I'm trying to extract the certificates informations from a list of websites and write it to a csv.
I keep getting into the same error but not always at the same time and on different domains.
I get the error at line 63: err := writer.Write(data)
main.analyzeDomains(0xc0000840c0, 0xc0000126c0)
/root/BreakCert/SSLCert/src/main.go:95 +0x5f
created by main.main
/root/BreakCert/SSLCert/src/main.go:113 +0x1bf
panic: runtime error: slice bounds out of range
goroutine 35 [running]:
bufio.(*Writer).Flush(0xc000024140, 0x400002400, 0x0)
/usr/local/go/src/bufio/bufio.go:590 +0x1c0
bufio.(*Writer).WriteByte(0xc000024140, 0xc0000aa92c, 0xc000452500, 0x4d1)
/usr/local/go/src/bufio/bufio.go:645 +0x96
bufio.(*Writer).WriteRune(0xc000024140, 0xc00000002c, 0x4d1, 0x4d1, 0x0)
/usr/local/go/src/bufio/bufio.go:657 +0x1aa
encoding/csv.(*Writer).Write(0xc0000126c0, 0xc00060a000, 0x5, 0x8, 0x2, 0x1a)
/usr/local/go/src/encoding/csv/writer.go:47 +0x4b8
main.storeCertificate(0xc00018cb00, 0xc0000126c0, 0xc000396380, 0x12)
/root/BreakCert/SSLCert/src/main.go:63 +0x3e9
main.analyzeDomain(0xc000396380, 0x12, 0xc0000126c0)
/root/BreakCert/SSLCert/src/main.go:88 +0x19d
main.analyzeDomains(0xc0000840c0, 0xc0000126c0)
/root/BreakCert/SSLCert/src/main.go:95 +0x5f
created by main.main
/root/BreakCert/SSLCert/src/main.go:113 +0x1bf
exit status 2
My code is
package main
import (
"bufio"
"crypto/rsa"
"crypto/tls"
"crypto/x509"
"encoding/csv"
"fmt"
"log"
"net"
"os"
"strconv"
"strings"
"sync"
"time"
)
type CsvWriter struct {
mutex *sync.Mutex
csvWriter *csv.Writer
}
func NewCsvWriter(fileName string) (*CsvWriter, error) {
csvFile, err := os.Create(fileName)
if err != nil {
return nil, err
}
w := csv.NewWriter(csvFile)
return &CsvWriter{csvWriter: w, mutex: &sync.Mutex{}}, nil
}
func (w *CsvWriter) Write(row []string) {
w.mutex.Lock()
w.csvWriter.Write(row)
w.mutex.Unlock()
}
func (w *CsvWriter) Flush() {
w.mutex.Lock()
w.csvWriter.Flush()
w.mutex.Unlock()
}
func storeCertificate(cert *x509.Certificate, writer *csv.Writer, domain string) {
if v := cert.PublicKeyAlgorithm.String(); v == "RSA" {
if len(cert.Issuer.Organization) != 0 {
var data []string
// Get Issuer Organization
data = append(data, domain[:len(domain)-4])
data = append(data, cert.Issuer.Organization[0])
rsaPublicKey := cert.PublicKey.(*rsa.PublicKey)
if rsaPublicKey != nil {
data = append(data, rsaPublicKey.N.String())
data = append(data, strconv.Itoa(rsaPublicKey.E))
data = append(data, strconv.Itoa(rsaPublicKey.Size()))
fmt.Println("Done: ", domain)
if 6 <= len(data) {
data = data[:5]
}
err := writer.Write(data)
if err != nil {
log.Fatal(err)
}
}
}
}
}
func analyzeDomain(domain string, writer *csv.Writer) {
//fmt.Println("analyzing", domain)
dialer := net.Dialer{}
dialer.Timeout = 10 * time.Second
conn, err := tls.DialWithDialer(&dialer, "tcp", domain, &tls.Config{
InsecureSkipVerify: true,
})
if err != nil {
fmt.Println(fmt.Sprintf("\x1b[31;1mfailed to connect to %s", domain), err, "\x1b[0m")
return
}
defer conn.Close()
for _, cert := range conn.ConnectionState().PeerCertificates {
storeCertificate(cert, writer, domain)
}
}
func analyzeDomains(queue chan string, writer *csv.Writer) {
for {
domain := <-queue
analyzeDomain(domain, writer)
}
}
func main() {
// Creates a channel
cs := make(chan string)
// Creates result.csv
file, err := os.Create("result.csv")
//Verifies that the file has been created
checkError("Cannot create file", err)
defer file.Close()
writer := csv.NewWriter(file)
for i := 0; i < 80; i++ {
go analyzeDomains(cs, writer)
}
writer.Flush()
scanner := bufio.NewScanner(os.Stdin)
for scanner.Scan() {
line := scanner.Text()
if !strings.Contains(line, ":") {
line = line + ":443"
}
cs <- line
}
time.Sleep(2 * time.Second)
}
func checkError(message string, err error) {
if err != nil {
log.Fatal(message, err)
}
}
and is used like that
cat domains | go run main.go
domains contains one url per line.
this is one solution to OP problem
// echo -e "google.com\ncnn.com\nstackoverflow.com" | go run main.go
package main
import (
"bufio"
"crypto/rsa"
"crypto/tls"
"crypto/x509"
"encoding/csv"
"io"
"log"
"net"
"os"
"strconv"
"strings"
"sync"
"time"
)
func certToCSV(cert *x509.Certificate, domain string) []string {
var data []string
data = append(data, domain[:len(domain)-4])
var org string
if len(cert.Issuer.Organization) > 0 {
org = cert.Issuer.Organization[0]
}
data = append(data, org)
if cert.PublicKey != nil {
rsaPublicKey := cert.PublicKey.(*rsa.PublicKey)
data = append(data, rsaPublicKey.N.String())
data = append(data, strconv.Itoa(rsaPublicKey.E))
data = append(data, strconv.Itoa(rsaPublicKey.Size()))
}
return data
}
func getCerts(d string) ([]*x509.Certificate, error) {
out := []*x509.Certificate{}
dialer := net.Dialer{}
dialer.Timeout = 10 * time.Second
conn, err := tls.DialWithDialer(&dialer, "tcp", d, &tls.Config{
InsecureSkipVerify: true,
})
if err != nil {
return out, err
}
defer conn.Close()
for _, cert := range conn.ConnectionState().PeerCertificates {
if v := cert.PublicKeyAlgorithm.String(); v != "RSA" {
log.Printf("%q not using RSA algorithm but %q", d, cert.PublicKeyAlgorithm)
continue
}
if len(cert.Issuer.Organization) < 1 {
log.Printf("%q does not have organization", d)
continue
}
out = append(out, cert)
}
return out, err
}
func analyze(dst chan []string, src chan string, errs chan error) {
for domain := range src {
certs, err := getCerts(domain)
if err != nil {
errs <- err
continue
}
for _, cert := range certs {
record := certToCSV(cert, domain)
dst <- record
}
}
}
func readCSVFile(dst chan string, fp string) error {
file, err := os.Create(fp)
if err != nil {
return err
}
defer file.Close()
scanner := bufio.NewScanner(os.Stdin)
for scanner.Scan() {
line := scanner.Text()
if !strings.Contains(line, ":") {
line = line + ":443"
}
dst <- line
}
return scanner.Err()
}
func readCSV(dst chan string, src io.Reader) error {
scanner := bufio.NewScanner(src)
for scanner.Scan() {
line := scanner.Text()
if !strings.Contains(line, ":") {
line = line + ":443"
}
dst <- line
}
return scanner.Err()
}
func writeCSV(dst io.Writer, src chan []string, errs chan error) {
w := csv.NewWriter(dst)
for record := range src {
if err := w.Write(record); err != nil {
errs <- err
}
w.Flush()
}
if err := w.Error(); err != nil {
errs <- err
}
}
func main() {
var wg sync.WaitGroup
errs := make(chan error)
src := make(chan string)
t1 := make(chan []string)
// synchronize all routines to close errs once
go func() {
wg.Wait()
close(errs)
}()
var wg2 sync.WaitGroup
// analyze multiple domains in //
for i := 0; i < 4; i++ {
wg.Add(1)
wg2.Add(1)
go func() {
defer wg.Done()
defer wg2.Done()
analyze(t1, src, errs)
}()
}
// synchronize with analyze routines to close t1
go func() {
wg2.Wait()
close(t1)
}()
// write the csv file
wg.Add(1)
go func() {
defer wg.Done()
writeCSV(os.Stdout, t1, errs)
}()
// read the csv, fail if an error occurs reading the source
wg.Add(1)
go func() {
defer wg.Done()
err := readCSV(src, os.Stdin)
if err != nil {
log.Fatal(err)
}
close(src)
}()
// read and print errors, adjust exit code
var exitCode int
for err := range errs {
log.Println(err)
exitCode = 1
}
os.Exit(exitCode)
}
Related
I'm new to golang and json, we are using gorilla mux library and I'd like to do a post request in postman. In config struct entries needs to be a map like that and in post server I need to have an array of *Config in postServer struct. I have 3 go files.
Service.go file is this:
package main
import (
"errors"
"github.com/gorilla/mux"
"mime"
"net/http"
)
type Config struct {
Id string `json:"id"`
entries map[string]string `json:"entries"`
}
type postServer struct {
data map[string][]*Config
}
func (ts *postServer) createPostHandler(w http.ResponseWriter, req *http.Request) {
contentType := req.Header.Get("Content-Type")
mediatype, _, err := mime.ParseMediaType(contentType)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
if mediatype != "application/json" {
err := errors.New("Expect application/json Content-Type")
http.Error(w, err.Error(), http.StatusUnsupportedMediaType)
return
}
rt, err := decodeBody(req.Body)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
id := createId()
ts.data[id] = rt
renderJSON(w, rt)
}
func (ts *postServer) getAllHandler(w http.ResponseWriter, req *http.Request) {
allTasks := []*Config{}
for _, v := range ts.data {
allTasks = append(allTasks, v...)
}
renderJSON(w, allTasks)
}
func (ts *postServer) getPostHandler(w http.ResponseWriter, req *http.Request) {
id := mux.Vars(req)["id"]
task, ok := ts.data[id]
if !ok {
err := errors.New("key not found")
http.Error(w, err.Error(), http.StatusNotFound)
return
}
renderJSON(w, task)
}
func (ts *postServer) delPostHandler(w http.ResponseWriter, req *http.Request) {
id := mux.Vars(req)["id"]
if v, ok := ts.data[id]; ok {
delete(ts.data, id)
renderJSON(w, v)
} else {
err := errors.New("key not found")
http.Error(w, err.Error(), http.StatusNotFound)
}
}
I wanted to test createPostHandler.
Then I have helper.go file where I decoded json into go and rendered into json:
package main
import (
"encoding/json"
"github.com/google/uuid"
"io"
"net/http"
)
func decodeBody(r io.Reader) ([]*Config, error) {
dec := json.NewDecoder(r)
dec.DisallowUnknownFields()
var rt []*Config
if err := dec.Decode(&rt); err != nil {
return nil, err
}
return rt, nil
}
func renderJSON(w http.ResponseWriter, v interface{}) {
js, err := json.Marshal(v)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
w.Write(js)
}
func createId() string {
return uuid.New().String()
}
and the last one go file is main.go where I have this:
package main
import (
"context"
"github.com/gorilla/mux"
"log"
"net/http"
"os"
"os/signal"
"syscall"
"time"
)
func main() {
quit := make(chan os.Signal)
signal.Notify(quit, os.Interrupt, syscall.SIGTERM)
router := mux.NewRouter()
router.StrictSlash(true)
server := postServer{
data: map[string][]*Config{},
}
router.HandleFunc("/config/", server.createPostHandler).Methods("POST")
router.HandleFunc("/configs/", server.getAllHandler).Methods("GET")
router.HandleFunc("/config/{id}/", server.getPostHandler).Methods("GET")
router.HandleFunc("/config/{id}/", server.delPostHandler).Methods("DELETE")
// start server
srv := &http.Server{Addr: "0.0.0.0:8000", Handler: router}
go func() {
log.Println("server starting")
if err := srv.ListenAndServe(); err != nil {
if err != http.ErrServerClosed {
log.Fatal(err)
}
}
}()
<-quit
log.Println("service shutting down ...")
// gracefully stop server
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
if err := srv.Shutdown(ctx); err != nil {
log.Fatal(err)
}
log.Println("server stopped")
}
And JSON whad I did send is this:
{
"entries":["hello", "world"]
}
And error what I'm getting in postman is this:
json: cannot unmarshal object into Go value of type []*main.Config
I don't know what is a problem, maybe I'm sending wrong json or I just did something wrong in decodeBody, I needed to add [] in decodeBody in var rt []*Config because it wouldn't work otherwise.
Can someone help me to fix this please?
This is an example of how you can define a struct Config that you can parse your sample JSON into.
EDIT: field entries changed to map.
You can play with it on Playground.
package main
import (
"encoding/json"
"fmt"
)
type Config struct {
Id string `json:"id"`
Entries map[string]string `json:"entries"`
}
func main() {
str := `[{"id":"42", "entries":{"hello": "world"}}]`
var tmp []Config
err := json.Unmarshal([]byte(str), &tmp)
if err != nil {
fmt.Printf("error: %v", err)
}
var rt []*Config
for _, c := range tmp {
rt = append(rt, &c)
}
for _, c := range rt {
for k, v := range c.Entries {
fmt.Printf("id=%s key=%s value=%s\n", c.Id, k, v)
}
}
}
Is there a way to insert csv file using this go library https://github.com/ClickHouse/clickhouse-go in one command (without reading csv and iterating through the content.). If there is a way can you provide me with the example.
if not how can we convert this system command and write it in golang using os/exec library.
cat /home/srijan/employee.csv | clickhouse-client --query="INSERT INTO test1 FORMAT CSV"
It's impossible with that go library. You can use http api https://clickhouse.com/docs/en/interfaces/http/ and any http go client
for example
package main
import (
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"os"
)
func compress(data io.Reader) io.Reader {
pr, pw := io.Pipe()
gw, err := gzip.NewWriterLevel(pw, int(3))
if err != nil {
panic(err)
}
go func() {
_, _ = io.Copy(gw, data)
gw.Close()
pw.Close()
}()
return pr
}
func main() {
p, err := url.Parse("http://localhost:8123/")
if err != nil {
panic(err)
}
q := p.Query()
q.Set("query", "INSERT INTO test1 FORMAT CSV")
p.RawQuery = q.Encode()
queryUrl := p.String()
var req *http.Request
req, err = http.NewRequest("POST", queryUrl, compress(os.Stdin))
req.Header.Add("Content-Encoding", "gzip")
if err != nil {
panic(err)
}
client := &http.Client{
Transport: &http.Transport{DisableKeepAlives: true},
}
resp, err := client.Do(req)
if err != nil {
panic(err)
}
defer resp.Body.Close()
body, _ := ioutil.ReadAll(resp.Body)
if resp.StatusCode != 200 {
panic(fmt.Errorf("clickhouse response status %d: %s", resp.StatusCode, string(body)))
}
}
I am working on a website scraper. I can send only 1 JSON data to JSON file regularly. I want to write one after another JSON data, so I need to keep hundreds of data in a single JSON file. like this
[
{
"id": 1321931,
"name": "Mike"
},
{
"id": 32139219,
"name": "Melissa"
},
{
"id": 8421921,
"name": "Jordan"
},
{
"id": 4291901,
"name": "David"
}
]
but output like this. When I send new data, just the first JSON data update itself.
[
{
"id": 1,
"name": "Mike"
}
]
here is the code:
package main
import (
"encoding/json"
"fmt"
"html/template"
"io/ioutil"
"log"
"math/rand"
"net/http"
"os"
"strings"
"github.com/gocolly/colly"
)
type Info struct {
ID int `json:"id"`
Name string `json:"name"`
}
var tpl *template.Template
var name string
var stonf Info
var allInfos []Info
var id int
var co = colly.NewCollector()
func main() {
fmt.Println("Started...")
allInfos = make([]Info, 1)
id = rand.Intn((99999 - 10000) + 10000)
// Reading Data From Json
data, err := ioutil.ReadFile("stocky.json")
if err != nil {
fmt.Println("ERROR 1 JSON", err)
}
// Unmarshal JSON data
var d []Info
err = json.Unmarshal([]byte(data), &d)
if err != nil {
fmt.Println(err)
}
tpl, _ = tpl.ParseGlob("templates/*.html")
http.HandleFunc("/mete", hellloHandleFunc)
staticHandler := http.FileServer(http.Dir("./css/"))
http.Handle("/css/", http.StripPrefix("/css", staticHandler))
http.ListenAndServe("localhost:8080", nil)
}
func hellloHandleFunc(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
log.Fatal(err)
}
allInfos[0].ID = id // JSON-PRO
// GET Price - Fiyat GETİR
co.OnHTML("div#dp", func(p *colly.HTMLElement) {
name = p.ChildText("h1#title")
})
requestLink := strings.TrimSpace(r.FormValue("input-link"))
co.Visit(requestLink)
// FIRST DATA JSON
enc := json.NewEncoder(os.Stdout)
enc.SetIndent("", " ")
enc.Encode(allInfos)
stonf = Info{
Name: name,
}
fmt.Println("Index Running")
tpl.ExecuteTemplate(w, "form-copy.html", stonf)
}
func writeJson(data []Info) {
dataFile, err := json.MarshalIndent(data, "", " ")
if err != nil {
log.Println("Could not create JSON", err)
}
ioutil.WriteFile("stocky.json", dataFile, 0666)
}
Here is a solution which appends new Info to the list and store in file.
The solution will perform properly only for relatively small list. For large lists, the overhead of writing the entire file each time may be too high. In such case i propose to change the format to ndjson. It will allow to write only the current Info struct instead of the whole list.
I've also added synchronization mechanism to avoid race conditions in case you send multiple HTTP requests at the same time.
I assumed that the identifier must be generated separately for each request, and it is not a problem if collision occur.
package main
import (
"encoding/json"
"fmt"
"html/template"
"io/ioutil"
"log"
"math/rand"
"net/http"
"os"
"strings"
"sync"
"github.com/gocolly/colly"
)
type (
Info struct {
ID int `json:"id"`
Name string `json:"name"`
}
Infos struct {
List []Info
sync.Mutex
}
)
var (
infos *Infos
tpl *template.Template
co = colly.NewCollector()
)
func main() {
fmt.Println("Started...")
var err error
infos, err = readInfos()
if err != nil {
log.Fatal(err)
}
tpl, _ = tpl.ParseGlob("templates/*.html")
http.HandleFunc("/mete", hellloHandleFunc)
staticHandler := http.FileServer(http.Dir("./css/"))
http.Handle("/css/", http.StripPrefix("/css", staticHandler))
if err := http.ListenAndServe("localhost:8080", nil); err != nil {
log.Fatal(err)
}
}
func hellloHandleFunc(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
log.Fatal(err)
}
stonf := Info{
ID: rand.Intn((99999 - 10000) + 10000),
}
// GET Price - Fiyat GETİR
co.OnHTML("div#dp", func(p *colly.HTMLElement) {
stonf.Name = p.ChildText("h1#title")
})
requestLink := strings.TrimSpace(r.FormValue("input-link"))
if err := co.Visit(requestLink); err != nil {
log.Fatal(err)
}
if err := infos.AppendAndWrite(stonf); err != nil {
log.Fatal(err)
}
// FIRST DATA JSON
enc := json.NewEncoder(os.Stdout)
enc.SetIndent("", " ")
enc.Encode(stonf)
fmt.Println("Index Running")
tpl.ExecuteTemplate(w, "form-copy.html", stonf)
}
func readInfos() (*Infos, error) {
// Reading Data From Json
data, err := ioutil.ReadFile("stocky.json")
if err != nil {
return nil, err
}
var r []Info
// Unmarshal JSON data
err = json.Unmarshal([]byte(data), &r)
if err != nil {
return nil, err
}
return &Infos{List: r}, nil
}
func (i *Infos) AppendAndWrite(info Info) error {
i.Lock()
defer i.Unlock()
i.List = append(i.List, info)
if err := i.storeLocked(); err != nil {
return fmt.Errorf("storing info list failed: %w", err)
}
return nil
}
func (i *Infos) storeLocked() error {
dataFile, err := json.MarshalIndent(i.List, "", " ")
if err != nil {
return fmt.Errorf("could not marshal infos JSON: %w", err)
}
err = ioutil.WriteFile("stocky.json", dataFile, 0666)
if err != nil {
return fmt.Errorf("could not write 'stocky.json' file: %w", err)
}
return nil
}
There is a standard called JSON lines (https://jsonlines.org/) consisting on only one JSON per line instead of wrapping all in a JSON array.
JSON library from Go stdlib works pretty well with JSON lines on both cases, reading and writing.
Write multiple JSON (one per line):
e := json.NewEncoder(yourWriterFile)
e.Encode(object1)
e.Encode(object2)
//...
Read multiple JSON (one per line or concatenated):
d := json.NewDecoder(yourReaderFile)
d.Decode(&object1)
d.Decode(&object2)
//...
More info: https://pkg.go.dev/encoding/json
I am trying to build a TCP server that loads dataset from a CSV file and provide an interface to query the dataset. TCP server will expose port 4040. CSV file contains the following columns related to corona virus cases:
Cumulative Test Positive
Cumulative Tests Performed
Date
Discharged
Expired
Admitted
Region
Users should be able to connect to the server using NetCat nc localhost 4040 command on Linux/Unix based systems.
Once connected to TCP, the user should be able to communicate with the application by sending queries in JSON format.
{
"query": {
"region": "Sindh"
}
}
{
"query": {
"date": "2020-03-20"
}
}
My server.go
package main
import (
"fmt"
"net"
"os"
"flag"
"log"
"encoding/csv"
"encoding/json"
"bufio"
"io"
"strings"
)
type CovidPatient struct {
Positive string `json:"Covid_Positive"`
Performed string `json:"Coivd_Performed"`
Date string `json:"Covid_Date"`
Discharged string `json:"Covid_Discharged"`
Expired string `json:"Covid_Expired"`
Region string `json:"Covid_Region"`
Admitted string `json:"Covid_Admitted"`
}
type DataRequest struct {
Get string `json:"get"`
}
type DataError struct {
Error string `json:"Covid_error"`
}
func Load(path string) []CovidPatient {
table := make([]CovidPatient, 0)
var patient CovidPatient
file, err := os.Open(path)
if err != nil {
panic(err.Error())
}
defer file.Close()
reader := csv.NewReader(file)
csvData, err := reader.ReadAll()
if err != nil {
fmt.Println(err)
os.Exit(1)
}
for _, row := range csvData{
patient.Positive = row[0]
patient.Performed = row[1]
patient.Date = row[2]
patient.Discharged = row[3]
patient.Expired = row[4]
patient.Region = row[5]
patient.Admitted = row[6]
table = append(table, patient)
}
return table
}
func Find(table []CovidPatient, filter string) []CovidPatient {
if filter == "" || filter == "*" {
return table
}
result := make([]CovidPatient, 0)
filter = strings.ToUpper(filter)
for _, cp := range table {
if cp.Date == filter ||
cp.Region == filter ||
strings.Contains(strings.ToUpper(cp.Positive), filter) ||
strings.Contains(strings.ToUpper(cp.Performed), filter) ||
strings.Contains(strings.ToUpper(cp.Date), filter) ||
strings.Contains(strings.ToUpper(cp.Discharged), filter) ||
strings.Contains(strings.ToUpper(cp.Expired), filter) ||
strings.Contains(strings.ToUpper(cp.Region), filter) ||
strings.Contains(strings.ToUpper(cp.Admitted), filter){
result = append(result, cp)
}
}
return result
}
var (
patientsDetail = Load("./covid_final_data.csv")
)
func main(){
var addr string
var network string
flag.StringVar(&addr, "e", ":4040", "service endpoint [ip addr or socket path]")
flag.StringVar(&network, "n", "tcp", "network protocol [tcp,unix]")
flag.Parse()
switch network {
case "tcp", "tcp4", "tcp6", "unix":
default:
fmt.Println("unsupported network protocol")
os.Exit(1)
}
ln, err := net.Listen(network, addr)
if err != nil {
log.Println(err)
os.Exit(1)
}
defer ln.Close()
log.Println("Covid19 Condition in Pakistan")
log.Printf("Service started: (%s) %s\n", network, addr)
for {
conn, err := ln.Accept()
if err != nil {
log.Println(err)
conn.Close()
continue
}
log.Println("Connected to ", conn.RemoteAddr())
go handleConnection(conn)
}
}
func handleConnection(conn net.Conn) {
defer func() {
if err := conn.Close(); err != nil {
log.Println("error closing connection:", err)
}
}()
reader := bufio.NewReaderSize(conn, 4)
for {
buf, err := reader.ReadSlice('}')
if err != nil {
if err != io.EOF {
log.Println("connection read error:", err)
return
}
}
reader.Reset(conn)
var req DataRequest
if err := json.Unmarshal(buf, &req); err != nil {
log.Println("failed to unmarshal request:", err)
cerr, jerr := json.Marshal(DataError{Error: err.Error()})
if jerr != nil {
log.Println("failed to marshal DataError:", jerr)
continue
}
if _, werr := conn.Write(cerr); werr != nil {
log.Println("failed to write to DataError:", werr)
return
}
continue
}
result := Find(patientsDetail, req.Get)
rsp, err := json.Marshal(&result)
if err != nil {
log.Println("failed to marshal data:", err)
if _, err := fmt.Fprintf(conn, `{"data_error":"internal error"}`); err != nil {
log.Printf("failed to write to client: %v", err)
return
}
continue
}
if _, err := conn.Write(rsp); err != nil {
log.Println("failed to write response:", err)
return
}
}
}
This correctly loads the csv and convert it into JSON. But, when I try to run query using NetCat command it return empty JSON element. Kindly guide me where is error.
Guess you want this:
╭─root#DESKTOP-OCDRD7Q ~
╰─# nc localhost 4040
{"get": "Sindh"}
[{"Covid_Positive":"1","Coivd_Performed":"1","Covid_Date":"1","Covid_Discharged":"1","Covid_Expired":"1","Covid_Region":"Sindh","Covid_Admitted":"1"}]
What you should do is just to modify your json request.
package main
import (
"bufio"
"encoding/csv"
"encoding/json"
"flag"
"fmt"
"io"
"log"
"net"
"os"
)
type CovidPatient struct {
Positive string `json:"Covid_Positive"`
Performed string `json:"Coivd_Performed"`
Date string `json:"Covid_Date"`
Discharged string `json:"Covid_Discharged"`
Expired string `json:"Covid_Expired"`
Region string `json:"Covid_Region"`
Admitted string `json:"Covid_Admitted"`
}
type DataRequest struct {
Get CovidPatient `json:"get"`
}
type DataError struct {
Error string `json:"Covid_error"`
}
func Load(path string) []CovidPatient {
table := make([]CovidPatient, 0)
var patient CovidPatient
file, err := os.Open(path)
if err != nil {
panic(err.Error())
}
defer file.Close()
reader := csv.NewReader(file)
csvData, err := reader.ReadAll()
if err != nil {
fmt.Println(err)
os.Exit(1)
}
for _, row := range csvData {
patient.Positive = row[0]
patient.Performed = row[1]
patient.Date = row[2]
patient.Discharged = row[3]
patient.Expired = row[4]
patient.Region = row[5]
patient.Admitted = row[6]
table = append(table, patient)
}
return table
}
func Find(table []CovidPatient, filter CovidPatient) []CovidPatient {
result := make([]CovidPatient, 0)
log.Println(filter, table)
for _, cp := range table {
if filter.Positive == "" {
} else if filter.Positive != cp.Positive {
continue
}
if filter.Performed == "" {
} else if filter.Performed != cp.Performed {
continue
}
if filter.Date == "" {
} else if filter.Date != cp.Date {
continue
}
if filter.Discharged == "" {
} else if filter.Discharged != cp.Discharged {
continue
}
if filter.Expired == "" {
} else if filter.Expired != cp.Expired {
continue
}
if filter.Region == "" {
} else if filter.Region != cp.Region {
continue
}
if filter.Admitted == "" {
} else if filter.Admitted != cp.Admitted {
continue
}
result = append(result, cp)
}
return result
}
var (
patientsDetail = Load("./covid_final_data.csv")
)
func main() {
log.SetFlags(log.Lshortfile | log.Ltime)
var addr string
var network string
flag.StringVar(&addr, "e", ":4040", "service endpoint [ip addr or socket path]")
flag.StringVar(&network, "n", "tcp", "network protocol [tcp,unix]")
flag.Parse()
switch network {
case "tcp", "tcp4", "tcp6", "unix":
default:
fmt.Println("unsupported network protocol")
os.Exit(1)
}
ln, err := net.Listen(network, addr)
if err != nil {
log.Println(err)
os.Exit(1)
}
defer ln.Close()
log.Println("Covid19 Condition in Pakistan")
log.Printf("Service started: (%s) %s\n", network, addr)
for {
conn, err := ln.Accept()
if err != nil {
log.Println(err)
conn.Close()
continue
}
log.Println("Connected to ", conn.RemoteAddr())
go handleConnection(conn)
}
}
func handleConnection(conn net.Conn) {
defer func() {
if err := conn.Close(); err != nil {
log.Println("error closing connection:", err)
}
}()
reader := bufio.NewReaderSize(conn, 100)
for {
buf, err := reader.ReadBytes('|')
if err != nil {
if err != io.EOF {
log.Println("connection read error:", err)
return
}
}
reader.Reset(conn)
var req DataRequest
if err := json.Unmarshal(buf[:len(buf)-1], &req); err != nil {
log.Println("failed to unmarshal request:", string(buf), err)
cerr, jerr := json.Marshal(DataError{Error: err.Error()})
if jerr != nil {
log.Println("failed to marshal DataError:", jerr)
continue
}
if _, werr := conn.Write(cerr); werr != nil {
log.Println("failed to write to DataError:", werr)
return
}
continue
}
result := Find(patientsDetail, req.Get)
rsp, err := json.Marshal(&result)
if err != nil {
log.Println("failed to marshal data:", err)
if _, err := fmt.Fprintf(conn, `{"data_error":"internal error"}`); err != nil {
log.Printf("failed to write to client: %v", err)
return
}
continue
}
if _, err := conn.Write(rsp); err != nil {
log.Println("failed to write response:", err)
return
}
}
}
The query is:
╭─root#DESKTOP-OCDRD7Q ~
╰─# nc localhost 4040 127 ↵
{
"get": {
"Covid_Region": "Sindh",
"Covid_Date": "2020-03-20"
}
}|
[{"Covid_Positive":"1","Coivd_Performed":"1","Covid_Date":"2020-03-20","Covid_Discharged":"1","Covid_Expired":"1","Covid_Region":"Sindh","Covid_Admitted":"1"}]
Inside function handleConnection, the first thing is "read until you find the first }", imagine the user is sending the request:
{ "get": { "Covid_Region": "Sindh", "Covid_Date": "2020-03-20" } }
then that step read:
{ "get": { "Covid_Region": "Sindh", "Covid_Date": "2020-03-20" }
Notice the trailing } is missing, then the json.Unmarshal is trying to unmarshal the query without the last } (which is an invalid json).
This problem can take advantage of JSON streaming decoding, in other words, use json.NewDecoder(r io.Reader) instead of json.Unmarshal. Let me copy and modify the first part of that function:
func handleConnection(conn net.Conn) {
defer func() {
if err := conn.Close(); err != nil {
log.Println("error closing connection:", err)
}
}()
jsonDecoder := json.NewDecoder(conn) // A json decoder read a stream to find a
// valid JSON and stop just the byte
// after the JSON ends. Process can be
// repeated.
for {
var req DataRequest
err := jsonDecoder.Decode(&req)
if err == io.EOF {
log.Println("finish")
return
}
if err != nil {
log.Println("unmarshal:", err)
return
}
result := Find(patientsDetail, req.Get) // Here query the system
// ...
Probably now it works, but you can also take advantage of json streaming to send the response back with a jsonEncoder := json.NewEncoder(conn) before de for loop and sending the request like this:
err := jsonEncoder.Encode(&result)
if err != nil {
log.Println("failed to marshal data:", err)
// ...
continue
}
I have a list of locations and I want to know if it's possible to get weather data from the Google Maps API.
When I've had to do this in Go, I've done something like this:
package main
import (
"bufio"
"context"
"database/sql"
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"strings"
"sync"
"time"
"googlemaps.github.io/maps"
_ "github.com/lib/pq"
)
var db *sql.DB
var (
apiKey string
dsn string
locationsFile string
)
type HourlyForecast struct {
Type string
Properties Properties
}
type Properties struct {
Periods []Period
}
type Period struct {
Temperature int
}
func main() {
flag.StringVar(&apiKey, "api-key", "", "Google Maps API key")
flag.StringVar(&dsn, "dsn", "", "The database connection string")
flag.StringVar(&locationsFile, "locations", "locations.txt", "The locations file")
flag.Parse()
if apiKey == "" {
apiKey = os.Getenv("GOOGLE_MAPS_API_KEY")
}
if apiKey == "" {
log.Fatal("The --api-key flag or GOOGLE_MAPS_API_KEY env var must be set and non-empty")
}
apiKey = strings.TrimSpace(apiKey)
if dsn == "" {
dsn = os.Getenv("DSN")
}
var err error
db, err = sql.Open("postgres", dsn)
if err != nil {
log.Fatal(err)
}
for {
err := db.Ping()
if err != nil {
log.Println(err.Error())
time.Sleep(3 * time.Second)
continue
}
break
}
data, err := os.Open(locationsFile)
if err != nil {
log.Fatal(err)
}
var locations []string
scanner := bufio.NewScanner(data)
for scanner.Scan() {
locations = append(locations, scanner.Text())
}
var wg sync.WaitGroup
for _, location := range locations {
wg.Add(1)
location := location
go func() {
defer wg.Done()
mc, err := maps.NewClient(maps.WithAPIKey(apiKey))
if err != nil {
log.Fatal(err)
}
r := maps.FindPlaceFromTextRequest{
Input: location,
InputType: maps.FindPlaceFromTextInputTypeTextQuery,
}
response, err := mc.FindPlaceFromText(context.Background(), &r)
if err != nil {
log.Fatal(err)
}
pdr := maps.PlaceDetailsRequest{
PlaceID: response.Candidates[0].PlaceID,
}
log.Printf("retrieving geo coordinates for %s", location)
pdResponse, err := mc.PlaceDetails(context.Background(), &pdr)
if err != nil {
log.Fatal(err)
}
lat := pdResponse.Geometry.Location.Lat
lng := pdResponse.Geometry.Location.Lng
u := fmt.Sprintf("https://api.weather.gov/points/%.4f,%.4f/forecast/hourly", lat, lng)
log.Printf("retrieving weather data for %s (%.4f,%.4f)", location, lat, lng)
request, err := http.NewRequest("GET", u, nil)
if err != nil {
log.Fatal(err)
}
request.Header.Add("User-Agent", "Hightower Weather 1.0")
request.Header.Add("Accept", "application/geo+json")
weatherResponse, err := http.DefaultClient.Do(request)
if err != nil {
log.Fatal(err)
}
data, err := ioutil.ReadAll(weatherResponse.Body)
if err != nil {
log.Fatal(err)
}
weatherResponse.Body.Close()
var forecast HourlyForecast
if err := json.Unmarshal(data, &forecast); err != nil {
log.Fatal(err)
}
log.Printf("setting temperature for %s to %d", location, forecast.Properties.Periods[0].Temperature)
_, err = db.Exec(query, location, forecast.Properties.Periods[0].Temperature)
if err != nil {
log.Fatal(err)
}
}()
}
wg.Wait()
}
var query = `INSERT INTO weather (location, temperature)
VALUES ($1, $2)
ON CONFLICT (location)
DO UPDATE SET temperature = EXCLUDED.temperature;`