Chisel3: Verilog "default" case equivalent - chisel

Is it possible to have a Chisel "is" condition which behaves similar to a Verilog "default" case in a Chisel "switch" statement? This is required to prevent latches being inferred for combinatorial outputs from an FSM.
Verilog Example:
module detector (clk, rstn, in1, in2, out1, out2);
input clk, rstn, in1, in2;
output reg out1, out2;
localparam [1:0] IDLE = 2'b01;
localparam [1:0] GOT_IN1 = 2'b10;
reg [1:0] state, nxtState;
always # (posedge clk or negedge rstn) begin
if (!rstn) begin
state <= IDLE;
end else begin
state <= nxtState;
end
end
always # (*) begin
case (state)
IDLE: begin
if (in1) begin
out1 = 1'b1;
out2 = 1'b0;
nxtState = GOT_IN1;
end else begin
out1 = 1'b0;
out2 = 1'b0;
nxtState = IDLE;
end
end
GOT_IN1: begin
if (in2) begin
out1 = 1'b0;
out2 = 1'b1;
nxtState = IDLE;
end else begin
out1 = 1'b0;
out2 = 1'b0;
nxtState = GOT_IN1;
end
end
default: begin
out1 = 1'b0;
out2 = 1'b0;
nxtState = IDLE;
end
endcase
end
endmodule
Chisel2 allowed this type of behavior as a default value could be assigned to out1 and out2 in the switch statement outside of an "is" condition.
switch (state) {
io.out1 := UInt(0)
io.out2 := UInt(0)
is (IDLE) {
when (io.in1 === UInt(1)) {
io.out1 := UInt(1)
io.out2 := UInt(0)
state := GOT_IN1
} .otherwise {
io.out1 := UInt(0)
io.out2 := UInt(0)
state := IDLE
}
}
is (GOT_IN1) {
when (io.in2 === UInt(1)) {
io.out1 := UInt(0)
io.out2 := UInt(1)
state := IDLE
} .otherwise {
io.out1 := UInt(0)
io.out2 := UInt(0)
state := GOT_IN1
}
}
}
Chisel3 doesn't support this default assignment syntax like Chisel2. A build error gets flagged:
â—¾exception during macro expansion: java.lang.Exception: Cannot include blocks that do not begin with is() in switch. at chisel3.util.switch
Chisel3 doesn't appear to have any method to prevent a latch from being inferred on the out1 and out2 outputs. I understand that out1 and out2 assignments can be moved outside of the switch statement and handled using a conditional assignment. However, from a code visibility standpoint it's clearer to handle the assignments within the case statement for large FSMs with dozens of states and combinatorial outputs.

You can add bracket deliberately to identify some code should be read together.
Something like:
{ // state machine block
io.out1 := UInt(0)
io.out2 := UInt(0)
when (state === IDLE) {
when (io.in1 === UInt(1)) {
io.out1 := UInt(1)
io.out2 := UInt(0)
state := GOT_IN1
} .otherwise {
io.out1 := UInt(0)
io.out2 := UInt(0)
state := IDLE
}
}
when (state === GOT_IN1) {
when (io.in2 === UInt(1)) {
io.out1 := UInt(0)
io.out2 := UInt(1)
state := IDLE
} .otherwise {
io.out1 := UInt(0)
io.out2 := UInt(0)
state := GOT_IN1
}
}
}
I believe Chisel will never generate latches. The first assignment takes effect if no later assignment change is enabled.

Related

How do I save a physical file starting from a TJSONObject?

How do I save a physical .json file to my C: \ drive starting from my JSONObject3 object?
procedure CreateJSON;
var
JSONObject2, JSONObject3: TJSONObject;
JSONValue1, JSONValue2: TJSONValue;
JSONArray: TJSONArray;
JSONString1, JSONString2: TJSONString;
AText, BText: string;
mStringStream: TStringStream;
begin
JSONObject2 := TJSONObject.Create;
JSONObject3 := TJSONObject.Create;
JSONArray := TJSONArray.Create;
try
AText := 'Name';
BText := '"Charles"';
JSONString2 := TJSONString.Create(AText);
JSONValue2 := TJSONObject.ParseJSONValue(BText);
JSONObject2.AddPair(JSONString2, JSONValue2);
JSONArray.Add(JSONObject2);
JSONObject3.AddPair('People', JSONArray);
mStringStream := TStringStream.Create('', TEncoding.UTF8);
// m_StringStream.LoadFromStream(JSONObject3.ToString); <---ERROR
mStringStream.SaveToFile('people.json');
finally
JSONObject3.Free;
end;
end;
Thank you, I am a beginner with the json topic
TJSONObject does not have any streaming support, but it does have several To...() output methods (ToBytes(), ToJSON() ToString()). The output of any of those methods can be written to a file, such as with TFile.WriteAll...() methods (WriteAllBytes(), WriteAllText()).
Try this instead:
uses
...,
Data.DBXJSON, // use System.JSON in XE6+
System.IOUtils,
System.SysUtils;
procedure CreateJSON;
var
JSONObject, JSONObject2: TJSONObject;
JSONValue: TJSONValue;
JSONArray: TJSONArray;
AText, BText: string;
begin
JSONObject := TJSONObject.Create;
try
AText := 'Name';
BText := '"Charles"';
JSONValue := TJSONObject.ParseJSONValue(BText);
if JSONValue <> nil then
try
JSONObject.AddPair(AText, JSONValue);
except
JSONValue.Free;
raise;
end;
JSONArray := TJSONArray.Create;
try
JSONObject2 := TJSONObject.Create;
try
JSONArray.Add(JSONObject2);
except
JSONObject2.Free;
raise;
end;
JSONObject.AddPair('People', JSONArray);
except
JSONArray.Free;
raise;
end;
TFile.WriteAllText('people.json', JSONObject.ToJSON, TEncoding.UTF8);
finally
JSONObject.Free;
end;
end;

Cannot use notifs (variable of type []entity.Notif) as string value in array or slice literal

I want to fetch email data from database to send email with that data.
This code from controller/notif-controller.go:
func (c *notifController) SendNotifEmail(context *gin.Context) {
email_to := context.Query("sEmailByDiv")
cc_to := context.Query("cc_to")
subject := context.Query("subject")
body := context.Query("body")
notifs := c.notifService.EmailByDiv(email_to)
to := []string{notifs}
mailer := gomail.NewMessage()
mailer.SetHeader("From", CONFIG_SENDER_NAME)
mailer.SetHeader("To", to)
mailer.SetHeader("Cc", cc_to)
mailer.SetHeader("Subject", subject)
mailer.SetBody("text/html", body)
// dialer := &gomail.Dialer{Host: CONFIG_SMTP_HOST, Port: CONFIG_SMTP_PORT}
dialer := gomail.NewDialer(
CONFIG_SMTP_HOST,
CONFIG_SMTP_PORT,
CONFIG_AUTH_EMAIL,
CONFIG_AUTH_PASSWORD,
)
err := dialer.DialAndSend(mailer)
if err != nil {
log.Fatal(err.Error())
}
log.Println("Mail sent!")
}
I have Error in :
cannot use notifs (variable of type []entity.Notif) as string value in array or slice literal
and cannot use to (variable of type []string) as string value in argument to mailer.SetHeader.
I've added a loop and it's like this :
func (c *notifController) SendNotifEmail(context *gin.Context) {
email_to := context.Query("sEmailByDiv")
cc_to := context.Query("cc_to")
subject := context.Query("subject")
body := context.Query("body")
// file := context.Query("file")
notifs := c.notifService.EmailByDiv(email_to)
to := []string{notifs}
mailer := gomail.NewMessage()
addresses := make([]string, len(to))
for i, recipient := range to {
addresses[i] = mailer.FormatAddress(recipient, "")
}
mailer.SetHeader("From", CONFIG_SENDER_NAME)
mailer.SetHeader("To", addresses...)
mailer.SetHeader("Cc", cc_to)
mailer.SetHeader("Subject", subject)
mailer.SetBody("text/html", body)
// mailer.Attach(file)
// dialer := &gomail.Dialer{Host: CONFIG_SMTP_HOST, Port: CONFIG_SMTP_PORT}
dialer := gomail.NewDialer(
CONFIG_SMTP_HOST,
CONFIG_SMTP_PORT,
CONFIG_AUTH_EMAIL,
CONFIG_AUTH_PASSWORD,
)
err := dialer.DialAndSend(mailer)
if err != nil {
log.Fatal(err.Error())
}
log.Println("Mail sent!")
}
and I have an error like this :
cannot use notifs (variable of type []entity.Notif) as string value in array or slice literal
This file for entity.Notif from entity/notif.go contains:
package entity
type Notif struct {
Email string `gorm:"type:varchar(255)" json:"email"`
}
Write a loop to extract the email addresses from notifs as a slice of strings:
addresses := make([]string, len(notifs))
for i, notif := range notifs {
addresses[i] = notif.Email
}
Set the header using the slice:
mailer.SetHeader("To", addresses...)

Safely Interupt csv read and write

I am processing csv files, and when I interupt the process, I want to store unprocessed data to another file.
This what I've done
csvFile, err := os.Open(csvPath)
r := csv.NewReader(csvFile)
sigc := make(chan os.Signal, 1)
signal.Notify(sigc,
syscall.SIGHUP,
syscall.SIGINT,
syscall.SIGTERM,
syscall.SIGQUIT)
go func() {
<-sigc
savePending(r)
}()
for {
record, err := r.Read()
if err == io.EOF {
break
}
if err != nil {
log.Println(record, err)
continue
}
doSomethingWithRecord(record)
}
savePending Function
func savePending(r *csv.Reader) {
pendingFileName := fmt.Sprintf("%s_pending.csv", fileBaseName)
csvPendingPath := path.Join(dirname, pendingFileName)
pendingFile, err := os.Create(csvPendingPath)
if err != nil {
log.Fatalln("Couldn't open the csv file", csvPendingPath, err)
}
defer pendingFile.Close()
pendR := csv.NewWriter(pendingFile)
records, err := r.ReadAll()
if err == io.EOF {
log.Println("no pending records")
}
err = pendR.WriteAll(records)
if err != nil {
log.Println("error writing pending file")
}
}
But when I run the code, then I interupt the script by pressing CTRL+C, I always get panic
panic: runtime error: slice bounds out of range [:7887] with capacity 4096
goroutine 82 [running]:
bufio.(*Reader).ReadSlice(0xc0000c2ea0, 0x105930a, 0x88, 0x90, 0xc00090cab0, 0x0, 0x0)
/usr/local/Cellar/go/1.13.3/libexec/src/bufio/bufio.go:334 +0x232
encoding/csv.(*Reader).readLine(0xc00015c1b0, 0x9, 0x9, 0xc00090cab0, 0xc00090f680, 0x20e)
/usr/local/Cellar/go/1.13.3/libexec/src/encoding/csv/reader.go:218 +0x49
encoding/csv.(*Reader).readRecord(0xc00015c1b0, 0x0, 0x0, 0x0, 0xc00090cab0, 0x9, 0x9, 0x0, 0x0)
/usr/local/Cellar/go/1.13.3/libexec/src/encoding/csv/reader.go:266 +0x115
encoding/csv.(*Reader).ReadAll(0xc00015c1b0, 0xc0005af2c0, 0x1000, 0xc0006fc000, 0xc0001da608, 0x0)
/usr/local/Cellar/go/1.13.3/libexec/src/encoding/csv/reader.go:202 +0x74
main.savePending(0xc00015c1b0, 0x0, 0x0, 0x0)
What could be the issue ?
While the savePending function is being started, the main routine continues to read from the reader.
How about aborting the for loop on <-sigc and saving the rest then:
csvFile, err := os.Open(csvPath)
r := csv.NewReader(csvFile)
sigc := make(chan os.Signal, 1)
signal.Notify(sigc,
syscall.SIGHUP,
syscall.SIGINT,
syscall.SIGTERM,
syscall.SIGQUIT)
for {
select {
case <-sigc:
savePending(r)
return
default:
}
record, err := r.Read()
if err == io.EOF {
break
}
if err != nil {
log.Println(record, err)
continue
}
doSomethingWithRecord(record)
}

SQL result to JSON as fast as possible

I'm trying to transform the Go built-in sql result to JSON. I'm using goroutines for that but I got problems.
The base problem:
There is a really big database with around 200k user and I have to serve them through tcp sockets in a microservice based system. To get the users from the database spent 20ms but transform this bunch of data to JSON spend 10 seconds with the current solution. This is why I want to use goroutines.
Solution with Goroutines:
func getJSON(rows *sql.Rows, cnf configure.Config) ([]byte, error) {
log := logan.Log{
Cnf: cnf,
}
cols, _ := rows.Columns()
defer rows.Close()
done := make(chan struct{})
go func() {
defer close(done)
for result := range resultChannel {
results = append(
results,
result,
)
}
}()
wg.Add(1)
go func() {
for rows.Next() {
wg.Add(1)
go handleSQLRow(cols, rows)
}
wg.Done()
}()
go func() {
wg.Wait()
defer close(resultChannel)
}()
<-done
s, err := json.Marshal(results)
results = []resultContainer{}
if err != nil {
log.Context(1).Error(err)
}
rows.Close()
return s, nil
}
func handleSQLRow(cols []string, rows *sql.Rows) {
defer wg.Done()
result := make(map[string]string, len(cols))
fmt.Println("asd -> " + strconv.Itoa(counter))
counter++
rawResult := make([][]byte, len(cols))
dest := make([]interface{}, len(cols))
for i := range rawResult {
dest[i] = &rawResult[i]
}
rows.Scan(dest...) // GET PANIC
for i, raw := range rawResult {
if raw == nil {
result[cols[i]] = ""
} else {
fmt.Println(string(raw))
result[cols[i]] = string(raw)
}
}
resultChannel <- result
}
This solution give me a panic with the following message:
panic: runtime error: invalid memory address or nil pointer dereference
[signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x45974c]
goroutine 408 [running]:
panic(0x7ca140, 0xc420010150)
/usr/lib/golang/src/runtime/panic.go:500 +0x1a1
database/sql.convertAssign(0x793960, 0xc420529210, 0x7a5240, 0x0, 0x0, 0x0)
/usr/lib/golang/src/database/sql/convert.go:88 +0x1ef1
database/sql.(*Rows).Scan(0xc4203e4060, 0xc42021fb00, 0x44, 0x44, 0x44, 0x44)
/usr/lib/golang/src/database/sql/sql.go:1850 +0xc2
github.com/PumpkinSeed/zerodb/operations.handleSQLRow(0xc420402000, 0x44, 0x44, 0xc4203e4060)
/home/loow/gopath/src/github.com/PumpkinSeed/zerodb/operations/operations.go:290 +0x19c
created by github.com/PumpkinSeed/zerodb/operations.getJSON.func2
/home/loow/gopath/src/github.com/PumpkinSeed/zerodb/operations/operations.go:258 +0x91
exit status 2
The current solution which is working but spend too much time:
func getJSON(rows *sql.Rows, cnf configure.Config) ([]byte, error) {
log := logan.Log{
Cnf: cnf,
}
var results []resultContainer
cols, _ := rows.Columns()
rawResult := make([][]byte, len(cols))
dest := make([]interface{}, len(cols))
for i := range rawResult {
dest[i] = &rawResult[i]
}
defer rows.Close()
for rows.Next() {
result := make(map[string]string, len(cols))
rows.Scan(dest...)
for i, raw := range rawResult {
if raw == nil {
result[cols[i]] = ""
} else {
result[cols[i]] = string(raw)
}
}
results = append(results, result)
}
s, err := json.Marshal(results)
if err != nil {
log.Context(1).Error(err)
}
rows.Close()
return s, nil
}
Question:
Why the goroutine solution give me an error, where it is not an obvious panic, because the first ~200 goroutine running properly?!
UPDATE
Performance test for the original working solution:
INFO[0020] setup taken -> 3.149124658s file=operations.go func=operations.getJSON line=260 service="Database manager" ts="2017-04-02 19:45:27.132881211 +0100 BST"
INFO[0025] toJSON taken -> 5.317647046s file=operations.go func=operations.getJSON line=263 service="Database manager" ts="2017-04-02 19:45:32.450551417 +0100 BST"
The sql to map is 3 sec and to json is 5 sec.
Go routines won't improve performance on CPU-bound operations like JSON marshaling. What you need is a more efficient JSON marshaler. There are some available, although I haven't used any. A simple Google search for 'faster JSON marshaling' will turn up many results. A popular one is ffjson. I suggest starting there.

Delphi XE5 JSON nested object

I am trying to generate JSON, data source is a DB FireBird.
I have a schema to generate path like this:
schema:= TDictionary<string, string>.Create;
schema.Add('DBTableName1', 'nest1.valueKeyName1');
schema.Add('DBTableName2', 'nest1.valueKeyName2');
schema.Add('DBTableName3', 'nest2.valueKeyName1');
schema.Add('DBTableName4', 'nest3.valueKeyName1');
schema.Add('DBTableName5', 'nest3.valueKeyName2');
schema.Add('DBTableName6', 'nest4.valueKeyName1');
How to create function for generate path to make nested objects?
{
"nest1": {
valueKeyName1: DBTableValue1,
valueKeyName2: DBTableValue2,
},
"nest2": {
valueKeyName1: DBTableValue3
},
"nest5":{
"nest6": {
"key1": "value1",
"key2": "value2",
},
"nest7": {}
}
In JavaScript I can do something like:
if (object.hasOwnProperty['key'] == false) object['key'] = {};
object = object['key'];
But in Delphi I have problem, and do not know, how to go deeper:
function TKlient.wprowadzWartoscDoJSON(wartosc: string; JSON: TJSONObject; sciezka: TStringList): TJSONObject;
var
i: integer;
obiekt: TJSONObject;
para: TJSONPair;
zagniezdzen: integer;
begin
zagniezdzen := sciezka.Count - 2;
obiekt := JSON;
para:= obiekt.Get(sciezka[i]);
for i := 1 to zagniezdzen do
begin
if obiekt.Get(sciezka[i]) = nil then obiekt.AddPair(sciezka[i], TJSONObject.Create)
else obiekt := obiekt.Get(sciezka[i]);
end;
obiekt.AddPair(sciezka[sciezka.Count - 1], wartosc);
result := obiekt;
end;
Here I've used a TDictionary<string, TJSONObject> to keep track of the object paths.
var
schema: TDictionary<string, string>;
pathSchema: TDictionary<string{path to every object}, TJSONObject>;
pair: TPair<string, string>;
values: TStringDynArray;
jsonObj,
jsonChildObj,
jsonParentObj: TJSONObject;
path: string;
i: Integer;
begin
schema := TDictionary<string, string>.Create;
try
schema.Add('DBTableName1', 'nest1.valueKeyName1');
schema.Add('DBTableName2', 'nest1.valueKeyName2');
schema.Add('DBTableName3', 'nest2.valueKeyName1');
schema.Add('DBTableName4', 'nest3.valueKeyName1');
schema.Add('DBTableName5', 'nest3.valueKeyName2');
schema.Add('DBTableName6', 'nest4.valueKeyName1');
schema.Add('value1', 'nest5.nest6.key1');
schema.Add('value2', 'nest5.nest6.key2');
pathSchema := TDictionary<string, TJSONObject>.Create;
try
jsonObj := TJSONObject.Create;
try
for pair in schema do begin
values := SplitString(pair.Value, '.');
path := '';
jsonParentObj := jsonObj;
for i := Low(values) to High(values)-1 do begin
if i > 0 then
path := path + '.';
path := path + values[i];
if pathSchema.ContainsKey(path) then
jsonChildObj := pathSchema[path]
else begin
jsonChildObj := TJSONObject.Create;
jsonParentObj.AddPair(TJSONPair.Create(values[i], jsonChildObj));
pathSchema.Add(path, jsonChildObj);
end;
jsonParentObj := jsonChildObj;
end;
jsonChildObj.AddPair(TJSONPair.Create(values[High(values)], pair.Key));
end;
WriteLn(jsonObj.ToString);
finally
jsonObj.Free;
end;
finally
pathSchema.Free;
end;
finally
schema.Free;
end;
ReadLn;
end.
The above prints the following:
{
"nest4":{
"valueKeyName1":"DBTableName6"
},
"nest2":{
"valueKeyName1":"DBTableName3"
},
"nest5":{
"nest6":{
"key1":"value1",
"key2":"value2"
}
},
"nest1":{
"valueKeyName1":"DBTableName1",
"valueKeyName2":"DBTableName2"
},
"nest3":{
"valueKeyName1":"DBTableName4",
"valueKeyName2":"DBTableName5"
}
}