Showing the Val Acc in tensorflow.js - json

So I'm new to tensorflow.js and I have been trying to practice with an exercise.
I have a json dataset and I want to show the val acc but all the example that I could find had .csv data sets. I would appreciate it if anyone could send me a link to such an example so I can better understand or to explain it here in the answers.
Here is what I'm doing:
const trainingData = tf.tensor1d(horses.map(item =>
findAvg( filterOdds(item.prices))
))
const testingData = tf.tensor1d(horsesTesting.map(item =>
findAvg( filterOdds(item.prices))
));
const outputData = tf.tensor2d(horses.map(item => [
item.position === 1 ? 1 : 0,
item.position !== 1 ? 1 : 0,
// item.position != 1 ? 1 : 0,
]))
model.fit(trainingData, outputData, {epochs: 10})
.then((history) => {
// console.log(history)
model.predict(testingData).print()
})
Output: 4404ms 184us/step - acc=0.890 loss=0.0866 precision=0.00
what I found in the examples:
const trainingUrl1 = 'wdbc-train.csv';
// Take a look at the 'wdbc-train.csv' file and specify the column
// that should be treated as the label in the space below.
// HINT: Remember that you are trying to build a classifier that
// can predict from the data whether the diagnosis is malignant or benign.
const trainingData = tf.data.csv(trainingUrl1, {
columnConfigs: {
diagnosis: {
isLabel: true
}
}
});
const convertedTrainingData = // YOUR CODE HERE
trainingData.map(({xs, ys}) => {
return{ xs: Object.values(xs), ys: Object.values(ys)};
}).batch(10);
const testingUrl2 = 'wdbc-test.csv';
const testingData = tf.data.csv(testingUrl2, {
columnConfigs: {
diagnosis: {
isLabel: true
}
}
});
const convertedTestingData = // YOUR CODE HERE
testingData.map(({xs, ys}) => {
return{ xs: Object.values(xs), ys: Object.values(ys)};
}).batch(10);
await model.fitDataset(convertedTrainingData,
{epochs:35,
validationData: convertedTestingData,
callbacks:{
onEpochEnd: async(epoch, logs) =>{
console.info("Epoch: " + epoch + " Loss: " + logs.loss + " Accuracy: " + logs.acc + " val_acc " + logs.val_acc);
}
}});
Output:
Epoch: 1 Loss: 0.08664028346538544 Accuracy: 0.784 val_acc 0.919

There are two options for loading the dataset already discussed in this answer:
convert json to csv and then use the csvDataSet loader
create a custom loader using json. The csvDataSet loader here can help started
Regarding acc and val_acc, the first is for the training data and the second for the testing data

Related

Accessing json returning undefined react js

I am trying to grab some json data to display in Ui but when I iterate over it using the map method I keep getting undefined.Any help will be really appreciated.Here is a link on code sandbox https://codesandbox.io/s/late-wood-kx8w2?file=/src/App.js
This line
const [items, setItem] = useState(Object.keys(trees));
is passing the tree keys to the View function and not the actual data. I believe that you meant to pass the data. Your code passes 'name' and 'children' as {items} that then gets displayed by View.js.
The following code shows you how you can parse the tree and get the names and the values. It's incomplete, but it should give you a start on how to do the traversal.
import React, { useState } from "react";
export default function Start(){
const trees = {
name: "root",
children: [
{
name: "child1",
children: [
{ name: "child1-child1", data: "c1-c1 Hello" },
{ name: "child1-child2", data: "c1-c2 JS" }
]
},
{ name: "child2", data: "c2 World" }
]
};
const treesCopy = trees;
function Traverse(tree){
var treesCopy = tree;
var str = [];
for (var prop in treesCopy) {
console.log(prop);
if (prop=="children"){
str = str + "name: "+ prop + ",";
treesCopy = treesCopy[prop][0];
// console.log('New tree: ',treesCopy);
return str + Traverse(treesCopy);
}
str = str + "name: "+ prop +" value: " + treesCopy[prop]+",";
}
return str;
};
const str = Traverse(treesCopy);
return(
<>
{
str ? str.split(",").map(place => <p> {place} </p>)
: ""
}
</>
)
}

Error while saving JSON data to Firestore collection using cloud function

I am trying to insert array in my firebase collection from cloud function. I need to have multiple lines in one document so for each line i am inserting an array. Please check my attached screenshot where you can see line0 , same way i need to have Line1,Line2,Line3..,Line n in the same document.
for line0 i am passing array from code like below and its working fine.
admin.firestore().collection("qbContestWinners").add(
{
'cmpientryid': context.params.processId,
'qbid': '',
'qbsyncdate': '',
'qbsyncstatus': 'pending',
'Line0':
{
id: "0",
description: 'PRIZE AMOUNT',
amount: 1000,
accountrefid: contestresultData.qbcontestid,
accountrefname: contestresultData.qbcontestname,
contestresultId: context.params.processId,
},
})
when i am looping through data i am getting from another table , i am not able to generate proper JSON to insert.
below is how i am looping and creating JSON after getting data from another table.
i = 1;
admin.firestore().collection("results").where('cid', '==', 'LKRRk2XXXXXXXX')
.orderBy("rank", "asc").get().then(snapshots =>
{
snapshots.forEach(doc =>
{
const contestresultId = doc.id;
const prizeAmount = doc.data().prizeamt;
const userId = doc.data().userid;
const lineNum = "Line" + i;
console.log("new line numner is: ", lineNum);
console.log(`lineNum? ${lineNum}`);
const linetxt = "Line" + String(i);
const insertData = "{"+linetxt +
":{id:'" + i +
"', description: 'PRIZE AMOUNT'"+
", amount:" + prizeAmount + "," +
"accountrefid:"+ contestresultData.qbcontestid +","+
"accountrefname:'" +contestresultData.qbcontestname +"',"+
"contestresultId:'" + contestresultId +"'," +
"},}"
const finalInsert = JSON.stringify(insertData);
const finalJSON = JSON.parse(finalInsert);
admin.firestore().collection("qbContestWinners").doc(mainID).set(
finalInsert.toJSON(),
{
merge: true
});
i= i+1;
});
});
using this code i am getting error
finalInsert.toJSON is not a function
Actually, the Line0 field is a map and not an Array, see this doc for more details.
So, if you want to create similar fields (Line1, Line2, ...), you simply need to pass a JavaScript Object to the set() method, as follows:
snapshots.forEach(doc => {
const contestresultId = doc.id;
const prizeAmount = doc.data().prizeamt;
const userId = doc.data().userid;
const lineNum = "Line" + i;
console.log("new line numner is: ", lineNum);
console.log(`lineNum? ${lineNum}`);
const lineObj = {
id: i,
description: 'PRIZE AMOUNT',
accountrefid: contestresultData.qbcontestid, //Not sure if you have defined contestresultData somewhere...
//...
}
const dataObj = {};
dataObj["Line" + i] = lineObj // See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Property_accessors
admin.firestore().collection("qbContestWinners").doc(mainID).set(dataObj, {merge: true});
i= i+1;
});
HOWEVER, note that you must return a promise that resolves when all the asynchronous work in your Cloud Function is complete (i.e. call to the Firestore set() method).
This is explained in the official Firebase video series, watch in particular the three videos titled "Learn JavaScript Promises".
Since you are calling several times the set() method in a forEach loop, you need to use Promise.all() in order to return a Promise when all these parallel calls to the set() method are completed.
The following should do the trick:
let i = 1;
return admin.firestore().collection("results") // <-- See the return here
.where('cid', '==', 'LKRRk2XXXXXXXX')
.orderBy("rank", "asc").get()
.then(snapshots => {
const promises = [];
snapshots.forEach(doc => {
const contestresultId = doc.id;
const prizeAmount = doc.data().prizeamt;
const userId = doc.data().userid;
const lineNum = "Line" + i;
const lineObj = {
id: i,
description: 'PRIZE AMOUNT',
accountrefid: contestresultData.qbcontestid,
//...
}
const dataObj = {};
dataObj[lineNum] = lineObj;
promises.push(admin.firestore().collection("qbContestWinners").doc(mainID).set(dataObj, {merge: true}));
i= i+1;
});
return Promise.all(promises) // <-- See the return here
});
A last remark: if mainID keeps the same value in the snapshots.forEach loop, you may adopt a totally different approach, consisting in building a JavaScript object with several LineXX properties and call the set() method only once. Since you didn't share the entire code of your Cloud Function it is impossible to say if this approach should be used or not.
first to the error
You stringify and parse a string. The problem here seems to be the order. You have to parse a "String" and to stringify an "Object". The result won't have a toJSON Method as well, but u can just stringify the Object to get a json.
the second thing
Why do you use a string to create your object? You shouldn't. Just use an object.
the third thing
You should not use Objects as Arrays. Not even in firebase.
Just use arrays. Example:
[Line0Object, Line1Object, ...]
Hint: If your array can work as its own collection. Just use a SubCollection. This might fit your needs.

Flutter put request for Database

I am working on a Flutter app. we have a PSQL database, Node server on the background. On the Flutter app I am displaying some geometry which I fetch from the database successfully. Now after a modification on the geometry, such as lines, I want to be able to update the database via a put request.
Server goes like:
app.put('/api/shape/:id', async (req,res) =>{
let answer;
if( req.body.shape_type == "line"){
answer = await db.db.modify_line(req.params.id, req.body.info_shape);
}
res.send(answer);
});
And db.js file goes like:
modify_line : async function(id_shape, info_shape){
console.log(info_shape);
const result = await send_query("UPDATE line SET line = $2 WHERE id_shape = $1", [id_shape, info_shape]);
return(result);
},
On the Flutter app I do this:
_makeUpdateRequest() async {
var url = globals.URL + 'api/shape/' + globals.selectedShapeID.toString();
Map data;
if (globals.selectedType == globals.Type.line) {
String lseg = "(" + globals.pLines[globals.selectedLineIndex].p1.dx.toString() + "," +
globals.pLines[globals.selectedLineIndex].p1.dy.toString() + "," +
globals.pLines[globals.selectedLineIndex].p2.dx.toString() + "," +
globals.pLines[globals.selectedLineIndex].p2.dy.toString() + ")";
data = {
'shape_type': 'line',
'info_shape': {
'id_shape': globals.selectedShapeID.toString(),
'line': lseg,
}
};
}
http.Response response;
try {
//encode Map to JSON
print("encode Map to JSON");
var body = json.encode(data);
print(body);
response =
await http.put(url,
headers: {
"Content-Type": "application/json"
},
body: body
).catchError((error) => print(error.toString()));
} catch (e) {
print(e);
}
return response;
}
Database "line" table contains a "shapeID" and "lseg" information on each row.
Currently I am getting an error when I try this code:
{ id_shape: '619',
line: '(19.5,100.6,20.5,50.9)' }
fail____error: invalid input syntax for type lseg: "{"id_shape":"619","line":"(-19.5,100.6,20.5,50.9)"}"
How shall I shape my lseg json?
Thanks
Well, it looks like to me you are passing the whole input_shape object to the SQL query, which looks like this, as per your console.log:
{
id_shape: '619',
line: '(19.5,100.6,20.5,50.9)'
}
Obviously, this is invalid for PostgreSQL.
I would say that your backend code should be more like this:
modify_line : async function(id_shape, info_shape){
console.log(info_shape);
const result = await send_query(
"UPDATE line SET line = $2 WHERE id_shape = $1",
// Reference "line" sub-object
[id_shape, info_shape.line],
);
return(result);
},
You should also pay attention to the Geometric types format for lines:
[ ( x1 , y1 ) , ( x2 , y2 ) ]
( ( x1 , y1 ) , ( x2 , y2 ) )
( x1 , y1 ) , ( x2 , y2 )
x1 , y1 , x2 , y2
I'm not 100% sure by reading this that your format (with leading and trailing parenthesis) is correct.
As the issue is solved, following is the answer:
DB.js is like:
modify_line : async function(id_shape, info_shape){
const result = await send_query("UPDATE line SET line = $2 WHERE id_shape = $1", [info_shape['id_shape'], info_shape['line']]);
return(result);
},
and Flutter app is like:
_makeUpdateRequest() async {
var url = globals.URL + 'api/shape/' + globals.selectedShapeID.toString();
Map data;
if (globals.selectedType == globals.Type.line) {
String lseg =
"[" + globals.pLines[globals.selectedLineIndex].p1.dx.toString() + "," +
globals.pLines[globals.selectedLineIndex].p1.dy.toString() + "," +
globals.pLines[globals.selectedLineIndex].p2.dx.toString() + "," +
globals.pLines[globals.selectedLineIndex].p2.dy.toString() + "]";
data = {
'shape_type': 'line',
'info_shape': {
'id_shape': globals.selectedShapeID.toString(),
'line': lseg,
}
};
}
http.Response response;
try {
//encode Map to JSON
print("encode Map to JSON");
var body = json.encode(data);
print(body);
response =
await http.put(url,
headers: {
"Content-Type": "application/json"
},
body: body
).catchError((error) => print(error.toString()));
} catch (e) {
print(e);
}
return response;
}

Loss and mean squared error values not showing during training performance. Not getting predicted line

Currently we are trying to input uber data that includes time of day and ride fare into our TensorFlow.js model. We noticed that when we ran the model on the browser, the points are showing up on our scatterplot but during the training the loss and mean squared error values are not showing up and most importantly our model is not displaying a prediction line.
var userData = [
{
"City": "San Francisco",
"Product_Type": "UberEATS Marketplace",
"Trip_or_Order_Status": "COMPLETED",
"Request_Time": "2019-06-16 04:10:44 +0000 UTC",
"Begin_Trip_Time": "2019-06-16 04:44:40 +0000 UTC",
"Begin_Trip_Lat": "37.7352602",
"Begin_Trip_Lng": "-122.4203465",
"Begin_Trip_Address": "",
"Dropoff_Time": "2019-06-16 04:44:40 +0000 UTC",
"Dropoff_Lat": "37.7352602",
"Dropoff_Lng": "-122.4203465",
"Dropoff_Address": "",
"Distance_miles": "2.04",
"Fare_Amount": "32.34",
"Fare_Currency": "USD"
}...]
async function getData() {
const carsData = await userData;
// Here we map out the values for each car and filter out the list item that do not have an day or a pay value
const cleaned = carsData.map(car => ({
timeInMinutes: calculateMins(car.Request_Time),
pay_rate: normalizeUberPrice(car.Distance_miles, car.Fare_Amount),
}))
.filter(car => (car.day != null && car.pay != null));
return cleaned;
}
async function run() {
const data = await getData();
const values = data.map(d => ({
x: d.day,
y: d.pay,
}));
tfvis.render.scatterplot(
{ name: 'Horsepower v MPG' },
{ values },
{
xAxisDomain: [0, 1600],
yAxisDomain: [0,10],
xLabel: 'Day',
yLabel: 'Pay',
height: 300
}
);
const model = createModel();
tfvis.show.modelSummary({ name: 'Model Summary' }, model);
// Convert the data to a form we can use for training.
const tensorData = convertToTensor(data);
console.log(tensorData)
const { inputs, labels } = tensorData;
// Train the model
await trainModel(model, inputs, labels);
console.log('Done Training');
testModel(model, data, tensorData);
}
function createModel() {
const model = tf.sequential();
model.add(tf.layers.dense({ inputShape: [1], units: 25, useBias: true }));
model.add(tf.layers.dense({ units: 50, activation: "sigmoid" }));
model.add(tf.layers.dense({ units: 1, useBias: true }));
return model;
}
function convertToTensor(data) {
return tf.tidy(() => {
tf.util.shuffle(data);
const inputs = data.map(d => d.pay)
const labels = data.map(d => d.day);
const inputTensor = tf.tensor2d(inputs, [inputs.length, 1]);
const labelTensor = tf.tensor2d(labels, [labels.length, 1]);
//Step 3. Normalize the data to the range 0 - 1 using min-max scaling
const inputMax = inputTensor.max();
const inputMin = inputTensor.min();
const labelMax = labelTensor.max();
const labelMin = labelTensor.min();
const normalizedInputs = inputTensor.sub(inputMin).div(inputMax.sub(inputMin));
const normalizedLabels = labelTensor.sub(labelMin).div(labelMax.sub(labelMin));
return {
inputs: normalizedInputs,
labels: normalizedLabels,
// Return the min/max bounds so we can use them later.
inputMax,
inputMin,
labelMax,
labelMin,
}
});
}
async function trainModel(model, inputs, labels) {
model.compile({
optimizer: tf.train.adam(),
loss: tf.losses.meanSquaredError,
metrics: ['mse'],
});
const batchSize = 32;
const epochs = 30;
callbacks: tfvis.show.fitCallbacks(
{ name: 'Training Performance' },
['loss', 'mse'],
{
xAxisDomain: [0, 100],
yAxisDomain: [0,1],
height: 200,
callbacks: ['onEpochEnd'] }
// ',onBatchEnd'
),
history: tfvis.show.history({
name: 'History'},
history,
["loss","mse"])
});
}
function testModel(model, inputData, normalizationData) {
const { inputMax, inputMin, labelMin, labelMax } = normalizationData;
const [xs, preds] = tf.tidy(() => {
const xs = tf.linspace(0, 1, 100);
const preds = model.predict(xs.reshape([100, 1]));
const unNormXs = xs
.mul(inputMax.sub(inputMin))
.add(inputMin);
const unNormPreds = preds
.mul(labelMax.sub(labelMin))
.add(labelMin);
return [unNormXs.dataSync(), unNormPreds.dataSync()];
});
const predictedPoints = Array.from(xs).map((val, i) => {
return { x: val, y: preds[i] }
});
const originalPoints = inputData.map(d => ({
x: d.pay, y: d.day,
}));
console.log("ORIGINAL POINTS:")
console.log(originalPoints)
tfvis.render.scatterplot(
{ name: 'Model Predictions vs Original Data' },
{ values: [originalPoints, predictedPoints], series: ['original', 'predicted'] },
{
xAxisDomain: [0,10],
yAxisDomain: [0,1600],
xLabel: 'Horsepower',
yLabel: 'MPG',
height: 1000
}
);
}
document.addEventListener('DOMContentLoaded', run);
Basically we want to see a predicted line for our data but were not getting anything back.
It worked when we used data like this:
var userData = [{
day: 1
pay: 20
},...]
The data processing is not well performed. Thus, the values used for prediction contain NaN and Infinity. As a result, the error computed by model.fit is NaN and could therefore not be displayed on the chart of tfjs-vis.
The filtering
.filter(car => (car.day != null && car.pay != null));
is not removing NaN and Infinity. Instead, this condition can be used:
.filter(car => isFinite(car.pay + car.day) && !isNaN(car.pay + car.day));
Though, the NaN and Infinity are found within the values of car.day, here a general filtering is made over car.pay and car.day - thus the additional operation - to make sure that these values will not appear anywhere in the cleaned data.
here you can see how to display the loss.

d3 - reading JSON data instead of CSV file

I'm trying to read data into my calendar visualisation using JSON. At
the moment it works great using a CSV file:
d3.csv("RSAtest.csv", function(csv) {
var data = d3.nest()
.key(function(d) { return d.date; })
.rollup(function(d) { return d[0].total; })
.map(csv);
rect.filter(function(d) { return d in data; })
.attr("class", function(d) { return "day q" + color(data[d]) +
"-9"; })
.select("title")
.text(function(d) { return d + ": " + data[d]; });
});
It reads the following CSV data:
date,total
2000-01-01,11
2000-01-02,13
.
.
.etc
Any pointers on how I can read the following JSON data instead:
{"2000-01-01":19,"2000-01-02":11......etc}
I tried the following but it not working for me (datareadCal.php spits
out the JSON for me):
d3.json("datareadCal.php", function(json) {
var data = d3.nest()
.key(function(d) { return d.Key; })
.rollup(function(d) { return d[0].Value; })
.map(json);
thanks
You can use d3.entries() to turn an object literal into an array of key/value pairs:
var countsByDate = {'2000-01-01': 10, ...};
var dateCounts = d3.entries(countsByDate);
console.log(JSON.stringify(dateCounts[0])); // {"key": "2000-01-01", "value": 10}
One thing you'll notice, though, is that the resulting array isn't properly sorted. You can sort them by key ascending like so:
dateCounts = dateCounts.sort(function(a, b) {
return d3.ascending(a.key, b.key);
});
Turn your .json file into a .js file that is included in your html file. Inside your .js file have:
var countsByDate = {'2000-01-01':10,...};
Then you can reference countsByDate....no need to read from a file per se.
And you can read it with:
var data = d3.nest()
.key(function(d) { return d.Key; })
.entries(json);
As an aside....d3.js says it's better to set your json up as:
var countsByDate = [
{Date: '2000-01-01', Total: '10'},
{Date: '2000-01-02', Total: '11'},
];