Update data in database using Axios and VueJS - html

I have this program which gets an entry from a database. It then changes some of the fields in this object and then sends it back with a PUT using Axios. My problem is that I don't know what to write in the PUT-function on the server side. When I try to do a PUT-request that only console.log the values I get the error:
"PayloadTooLargeError: Request entity too large".
selectedRoute contains the ID of the object to be sent back. chosenRoute contains the object.
Client-side:
onUpload(): void {
console.log(this.chosenRoute);
this.chosenRoute.name = this.routeName;
this.chosenRoute.text.paragraphs[0] = this.routeDescription;
this.chosenRoute.text.preamble;
if(this.activity != "") this.chosenRoute.activity = this.activity;
axios.put('http://localhost:8080/editRoute/' + this.selectedRoute, this.chosenRoute, {
onUploadProgress: uploadEvent => {
console.log('Upload Progress ' + Math.round(uploadEvent.loaded / uploadEvent.total) * 100 + " % ");
}
}).then(
res => {
console.log(res);
});
}
Server-side:
app.put('/editRoute/:id', function(req,res){
console.log(req);
console.log(res);
});

Related

Firebase Updating User Data With Custom Fields After Creating User

I want to update the newly created User's data. The returned JSON is:
{
"user":{
"uid":"test123",
"displayName":null,
"photoURL":null,
"email":"test12#test.com",
"emailVerified":false,
"phoneNumber":null,
"isAnonymous":false,
"tenantId":null,
"providerData":[
{
"uid":"test12#test.com",
"displayName":null,
"photoURL":null,
"email":"test12#test.com",
"phoneNumber":null,
"providerId":"password"
}
],
"apiKey":"test123",
"appName":"[DEFAULT]",
"authDomain":"test123.firebaseapp.com",
"stsTokenManager":{
"apiKey":"test123",
"refreshToken":"test123",
"accessToken":"test123",
"expirationTime":1571238989357
},
"redirectEventId":null,
"lastLoginAt":"1571235389108",
"createdAt":"1571235389108"
},
"credential":null,
"additionalUserInfo":{
"providerId":"password",
"isNewUser":true
},
"operationType":"signIn"
}
This is my callout and update:
createUser = async (userData) => {
return await firebase.auth().createUserWithEmailAndPassword(userData.get('userName'), userData.get('password'))
.then((authData) => {
firebase.database().ref('users/' + authData.user.uid + '/').set({
fullName: userData.get('fullName'),
pictures: userData.get('pictures'),
phoneNumber: userData.get('phoneNumber')
});
})
};
Is it possible to add to the User table custom fields?
A few things are happening. It appears that userData can not be seen in the .then statement. So to solve this I attempted to pass in the userData JSON as a param. This did not work. I then broke out each value out of userData, saved it into a const and passed that value. This did not work.
I can see that userData has values in it before the .then statement. I am able to successfully create a new user with the right userName and password. This means to me either:
A - I am not passing the userData JSON correctly or
B - I am not allowed to pass data to firebase like I am doing
My end goal is to sign up a user and then take all of the data they input from a registration form (aka userData) and update the user table with it.
Articles I am using are:
https://firebase.google.com/docs/auth/web/manage-users
https://medium.com/mindorks/firebase-realtime-database-with-react-native-5f357c6ee13b
Main class that calls the createUser function:
const signUp = (dispatch) => {
return async (userData)=>{
try{
const response = await config.createUser(userData);
console.log('sign up resonse1: ' + response); //coming back as undefined
//todo:: figure out how to parse out the apikey out of response
await AsyncStorage.setItem('token', '123mockToken');
dispatch({type: 'sign_up', payload: '123mockToken'});
navigate('mainFlow');
} catch(e){
dispatch({type: 'add_error', payload: '' + e}); //we call dispatch anytime we want to update our state
}
}
};
I understand that the parameter userData holds all the data you want to use for creating the user ("all of the data they input from a registration form").
The following should work:
createUser = async userData => {
try {
const userCredential = await firebase
.auth()
.createUserWithEmailAndPassword(
userData.get('userName'),
userData.get('password')
);
const userId = userCredential.user.uid;
await firebase
.database()
.ref('users/' + userId + '/')
.set({
fullName: userData.get('fullName'),
pictures: userData.get('pictures'),
phoneNumber: userData.get('phoneNumber')
});
return userId; //As per your comment below
} catch (error) {
return error;
}
};
The createUserWithEmailAndPassword() method returns a UserCredential which contains a User.

Data not getting change in node js

I am working with Angular, Node and MySQL. I have a simple crud app in which I am adding,updating some data. Whenever I am adding/updating any data I want it to reflect changes in list grid as well.
I am using popups for add/update. So after adding or updating I am calling my generic function which gives me list of data but I was getting response 304 and changes not displayed in grid. So to overcome 304 I have use app.disable('etag') in Node.js which gives me status 200. But still changes not displayed, but when I refresh page manually I can see the changes.
Also when I am checking data in network just after performing operation it also holds old values.
Node.js
app.post("/addcity", function(req, res) {
d = req.body;
var con = mysql.createConnection(connectionString);
con.connect(function(err) {
if (err)
throw err;
var sql = "INSERT INTO tbl_city(city_name,state_id) VALUES('" + d.city_name + "'," + d.state_id + ")";
console.log(sql);
con.query(sql, function(err, result) {
if (err)
throw err;
res.send(result);
})
})
})
Angular code
PostData(addNewForm: NgForm) {
var object = {
"city_name": addNewForm.value.city_name,
"state_id": addNewForm.value.state_id
}
this.service.addCity(object).subscribe();
this.modalRef.hide();
this.getAllCities(); // this still gives old values even new City is added in database
}
Update
getCities() : Observable<any[]>{
var details = this.http.get(`${this.apiUrl}/cities`);
return forkJoin([details]);
}
You are calling the two requests in async, that's why by the time the city is added in database. this.getAllCities() gets the list of old cities try it like this.
PostData(addNewForm: NgForm) {
var object = {
"city_name": addNewForm.value.city_name,
"state_id": addNewForm.value.state_id
}
this.service.addCity(object).subscribe(
(data)=> {
this.modalRef.hide();
this.getAllCities();
}
);
}

NodeJS http.request end processing before data processes

Can some explain why the the http.request end function is running before any data is actually retrieved? And how would I debug this any further? Should I be checking an http status?
This is going to work with Google Home app, but I took that code out and getting same error running locally. The http.request is from what a teacher provided in a class.
You can paste: people/?search=Luke%20Skywalker
into http://swapi.com (SW = StarWars API) to see the expected result.
'use strict';
/*eslint no-undef: "error"*/
/*eslint-env node*/
/*eslint-disable no-console */
let http = require('http');
let starWarsAPI = `www.swapi.co`;
//function to get details of the Star Wars Characters
//exports.handler = function(event, context, callback) {
//console.log("event=" + JSON.stringify(event));
//console.log("context=" + JSON.stringify(context));
//let characterName = event.result.parameters.StarWarsCharacter;
let characterName = "Luke Skywalker";
console.log("**** characterName=" + characterName);
let options = searchPeopleRequestOptions(characterName);
console.log("options=" + JSON.stringify(options));
makeRequest(options, function( data, error) {
console.log(" Processing data.results");
let person = data.results[0];
if (person) {
let height = person.height;
let mass = person.mass;
let response = person.name + " is " + height + " centimeters tall, weighs " + mass + " kilograms";
console.log("**** response=" + response);
//callback(null, {"speech": response});
}
else {
console.log ("No person found");
//callback(null, {"speech": "I'm not sure that character exists!"});
}
});
//};
console.log("The end");
//create a function to read first and last names from the API.
function searchPeopleRequestOptions(argCharacterName) {
var pathValue = `/api/people/?search=`+
encodeURIComponent(argCharacterName);
return {
host: starWarsAPI,
path: pathValue
};
}
function makeRequest(options, callback) {
var responseString = "";
var request = http.request(options,
function(response) {
response.on('data', function(data) {
responseString += data;
console.log("responseString=" + responseString);
});
response.on('end', function() {
console.log("end: responseString=" + responseString);
// dies on next line because responseString is empty
var responseJSON = JSON.parse(responseString);
callback(responseJSON, null);
});
response.on('error', function (error) {
console.log('\n Error received: ' + error);
});
});
request.end();
}
This is what I see when I run it:
E:\GitHub\NealWalters\GoogleHomeTest
λ node indexTest.js
**** characterName=Luke Skywalker
options={"host":"www.swapi.co","path":"/api/people/?search=Luke%20Skywalker"}
The end
end: responseString=
undefined:1
I'm not sure what's writing out the "undefined: 1" line.
If you look at the server's response status code, it will be 301: Moved Permanently.
And value of location field of response is:
https://swapi.co/api/people/?search=Luke%20Skywalker
instead
http://swapi.co/api/people/?search=Luke%20Skywalker
As we can see, the protocol changed from http to https.
The problem is that the http client supplied with the node.js does not support redirection for permanently changed URL.
So, you can use https module instead http (just change the require('https')).
Or use packages that support redirection. For example axios or request.

I get undefined values in my field value when inserting multiple documents after converting csv to json string and parsing. Why?

Basically, I am trying to do an import function for csv files where the csv file will get converted to json before then being inserted into the mongodb. This is my code.
//require the csvtojson converter class
var Converter = require("csvtojson").Converter;
// create a new converter object
var converter = new Converter({});
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost:27017/myproject';
// call the fromFile function which takes in the path to your
// csv file as well as a callback function
converter.fromFile("./NTA-SAM-Inventory-List-Security-Management-
New_2017.csv",function(err,result){
// if an error has occured then handle it
if(err){
console.log("An Error Has Occured");
console.log(err);
}
// the result of the conversion
var jsonResult = result;
console.log(jsonResult);
var jsobject= JSON.stringify(jsonResult);
var jsonobject= JSON.parse(jsobject);
var f = jsonobject.length;
console.log(f);
MongoClient.connect(url, function(err, db) {
if(err) {
console.log(err);
}
for(i = 0; i < f; i++){
var insertDocument = function() {
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
}, function(err, results) {
if(err) throw err;
console.log(results);
});
};
insertDocument(db, function() {
if(err)
throw err;
else{
console.log('insert');
}
db.close();
});
}
});
console.log("Inserted " + f + " document into the documents
collection.");
});
So far, I have tried doing this of converting a random file with 1400 records into a json string before parsing and then inserting it. But somehow I keep getting undefined from my fields whenever I insert, the result show my respective field with undefined values.
Which part of my jsonobject.indexNo is wrong in the sense that is jsonobject.field1value and jsonobject.field2value etc. How should I get the values from my json string then after parsing?
I am using node.js to run it and mongodb as database. I can convert nicely just this part about inserting the documents inside. Thanks in advance!
db.collection('documents').insertOne is an async method , you can't run it in a loop like that. Workaround is you can use async to handle it. Suggest to use async.each
Eg:
// Consider jsonResult is an array
var jsonResult = result;
async.each(jsonResult,
// Here, jsonobject is a child object of jsonResult array
function(jsonobject, callback){
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
});
// Async call is done, trigger callback
callback();
},
// 3rd param is the function to call when everything's done
function(err){
// All tasks are done now
doSomethingOnceAllAreDone();
}
);

aws lambda s3 function isn't called inside alexa skills kit

I am trying to create a skill for Amazon Echo that will call a JSON file from AWS S3. When I call the code from s3 basic get function it works. And the Amazon Alexa code works on its own.
But when I call them together the function gets skipped. So for the following code the console gets called before and after s3.getObject(). But the middle one gets skipped. I do not understand why.
I also checked whether s3 was being called, and it is.
let aws = require('aws-sdk');
let s3 = new aws.S3({ apiVersion: '2006-03-01'});
function callS3() {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
}
console.log('myData >> ' + myData);
});
console.log('finished callS3() func');
return myData;
}
This might be a control flow issue, I've worked with amazons sdk before and was running into similar issues. Try implementing async within your code to have a better control of what happens when. This way methods won't skip.
UPDATE: adding some code examples of what you could do.
function callS3(callback) {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
callback(err,null);//callback the error.
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
console.log('myData >> ' + myData);
console.log('finished callS3() func');
//Include the callback inside of the S3 call to make sure this function returns until the S3 call completes.
callback(null,myData); // first element is an error and second is your data, first element is null if no error ocurred.
}
});
}
/*
This MIGHT work without async but just in case you can read more about
async.waterfall where functions pass down values to the next function.
*/
async.waterfall([
callS3()//you can include more functions here, the callback from the last function will be available for the next.
//myNextFunction()
],function(err,myData){
//you can use myData here.
})
It's a timing issue. Here is an example of loading a JSON file from an S3 share when a session is started.
function onLaunch(launchRequest, session, callback) {
var sBucket = "your-bucket-name";
var sFile = "data.json";
var params = {Bucket: sBucket, Key: sFile};
var s3 = new AWS.S3();
var s3file = s3.getObject(params)
new AWS.S3().getObject(params, function(err, data) {
if (!err) {
var json = JSON.parse(new Buffer(data.Body).toString("utf8"));
for(var i = 0; i < json.length; i++) {
console.log("name:" + json[i].name + ", age:" + json[i].age);
}
getWelcomeResponse(callback);
} else {
console.log(err.toString());
}
});
}