aws lambda s3 function isn't called inside alexa skills kit - json

I am trying to create a skill for Amazon Echo that will call a JSON file from AWS S3. When I call the code from s3 basic get function it works. And the Amazon Alexa code works on its own.
But when I call them together the function gets skipped. So for the following code the console gets called before and after s3.getObject(). But the middle one gets skipped. I do not understand why.
I also checked whether s3 was being called, and it is.
let aws = require('aws-sdk');
let s3 = new aws.S3({ apiVersion: '2006-03-01'});
function callS3() {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
}
console.log('myData >> ' + myData);
});
console.log('finished callS3() func');
return myData;
}

This might be a control flow issue, I've worked with amazons sdk before and was running into similar issues. Try implementing async within your code to have a better control of what happens when. This way methods won't skip.
UPDATE: adding some code examples of what you could do.
function callS3(callback) {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
callback(err,null);//callback the error.
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
console.log('myData >> ' + myData);
console.log('finished callS3() func');
//Include the callback inside of the S3 call to make sure this function returns until the S3 call completes.
callback(null,myData); // first element is an error and second is your data, first element is null if no error ocurred.
}
});
}
/*
This MIGHT work without async but just in case you can read more about
async.waterfall where functions pass down values to the next function.
*/
async.waterfall([
callS3()//you can include more functions here, the callback from the last function will be available for the next.
//myNextFunction()
],function(err,myData){
//you can use myData here.
})

It's a timing issue. Here is an example of loading a JSON file from an S3 share when a session is started.
function onLaunch(launchRequest, session, callback) {
var sBucket = "your-bucket-name";
var sFile = "data.json";
var params = {Bucket: sBucket, Key: sFile};
var s3 = new AWS.S3();
var s3file = s3.getObject(params)
new AWS.S3().getObject(params, function(err, data) {
if (!err) {
var json = JSON.parse(new Buffer(data.Body).toString("utf8"));
for(var i = 0; i < json.length; i++) {
console.log("name:" + json[i].name + ", age:" + json[i].age);
}
getWelcomeResponse(callback);
} else {
console.log(err.toString());
}
});
}

Related

Resize all existing images stored in firebase storage and update the newly resized image url to database via api call [duplicate]

This question already has an answer here:
How can I resize all existing images in firebase storage?
(1 answer)
Closed 9 months ago.
I have requirement to resize new and existing images stored in firebase store. For new image, I enabled firebase's resize image extension. For existing image, how can I resized the image and get the newly resized image url to update back to database via api.
Here is my firebase function to get existing image urls from database. My question is how to resize the image and get the new image url?
const functions = require("firebase-functions");
const axios =require("axios");
async function getAlbums() {
const endpoint = "https://api.mydomain.com/graphql";
const headers = {
"content-type": "application/json",
};
const graphqlQuery = {
"query": `query Albums {
albums {
id
album_cover
}
}`
};
functions.logger.info("Call API");
const response = await axios({
url: endpoint,
method: 'post',
headers: headers,
data: graphqlQuery
});
if(response.errors) {
functions.logger.info("API ERROR : ", response.errors) // errors if any
} else {
return response.data.data.albums;
}
}
exports.manualGenerateResizedImage = functions.https.onRequest(async () => {
const albums = await getAlbums();
functions.logger.info("No. of Album : ", albums.length);
});
I think the below answer from Renaud Tarnec will definitely help you.
If you look at the code of the "Resize Images" extension, you will see that the Cloud Function that underlies the extension is triggered by a onFinalize event, which means:
When a new object (or a new generation of an existing object) is
successfully created in the bucket. This includes copying or rewriting
an existing object.
So, without rewriting/regenerating the existing images the Extension will not be triggered.
However, you could easily write your own Cloud Function that does the same thing but is triggered, for example, by a call to a specific URL (HTTPS cloud Function) or by creating a new document in a temporary Firestore Collection (background triggered CF).
This Cloud Function would execute the following steps:
Get all the files of your bucket, see the getFiles() method of the
Google Cloud Storage Node.js Client API. This method returns a
GetFilesResponse object which is an Array of File instances.
By looping over the array, for each file, check if the file has a
corresponding resized image in the bucket (depending on the way you
configured the Extension, the resized images may be in a specific
folder)
If a file does not have a corresponding resized image, execute the
same business logic of the Extension Cloud Function for this File.
There is an official Cloud Function sample which shows how to create a Cloud Storage triggered Firebase Function that will create resized thumbnails from uploaded images and upload them to the database URL, (see the last lines of index.js file)
Note : If you have a lot of files to treat, you should most probably work by batch, since there is a limit of 9 minutes for Cloud Function execution. Also, depending on the number of images to treat, you may need to increase the timeout value and/or the allocated memory of your Cloud Function, see https://firebase.google.com/docs/functions/manage-functions#set_timeout_and_memory_allocation
In case someone need it. This is how I resized existing image.
const functions = require("firebase-functions");
const axios = require("axios");
const { Storage } = require("#google-cloud/storage");
const storage = new Storage();
// Don't forget to replace with your bucket name
const bucket = storage.bucket("projectid.appspot.com");
async function getAlbums() {
const endpoint = "https://api.mydomain.com/graphql";
const headers = {
"content-type": "application/json",
};
const graphqlQuery = {
query: `query Albums {
albums {
id
album_cover
}
}`,
};
const response = await axios({
url: endpoint,
method: "post",
headers: headers,
data: graphqlQuery,
});
if (response.errors) {
functions.logger.error("API ERROR : ", response.errors); // errors
if any
} else {
return response.data.data.albums;
}
}
function getFileName(url) {
var decodeURI = decodeURIComponent(url);
var index = decodeURI.lastIndexOf("/") + 1;
var filenameWithParam = decodeURI.substr(index);
index = filenameWithParam.lastIndexOf("?");
var filename = filenameWithParam.substr(0, index);
return filename;
}
function getFileNameFromFirestore(url) {
var index = url.lastIndexOf("/") + 1;
var filename = url.substr(index);
return filename;
}
const triggerBucketEvent = async () => {
bucket.getFiles(
{
prefix: "images/albums", // you can add a path prefix
autoPaginate: false,
},
async (err, files) => {
if (err) {
functions.logger.error(err);
return;
}
const albums = await getAlbums();
await Promise.all(
files.map((file) => {
var fileName = getFileNameFromFirestore(file.name);
var result = albums.find((obj) => {
return getFileName(obj.album_cover) === fileName;
});
if (result) {
var file_ext = fileName.substr(
(Math.max(0, fileName.lastIndexOf(".")) || Infinity) + 1
);
var newFileName = result.id + "." + file_ext;
// Copy each file on thumbs directory with the different name
file.copy("images/albums/" + newFileName);
} else {
functions.logger.info(file.name, " not found in album list!");
}
})
);
}
);
};
exports.manualGenerateResizedImage = functions.https.onRequest(async () => {
await triggerBucketEvent();
});

aws s3 bucket image upload in angular6 having problem in return

I am using this approach to upload images to aws s3 bucket:
https://grokonez.com/aws/angular-4-amazon-s3-example-how-to-upload-file-to-s3-bucket
This works fine as an individual task but as far as I rely on the result which is coming a bit later due to async behavior may be. I would like the next task to be executed just after the confirmation.
upload() {
let file: any;
// let urltype = '';
let filename = '';
// let b: boolean;
for (let i = 0 ; i < this.urls.length ; i++) {
file = this.selectedFiles[i];
// urltype = this.urltype[i];
filename = file.name;
const k = uuid() + '.' + filename.substr((filename.lastIndexOf('.') + 1));
this.uploadservice.uploadfile(file, k);
console.log('done');
// console.log('file: ' + file + ' : ' + filename);
// let x = this.userservice.PostImage('test', file);
// console.log('value of ' + x);
}
// return b;
}
fileupload service:
bucket.upload(params, function (err, data) {
if (err) {
console.log('There was an error uploading your file: ', err);
return false;
}
console.log('Successfully uploaded file.', data);
return true;
}).promise();
}
Here, done is getting executed before the file upload is done.
I think you should check out a tutorial for asynchronous programming and try to play around with couple of examples using simple timeouts to get the hang of it and then proceed with more complex things like s3 and aws.
Here is how I suggest you start your journey:
1) Learn the basic concepts of asynchronous programming using pure JS
https://eloquentjavascript.net/11_async.html
2) Play around with your own examples using callbacks and timeouts
3) Replace the callbacks with Promises
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises
4) Do it the "angular" way with rxjs Observables (similar to JS Observable)
http://reactivex.io/rxjs/class/es6/Observable.js~Observable.html
PS: To be more concrete:
Your code fails because the following line is executed in an asynchronous manner. Thus the code will call your uploadfile function and will immedietly continue executing without waiting.
this.uploadservice.uploadfile(file, k);
Once you follow all the points I described above you will be able to do something like this (using a Promise):
this.uploadservice.uploadfile(file, k)
.then( result => {
console.log('Upload finished');
})
.catch(error => {
console.log('Something went wrong');
});

NodeJS http.request end processing before data processes

Can some explain why the the http.request end function is running before any data is actually retrieved? And how would I debug this any further? Should I be checking an http status?
This is going to work with Google Home app, but I took that code out and getting same error running locally. The http.request is from what a teacher provided in a class.
You can paste: people/?search=Luke%20Skywalker
into http://swapi.com (SW = StarWars API) to see the expected result.
'use strict';
/*eslint no-undef: "error"*/
/*eslint-env node*/
/*eslint-disable no-console */
let http = require('http');
let starWarsAPI = `www.swapi.co`;
//function to get details of the Star Wars Characters
//exports.handler = function(event, context, callback) {
//console.log("event=" + JSON.stringify(event));
//console.log("context=" + JSON.stringify(context));
//let characterName = event.result.parameters.StarWarsCharacter;
let characterName = "Luke Skywalker";
console.log("**** characterName=" + characterName);
let options = searchPeopleRequestOptions(characterName);
console.log("options=" + JSON.stringify(options));
makeRequest(options, function( data, error) {
console.log(" Processing data.results");
let person = data.results[0];
if (person) {
let height = person.height;
let mass = person.mass;
let response = person.name + " is " + height + " centimeters tall, weighs " + mass + " kilograms";
console.log("**** response=" + response);
//callback(null, {"speech": response});
}
else {
console.log ("No person found");
//callback(null, {"speech": "I'm not sure that character exists!"});
}
});
//};
console.log("The end");
//create a function to read first and last names from the API.
function searchPeopleRequestOptions(argCharacterName) {
var pathValue = `/api/people/?search=`+
encodeURIComponent(argCharacterName);
return {
host: starWarsAPI,
path: pathValue
};
}
function makeRequest(options, callback) {
var responseString = "";
var request = http.request(options,
function(response) {
response.on('data', function(data) {
responseString += data;
console.log("responseString=" + responseString);
});
response.on('end', function() {
console.log("end: responseString=" + responseString);
// dies on next line because responseString is empty
var responseJSON = JSON.parse(responseString);
callback(responseJSON, null);
});
response.on('error', function (error) {
console.log('\n Error received: ' + error);
});
});
request.end();
}
This is what I see when I run it:
E:\GitHub\NealWalters\GoogleHomeTest
λ node indexTest.js
**** characterName=Luke Skywalker
options={"host":"www.swapi.co","path":"/api/people/?search=Luke%20Skywalker"}
The end
end: responseString=
undefined:1
I'm not sure what's writing out the "undefined: 1" line.
If you look at the server's response status code, it will be 301: Moved Permanently.
And value of location field of response is:
https://swapi.co/api/people/?search=Luke%20Skywalker
instead
http://swapi.co/api/people/?search=Luke%20Skywalker
As we can see, the protocol changed from http to https.
The problem is that the http client supplied with the node.js does not support redirection for permanently changed URL.
So, you can use https module instead http (just change the require('https')).
Or use packages that support redirection. For example axios or request.

I get undefined values in my field value when inserting multiple documents after converting csv to json string and parsing. Why?

Basically, I am trying to do an import function for csv files where the csv file will get converted to json before then being inserted into the mongodb. This is my code.
//require the csvtojson converter class
var Converter = require("csvtojson").Converter;
// create a new converter object
var converter = new Converter({});
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost:27017/myproject';
// call the fromFile function which takes in the path to your
// csv file as well as a callback function
converter.fromFile("./NTA-SAM-Inventory-List-Security-Management-
New_2017.csv",function(err,result){
// if an error has occured then handle it
if(err){
console.log("An Error Has Occured");
console.log(err);
}
// the result of the conversion
var jsonResult = result;
console.log(jsonResult);
var jsobject= JSON.stringify(jsonResult);
var jsonobject= JSON.parse(jsobject);
var f = jsonobject.length;
console.log(f);
MongoClient.connect(url, function(err, db) {
if(err) {
console.log(err);
}
for(i = 0; i < f; i++){
var insertDocument = function() {
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
}, function(err, results) {
if(err) throw err;
console.log(results);
});
};
insertDocument(db, function() {
if(err)
throw err;
else{
console.log('insert');
}
db.close();
});
}
});
console.log("Inserted " + f + " document into the documents
collection.");
});
So far, I have tried doing this of converting a random file with 1400 records into a json string before parsing and then inserting it. But somehow I keep getting undefined from my fields whenever I insert, the result show my respective field with undefined values.
Which part of my jsonobject.indexNo is wrong in the sense that is jsonobject.field1value and jsonobject.field2value etc. How should I get the values from my json string then after parsing?
I am using node.js to run it and mongodb as database. I can convert nicely just this part about inserting the documents inside. Thanks in advance!
db.collection('documents').insertOne is an async method , you can't run it in a loop like that. Workaround is you can use async to handle it. Suggest to use async.each
Eg:
// Consider jsonResult is an array
var jsonResult = result;
async.each(jsonResult,
// Here, jsonobject is a child object of jsonResult array
function(jsonobject, callback){
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
});
// Async call is done, trigger callback
callback();
},
// 3rd param is the function to call when everything's done
function(err){
// All tasks are done now
doSomethingOnceAllAreDone();
}
);

Unable to send data in response to the view in nodejs

I am trying to create a simple web application which fires a http.request call, get the data and display it over to the html(ejs here). I am able to fire the request, get the data, massage it etc.. but unable to pass it to the view. Sample code is as below:
var searchData = [];
router.post('/',requesthandler);
function requesthandler(req,res){
var options = {
host: url,
port: 9999,
path: qstring,
method: 'GET'
};
var reqget = http.request(options,responsehandler);
reqget.end();
console.log('Rendering now:............................ ');
res.render('result',{title: 'Results Returned',searchdata : searchData});
}
function responsehandler(ress) {
console.log('STATUS: ' + ress.statusCode);
ress.on('data', function (chunk) {
output += chunk;
console.log('BODY: ' );
});
/* reqget.write(output); */
ress.on('end',parseresponse);
}
function parseresponse(){
var data = JSON.parse(output);
console.log(data.responseHeader);
// populate searchData here from data object
searchData.push({//some data});
}
function errorhandler(e) {
console.error(e);
}
module.exports = router;
Problem is I a unable to pass the objeect searchData to the view via res.render();
'Rendering now: ...........' gets executed before execution starts in parseresponse() and so the page is displayed without the data which seems to be in conjuction with using callbacks, So how can I pass the data object to the view once the searchData is loaded in parseresponse().
PS: I am able to print all console statements
define res variable globally:
var res;
function requesthandler(req,resObj){
res = resObj;//set it to the resObj
}
wrap res.render inside a function like this:
function renderPage(){
res.render('result',{title: 'Results Returned',searchdata : searchData});
}
then in parseresponse function do this:
function parseresponse(){
var data = JSON.parse(output);
searchData.push({some data});
renderPage();
}
Hope this solves your problem.