Looking for advice / second opinion. I'm trying to pass JSON via HTTP API (api gateway) > Lambda. I'm receiving the data (pic of Cloudwatch), getting undefined when trying to extract values. The file is being written to S3, but undefined.
I included Lambda code, picture of Cloudwatch logs. I'm about there :) . Newbie here...
Logs
Lambda Code
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event, context, callback) => {
var bucketName = process.env.bucketName;
var folder = process.env.folder;
var filename = getFileName();
console.log("Filename:" + filename);
var raw = JSON.stringify(event.body);
console.log("raw after stringify:" + raw);
var results = JSON.parse(raw);
console.log("results:" + results);
let firstname = results.firstName;
console.log("firstName:" + firstname);
let lastname = results.lastName;
console.log("lastName:" + lastname);
let message = results.Message;
console.log("Message:" + message);
var content = message + "," + firstname + "," + lastname;
console.log("content:" + content);
var keyName = getKeyName(folder, filename);
var params = { Bucket: bucketName, Key: keyName, Body: content };
s3.putObject(params, function (err, data) {
if (err)
console.log(err)
else
console.log("Successfully saved object to: " + bucketName + "/" + keyName);
});
function getKeyName(folder, filename) {
return folder + '/' + filename;
}
function getFileName() {
var _uuid = uuidv4();
var _date = Date.now();
return _uuid + "-" + _date;
}
function uuidv4() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
}
var html = '<html><head><title>Prayer received result</title></head>' +
'<body><h1>Your Prayer has been received!</h1></body></html>';
//callback(null, res); - use this when using proxy model
callback(null, html);
};
Made the following changes.
//var raw = JSON.stringify(event.body);
//console.log("raw after stringify:" + raw);
var results = JSON.parse(event.body);
console.log("results:" + results);
Hope this helps others. I'm newer as of this post to Lambda, JSON.
Related
I am trying to implement twitter media upload on google apps script via OAuth1.0a... as there has been no oauth2 for media uploads since 2 years. Following is the code. Still facing 401, 402, 403 , 400... all such return codes since last one week. Is this end point not working? anyone has any info? any ideas why its failing again and again.
using OAuth1 (https://github.com/googleworkspace/apps-script-oauth1/tree/3f3a6697d95a3ed9a91d09c65ffc34941136f587)
var url = 'https://upload.twitter.com/1.1/media/upload.json?media_category=tweet_image';
var baseUrl = 'https://upload.twitter.com/1.1/media/upload.json';
var params = {
'payload': {'media': imageBlob},
'method': 'POST',
'muteHttpExceptions' : true
};
var token = JSON.parse(PropertiesService.getUserProperties().getProperty("oauth1."+ account));
var oauth_token = token.oauth_token
var oauth_token_secret = token.oauth_token_secret
var oauth_consumer_key = PropertiesService.getUserProperties().getProperty("TWITTER_CONSUMER_KEY");
var oauth_consumer_secret = PropertiesService.getUserProperties().getProperty("TWITTER_CONSUMER_SECRET");
const method = params['method'] || 'post';
params['method'] = method;
const oauthParameters = {
oauth_version: "1.0",
oauth_token: oauth_token,
oauth_consumer_key: oauth_consumer_key,
oauth_signature_method: "HMAC-SHA1",
oauth_timestamp: (Math.floor((new Date()).getTime() / 1000)).toString(),
};
oauthParameters.oauth_nonce = oauthParameters.oauth_timestamp + Math.floor(Math.random() * 100000000);
const payload = params['payload'] || {};
const q = {"media_category": "tweet_image"} //parms from url
const queryKeys = Object.keys(oauthParameters).concat(Object.keys(payload)).concat(Object.keys(q)).sort();
const baseString = queryKeys.reduce(function(acc, key, idx) {
if (idx) acc += encodeURIComponent("&");
if (oauthParameters.hasOwnProperty(key))
acc += _encode(key + "=" + oauthParameters[key]);
else if (payload.hasOwnProperty(key))
acc += _encode(key + "=" + _encode(payload[key]));
return acc;
}, method.toUpperCase() + '&' + _encode(baseUrl) + '&');
oauthParameters.oauth_signature = Utilities.base64Encode(
Utilities.computeHmacSignature(
Utilities.MacAlgorithm.HMAC_SHA_1,
baseString, oauth_consumer_secret + '&' + oauth_token_secret
)
);
if (!params['headers']) params['headers'] = {};
params['headers']['authorization'] = "OAuth " + Object.keys(oauthParameters)
.sort().reduce(function(acc, key) {
acc.push(key + '="' + _encode(oauthParameters[key]) + '"');
return acc;
}, []).join(', ');
params['payload'] = Object.keys(payload).reduce(function(acc, key) {
acc.push(key + '=' + _encode(payload[key]));
return acc;
}, []).join('&');
console.log(params)
response = UrlFetchApp.fetch(url, params);
for info...other than this, I also tried this repo - https://github.com/airhadoken/twitter-lib
still facing similar issues.
EDIT: on postman it works.. somethings wrong with the code then.. :(
I am attempting to authenticate with a service account to work on behalf of a user account on the domain. I have delegated admin access and added to the GSuite console. I can get an access token with the below but the making batch requests to copy drive files returns "code: 404, message: 'File not found:". The below code is writted in Google Apps Script. Am I missing something form the process to creating and authenticating the service account?
var CREDENTIALS = {
private_key: "-----BEGIN PRIVATE KEY----- XXXXXXX \n-----END PRIVATE KEY-----\n",
client_email: "XXXXXX#fXXXXXX.iam.gserviceaccount.com",
client_id: "1XXXXXXXXXXXXXXXX",
user_email: "XXXXX#XXXX.XXX.XXX",
scopes: ["https://www.googleapis.com/auth/drive","https://www.googleapis.com/auth/spreadsheets","https://www.googleapis.com/auth/userinfo.email","https://www.googleapis.com/auth/script.external_request"]
};
function oAuthToken(){
var url = "https://www.googleapis.com/oauth2/v3/token";
var header = {
alg: "RS256",
typ: "JWT",
};
var now = Math.floor(Date.now() / 1000);
var claim = {
iss: CREDENTIALS.client_id,
sub: CREDENTIALS.user_email,
scope: CREDENTIALS.scopes.join(" "),
aud: url,
exp: (now + 3600).toString(),
iat: now.toString(),
};
var signature = Utilities.base64Encode(JSON.stringify(header)) + "." + Utilities.base64Encode(JSON.stringify(claim));
var jwt = signature + "." + Utilities.base64Encode(Utilities.computeRsaSha256Signature(signature, CREDENTIALS.private_key));
var params = {
method: "post",
payload: {
assertion: jwt,
grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer",
},
};
var res = UrlFetchApp.fetch(url, params).getContentText();
return JSON.parse(res)
}
The batch process is a bit rough but this is the gist of it.
var request={
batchPath:
requests:[]
}
var backoff =0
function batch(request) {
var oAuth=oAuthToken().access_token
var url ='https://www.googleapis.com/'+request.batchPath
var body =request.requests
if(body.length<1){
return []
}
var boundary = 'xxxxxxxxxx';
var contentId = 0;
var data = '--' + boundary + '\r\n';
for (var i in body) {
if(typeof body[i]=='object'){
data += 'Content-Type: application/http\r\n';
data += 'Content-ID: ' + ++contentId + '\r\n\r\n';
data += body[i].method + ' ' + body[i].endpoint + '\r\n';
data += body[i].requestBody ? 'Content-Type: application/json; charset=utf-8\r\n\r\n' : '\r\n';
data += body[i].requestBody ? JSON.stringify(body[i].requestBody) + '\r\n' : '';
data += "--" + boundary + '\r\n';
}
}
var parseBatchRes = function(res) {
var splittedRes = res.split('--batch');
return splittedRes.slice(1, splittedRes.length - 1).map(function(e) {
return {
contentId: Number(e.match(/Content-ID: response-(\d+)/)[1]),
status: Number(e.match(/HTTP\/\d+.\d+ (\d+)/)[1]),
object: JSON.parse(e.match(/{[\S\s]+}/)[0]),
};
});
};
var payload = Utilities.newBlob(data).getBytes();
var head = {Authorization: 'Bearer ' + oAuth}
var options = {
method: 'POST',
contentType: 'multipart/mixed; boundary=' + boundary,
payload: payload,
headers: head,
muteHttpExceptions: false
};
var complete=false;
var finalResponse=[];
for (var n=0; n<=backoff; n++) {
if(complete){
break;
}
var complete = true
console.log('backoff',n);
var response =UrlFetchApp.fetch(url, options).getContentText();
for(var j=0;j<response.length;j++){
if(response[r].status!=200){
var complete = false
}
}
}
}
Add the supportsAllDrives = true query parameter to the request.
The parameters indicates whether the requesting application supports both My Drives and shared drives and the default value for this is false.
Reference
Drive API Parameters
I am trying to make a JSON file using all the JSON files in a directory. Every time a user uploads a new JSON a new combined JSON should be generated. I want the new JSON to have a custom structure hence cant use any libraries. I have the following code:
router.post('/upload', function(req, res) {
var sampleFile;
var bbbid = req.body.bbbid;
DDLFile = req.files.DDLFile;
j++;
DDLFile.mv('/uploads/' + bbbid + '/device' + j + '.json', function (err) {
if (err) {
res.status(500).send(err);
}
else {
res.redirect("fileuploaded");
}
});
var myfiles = [];
var fs = require('fs');
var arrayOfFiles = fs.readdirSync('/uploads/' + bbbid);
arrayOfFiles.forEach(function (file) {
myfiles.push(file);
console.log(myfiles);
});
console.log('No of Files:', myfiles.length);
var files = myfiles.length;
console.log('Files:', files);
console.log('J', j);
var cddl = "{ BBBID:" + bbbid;
if (files == 0) {
cddl = cddl + '}';
console.log('Entered if loop');
}
else {
var i = 0;
/*var obj;
fs.readFile('/uploads/' + bbbid + '/device' + j + '.json', 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
});*/
for (i = 0; i < files; i++) {
console.log('Entered For loop');
console.log('Count:', count);
console.log('Sensor:', sensor);
try{
var obj = fs.readFileSync('/uploads/' + bbbid + '/device' + count + '.json', 'utf8');}
catch(err){
console.log(err);
}
console.log('everything good');
var obj1 = JSON.parse(obj);
console.log('hi');
//JSON.stringify(obj);
var ddl = require('/uploads/' + bbbid + '/device' + count + '.json');
console.log('o');
cddl = cddl + ", {" + obj1.DDL.Sensor.Description.Verbose_Description + ":" + JSON.stringify(ddl) + "}"
JSON.stringify(cddl);
console.log(cddl);
count++;
sensor++;
console.log('Count:', count);
console.log('Sensor:', sensor);
}
cddl = cddl + '}';
JSON.stringify(cddl);
console.log(cddl);
}
});
I want to generate a new cddl everytime a new file is uploaded. Having a lot of problems. Help please!
I see two problems. First instead of this:
var obj = fs.readFileSync('/uploads/' + bbbid + '/device' + count + '.json', 'utf8');}
catch(err){
console.log(err);
}
console.log('everything good');
var obj1 = JSON.parse(obj);
You can write(fix path, if necessary):
var obj1 = require('./uploads/' + bbbid + '/device' + count + '.json')
Then, when you call:
JSON.stringify(cddl);
You're not saving the result anywhere. So you should save it in the place, you need to:
var a = JSON.stringify(cddl);
And when all set, dont forget to write to file back using fs.writeFileSync or async one fs.writeFile.
I have this really simple get request that returns json that I am trying to implement. I have followed the tutorials for Express Web Framework REST API, but for some reason I keep getting the same error
ERROR:
TypeError: res.status is not a function
or
TypeError: res.json is not a function
index.js:
var express = require('express');
var router = express.Router();
var pg = require('pg');
var connectionString = 'pg://postgres:postgres#postgres/feed';
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index', { title: 'Express' });
});
router.get('/api/leaderboard', function(resp, req){
var results = [];
pg.connect(connectionString, function(err, client, done){
if(err){
done();
console.log(err);
return res.status(500).json({ success: false, data: err});
}
var query = client.query("SELECT * FROM log WHERE (logged >= date_trunc('week', CURRENT_TIMESTAMP - interval '1 week') AND logged <= date_trunc('week', CURRENT_TIMESTAMP));");
var counter = 0;
var b1 = {};
var b2 = {};
var b3 = {};
var b4 = {};
b1.energy_sum_week = 0;
b2.energy_sum_week = 0;
b3.energy_sum_week = 0;
b4.energy_sum_week = 0;
b1.zne_sum_week = 30000;
b2.zne_sum_week = 30000;
b3.zne_sum_week = 30000;
b4.zne_sum_week = 30000;
query.on('row', function(row){
//results.push(row);
if(row['address'] == 215){
b1.energy_sum_week = row['kitchen'] + row['plugload'] + row['lights'] + row['ev'] + row['hvac'] + row['instahot'] - row['solar'];
}
else if (row['address'] == 1590) {
b2.energy_sum_week = row['kitchen'] + row['plugload'] + row['lights'] + row['ev'] + row['hvac'] + row['instahot'] - row['solar'];
} else if (row['address'] == 1605) {
console.log(row);
b3.energy_sum_week = row['kitchen'] + row['plugload'] + row['lights'] + row['ev'] + row['hvac'] + row['instahot'] - row['solar'];
} else if (row['address'] == 1715) {
b4.energy_sum_week = row['kitchen'] + row['plugload'] + row['lights'] + row['ev'] + row['hvac'] + row['instahot'] - row['solar'];
}
});
query.on('end', function(){
done();
//make zne lower than everything
results.push(b1);
results.push(b2);
results.push(b3);
results.push(b4);
resp.json(results);
});
});
});
module.exports = router;
It seems like it can't recognize the response object. Tried a bunch of different things like passing in the request and response's to the query callbacks, and using promises.
Getting kinda desperate here :/
The res variable doesn't exist in the current context, you probably expect that the line
router.get('/api/leaderboard', function(resp, req){
had this form
router.get('/api/leaderboard', function(req, res){
You are passing resp as the req object and the req as the resp object.
Try changing the order.
router.get('/api/leaderboard', function(req, resp){...}
I'm trying to build a JSON out of multiple requests on my mongodb.
since I'm not using DBRef, I have to build the "table joints" by myself, and that's how I ended up in this mess.
This is the code that is giving me the headaches from a couple of days now.
(the mongo part is done with mongoskin)
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
userfeed.find({},args).toArray(function(e, feed) {
if (e) console.log("error: ", e);
// gather aparel infos
var i=0;
var ret_feeds = [];
feed.forEach(function(cur_feed) {
var outfits=[];
console.log("beginning with: " + cur_feed.url);
var resfeed = "";
resfeed = cur_feed;
resfeed.url = baseurl + snapurl + resfeed.url + "_small.jpg";
i=0;
cur_feed.apparel_ids.forEach(function(item) {
/*>>*/ apparel.find({"_id": item},{limit:1}).toArray(function(e, results) {
console.log(">>>>>>>>>>> APPAREL_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
outfits.push(results); // quick and dirty, 2 b refined..
i++;
if(i>=cur_feed.apparel_ids.length)
{
// pack it up
// resfeed.url = resfeed.url;
resfeed.outfits = outfits;
resfeed.fav = false;
resfeed.bough = false;
// retrieve store infos
/*>>>*/ store.find({"_id":resfeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t############# STORE_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
resfeed.store = resstore[0];
resfeed.store.class = "hem";
ret_feeds.push(resfeed);
if(ret_feeds.length >= feed.length)
{
console.log("\t\t######################calling return [ ret_feeds.length = " + ret_feeds.length + " feed.length = " + feed.length);
out.send(ret_feeds);
}
});
}
});
});
});
});
}
This code fails, because returns the json before finishing its task, so the next time that it tries to return another json it crashes miserably due to the fact the the headers have already been sent.
Now as you can see, I have 3 collections: userfeed, apparel and stores.
the goal of this function is to retrieve all the items in the userfeed collection, extract the outfits (based on the outfit_id array that is part of the userfeed collection), and also extract the store infos related in the same way to each userfeed entry, like so:
I know that async.js or equivalent is the way to go: I've red like a gazillion of other posts here on SO, but I still can't get my head around it, probably because the whole mechanism behind the async.js or flow control in general it's still out of focus in my mind.
I'm still a noob at node :)
UPDATE
I think I found the right path for understanding here: http://www.sebastianseilund.com/nodejs-async-in-practice
this guy made a terrific job in describing use-case by use-case all the ways to apply async.js to your code.
I'll post the solution as soon as I get around it.
UPDATE 2
Thanks to the above dude I could work out a working solution, below is the answer.
After so much struggling I have finally managed to get a solution.
async.js was the answer as I was (obviously) suspecting.
FYI here's the working code.
If you like to point out improvements or anything else, you are more than welcome
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
var args;
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
var outfits=[];
var feeds = array();
async.series([
// userfeed find
function(callback) {
userfeed.find({},args).toArray(function(e, feed) {
if(e) callback(e);
feeds = array(feed);
console.log(feeds.length + " retrieved. stepping in");
callback(null, null);
});
},
// join
function(callback) {
async.forEach(feeds, function(thefeed, callback) {
var i = feeds.indexOf(thefeed);
async.parallel([
// load apparel infos
function(callback) {
console.log("\t >>> analyzing thefeed id " + thefeed._id);
async.forEach(thefeed.apparel_ids, function(apparel_id, callback) {
apparel.find({"_id": apparel_id},{limit:1}).toArray(function(e, results) {
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
console.log("\t\t### pushing data into thefeed_index: " + i);
if(!util.isArray(feeds[i].oufits)) feeds[i].outfits = array();
feeds[i].outfits.push(results);
callback(null, null);
});
}, callback);
},
// load store infos
function(callback) {
store.find({"_id":thefeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t### STORE_FIND");
if (e) console.log("error: ", e);
feeds[i].store = resstore[0];
feeds[i].store.class = "hem";
callback(null, null);
});
}
], callback);
}, callback);
}
// MAIN
], function(err, result) {
console.log("feed retrieval completed. stepping out");
if (err) return next(err);
out.send(feeds);
});
};