Protractor: HTTP Response Testing - json

I'm using Protracotr for e2e testing.
I want to test response from HTTP in protractor.
Basically:
I have running some NodeJS server.
I want to send request to this server
Receive some JSON data
Parse those data
Check if they are correct
I'm using "http" NODEJS Lib to make http calls GET+POST.
var http = require('http');
describe("Some test", function() {
function httpGet(siteUrl) {
http.get(siteUrl, function(response) {
response.setEncoding('utf8');
response.on("data", function(chunk) {
bodyString += chunk;
});
response.on('end', function() {
defer.fulfill({
bodyString: bodyString
});
});
}).on('error', function(e) {
defer.reject("Got http.get error: " + e.message);
});
return defer.promise;
}
it('Test case', function(){
httpGet("http://localhost:3333/path/1/10").then(function(result) {
var json_data = JSON.parse(result.bodyString);
for (var i = 0; i < json_data.length; ++i) {
console.log("label: " + json_data[i].label);
expect(json_data[i].label).toEqual('abc');
}
});
});
});
I can see response message nice parsed in console.log, but I'm not able to test anything, my test results are:
Finished in 0.019 seconds
1 test, 0 assertions, 0 failures
label: Text1
label: Text2
[launcher] 0 instance(s) of WebDriver still running
[launcher] chrome #1 passed
Process finished with exit code 0
The console log is write down after finish of test and no assertion has been done.
Any help please, how to test those responses (in JSON format) from server in Protractor?

For async tests, you'll need to pass the done callback to your function. Then call done() on success or done.fail() on failure. See Jasmine's Asynchronous support documentation.
it('Test case', function(done){
httpGet("http://localhost:3333/path/1/10").then((result) => {
var json_data = JSON.parse(result.bodyString);
for (var i = 0; i < json_data.length; ++i) {
console.log("label: " + json_data[i].label);
}
done();
}).catch(err => {
done.fail();
});
});

Related

The action did not produce a valid response and exited unexpectedly

I want to call a Node-RED flow from IBM Cloud Functions.
const https = require('https');
function main(params) {
const path = "/" + params.route + "?" + params.query_params ;
const options = {
hostname: params.hostname,
path: path,
port: 443,
method: 'GET'
};
return new Promise((resolve, reject) => {
https.get(options, (resp) => {
resp.on('data', (d) => {
let s = d.toString();
obj = JSON.parse(s);
resolve({ "gw_result": obj })
});
});
})
}
In the Node-RED flow I'm using a HTTP request to get data from another server. For test purposes I used a GET request to google.com but have same results using another Node-RED endpoint.
As soon as I invoke the web action I get the error message "The action did not produce a valid response and exited unexpectedly". The output of the Node-RED flow appears some seconds later in the web action's log although the Node-RED flow works properly and promptly (I used debug Node-RED debug nodes to check this).
The https GET request to Node-RED works well when I replace the http request in Node-RED by something else, e.g. a Function node, even when I use a Delay node to delay the response for a second or so.
This code works, although google.com does not return an object, of course.
var rp = require('request-promise');
function main(params) {
var uri = params.hostname + params.route + params.query_params
return new Promise(function (resolve, reject) {
rp(uri)
.then(function (parsedBody) {
obj = JSON.parse(parsedBody);
resolve({ "gw_result": obj
});
})
.catch(function (err) {
resolve({ message: 'failed!!', error: err.toString() });
});
});
}

Nodejs, Cloud Firestore Upload Tasks - Auth error:Error: socket hang up

I'm coding a function that runs API calls and requests JSON from a huge database in sequence via offsets. The JSON response is parsed and then the subsequent data within is uploaded to our Cloud Firestore server.
Nodejs (Node 6.11.3) & Latest Firebase Admin SDK
The information is parsed as expected, and prints to the console perfectly. When the data attempts to upload to our Firestore database however, the console is spammed with the error message:
Auth error:Error: socket hang up
(node:846) UnhandledPromiseRejectionWarning: Unhandled promise rejection
(rejection id: -Number-): Error: Getting metadata from plugin failed with
error: socket hang up
and occasionally:
Auth error:Error: read ECONNRESET
The forEach function collects the items from the downloaded JSON and processes the data before uploading to the Firestore database. Each JSON has up to 1000 items of data (1000 documents worth) to pass through the forEach function. I understand that this might be a problem if the function repeats before the upload set finishes?
I'm a coding newbie and understand that the control flow of this function isn't the best. However, I can't find any information on the error that the console prints. I can find plenty of information on socket hang ups, but none on the Auth error section.
I'm using a generated service account JSON as a credential to access our database, which uses the firebase-adminsdk account. Our read/write rules for the database are currently open to allow any access (as we're in development with no real users).
Here's my function:
Firebase initialisation & offset zero-ing
const admin = require('firebase-admin');
var serviceAccount = require("JSON");
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: "URL"
});
var db = admin.firestore();
var offset = 0;
var failed = false;
Running the function & setting HTTP Headers
var runFunction = function runFunction() {
var https = require('https');
var options = {
host: 'website.com',
path: (path including an offset and 1000 row specifier),
method: 'GET',
json: true,
headers: {
'content-type': 'application/json',
'Authorization': 'Basic ' + new Buffer('username' + ':' + 'password').toString('base64')
}
};
Running the HTTP Request & Re-running the function if we haven't reached the end of the response from the API
if (failed === false) {
var req = https.request(options, function (res) {
var body = '';
res.setEncoding('utf8');
res.on('data', function (chunk) {
body += chunk;
});
res.on('end', () => {
console.log('Successfully processed HTTPS response');
body = JSON.parse(body);
if (body.hasOwnProperty('errors')) {
console.log('Body ->' + body)
console.log('API Call failed due to server error')
console.log('Function failed at ' + offset)
req.end();
return
} else {
if (body.hasOwnProperty('result')) {
let result = body.result;
if (Object.keys(result).length === 0) {
console.log('Function has completed');
failed = true;
return;
} else {
result.forEach(function (item) {
var docRef = db.collection('collection').doc(name);
console.log(name);
var upload = docRef.set({
thing: data,
thing2: data,
})
});
console.log('Finished offset ' + offset)
offset = offset + 1000;
failed = false;
}
if (failed === false) {
console.log('Function will repeat with new offset');
console.log('offset = ' + offset);
req.end();
runFunction();
} else {
console.log('Function will terminate');
}
}
}
});
});
req.on('error', (err) => {
console.log('Error -> ' + err)
console.log('Function failed at ' + offset)
console.log('Repeat from the given offset value or diagnose further')
req.end();
});
req.end();
} else {
req.end();
}
};
runFunction();
Any help would be greatly appreciated!
UPDATE
I've just tried changing the rows of JSON that I pull at a time and subsequently upload at a time using the function - from 1000 down to 100. The socket hang up errors are less frequent so it is definitely due to overloading the database.
Ideally it would be perfect if each forEach array iteration waited for the previous iteration to complete before commencing.
UPDATE #2
I've installed the async module and I'm currently using the async.eachSeries function to perform one document upload at a time. All errors mid-upload disappear - however the function will take an insane amount of time to finish (roughly 9 hours for 158,000 documents). My updated loop code is this, with a counter implemented:
async.eachSeries(result, function (item, callback) {
// result.forEach(function (item) {
var docRef = db.collection('collection').doc(name);
console.log(name);
var upload = docRef.set({
thing: data,
thing2: data,
}, { merge: true }).then(ref => {
counter = counter + 1
if (counter == result.length) {
console.log('Finished offset ' + offset)
offset = offset + 1000;
console.log('Function will repeat with new offset')
console.log('offset = ' + offset);
failed = false;
counter = 0
req.end();
runFunction();
}
callback()
});
});
Also, after a period of time the database returns this error:
(node:16168) UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: -Number-): Error: The datastore operation timed out, or the data was temporarily unavailable.
It seems as if now my function is taking too long... instead of not long enough. Does anyone have any advice on how to make this run faster without stated errors?
The write requests as part of this loop were simply exceeding Firestore's quota - thus the server was rejecting the majority of them.
To solve this issue I converted my requests to upload in chunks of 50 or so items at a time, with Promises confirming when to move onto the next chunk upload.
The answer was posted here -> Iterate through an array in blocks of 50 items at a time in node.js, and the template for my working code is as below:
async function uploadData(dataArray) {
try {
const chunks = chunkArray(dataArray, 50);
for (const [index, chunk] of chunks.entries()) {
console.log(` --- Uploading ${index + 1} chunk started ---`);
await uploadDataChunk(chunk);
console.log(`---Uploading ${index + 1} chunk finished ---`);
}
} catch (error) {
console.log(error)
// Catch en error here
}
}
function uploadDataChunk(chunk) {
return Promise.all(
chunk.map((item) => new Promise((resolve, reject) => {
setTimeout(
() => {
console.log(`Chunk item ${item} uploaded`);
resolve();
},
Math.floor(Math.random() * 500)
);
}))
);
}
function chunkArray(array, chunkSize) {
return Array.from(
{ length: Math.ceil(array.length / chunkSize) },
(_, index) => array.slice(index * chunkSize, (index + 1) * chunkSize)
);
}
Pass the data array through to uploadData - using uploadData(data); and post your upload code for each item into uploadDataChunk inside the setTimeout block (before the resolve() line) within the chunk.map function.
I got around this by chaining the promises in the loop with a wait of 50 milliseconds in between each.
function Wait() {
return new Promise(r => setTimeout(r, 50))
}
function writeDataToFirestoreParentPhones(data) {
let chain = Promise.resolve();
for (let i = 0; i < data.length; ++i) {
var docRef = db.collection('parent_phones').doc(data[i].kp_ID_for_Realm);
chain = chain.then(()=> {
var setAda = docRef.set({
parent_id: data[i].kf_ParentID,
contact_number: data[i].contact_number,
contact_type: data[i].contact_type
}).then(ref => {
console.log(i + ' - Added parent_phones with ID: ', data[i].kp_ID_for_Realm);
}).catch(function(error) {
console.error("Error writing document: ", error);
});
})
.then(Wait)
}
}
For me this turned out to be a network issue.
Uploading 180,000 documents in batches of 10,000 was no trouble for me before and today having used a public, slower wifi connection, I received that error.
Switching back to my 4G mobile connection sorted the problem for me. Not sure whether it's a speed issue - could have been a security issue - but I'll go with that assumption.

I'm not using Express, but I'm trying extract POST data then to send a json as response, with no luck

I'm still very new to node.js so please bear with me.
I'm trying to extract POST data then sent a json as response, but I don't seem to be able to extract the data from the POST request and even worse I can't find the syntax for people who are NOT using Express to send the json. It keeps telling me res.json is not a function.
EDIT: I found out the problem for the json part, I was a dump. I finally remember what I was told, json are sent like strings.
var http = require('http');
var qs = require("querystring");
server = http.createServer(function (req, res) {
try {
var body = "";
var post = qs.parse("");
if (req.method == "POST") {
res.writeHeader(200, {"Content-Type": "application/json"});
req.on("data", function (data) {
body += data;
console.log(data); //It gives something like <Buffer xx xx xx ...>
if (body.length > 1e6)
req.connection.destroy();
});
req.on("end", function () {
post = qs.parse(body);
console.log(post.test); //It gives "undefined"
});
res.end(JSON.stringify({ a: 1 }));
} catch(err) {
console.dir(err);
res.writeHeader(200, {"Content-Type": "text/plain"});
res.end("Hi hi");
}
});
server.listen(8000);
console.log("http start #8000");
Any help? Thanks in advance.
below solves the date to string (i.e. converting buffer to string
res.on('data', function(chunk) {
var textChunk = chunk.toString('utf8');
// console.log(textChunk); // will give you a stream of text from data
});
you could store textChunk out of the ondata handler, to then use that if required (say returning relevant data to the user back again)

How do I return JSON in my Node.js proxy?

I had to proxy the requests to the Dota 2 API. After some research I found a way to do it in the API I am building using the MEAN stack. However, I can't figure out how to return the data from the request.
Here is my route in my node server code:
router.get('/api/allheros', function (req, res) {
Https.get('https://api.steampowered.com/IEconDOTA2_570/GetHeroes/v0001/?key=FB900D42DC33F4B4FCC62F6C7779BE5D', function (res) {
var str = '';
console.log('Response is ' + res.statusCode);
res.on('data', function (chunk) {
str += chunk;
});
res.on('end', function () {
console.log(str); //This console logs all the heros correctly
});
});
});
The console log works but I need the data to be returned to the front end.
All you need to do is parse your result into a JSON Object and send it back using res.json(). Modify your code as follows,
router.get('/api/allheros', function (req, res) {
Https.get('https://api.steampowered.com/IEconDOTA2_570/GetHeroes/v0001/?key=FB900D42DC33F4B4FCC62F6C7779BE5D', function (response) {
var str = '';
console.log('Response is ' + response.statusCode);
response.on('data', function (chunk) {
str += chunk;
});
response.on('end', function () {
res.json(JSON.parse(str));
});
});
});
Note: The res in inner function had to be changed to response so that it doesn't mask the res parameter of outer function.

how to get session from codeigniter with node.js

i'm writing application like social network where in my application can show status update and chat . when i search on internet i found node.js for long polling technology and i think i can use that for chat and streaming page in my application. but when i use node.js i have a stack
this is a technology i want to my project:
1) i'm using codeigniter for framework and mysql database in address localhost:81/myproject
2) and using node.js in port 127.0.0.1:8080 to chat and streaming page
this is code javascript server with node.js name is server.js
var sys = require("sys"),
http = require("http"),
url = require("url"),
path = require("path"),
fs = require("fs"),
events = require("events");
function load_static_file(uri, response) {
var filename = path.join(process.cwd(), uri);
path.exists(filename, function(exists) {
if(!exists) {
response.writeHead(404, {"Content-Type": "text/plain"});
response.write("404 Not Found\n");
response.end();
return;
}
fs.readFile(filename, "binary", function(err, file) {
if(err) {
response.writeHead(500, {"Content-Type": "text/plain"});
response.write(err + "\n");
response.end();
return;
}
response.writeHead(200);
response.write(file, "binary");
response.end();
});
});
}
var local_client = http.createClient(81, "localhost");
var local_emitter = new events.EventEmitter();
function get_users() {
var request = local_client.request("GET", "/myproject/getUser", {"host": "localhost"});
request.addListener("response", function(response) {
var body = "";
response.addListener("data", function(data) {
body += data;
});
response.addListener("end", function() {
var users = JSON.parse(body);
if(users.length > 0) {
local_emitter.emit("users", users);
}
});
});
request.end();
}
setInterval(get_users, 5000);
http.createServer(function(request, response) {
var uri = url.parse(request.url).pathname;
if(uri === "/stream") {
var listener = local_emitter.addListener("users", function(users) {
response.writeHead(200, { "Content-Type" : "text/plain" });
response.write(JSON.stringify(users));
response.end();
clearTimeout(timeout);
});
var timeout = setTimeout(function() {
response.writeHead(200, { "Content-Type" : "text/plain" });
response.write(JSON.stringify([]));
response.end();
local_emitter.removeListener(listener);
}, 10000);
}
else {
load_static_file(uri, response);
}
}).listen(8383);
sys.puts("Server running at http://localhost:8383/");
now in codeigniter side i making webservices on url http://localhost:81/myproject/getUser with response is json format and i access this with session auhtentication if not is redirect to login page.
[{"user_id":"2a20f5b923ffaea7927303449b8e76daee7b9b771316488679","token":"3m5biVJMjkCNDk79pGSo","username":"rakhacs","password":"*******","name_first":"rakha","name_middle":"","name_last":"cs","email_id":"email#gmail.com","picture":"img\/default.png","active":"1","online":"0","created_at":"2011-09-20 11:14:43","access":"2","identifier":"ae70c3b56df19a303a7693cdc265f743af5b0a6e"},{"user_id":"9a6e55977e906873830018d95c31d2bf664c2f211316493932","token":"RoUvvPyVt7bGaFhiMVmj","username":"ferdian","password":"*****","name_first":"willy","name_middle":"","name_last":";f;w","email_id":"email1#gmail.com","picture":"img\/default.png","active":"1","online":"0","created_at":"2011-09-20 11:47:20","access":"2","identifier":"1ccd4193fa6b56b96b3889e59c5205cc531177c9"}]
this is the point problem when i execute node server.js
i get error like this undefined:0
sysntaxerror:unexpected end of input
at object.parse(native)
i don't know about that error i think because i using session ? but i'm not sure.
when i test using test.js for testing my webservices
var http = require("http")
var options = {
host: 'localhost',
port: 81,
path: '/myproject/getUser',
method: 'GET'
};
var req = http.request(options, function(res) {
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('BODY: ' + chunk);
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
// write data to request body
req.write('data\n');
req.write('data\n');
req.end();
and this is the output
problem with request: parse error
but when i using another webservice who has been response json format too and not using session that's work i can get the body response..if this problem is session how i can fix this?..this is can make me confused..thanks for your comment
hard to say but I would primary try to send an object in jSON and not directly an array. i.e. just encapsulate the array into an object:
{"data":[…]}
This "may" be causing the parse error.
Something else, for this purpose, it won't be bad to add the following to your responding PHP method:
header('Cache-Control: no-cache, must-revalidate');
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Content-type: application/json');