can't read a file from tmp dir in Linux - json

I am writing an electron app which creates a file in user's system in tmp dir with an extension (.tms) and afterwords read that tmp dir and get all the files with an extension (.tms) and pick one file and read it's content and further parse the content using JSON.parse() . The files has contents in JSON string , so I need to parse that string obj. But my code read the dir like {/tmp/12321212.tms} but can't read the content from that dir. That's why JSON.parse function is giving error. Below is the piece of code.
function getScreenshotName (){
return new Promise( (resolve,reject) =>{
var files = readDir.readSync(os.tmpdir()+'/',["**.tms"])
if (files.length > 0 ) {
return resolve(files[0])
}
else{
return reject(err)
}
})
}
function getScreenshotObj (pathToFirstFile) {
return new Promise ((resolve,reject) =>{
console.log("Path to temp dir : " + pathToFirstFile)
fs.readFile(pathToFirstFile, 'utf8',function(err,fileContents){
if (err) {
return reject(err)
}
else{
console.log("File contents : ")
console.log(fileContents)
screenshotObject = JSON.parse(fileContents)
obj = {pathToFirstFile : pathToFirstFile , screenshotObject:screenshotObject ,accesstoken : accesstoken}
return resolve(obj)
}
})
})
}
Error is in JSON.Parse function because it can't read the contents .

Related

Cypress fixtures structure json and type

i have one form with two inputs
context('Include contains from json file', function () {
beforeEach(() => {
cy.server()
cy.fixture("example.json")
.as('data')
.then((data) => {
cy.route('GET', 'example.json', data)
})
})
it('Výběr klienta', function () {
cy.visit('/info')
cy.get('[data-cy=username]').type(JSON.stringify(this.data)
cy.get('[data-cy=surname]').type(JSON.stringify(this.data)
})
})
How to type contains for two or more inputs from external file .json
My .json file
{
"name": "Jane"
"surname": "Doe"
}
The .json file should be in the fixtures folder as cypress will automatically search for test data inside this folder by default until and unless the path is specified otherwise.
You have to load the fixtures file in your tests in beforeEach() cy.fixture('example.json').as('data')
Then your code would be:
cy.get('[data-cy=username]').type(data.name)
cy.get('[data-cy=surname]').type(data.surname)
Works for me perfectly in single fixture JSON with multiple data and not stub API
before describe('Test Single Input Field Form', function() declare :
const testData = require("../../fixtures/multipleInputFields.json")
and then
`testData.forEach((data) => {
const message = data.message
it('Test Case', function(){
cy.log("data is:" + data)
cy.get('#user-message').type(message).should('have.value', message)
cy.get('#get-input > button').click()
cy.wait(200)
cy.get('span#display').should('have.text', message)
})
});`

I get undefined values in my field value when inserting multiple documents after converting csv to json string and parsing. Why?

Basically, I am trying to do an import function for csv files where the csv file will get converted to json before then being inserted into the mongodb. This is my code.
//require the csvtojson converter class
var Converter = require("csvtojson").Converter;
// create a new converter object
var converter = new Converter({});
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost:27017/myproject';
// call the fromFile function which takes in the path to your
// csv file as well as a callback function
converter.fromFile("./NTA-SAM-Inventory-List-Security-Management-
New_2017.csv",function(err,result){
// if an error has occured then handle it
if(err){
console.log("An Error Has Occured");
console.log(err);
}
// the result of the conversion
var jsonResult = result;
console.log(jsonResult);
var jsobject= JSON.stringify(jsonResult);
var jsonobject= JSON.parse(jsobject);
var f = jsonobject.length;
console.log(f);
MongoClient.connect(url, function(err, db) {
if(err) {
console.log(err);
}
for(i = 0; i < f; i++){
var insertDocument = function() {
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
}, function(err, results) {
if(err) throw err;
console.log(results);
});
};
insertDocument(db, function() {
if(err)
throw err;
else{
console.log('insert');
}
db.close();
});
}
});
console.log("Inserted " + f + " document into the documents
collection.");
});
So far, I have tried doing this of converting a random file with 1400 records into a json string before parsing and then inserting it. But somehow I keep getting undefined from my fields whenever I insert, the result show my respective field with undefined values.
Which part of my jsonobject.indexNo is wrong in the sense that is jsonobject.field1value and jsonobject.field2value etc. How should I get the values from my json string then after parsing?
I am using node.js to run it and mongodb as database. I can convert nicely just this part about inserting the documents inside. Thanks in advance!
db.collection('documents').insertOne is an async method , you can't run it in a loop like that. Workaround is you can use async to handle it. Suggest to use async.each
Eg:
// Consider jsonResult is an array
var jsonResult = result;
async.each(jsonResult,
// Here, jsonobject is a child object of jsonResult array
function(jsonobject, callback){
db.collection('documents').insertOne({
//'_id': Object.keys(obj).length,
'indexNo' : jsonobject.indexNo,
'samID': jsonobject.samID,
'Type': jsonobject.Type,
'InventoryStatus': jsonobject.InventoryStatus,
'dateIn':jsonobject.dateIn,
'Remarks':jsonobject.Remarks,
'dateOut':jsonobject.dateOut,
//'Remarks':jsonobject.remarks,
'ntaSamRequestRef': jsonobject.ntaSamReqRef
//'Created Date': "<b>" + day + "/" + month + "/" + year + "</b>"
});
// Async call is done, trigger callback
callback();
},
// 3rd param is the function to call when everything's done
function(err){
// All tasks are done now
doSomethingOnceAllAreDone();
}
);

aws lambda s3 function isn't called inside alexa skills kit

I am trying to create a skill for Amazon Echo that will call a JSON file from AWS S3. When I call the code from s3 basic get function it works. And the Amazon Alexa code works on its own.
But when I call them together the function gets skipped. So for the following code the console gets called before and after s3.getObject(). But the middle one gets skipped. I do not understand why.
I also checked whether s3 was being called, and it is.
let aws = require('aws-sdk');
let s3 = new aws.S3({ apiVersion: '2006-03-01'});
function callS3() {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
}
console.log('myData >> ' + myData);
});
console.log('finished callS3() func');
return myData;
}
This might be a control flow issue, I've worked with amazons sdk before and was running into similar issues. Try implementing async within your code to have a better control of what happens when. This way methods won't skip.
UPDATE: adding some code examples of what you could do.
function callS3(callback) {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
callback(err,null);//callback the error.
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
console.log('myData >> ' + myData);
console.log('finished callS3() func');
//Include the callback inside of the S3 call to make sure this function returns until the S3 call completes.
callback(null,myData); // first element is an error and second is your data, first element is null if no error ocurred.
}
});
}
/*
This MIGHT work without async but just in case you can read more about
async.waterfall where functions pass down values to the next function.
*/
async.waterfall([
callS3()//you can include more functions here, the callback from the last function will be available for the next.
//myNextFunction()
],function(err,myData){
//you can use myData here.
})
It's a timing issue. Here is an example of loading a JSON file from an S3 share when a session is started.
function onLaunch(launchRequest, session, callback) {
var sBucket = "your-bucket-name";
var sFile = "data.json";
var params = {Bucket: sBucket, Key: sFile};
var s3 = new AWS.S3();
var s3file = s3.getObject(params)
new AWS.S3().getObject(params, function(err, data) {
if (!err) {
var json = JSON.parse(new Buffer(data.Body).toString("utf8"));
for(var i = 0; i < json.length; i++) {
console.log("name:" + json[i].name + ", age:" + json[i].age);
}
getWelcomeResponse(callback);
} else {
console.log(err.toString());
}
});
}

TypeError: Failed to execute 'readAsText' on 'FileReader': parameter 1 is not of type 'Blob'

I'm writing an chrome application with scala.js, and have some file reading problem.
When I use chrome.fileSystem.chooseEntry with openDirectory to select a directory, I want to read the _meta_.json file inside, the code is:
chrome.fileSystem.chooseEntry(js.Dynamic.literal("type" -> "openDirectory"), (dir: Entry) => {
dir.getFile("_meta_.json", js.Dynamic.literal(), (entry: FileEntry) => {
entry.file((file: FileEntry) => {
val reader = new FileReader()
reader.onload = (event: UIEvent) => {
println("############ read file: " + event)
}
reader.onloadend = (event: ProgressEvent) => {
println("############ read file: " + reader.result)
()
}
reader.onerror = (event: Event) => {
println("######### read error")
()
}
println("###### going to read")
reader.readAsText(entry.asInstanceOf[Blob]) // !!!!
()
})
})
})
(The code here is simplified, if you want to see the accurate code, please refer to https://github.com/freewind/fast-links/blob/master/src/main/scala/in/freewind/fastlinks/chrome_app/config/Header.scala#L45)
But when the code is running, it doesn't print anything, seems the file is never read. Then I set a debugger and stepped into the line ends with // !!!!, and run the code in console:
reader.readAsText(this.entry$1$2)
It reports error:
TypeError: Failed to execute 'readAsText' on 'FileReader': parameter 1 is not of type 'Blob'.
It clearly shows the reason, but I don't know how to fix it. I searched but all the similar examples are using the file input file from html DOM.
How to read the file correctly?
I ran into something similar and found this answer:
Problems with HTML5 FileReader
TLDR:
The parameter you are passing the file reader is the file name (a string), not the file object itself.
Try something like this :
loadFile: function(e) {
if(e != undefined) {
var file = e.target.files[0];
if(file != null && file.size > 0) {
reader.readAsText(this.entry$1$2);
//process file
} else {
//error
}
}

getting error while download the file in mvc2

In my site, i gave download option to download the file. when i am checking in local server it is working properly. But after deploy the server, if i click the link means it will show the following error,
This request has been blocked because sensitive information could be disclosed to third party web sites when this is used in a GET request. To allow GET requests, set JsonRequestBehavior to AllowGet.
My code here
public ActionResult Download(string fileName)
{
string pfn = Server.MapPath("~/Content/Files/" + fileName);
if (!System.IO.File.Exists(pfn))
{
//throw new ArgumentException("Invalid file name or file not exists!");
return Json(new JsonActionResult { Success = false, Message = "Invalid file name or file not exists!" });
}
else
{
return new BinaryContentResult()
{
FileName = fileName,
ContentType = "application/octet-stream",
Content = System.IO.File.ReadAllBytes(pfn)
};
}
}
This is my code. I don't know what mistake here, Can anyone find my problem and tell me ?
The Problem with ur code is that u r missing 'JsonRequestBehavior.AllowGet' while returning json.
public ActionResult Download(string fileName)
{
string pfn = Server.MapPath("~/Content/Files/" + fileName);
if (!System.IO.File.Exists(pfn))
{
//throw new ArgumentException("Invalid file name or file not exists!");
return Json(new JsonActionResult { Success = false, Message = "Invalid file name or file not exists!" },JsonRequestBehavior.AllowGet });
}
else
{
return new BinaryContentResult()
{
FileName = fileName,
ContentType = "application/octet-stream",
Content = System.IO.File.ReadAllBytes(pfn)
};
}
}