json file not updating in react native - json

i want to write to a JSON file so i used react-native-fs
here is the code:
const add = (n, p, pr) => {
var RNFS = require('react-native-fs');
var filePath = RNFS.DocumentDirectoryPath + '/items.json';
RNFS.writeFile(filePath, '{name:hello}', 'utf8')
.then((success) => {
console.log('SUCCESS');
})
.catch((err) => {
console.log(err.message);
});
};
it log success but didn't update the file any ideas?

Your file is updating successfully and if you want to check it please run the following code after your file is written. You will see the file's path and data of your saved file.
// get a list of files and directories in the main bundle
RNFS.readDir(RNFS.DocumentDirectoryPath)
.then((result) => {
console.log('GOT RESULT', result);
// stat the first file
return Promise.all([RNFS.stat(result[0].path), result[0].path]);
})
.then((statResult) => {
if (statResult[0].isFile()) {
// if we have a file, read it
return RNFS.readFile(statResult[1], 'utf8');
}
return 'no file';
})
.then((contents) => {
// log the file contents
console.log("contents");
console.log(contents); // You will see the updated content here which is "{name:hello}"
})
.catch((err) => {
console.log(err.message, err.code);
});

Related

Cypress - Can't use custom task in tests when separating the infra from tests

I've tried to added a new custom task to my plugins file that located outside the tested project.
I've compiled it and configured his path in the config.json as well.
All the other plugins from this file it works ok.
The error I got from Cypress during the execution is ->
"value": "CypressError: `cy.task('queryDb')` failed with the following error:\n\nThe task 'queryDb' was not handled in the plugins file. The following tasks are registered: log\n\nFix this in your plugins file here:\n./../testilize/cypress/plugins/index.ts\n at ...
The configuration file is extend to the base config file outside the tested project ->
{
"extends": "./../testilize/cypress.json",
"baseUrl": "https://www.blabla.com/",
"env": {
"client": "https://www.blabla.com/",
"server": "https://www.blabla.com/"
},
"pluginsFile": "./../testilize/cypress/plugins/index.ts",
"supportFile": "./../testilize/cypress/support/index.js",
"fixturesFolder": "e2e-tests/fixtures",
"integrationFolder": "e2e-tests/test-files"
}
plugins file ->
// cypress/plugins/index.ts
/// <reference types="cypress" />
/**
* #type {Cypress.PluginConfig}
*/
const preprocess = require('./preprocess');
const deepmerge = require('deepmerge')
const path = require('path');
require('dotenv').config({ path: './../testilize/.env' , override: true })
import { my_connection } from '../support/db-handlers/connections';
function queryTestDb(query, config) {
// start connection to db
my_connection.connect();
// exec query + disconnect to db as a Promise
return new Promise((resolve, reject) => {
my_connection.query(query, (error, results) => {
if (error) reject(error);
else {
my_connection.end();
// console.log(results)
return resolve(results);
}
});
});
}
module.exports = (on, config) => {
require('cypress-log-to-output').install(on)
on('task', {
log (message) {
console.log(message)
return true
}
})
const configJson = require(config.configFile)
if (configJson.extends) {
const baseConfigFilename = path.join(config.projectRoot, configJson.extends)
const baseConfig = require(baseConfigFilename)
console.log('merging %s with %s', baseConfigFilename, config.configFile)
configJson.env.my_db_name = process.env.my_DB_NAME;
configJson.env.my_db_host = process.env.my_DB_HOST;
configJson.env.my_db_user = process.env.my_DB_USER;
configJson.env.my_db_password = process.env.my_DB_PASSWORD;
configJson.env.my_db_port = process.env.my_DB_PORT;
return deepmerge(baseConfig, configJson);
}
on("file:preprocessor", preprocess);
on('before:browser:launch', (browser , launchOptions) => {
if (browser.name === 'chrome' && browser.isHeadless) {
launchOptions.args.push('--disable-gpu', '--no-sandbox', '--disable-dev-shm-usage', '--window-size=1920,1080');
return launchOptions
}
})
// Usage: cy.task('queryDb', query)
on('task', {
'queryDb': query => {
return queryTestDb(query, config);
}
});
return configJson
}
Test file ->
/// <reference types="./../../../testilize/node_modules/cypress" />
let allProjectIDs: any = [];
describe('Tests', () => {
it('send graphQL request for internal api', () => {
cy.task(
'queryDb',
`SELECT project_id FROM table_name LIMIT 100;`
).then(res => {
console.log(res);
allProjectIDs.push(res);
console.log(allProjectIDs);
});
});
});
Stack::
TypeScript 4.6
Node 14x
Cypress 9.6
It might be because you have two on('task', { sections in plugins.
The first one looks like the default supplied by Cypress, try commenting it out.

Import csv file and send to backend

I try to create a redux-react app where the users can import an csv-file that later is stored in a database. Right now I am working on the frontend where I want to create a code where the user can chose a csv file from their computer that they want to download and then the file is sent to the backend. I have therfore used the csvReader to read the csv-file but I don't know how to send the data to the backend. I am using nestJS in the backend. I want to send the whole csv-file in one go but i dont know how to tackle the problem. I am a beginner :))) Do you know how to solve my problem?
I can't help you with react but maybe this NestJS part can help you. You can use multer to config your api and setting a store path.
Create multer options
// multer.ts
const excelMimeTypes = [
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'application/wps-office.xlsx',
'application/vnd.ms-excel',
];
export const multerOptions = {
fileFilter: (req: any, file: any, cb: any) => {
const mimeType = excelMimeTypes.find(im => im === file.mimetype);
if (mimeType) {
cb(null, true);
} else {
cb(new HttpException(`Unsupported file type ${extname(file.originalname)}`, HttpStatus.BAD_REQUEST), false);
}
},
storage: diskStorage({
destination: (req: any, file: any, cb: any) => {
const uploadPath = '/upload'; // use env var
if (!existsSync(uploadPath)) {
mkdirSync(uploadPath); // create if not exists
}
cb(null, uploadPath);
},
filename: (req: any, file: any, cb: any) => {
cb(null, file.originalname);
},
}),
};
Import multerOption recent created and use FileInterceptor and UploadedFile decorator to get the file.
#Post()
#UseInterceptors(FileInterceptor('file', multerOptions))
uploadFile(#UploadedFile() file) {
console.log(file) // call service or whathever to manage uploaded file.. handleFile in the example below..
}
Manage file (example) using xlsx library.
handleFile(file: any): Promise<any> {
return new Promise(async (resolve: (result: any) => void, reject: (reason: any) => void): Promise<void> => {
try {
const workbook = XLSX.readFile(`${uploadLocation}/${file.filename}`);
resolve(workbook.Sheets[sheetName]);
} catch (error) {
reject(error);
}
});
}
I hope it helps!

How do I return files as base-64 data URL strings from my API using MongoDB/GridFS?

I have a collection of Contacts inside my MongoDB
Those Contacts have avatars (or "profile pictures")
Here is the profile picture for the above user:
... and a chunk of that file (there's only one).
I'm trying to take ^^^ that ^^^ chunk and parse it into a base-64 data URL in order to return it from my server back to my application and use it inside an <img>'s src attribute.
app.get('/queryContacts', (req, res) => {
const getContacts = async query => {
let contacts = await db
.collection('contacts')
.find(query)
.toArray();
return contacts;
};
const getImages = async id => {
let imageUrl = 'data:image/jpg;base64';
await bucket
.openDownloadStream(new ObjectID(id))
.on('data', chunk => {
imageUrl += chunk.toString('base64');
})
.on('end', () => {
return imageUrl;
});
}
getContacts({account_id: new ObjectID(req.query.id)}).then(contacts => {
Object.keys(contacts).forEach(key => {
getImages(contacts[key].image_id).then(url => {
console.log(url); // undefined
contacts[key].imageUrl = url;
});
});
res.json(contacts);
});
});
The problem is that when I try this, the URL is undefined because getImages() isn't waiting for the 'end' event to finish.

Firebase functions RangeError: Maximum call stack size exceeded

I have a Callable function that uploads an image and update Firestore and Storage accordingly. The function does what it should do. but I still get this error:
Unhandled error RangeError: Maximum call stack size exceeded
here is the function:
export const uploadImageToStripe = functions.https.onCall(async (data, context) => {
let businessDoc: DocumentSnapshot
try {
if (!fireStoreDB) {
fireStoreDB = admin.firestore();
fireStoreDB.settings(settings);
}
businessDoc = await fireStoreDB.collection('businesses').doc(data.business_id).get()
const bucketName = functions.config().storage.default_bucket;
const tempLocalFile = path.join(os.tmpdir(), 'img.jpg').trim();
const tempLocalDir = path.dirname(tempLocalFile);
const bucket = admin.storage().bucket(bucketName);
// Create the temp directory where the storage file will be downloaded.
await mkdirp(tempLocalDir);
console.log('Temporary directory has been created', tempLocalDir);
// Download file from bucket.
await bucket.file(data.photo_location).download({ destination: tempLocalFile });
console.log('The file has been downloaded to', tempLocalFile);
// Downloads the file
console.log(`gs://${bucketName}/${data.photo_location} downloaded to ${tempLocalDir}.`)
const uploadedFile: stripeM.fileUploads.IFileUpdate = await stripe.fileUploads.create({
file: {
data: fs.readFileSync(tempLocalFile),
name: 'img.jpg',
type: 'application.octet-stream',
}
});
if (!businessDoc.exists) {
throw new functions.https.HttpsError('not-found', `Couldn't find business document ` + data.business_id);
}
await stripe.accounts.update(businessDoc.data().stripeId,
{ document: uploadedFile.id });
await businessDoc.ref.update({ "photoNeeded": false })
return await bucket.file(data.photo_location).delete()
} catch (error) {
console.error(error);
await businessDoc.ref.update({ "photoNeeded": true })
throw new functions.https.HttpsError('unavailable', `failed to upload photo to stripe`);
}
})
Any ideas why I get this error?
This line throw the error:
return await bucket.file(data.photo_location).delete()
splitting it to:
await bucket.file(data.photo_location).delete()
return "Success"
solve it.

Getting file from hash in js-ipfs

I'm running the script below and using an ipfs node to upload and get a file using its hash, but the ipfs cat function only returns the path of the file from the hash, not the content.
const node = new Ipfs()
node.once('ready', () => console.log('IPFS node is ready'))
$("#saveIt").click(function(){
var toStore = document.getElementById('fileInput').value
node.files.add(new node.types.Buffer.from(toStore), (err, res) => {
if (err || !res) {
return console.error('ipfs add error', err, res)
}
res.forEach((file) => {
if (file && file.hash) {
var newVar = file.hash
var newVar1 = newVar.slice(0, 23)
var leng = newVar.length
var newVar2 = newVar.slice(24, leng)
console.log(newVar1 + ' ' + newVar2)
mediachain.setUserFile($("#passwordSetter").val(), newVar1, newVar2)
node.files.cat(file.hash, (err, data) => {
if (err) {
return console.error('ipfs cat error', err)
}
document.getElementById('fileDisplayArea').innerText = data
})
} else {
console.error("Error: invalid file")
}
})
})
})
Does anyone have experience with js-ipfs and can help me out?
I had a similar problem with ipfs.
if you have hash already you can get the content this way.
const validCID = 'QmQFPQ5f94byxs7zvHMLJcx5WzThRhN4MfAF4ZisSXofKC'
ipfs.files.get(validCID, function (err, files) {
files.forEach((file) => {
console.log(file.path)
console.log("File content >> ",file.content.toString('utf8'))
})
})