I am trying to create a excel workbook using excelbuilder and populating the data with information from a mysql database. This is my get method:
app.get('/excel', function(req, res) {
var locationSpread = ['location1.xlsx', 'location2.xlsx',
'location3.xlsx'];
locationSpread.forEach(function(filename) {
fs.unlink("./Spreadsheets/" + filename, (err) => {
if (err) {
console.log('Spreadsheet ' + filename + ' not found');
} else {
console.log('Spreadsheet ' + filename + ' successfully found');
}
});
var query = connection.query('SELECT * from ' + filename.slice(0, -5), function(err, rows) {
var workbook = excelbuilder.createWorkbook('./Spreadsheets/', filename);
var sheet = workbook.createSheet(filename.slice(0, -5), 3, rows.length);
sheet.set(1, 1, 'First Name');
sheet.set(2, 1, 'Last Name');
sheet.set(3, 1, 'Company');
*** TROUBLE CODE START HERE ***
for (var j = 2, z = 0; z < rows.length; j++, z++) {
sheet.set(1, j, rows[z].firstName);
sheet.set(2, j, rows[z].lastName);
sheet.set(3, j, rows[z].company);
}
*** TROUBLE CODE END HERE ***
workbook.save(function(err) {
console.log('workbook saved ' + (err ? 'failed' : 'ok'));
});
});
});
});
The first thing I do is delete the files if they exist in the folder Spreadsheets. Then I query the database based on the location. I create a new workbook and sheet then write to it. The for loop breaks the program and while the error is:
TypeError: Cannot read property '1' of undefined
I believe it to be something along the lines of async
One possible fault I see here is that you're using the async version of unlink. Potentially, if the file was not deleted when you try to create the work book, an error may occur.
You can try switching to unlinkSync and verifying that the workbook object is not null before creating a sheet (and as additional precaution, that the sheet object is not null before setting values).
Beyond that, it's impossible to assist further without your error stack.
So the error was because of an error of an initial setup. Basically the createWorkbook has an initial number of rows and columns and if you happen to try and write a value that goes out of the bounds of what you initially created, it throws an error. Hence, why my for loop kept on crashing.
Related
I'm trying to import a large JSON file(177k record) to cloud firestore, firstly I found the code below;
Uploading Code
var admin = require("firebase-admin");
var serviceAccount = require("./service_key.json");
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: "my service key"
});
const firestore = admin.firestore();
const path = require("path");
const fs = require("fs");
const directoryPath = path.join(__dirname, "files");
fs.readdir(directoryPath, function(err, files) {
if (err) {
return console.log("Unable to scan directory: " + err);
}
files.forEach(function(file) {
var lastDotIndex = file.lastIndexOf(".");
var menu = require("./files/" + file);
menu.forEach(function(obj) {
firestore
.collection('academicians2')
.add({ 'department': obj['department'], 'designation': obj['designation'], 'field': obj['field'], 'name': obj['name'], 'university': obj['university'], 'reviewList': [], 'rating': 0 })
.then(function(docRef) {
console.log("Document written");
})
.catch(function(error) {
console.error("Error adding document: ", error);
});
});
});
});
but after uploading 10-15k records started to giving errors,(Memory error I guess), So I decided to schedule cloud functions for every 1.2 seconds as timeout and batch write this JSON to firestore, but really have no idea how to get 499 rows for each run from my JSON.
Scheduled Cloud Function
/* eslint-disable */
const functions = require("firebase-functions");
const admin = require('firebase-admin');
const { user } = require("firebase-functions/lib/providers/auth");
admin.initializeApp();
const firestore = admin.firestore();
const userRef = admin.firestore().collection('academicians2');
exports.scheduledFunction = functions.pubsub.schedule('every 1.2 seconds').onRun((context) => {
//do i need to create for loop for batch or how can i approach to solve this problem
});
I would do something like this:
Make the scheduled function get 500 records at a time with a "start after" clause.
Perform a batch write to the db (batch writes are limited to 500 as you may know)
If successful, copy the last record (or a reference to the last record: ex: record's ID) of those 500 records into a document in your db. It can be a document called "upload_tracker" with a field called "last_uploaded".
On subsequent operations: the function queries that last_uploaded record from your db, then performs another operation starting AFTER that last record.
Notes:
. The scheduled function can write multiple batches before terminating if you want to finish quickly.
. In your Google Cloud Console / Cloud Functions, you may want to extend the function's timeout value to 9 minutes, if you know it's going to run for a long time.
. The document ID's should reflect your "record ID's" if you have them, to make sure there are no duplicates.
I need to catch some data by a mysql query, and use the result to build up and email message with its results with node.
I put the code inside a function, but the call to the query appear to still be async, as the result is never given back before the end of the function, and the returning variable is alwasy empty.
Tried different approach with async/await but still the execution seems async
In my following code is just get in the console log up to the step 3, the step 4 is mde no matter what I try to do at the end of the function call
async function querydb (utente){
console.log("sono in querydb");
var messageHTMLAllegati="";
var risultatoquery;
console.log("step 1");
var connection = mysql.createConnection({
host : process.env.IP_address,
user : process.env.nome_utente,
password : process.env.password,
port : process.env.port,
database : process.env.DB,
});
console.log("step 2");
const query = util.promisify(connection.query).bind(connection);
(async () => {
try {
console.log("step 3");
var result = await query('SELECT Link FROM Link_Foto where ID_Utente="' + utente + '"');
var i = result.length;
console.log("step 4");
var j ;
for (j=0; j < i; j++) {
messageHTMLAllegati +='Immagine ' + (j+1)+ '<BR>';
console.log("print the link found in the DB and added to the text to be printed"+result[j].Link);
}
} finally {
connection.end();
}
})()
return messageHTMLAllegati;
}
I do expect the final variable "messageHTMLAllegati" to contain some text plus the query fields needed, but it get always empty. In the log I see though that the variable is filled up, but only after that the function is returned, therefore the text used to put the email together is empty from the DB section
async/await method only works when await functions is a promise. functions like 'query' in mysql are using a callback function to get the result. So if you want to use it with async/await method you should use it in another function and get the result in its callback function as a promise like this:
function query_promise(q_string){
return new Promise((resolve, reject)=>{
query(q_string,(err, result)=>{
if(err) return reject(err);
resolve(result);
});
});
}
then in your code:
var result = await query_promise('SELECT Link FROM Link_Foto where ID_Utente="' + utente + '"');
I am trying to write a little script that reads an xml-file and then imports the data I fetch into a MySQL-Database. The fetching works fine, but the inserting is a problem. I've worked with the .forEach to stay asnyc which helped me a lot and worked fine until I needed to insert the Data. When I tried to execute the "INSERT"-Query outside of the 'Loops' everything works fine, but as soon as I try to execute it inside of one it just inserts everything once and then stops. Also the console.log() message inside the connection.query() doesn't get executed, even though no error gets thrown.
This is my code:
console.log("Running import!");
//Import necessary modules
var fs = require("fs");
var config = require("./config");
var mysql = require("mysql");
var path = require("path");
var xml2js = require("xml2js");
var parser = new xml2js.Parser();
var connection = mysql.createConnection({
host : config.mysql.host,
user : config.mysql.user,
password : config.mysql.password,
database : config.mysql.db
});
connection.connect(function(err) {
if (err) { //throw error
console.error(err.stack);
throw err;
}else{
console.log("Succesfully connected to the database!");
// Get Folders in Folder and iterate through every of them
var dirs = getDirectories(config.misc.path);
dirs.forEach(function(dir, index){
var file = config.misc.path + "\\" + dir + "\\" + config.misc.xml_name;
// read xml file of that directory
fs.readFile(file, function(err, data){
parser.parseString(data, function(err, json){
if(err) throw err;
json.resultset.forecast.forEach(function(forecast, index){
forecast.parking_house.forEach(function(ph, index){
var ph_id = ph["$"].id;
ph.estimate.forEach(function(estimate, index){
var value = estimate.value[0]["_"];
var time = estimate.timestamp[0]["_"];
//console.log(time);
connection.query("INSERT INTO lb_data.estimates VALUES (DEFAULT,'23:00:00', 213, 44);", function(error, results, fields){
if(error) throw error;
//Printing %
var perc = (index / dirs.length * 100);
console.log("Scanned and imported " + Math.round(perc) + "% of the files");
});
});
});
});
});
});
});
}
});
/**
* Gets the direcotries in a directory
* #param {string} srcpath the source path of the directory you want to scan
* #return {void}
*/
function getDirectories (srcpath) {
return fs.readdirSync(srcpath)
.filter(file => fs.statSync(path.join(srcpath, file)).isDirectory())
}
Already a big thanks in advance!
PS: Be ensured that the data in config.json is fine, I've tested that outside the loop, so that is not the problem ....
I am using Parse Cloud Code for retrieving JSON data from external API. JSON data are updated every 2 min.
To accomplish this, I am using Cloud Job to run my method every 2 minutes to keep data fresh. Also, I am storing whole JSON data to Parse.
When I run this code for the first time everything works well, but... when code runs for second, third or fourth time instead of updating objects it creates the new ones.
How to update objects instead of creating the new ones, when there are some data?
Code:
Parse.Cloud.define("getCars", function(request, response) {
var promise = new Parse.Promise();
var Cars = Parse.Object.extend('Cars');
var query = new Parse.Query(Cars);
query.find().then(function (results){
Parse.Cloud.httpRequest({
method: 'GET',
url: urlLink,
success: function(httpResponse) {
var stations = new Array();
var data = JSON.parse(decodeURIComponent(escape(httpResponse.text))); // utf-8 decode
for (var i = 0; i < data.length; i++) {
var Cars = Parse.Object.extend('Cars'),
car = new Cars(),
content = data[i];
car.set('number', content.number);
car.set('name', content.name);
car.set('address', content.address);
car.set('position', content.position);
car.set('status', content.status);
cars.push(station);
};
Parse.Object.saveAll(cars, {
success: function(objects) {
promise.resolve();
},
error: function(error) {
alert("Error: " + error.code + " " + error.message);
promise.reject(error.message);
}
});
},
error: function(error) {
alert("Error: " + error.code + " " + error.message);
promise.reject(error.message);
}
});
});
return promise;
});
SOLUTION
I have finally found the solution. Maybe this will be useful for other people who are facing the same problem.
I added a simple if statement which checks query result. If query does not return data, new object is created, otherwise: old object is filled with new information and saved into database.
station = (results == 0) ? new Stations() : results[i];
you are createting new object car = new Cars(), every time.instead find with objectId and in success response you will get ParseObject list(array).
update first parseObject from that list
I've attempted to write a basic cron script to run and 'dump' a mysql database. For some reason, when it 'successfully saves the file', it does create the file, but it is empty. If instead of saving the file, I perform a console.log, it prints an empty string. Any thoughts on what I may be doing wrong?
Thanks in advance.
var mysql_backup = function(){
this.backup = '';
this.mysql = require('mysql'),
this.init = function(){
this.connection = this.mysql.createConnection({
user: 'root',
password: 'root',
database: 'test'
});
}
this.query = function(sql, callback) {
this.connection.query(sql, function (error, results, fields) {
if (error) {
throw error;
}
if (results.length > 0) {
callback(results);
}
});
}
this.get_tables = function(callback){
var me = this;
me.query('SHOW TABLES',
function(tables) {
for (var table in tables){
me.query(
'SHOW CREATE TABLE ' + tables[table].Tables_in_test,
function(r){
for (var t in r) {
me.backup += "DROP TABLE " + r[t].Table + "\n\n";
me.backup += r[t]["Create Table"] + "\n\n";
}
}
)
}
me.save_backup();
});
}
this.save_backup = function(){
var fs = require('fs');
fs.writeFile("./backup_test.txt", this.backup, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
}
};
var db = new mysql_backup;
db.init();
db.get_tables();
db.connection.destroy();
The code as written didn't even get to a file saving for me. There seem like a few issues. Not sure if this is the actual code or some things got lost in the copy paste. However, based on what you've got:
A big one is that you never connect to the database in your code with connection.connect().
The code you want to run once connected should be inside the connection.connect() callback. e.g.
connection.connect(function (err, empty) {
if (err)
throw new Error ('Panic');
// if no error, we are off to the races...
}
However, even if you quickly refactor your code to wrap your last lines inside of that get connection callback, you'll still have problems, because you are destroying the connection before the various SQL calls are getting made, so you will want to move the code into some sort of final callback.
Even after you do that, you'll still have an empty file, because you're calling save_backup from your 'SHOW TABLES' callback rather than after you have actually populated it via the inner callback where you get the CREATE TABLE statement and populate the backup property.
This is the minimal rewriting of your code which will do what you are intending. An important thing to note is the "counter" which manages when to write the file and close the connection. I would make other changes if it were mine, including:
Using 'self' instead of 'me'
Using a numeric for loop rather than the for (... in ...) syntax
Having my own callbacks fall the node convention of (err, stuff)
A more substantial changes is that I would rewrite this to use promises, as doing so can spare you some grief with the confusion inherent with deeply nested callbacks. I personally like the Q library, but there are several options here.
Hope this helped.
var mysql_backup = function(){
this.backup = '';
this.mysql = require('mysql');
this.init = function(){
this.connection = this.mysql.createConnection({
user : 'root',
password : 'root',
database : 'test'
});
};
this.query = function(sql, callback) {
this.connection.query(sql, function (error, results, fields) {
if (error) {
throw error;
}
if (results.length > 0) {
callback(results);
}
});
};
this.get_tables = function(callback){
var counter = 0;
var me = this;
this.query('SHOW TABLES',
function(tables) {
for (table in tables){
counter++;
me.query(
'SHOW CREATE TABLE ' + tables[table].Tables_in_mvc,
function(r){
for (t in r) {
me.backup += "DROP TABLE " + r[t].Table + "\n\n";
me.backup += r[t]["Create Table"] + "\n\n";
}
counter--;
if (counter === 0){
me.save_backup();
me.connection.destroy();
}
}
)
}
});
};
this.save_backup = function(){
var fs = require('fs');
fs.writeFile("./backup_test.txt", this.backup, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
}
};
var db = new mysql_backup;
db.init();
db.connection.connect(function (err){
if (err) console.log(err);
db.get_tables(function(x){;});
});
Update: If you are curious, here is a heavily-commented implementation using promises. Note that without the comments explaining the Q promise library functions, it is somewhat shorter than the original version and also offers more comprehensive error handling.
var MysqlBackup = function(connectionInfo, filename){
var Q = require('q');
var self = this;
this.backup = '';
// my personal preference is to simply require() inline if I am only
// going to use something a single time. I am certain some will find
// this a terrible practice
this.connection = require('mysql').createConnection(connectionInfo);
function getTables(){
// return a promise from invoking the node-style 'query' method
// of self.connection with parameter 'SHOW TABLES'.
return Q.ninvoke(self.connection,'query', 'SHOW TABLES');
};
function doTableEntries(theResults){
// note that because promises only pass a single parameter around,
// if the 'denodeify-ed' callback has more than two parameters (the
// first being the err param), the parameters will be stuffed into
// an array. In this case, the content of the 'fields' param of the
// mysql callback is in theResults[1]
var tables = theResults[0];
// create an array of promises resulting from another Q.ninvoke()
// query call, chained to .then(). Note that then() expects a function,
// so recordEntry() in fact builds and returns a new one-off function
// for actually recording the entry (see recordEntry() impl. below)
var tableDefinitionGetters = [];
for (var i = 0; i < tables.length ; i++){
// I noticed in your original code that your Tables_in_[] did not
// match your connection details ('mvc' vs 'test'), but the below
// should work and is a more generalized solution
var tableName = tables[i]['Tables_in_'+connectionInfo.database];
tableDefinitionGetters.push(Q.ninvoke(self.connection, 'query', 'SHOW CREATE TABLE ' + tableName)
.then(recordEntry(tableName)) );
}
// now that you have an array of promises, you can use Q.allSettled
// to return a promise which will be settled (resolved or rejected)
// when all of the promises in the array are settled. Q.all is similar,
// but its promise will be rejected (immediately) if any promise in the
// array is rejected. I tend to use allSettled() in most cases.
return Q.allSettled(tableDefinitionGetters);
};
function recordEntry (tableName){
return function(createTableQryResult){
self.backup += "DROP TABLE " + tableName + "\n\n";
self.backup += createTableQryResult[0][0]["Create Table"] + "\n\n";
};
};
function saveFile(){
// Q.denodeify return a promise-enabled version of a node-style function
// the below is probably excessively terse with its immediate invocation
return (Q.denodeify(require('fs').writeFile))(filename, self.backup);
}
// with the above all done, now you can actually make the magic happen,
// starting with the promise-return Q.ninvoke to connect to the DB
// note that the successive .then()s will be executed iff (if and only
// if) the preceding item resolves successfully, .catch() will get
// executed in the event of any upstream error, and finally() will
// get executed no matter what.
Q.ninvoke(this.connection, 'connect')
.then(getTables)
.then(doTableEntries)
.then(saveFile)
.then( function() {console.log('Success'); } )
.catch( function(err) {console.log('Something went awry', err); } )
.finally( function() {self.connection.destroy(); } );
};
var myConnection = {
host : '127.0.0.1',
user : 'root',
password : 'root',
database : 'test'
};
// I have left this as constructor-based calling approach, but the
// constructor just does it all so I just ignore the return value
new MysqlBackup(myConnection,'./backup_test.txt');