node js script exits prematurely - mysql

Promise newbie here.
I'm trying to retrieve icon_name field from asset database, Equipment table in mongodb
and update icon_id field in equipments database, equipments table in mysql.
I have about 12,000 records with icon_name field in Equipment.
The script runs successfully however it doesn't seem to go through all the records.
When I check the equipments table there are only about 3,000 records updated.
I tried running the script several times and it appears to update a few more records each time.
My suspicion is the database connection is close before all the queries are finished but since I use Promise.all I don't know why it happened.
Here is the script
const _ = require('lodash'),
debug = require('debug')('update'),
Promise = require('bluebird')
const asset = require('../models/asset'),
equipments = require('../models/equipments')
const Equipment = asset.getEquipment(),
my_equipments = equipments.get_equipments(),
icons = equipments.get_icons()
Promise.resolve()
.then(() => {
debug('Retrieve asset equipments, icons')
return Promise.all([
icons.findAll(),
Equipment.find({ icon_name: { $ne: null } })
])
})
.then(([my_icons, asset_equipments]) => {
debug('Update equipments')
const updates = []
console.log(asset_equipments.length)
asset_equipments.forEach((aeq, i) => {
const icon_id = my_icons.find(icon => icon.name === aeq.icon_name).id
up = my_equipments.update(
{ icon_id },
{ where: { code: aeq.eq_id } }
)
updates.push(up)
})
return Promise.all(updates)
})
.then(() => {
debug('Success: all done')
asset.close()
equipments.close()
})
.catch(err => {
debug('Error:', err)
asset.close()
equipments.close()
})
Thanks in advance.

Code looks fine but spawning 12000 promises in parallel might cause some trouble on the database connection level. I would suggest to batch the concurrent requests and limit them to let's say 100. You could use batch-promises (https://www.npmjs.com/package/batch-promises)
Basically something like
return batchPromises(100, asset_equipments, aeq => {
const icon_id = my_icons.find(icon => icon.name === aeq.icon_name).id;
return my_equipments.update({ icon_id }, { where: { code: aeq.eq_id } });
});

Related

How to delete both element and the associated elements in join table with express.js and react.js

I have a project where I have two main tables: Contacts and Workers
I also have a join table called WorkerContacts
In my project, I give the user the option of deleting contacts, something that would also require deleting elements of the join table. My concern is that with my current setup (seen below), if I run into an error where I successfully delete a contact, but then fail to delete the associated join tables (resulting from an error), that would throw off everything. So my question is, is there a way to refactor this so that it ensures that both have been completed before doing the actual deletions then sending the promise to the front end?
Here's my current situation:
Frontend:
export const destroyContact = (contact_id) => dispatch => {
axios.post(`http://localhost:3001/contacts/destroy`, {id: contact_id})
.then(() => {
dispatch({type: 'CONTACT_DESTROYED', payload: contact_id});
axios.post(`http://localhost:3001/workerContacts/destroy`, {id: contact_id}) //I'm scared that the first thing will run but the second one won't, causing a lot of problems. We can deal with this by just throwing a big error message for the user hopefully
.then(() => {
dispatch({type: 'JOIN_TABLE_ROWS_DESTROYED', payload: contact_id});
})
.catch(err => dispatch({type: 'ERROR_CAUGHT', payload: {err_message: err.response.data.message, err_code: err.response.request.status, err_value: err.response.request.statusText}}))
})
.catch(err => dispatch({type: 'ERROR_CAUGHT', payload: {err_message: err.response.data.message, err_code: err.response.request.status, err_value: err.response.request.statusText}}))
}
I'm using redux as well so that's why I have all of the dispatch and whatnot, but essentially I've split the deletions into two axios calls: one where I delete the contact and one where I delete the join tables.
Backend:
For the contact I have this:
export const destroy = (req, res) => {
// Here is when we want to remove an existing contact
Contact.deleteMe(req.body.id)
.then(() => res.json("Contact deleted"))
.catch((err) => res.status(500).json({message: "Something went wrong when trying to save delete this. Try and reload the page and try again "}))
}
And the associated deleteMe function:
static deleteMe(customer_id){
//Uses SQL to delete an individual customer element
return db.execute('DELETE FROM contacts WHERE id = ?', [customer_id]);
}
For the jointable, I have this:
export const destroy = (req, res) => {
// Here is when we want to remove an existing contact
JoinTable.deleteMe(req.body.id)
.then(() => res.json("Join tables deleted"))
.catch(err => res.status(500).json({message: "Something went wrong on our end. Try to reload the page and start again"}))
}
And the associated deleteMe function:
static deleteMe(customer_id){
//Uses SQL to delete an individual customer element
return db.execute('DELETE FROM workercontacts WHERE workerContacts.contact_id = ?', [customer_id]);
}
I'm using a MySQL database if that helps.
Hopefully this is enough information, but if you require more, I can definitely provide you with it.
Just use a single call and execute the DELETE commands in a transaction:
export const destroyContact = (contact_id) => (dispatch) => {
axios
.post(`http://localhost:3001/contacts/destroy`, { id: contact_id })
.then(() => {
dispatch({ type: 'CONTACT_DESTROYED', payload: contact_id });
dispatch({ type: 'JOIN_TABLE_ROWS_DESTROYED', payload: contact_id });
})
.catch((err) =>
dispatch({
type: 'ERROR_CAUGHT',
payload: {
err_message: err.response.data.message,
err_code: err.response.request.status,
err_value: err.response.request.statusText,
},
})
);
};
One the backend:
static async function deleteMe(customer_id) {
await db.execute('START TRANSACTION');
try {
await db.execute('DELETE FROM contacts WHERE id = ?', [customer_id]);
await db.execute('DELETE FROM workercontacts WHERE workerContacts.contact_id = ?', [customer_id]);
await db.execute('COMMIT');
} catch (err) {
await db.execute('ROLLBACK');
}
}
...
export const destroy = (req, res) => {
// Here is when we want to remove an existing contact
Contact.deleteMe(req.body.id)
.then(() => res.json("Contact deleted"))
.catch((err) => res.status(500).json({message: "Something went wrong when trying to save delete this. Try and reload the page and try again "}))
}

Retriving data from another table and inserting it into new one sequelize

I've been working on a project that involves sequelize(MySQL DB).
So I have a table "InterestRate" that takes id's from two other tables(foreign keys).
bankId is stored in "Banks" table and interId is stored in another table called "Interest".
Now I want to populate this table with Postman by sending this data:
{
"bank": "BankName",
"inter": "Interest",
"nks": "4.11",
"eks": "6.24",
"time": "36"
}
BUT I want to populate table with the primary keys of these values(if existed in their own table). E.g When I send to postman I want to check table "Banks" and search "BankName" grab its id in that table, and put it in new table. Same thing for this inter thing.
Code that I have rn is trash, I know why it doesn't work but I'm really stuck.
(req, res) => {
const bank = req.body.bank;
const type = req.body.type;
const nks = req.body.nks;
const eks = req.body.eks;
const time = req.body.time;
InterestRate.create({
bankId: bank,
interId: type,
NKS: nks,
EKS: eks,
time: time,
})
.then(() => {
res.status(200).json({ message: 'Added successfully' });
})
.catch((err) => {
res.status(500).send('Error -> ' + err);
});
};
Note that all models etc. are set up correctly, it works if I enter things manually!
Thank you!
You just need to get Bank and Interest by names and use found model instances to get their ids to create InterestRate record:
async (req, res) => {
const bank = req.body.bank;
const type = req.body.type;
const nks = req.body.nks;
const eks = req.body.eks;
const time = req.body.time;
const foundBank = await Bank.findOne({
where: {
name: bank // use a real field name instead of "name"
}
})
const foundInterest = await Interest.findOne({
where: {
name: type // use a real field name instead of "name"
}
})
if(!foundBank) {
// here should be some "res.status(...).send(...)" with error message
return
}
if(!foundInterest) {
// here should be some "res.status(...).send(...)" with error message
return
}
try {
await InterestRate.create({
bankId: foundBank.id,
interId: foundInterest.id,
NKS: nks,
EKS: eks,
time: time,
})
res.status(200).json({ message: 'Added successfully' });
catch (err) {
res.status(500).send('Error -> ' + err);
}
};

Kill running query with Sequelize

I am working with sequelize and MySQL DB. I have some heavy queries that the users can cancel by clicking a 'cancel' button in the GUI.
I tried to do it with transaction but when I do t.rollback() the query doesn't getting killed in the DB. Is there any way to kill a query using sequelize?
I prefer to use sequlize to do it, but even getting the query ID and manually kill it is fine.
.transaction(async (t) => {
if (transaction) {
transaction.rollback();
}
transaction = t;
return db.myTable.findAll(data);
})
.then((data) => {
transaction = {};
return data;
})
.catch((error) => {
transaction = {};
throw error;
});
Not natively supported with Sequelize. There is an open, inactive issue to add this ability there, with a workaround used for PostgreSQL.
const killableQuery = async (query, req, options = {}) => {
const connection = await sequelize.connectionManager.getConnection();
req.on('close', () => {
sequelize.query(`SELECT pg_terminate_backend(${connection.processID});`)
.catch((error) => {
console.error('Unable to terminate query!', error);
});
});
const result = await sequelize.query(query, {
...options,
transaction: { connection },
});
await sequelize.connectionManager.releaseConnection(connection);
return result;
};
Sorry, I cannot find detail about the processID property and whether it corresponds to a property that MySQL tracks. If yes, one could replace the PostgreSQL statement with a MySQL counterpart like:
SELECT GROUP_CONCAT(CONCAT('KILL ',id,';') SEPARATOR ' ')
FROM information_schema.processlist
WHERE user <> 'system user'
AND -- <condition based on processID>;

One response after few async functions

I have a web page with a form where a user can edit personal info, education, work history and etc.
And the user can add more than one degree, for example: bs, ms, phd. And a few job positions as well.
When the user push 'save' button I send all this data to my server. I send it all in one request. In the server I have a point to handle the request.
app.post(config.version + '/profile', (req, res, next) => {});
And there I do a few MySQL queries to insert/update/delete a data. I use mysql package from npm to do that.
new Promise((resolve, reject) => {
const userQuery = `INSERT INTO user ...;`;
const degreesQuery = 'INSERT INTO degree ...;';
const positionsQuery = 'UPDATE position SET ...;';
this.connection.query(userQuery, err => {});
this.connection.query(degreesQuery, err => {});
this.connection.query(positionsQuery, err => {});
resolve({});
})
In the end I do resolve({}) but I want to select updated profile and send it back (because in MySQL tables for degrees I add ids that helps me to not insert again duplicate data). So, my question is how to do resolve({}) only when all my async this.connection.querys finished?
My suggestion is to run all the queries in a Promise.all().
Example:
const queries = [
`INSERT INTO user ...;`;,
'INSERT INTO degree ...;',
'UPDATE position SET ...;'
];
Promise.all(queries.map((query) => {
return new Promise((resolve, reject) => {
this.connection.query(query, err => {
return err ? reject(err) : resolve();
});
});
})
.then(() => {
// continue
// get your updated data here with and send it as response
})
If your db library has support for Promise write it this way
Promise.all(queries.map((query) => {
return this.connection.query(query);
})
.then(() => {
// continue
// get your updated data here with and send it as response
})

how do I force PouchDB to really delete records?

I am creating a PouchDb like so :
var db = new PouchDB('my_db',
{ auto_compaction: true, revs_limit: 1, adapter: 'websql' });
Then I create and delete a number of records :
db.put({ _id: '1'});
db.put({ _id: '2'});
db.put({ _id: '3'});
db.get('1')
.then(function(doc) {
db.remove(doc)
});
db.get('2')
.then(function(doc) {
db.remove(doc)
});
db.get('3')
.then(function(doc) {
db.remove(doc)
});
From my reading of the documentation, this is the correct way to delete and remove records.
And this SO question and answer seems to suggest also that this is the way to do things.
However, if I use the Chrome inspector to look at my Web SQL DB, the records are still there :
I don't believe this is not a timing issue or anything like that, as I can refresh with just the delete code and then get a 404 not_found error
My application creates and keeps records in a local pouchDb until they have been synced to central server, at which time I want to clear them from the local database.
I'm creating lots of records and if I cannot clear them out then eventually I'm going to run out of space on the device (it is hybrid HTML5 mobile app).
Is it even possible to actually remove records from a local PouchDB?
If so, how do I do it?
If not, what is a good solution that I can easily swap in place of PouchDB?
(I'm really hoping it is possible because I've gone down this path of development, so if the answer to the first question is No, then I need a good answer to the third question)
As mentioned in the comments above, this is not yet possible but is being worked on (source 1 source 2). However, there is a work around which you might be able to use.
The workaround is to replicate the database locally to another PouchDB database and once the replication is complete, delete the original database. Deleted documents won't be replicated (source)
Here is a working demo:
(() => {
// DECLARATION
const dbName = 'testdb';
const tmpDBName = 'tmpdb';
const deleteFilter = (doc, req) => !doc._deleted;
const doc1 = { _id: 'd1' };
const doc2 = { _id: 'd2' };
// CREATION
// create database
const maindb = new PouchDB(dbName);
// insert two documents
maindb.post(doc1)
.then(() => maindb.post(doc2))
// query for one document
.then(() => maindb.get(doc1._id))
// delete this document
.then((doc) => { console.log(doc); return maindb.remove(doc) })
// query for the same document
.then(() => maindb.get(doc1._id))
.catch((err) => { console.log(err) });
// CLEANUP
// delete a database with tmpdb name
new PouchDB(tmpDBName).destroy()
// create a database with tmpdb name
.then(() => Promise.resolve(new PouchDB(tmpDBName)))
// replicate original database to tmpdb with filter
.then((tmpDB) => new Promise((resolve, reject) => {
maindb.replicate.to(tmpDB, { filter: deleteFilter })
.on('complete', () => { resolve(tmpDB) })
.on('denied', reject)
.on('error', reject)
}))
// destroy the original db
.then((tmpDB) => {
console.log(tmpDB.name);
return maindb.destroy().then(() => Promise.resolve(tmpDB))
})
// create the original db
.then((tmpDB) => new Promise((resolve, reject) => {
console.log(tmpDB.name);
try {
resolve({ db: new PouchDB(dbName), tmpDB: tmpDB })
} catch (e) {
reject(e)
}
}))
// replicate the tmpdb to original db
.then(({db, tmpDB}) => new Promise((resolve, reject) => {
tmpDB.replicate.to(db)
.on('complete', () => { resolve(tmpDB) })
.on('denied', reject)
.on('error', reject)
}))
// destroy the tmpdb
.then((tmpDB) => tmpDB.destroy())
.then(() => { console.log('Cleanup complete') })
.catch((err) => { console.log(err) });
})()
If you check the state of the database after executing this code, it'll contain only one document. Note that at times, I had to refresh the browser to be able to see the latest state of the database (a right click + Refresh IndexedDB wasn't enough).
If you want to cleanup the database while testing this, you can use this snippet:
['testdb', 'tmpdb'].forEach((d) => { new PouchDB(d).destroy() })