Create or Update Sequelize - mysql

I'm using Sequelize in my Nodejs project and I found a problem that I'm having a hard time to solve.
Basically I have a cron that gets an array of objects from a server than inserts it on my database as a object ( for this case, cartoons ). But if I already have one of the objects, I have to update it.
Basically I have a array of objects and a could use the BulkCreate() method. But as the Cron starts again, it doesn't solve it so I was needing some sort of update with an upsert true flag. And the main issue: I must have a callback that fires just once after all these creates or updates. Does anyone have an idea of how can I do that? Iterate over an array of object.. creating or updating it and then getting a single callback after?
Thanks for the attention

From the docs, you don't need to query where to perform the update once you have the object. Also, the use of promise should simplify callbacks:
Implementation
function upsert(values, condition) {
return Model
.findOne({ where: condition })
.then(function(obj) {
// update
if(obj)
return obj.update(values);
// insert
return Model.create(values);
})
}
Usage
upsert({ first_name: 'Taku' }, { id: 1234 }).then(function(result){
res.status(200).send({success: true});
});
Note
This operation is not atomic.
Creates 2 network calls.
which means it is advisable to re-think the approach and probably just update values in one network call and either:
Look at the value returned (i.e. rows_affected) and decide what to do.
Return success if update operation succeeds. This is because whether the resource exists is not within this service's responsibility.

You can use upsert
It's way easier.
Implementation details:
MySQL - Implemented as a single query INSERT values ON DUPLICATE KEY UPDATE values
PostgreSQL - Implemented as a temporary function with exception handling: INSERT EXCEPTION WHEN unique_constraint UPDATE
SQLite - Implemented as two queries INSERT; UPDATE. This means that the update is executed regardless of whether the row already
existed or not
MSSQL - Implemented as a single query using MERGE and WHEN (NOT) MATCHED THEN Note that SQLite returns undefined for created, no
matter if the row was created or updated. This is because SQLite
always runs INSERT OR IGNORE + UPDATE, in a single query, so there
is no way to know whether the row was inserted or not.

Update 07/2019 now with async/await
async function updateOrCreate (model, where, newItem) {
// First try to find the record
const foundItem = await model.findOne({where});
if (!foundItem) {
// Item not found, create a new one
const item = await model.create(newItem)
return {item, created: true};
}
// Found an item, update it
const item = await model.update(newItem, {where});
return {item, created: false};
}
I liked the idea of Ataik, but made it a little shorter:
function updateOrCreate (model, where, newItem) {
// First try to find the record
return model
.findOne({where: where})
.then(function (foundItem) {
if (!foundItem) {
// Item not found, create a new one
return model
.create(newItem)
.then(function (item) { return {item: item, created: true}; })
}
// Found an item, update it
return model
.update(newItem, {where: where})
.then(function (item) { return {item: item, created: false} }) ;
}
}
Usage:
updateOrCreate(models.NewsItem, {slug: 'sometitle1'}, {title: 'Hello World'})
.then(function(result) {
result.item; // the model
result.created; // bool, if a new item was created.
});
Optional: add error handling here, but I strongly recommend to chain all promises of one request and have one error handler at the end.
updateOrCreate(models.NewsItem, {slug: 'sometitle1'}, {title: 'Hello World'})
.then(..)
.catch(function(err){});

This might be an old question, but this is what I did:
var updateOrCreate = function (model, where, newItem, onCreate, onUpdate, onError) {
// First try to find the record
model.findOne({where: where}).then(function (foundItem) {
if (!foundItem) {
// Item not found, create a new one
model.create(newItem)
.then(onCreate)
.catch(onError);
} else {
// Found an item, update it
model.update(newItem, {where: where})
.then(onUpdate)
.catch(onError);
;
}
}).catch(onError);
}
updateOrCreate(
models.NewsItem, {title: 'sometitle1'}, {title: 'sometitle'},
function () {
console.log('created');
},
function () {
console.log('updated');
},
console.log);

User.upsert({ a: 'a', b: 'b', username: 'john' })
It will try to find record by hash in 1st param to update it, if it will not find it - then new record will be created
Here is example of usage in sequelize tests
it('works with upsert on id', function() {
return this.User.upsert({ id: 42, username: 'john' }).then(created => {
if (dialect === 'sqlite') {
expect(created).to.be.undefined;
} else {
expect(created).to.be.ok;
}
this.clock.tick(1000);
return this.User.upsert({ id: 42, username: 'doe' });
}).then(created => {
if (dialect === 'sqlite') {
expect(created).to.be.undefined;
} else {
expect(created).not.to.be.ok;
}
return this.User.findByPk(42);
}).then(user => {
expect(user.createdAt).to.be.ok;
expect(user.username).to.equal('doe');
expect(user.updatedAt).to.be.afterTime(user.createdAt);
});
});

Sound likes you want to wrap your Sequelize calls inside of an async.each.

This can be done with the custom event emitter.
Assuming your data is in a variable called data.
new Sequelize.Utils.CustomEventEmitter(function(emitter) {
if(data.id){
Model.update(data, {id: data.id })
.success(function(){
emitter.emit('success', data.id );
}).error(function(error){
emitter.emit('error', error );
});
} else {
Model.build(data).save().success(function(d){
emitter.emit('success', d.id );
}).error(function(error){
emitter.emit('error', error );
});
}
}).success(function(data_id){
// Your callback stuff here
}).error(function(error){
// error stuff here
}).run(); // kick off the queries

you can use findOrCreate and then update methods in sequelize. here is a sample with async.js
async.auto({
getInstance : function(cb) {
Model.findOrCreate({
attribute : value,
...
}).complete(function(err, result) {
if (err) {
cb(null, false);
} else {
cb(null, result);
}
});
},
updateInstance : ['getInstance', function(cb, result) {
if (!result || !result.getInstance) {
cb(null, false);
} else {
result.getInstance.updateAttributes({
attribute : value,
...
}, ['attribute', ...]).complete(function(err, result) {
if (err) {
cb(null, false);
} else {
cb(null, result);
}
});
}
}]
}, function(err, allResults) {
if (err || !allResults || !allResults.updateInstance) {
// job not done
} else {
// job done
});
});

Here is a simple example that either updates deviceID -> pushToken mapping or creates it:
var Promise = require('promise');
var PushToken = require("../models").PushToken;
var createOrUpdatePushToken = function (deviceID, pushToken) {
return new Promise(function (fulfill, reject) {
PushToken
.findOrCreate({
where: {
deviceID: deviceID
}, defaults: {
pushToken: pushToken
}
})
.spread(function (foundOrCreatedPushToken, created) {
if (created) {
fulfill(foundOrCreatedPushToken);
} else {
foundOrCreatedPushToken
.update({
pushToken: pushToken
})
.then(function (updatedPushToken) {
fulfill(updatedPushToken);
})
.catch(function (err) {
reject(err);
});
}
});
});
};

2022 update:
You can use the upsert function:
https://sequelize.org/api/v6/class/src/model.js~model#static-method-upsert
Insert or update a single row. An update will be executed if a row which matches the supplied values on either the primary key or a unique key is found. Note that the unique index must be defined in your sequelize model and not just in the table. Otherwise you may experience a unique constraint violation, because sequelize fails to identify the row that should be updated.
Implementation details:
MySQL - Implemented with ON DUPLICATE KEY UPDATE`
PostgreSQL - Implemented with ON CONFLICT DO UPDATE. If update data contains PK field, then PK is selected as the default conflict key.
Otherwise first unique constraint/index will be selected, which can satisfy conflict key requirements.
SQLite - Implemented with ON CONFLICT DO UPDATE
MSSQL - Implemented as a single query using MERGE and WHEN (NOT) MATCHED THEN
Note that Postgres/SQLite returns null for created, no matter if the row was created or updated

Related

How to update array of objects with same value in reference_id (FK Column) in Sequelize JS?

I have an array of objects which somewhat looks like this:
[
{
id: '5b29c08b-597c-460c-a3c7-ac8852b7a5dc',
option_text: 'njnj',
answer: false
},
{
id: '8ff5bda6-9335-495c-9c72-15ef258b899b',
option_text: 'jnjn',
answer: true
}
]
Here the answer column is inter-related like if any of the object's answer is set to true the other will come as false from frontend. So I've to update all the row associated with the referenced id.
What problem am facing is that the update query is not running but it is going inside the then block of the code instead of throwing error. Below is my code for the same:
// UPDATE Option
exports.updateOption = (req, res, next) => {
try {
console.log(req.body);
db.Option.update(req.body, {
where: { question_id: req.params.id }
}).then(() => {
console.log('A');
return res.status(200).send(errors.UPDATED_SUCESSFULLY);
}).catch(err => {
console.log('B');
return res.status(204).send(errors.INTERNAL_SERVER);
});
} catch(err) {
console.log('C');
return res.status(204).send(errors.INTERNAL_SERVER);
}
};
Sample Table Data for the same:
What I am thinking is that firstly to answer column false for all the rows associated with the same question_id and then update the particular row which has answer set to true.
But is this a good approach or anyone can suggest me some better solution ?
You should execute all updates in the same transaction (to avoid inconsistencies in DB):
sequelize.transaction(async transaction => {
const options = req.body;
for (const option of options) {
await db.Option.update(option, {
where: { question_id: req.params.id },
transaction
});
}
}).then(...

Sequelize: only the last few rows are updated

I have an array of (few thousands of) data object that I need to insert or update depending on a condition. A simple upsert() method is implemented in my model class.
Implementation
csv.upsert = async function (values, condition) {
let obj = await csv.findOne({ where: condition });
if (obj) {
// update
console.log("existing record updated")
return await obj.update(values);
} else {
// insert
console.log("new record inserted")
return await csv.create(values);
}
}
This upsert method is then used, where I loop through the array of objects to insert or update them in db.
Usage
try {
await models.sequelize.authenticate();
await models.sequelize.sync();
let dataToBeInserted = getArrayOfDatatoInsert();
dataToBeInserted.map(async function (data) {
let condition = {
'categorygroup': data.categorygroup,
'category': data.category,
'country': data.country,
'city': data.city
};
await csvModel.upsert(data, condition);
})
// await restofthestuff();
} catch (error) {
console.log("error", error);
}
For test I took a dataset where all of my data needs to be updated.
When I run this method:
I can see in the (along with sequelize log turned on) log that "existing record updated" message is printed for each and every record that exists which is desired output. Only the last few (30) data gets updated in the db. Where as it works for csv.create(values)
~ How can I update all the records and obviously not just the last 30 data, any help's appreciated. ~
EDIT: Apparently I got this to work by using csv.update(values, {where: condition}) instead of using obj.update(values).
New question: I didn't look further into the sequelize's update method but is this a bug or am I doing something wrong here?
As detailed in the commented code below, your log is after your return and so will never be executed.
Also you were not using await in an async function, so either don't make it async or use await.
csv.upsert = async function (values, condition) {
const obj = await csv.findOne({ where: condition });
// you can await here, no need for then as you are in an async function
if (obj) { // update
// you need to log before your return condition
console.log("existing record updated")
return obj.update(values);
// since we return here, the rest of the code will not be executed
// you can skip the use of else
}
// we are in this part of the code only if object is falsy
// insert
console.log("new record inserted")
return csv.create(values);
}
You could also use Promise.all to ensure all the upsert are done:
await Promise.all(dataToBeInserted.map(async function (data) {
let condition = {
'categorygroup': data.categorygroup,
'category': data.category,
'country': data.country,
'city': data.city
};
await csvModel.upsert(data, condition);
}))
this will also ensure that if an error occur it gets catched by your try / catch
Maybe that will help you find what's causing the unexpected behaviours.

knex transaction not working in nodejs

I'm using knex in loopback for DB operation with mysql.
My task is to update the 2 table by using the transaction.
When I enter new entry in one tabe, i want to use id of that entry for 2nd query operation.
But when transaction throw the error it not rolling back the data/ removing the first table entry if second table entry throws error. but in my case transaction always do commit not rollback i put my example code in below:
addTest : (data) => {
return new promise(function(resolve, reject) {
knex.transaction(function(t) {
return knex('foo')
.transacting(t)
.insert({
foo_id: data.foo_id ? data.foo_id : null,
foo_name: data.foo_name ? data.foo_name : null,
date_entered : new Date()
})
.then(function() {
return knex('bar')
.transacting(t)
.insert({
bar_id: data.bar_id ? data.bar_id : null,
bar_name : data.bar_name ? data.bar_name : null
})
})
.then(t.commit)
.catch(function(e) {
t.rollback();
throw e;
})
})
.then(function() {
// it worked
// resolve('sucess');
console.log('success');
})
.catch(function(e) {
// it failed
console.log('error'+e);
});
});
}
please, provide me suitable suggestion.
thank you
You can avoid having to call t.commit or t.rollback youself. See the docs.
Make your code inside the transaction function something like this
return t.insert({}).into('foo').returning('id')
.then( function(idArray) {
return t.insert({fooId: idArray[0]}).into('bar')
})
That lets knex handle the commiting and rolling back itself based on the result result of that promise. Also, note how I got the inserted fooId and applied it to the bar object for insertion. That was kind of mentioned in the question.

Nested collection in models Sails.js [duplicate]

I've got myself a question regarding associations in Sails.js version 0.10-rc5. I've been building an app in which multiple models are associated to one another, and I've arrived at a point where I need to get to nest associations somehow.
There's three parts:
First there's something like a blog post, that's being written by a user. In the blog post I want to show the associated user's information like their username. Now, everything works fine here. Until the next step: I'm trying to show comments which are associated with the post.
The comments are a separate Model, called Comment. Each of which also has an author (user) associated with it. I can easily show a list of the Comments, although when I want to display the User's information associated with the comment, I can't figure out how to populate the Comment with the user's information.
In my controller i'm trying to do something like this:
Post
.findOne(req.param('id'))
.populate('user')
.populate('comments') // I want to populate this comment with .populate('user') or something
.exec(function(err, post) {
// Handle errors & render view etc.
});
In my Post's 'show' action i'm trying to retrieve the information like this (simplified):
<ul>
<%- _.each(post.comments, function(comment) { %>
<li>
<%= comment.user.name %>
<%= comment.description %>
</li>
<% }); %>
</ul>
The comment.user.name will be undefined though. If I try to just access the 'user' property, like comment.user, it'll show it's ID. Which tells me it's not automatically populating the user's information to the comment when I associate the comment with another model.
Anyone any ideals to solve this properly :)?
Thanks in advance!
P.S.
For clarification, this is how i've basically set up the associations in different models:
// User.js
posts: {
collection: 'post'
},
hours: {
collection: 'hour'
},
comments: {
collection: 'comment'
}
// Post.js
user: {
model: 'user'
},
comments: {
collection: 'comment',
via: 'post'
}
// Comment.js
user: {
model: 'user'
},
post: {
model: 'post'
}
Or you can use the built-in Blue Bird Promise feature to make it. (Working on Sails#v0.10.5)
See the codes below:
var _ = require('lodash');
...
Post
.findOne(req.param('id'))
.populate('user')
.populate('comments')
.then(function(post) {
var commentUsers = User.find({
id: _.pluck(post.comments, 'user')
//_.pluck: Retrieves the value of a 'user' property from all elements in the post.comments collection.
})
.then(function(commentUsers) {
return commentUsers;
});
return [post, commentUsers];
})
.spread(function(post, commentUsers) {
commentUsers = _.indexBy(commentUsers, 'id');
//_.indexBy: Creates an object composed of keys generated from the results of running each element of the collection through the given callback. The corresponding value of each key is the last element responsible for generating the key
post.comments = _.map(post.comments, function(comment) {
comment.user = commentUsers[comment.user];
return comment;
});
res.json(post);
})
.catch(function(err) {
return res.serverError(err);
});
Some explanation:
I'm using the Lo-Dash to deal with the arrays. For more details, please refer to the Official Doc
Notice the return values inside the first "then" function, those objects "[post, commentUsers]" inside the array are also "promise" objects. Which means that they didn't contain the value data when they first been executed, until they got the value. So that "spread" function will wait the acture value come and continue doing the rest stuffs.
At the moment, there's no built in way to populate nested associations. Your best bet is to use async to do a mapping:
async.auto({
// First get the post
post: function(cb) {
Post
.findOne(req.param('id'))
.populate('user')
.populate('comments')
.exec(cb);
},
// Then all of the comment users, using an "in" query by
// setting "id" criteria to an array of user IDs
commentUsers: ['post', function(cb, results) {
User.find({id: _.pluck(results.post.comments, 'user')}).exec(cb);
}],
// Map the comment users to their comments
map: ['commentUsers', function(cb, results) {
// Index comment users by ID
var commentUsers = _.indexBy(results.commentUsers, 'id');
// Get a plain object version of post & comments
var post = results.post.toObject();
// Map users onto comments
post.comments = post.comments.map(function(comment) {
comment.user = commentUsers[comment.user];
return comment;
});
return cb(null, post);
}]
},
// After all the async magic is finished, return the mapped result
// (or an error if any occurred during the async block)
function finish(err, results) {
if (err) {return res.serverError(err);}
return res.json(results.map);
}
);
It's not as pretty as nested population (which is in the works, but probably not for v0.10), but on the bright side it's actually fairly efficient.
I created an NPM module for this called nested-pop. You can find it at the link below.
https://www.npmjs.com/package/nested-pop
Use it in the following way.
var nestedPop = require('nested-pop');
User.find()
.populate('dogs')
.then(function(users) {
return nestedPop(users, {
dogs: [
'breed'
]
}).then(function(users) {
return users
}).catch(function(err) {
throw err;
});
}).catch(function(err) {
throw err;
);
Worth saying there's a pull request to add nested population: https://github.com/balderdashy/waterline/pull/1052
Pull request isn't merged at the moment but you can use it installing one directly with
npm i Atlantis-Software/waterline#deepPopulate
With it you can do something like .populate('user.comments ...)'.
sails v0.11 doesn't support _.pluck and _.indexBy use sails.util.pluck and sails.util.indexBy instead.
async.auto({
// First get the post
post: function(cb) {
Post
.findOne(req.param('id'))
.populate('user')
.populate('comments')
.exec(cb);
},
// Then all of the comment users, using an "in" query by
// setting "id" criteria to an array of user IDs
commentUsers: ['post', function(cb, results) {
User.find({id:sails.util.pluck(results.post.comments, 'user')}).exec(cb);
}],
// Map the comment users to their comments
map: ['commentUsers', function(cb, results) {
// Index comment users by ID
var commentUsers = sails.util.indexBy(results.commentUsers, 'id');
// Get a plain object version of post & comments
var post = results.post.toObject();
// Map users onto comments
post.comments = post.comments.map(function(comment) {
comment.user = commentUsers[comment.user];
return comment;
});
return cb(null, post);
}]
},
// After all the async magic is finished, return the mapped result
// (or an error if any occurred during the async block)
function finish(err, results) {
if (err) {return res.serverError(err);}
return res.json(results.map);
}
);
You could use async library which is very clean and simple to understand. For each comment related to a post you can populate many fields as you want with dedicated tasks, execute them in parallel and retrieve the results when all tasks are done. Finally, you only have to return the final result.
Post
.findOne(req.param('id'))
.populate('user')
.populate('comments') // I want to populate this comment with .populate('user') or something
.exec(function (err, post) {
// populate each post in parallel
async.each(post.comments, function (comment, callback) {
// you can populate many elements or only one...
var populateTasks = {
user: function (cb) {
User.findOne({ id: comment.user })
.exec(function (err, result) {
cb(err, result);
});
}
}
async.parallel(populateTasks, function (err, resultSet) {
if (err) { return next(err); }
post.comments = resultSet.user;
// finish
callback();
});
}, function (err) {// final callback
if (err) { return next(err); }
return res.json(post);
});
});
As of sailsjs 1.0 the "deep populate" pull request is still open, but the following async function solution looks elegant enough IMO:
const post = await Post
.findOne({ id: req.param('id') })
.populate('user')
.populate('comments');
if (post && post.comments.length > 0) {
const ids = post.comments.map(comment => comment.id);
post.comments = await Comment
.find({ id: commentId })
.populate('user');
}
Granted this is an old question, but a much simpler solution would be to loop over the comments,replacing each comment's 'user' property (which is an id) with the user's full detail using async await.
async function getPost(postId){
let post = await Post.findOne(postId).populate('user').populate('comments');
for(let comment of post.comments){
comment.user = await User.findOne({id:comment.user});
}
return post;
}
Hope this helps!
In case anyone is looking to do the same but for multiple posts, here's one
way of doing it:
find all user IDs in posts
query all users in 1 go from DB
update posts with those users
Given that same user can write multiple comments, we're making sure we're reusing those objects. Also we're only making 1 additional query (whereas if we'd do it for each post separately, that would be multiple queries).
await Post.find()
.populate('comments')
.then(async (posts) => {
// Collect all comment user IDs
const userIDs = posts.reduce((acc, curr) => {
for (const comment of post.comments) {
acc.add(comment.user);
}
return acc;
}, new Set());
// Get users
const users = await User.find({ id: Array.from(userIDs) });
const usersMap = users.reduce((acc, curr) => {
acc[curr.id] = curr;
return acc;
}, {});
// Assign users to comments
for (const post of posts) {
for (const comment of post.comments) {
if (comment.user) {
const userID = comment.user;
comment.user = usersMap[userID];
}
}
}
return posts;
});

Nested mysql transactions between 2 tables using sequelize.js

I have 2 sequelize models (Event and Inventory associated with 2 tables). I created a Event._create method so that I can use it to create an event in event db with multiple products recorded in the inventory db at the same time. Each inventory is associated with the event_id of the newly created event.
Because all these stuff should success or fail altogether, I use sequelize's transaction to achieve this.
Initially I was thinking about doing something like this.
sequelize.transactionPromise = Promise.promisify(sequelize.transaction, sequelize);
return sequelize.transactionPromise({autocommit: 0})
.then(function(t) {
return Event.create(ev, {transaction: t})
.then(function(event){
var event_id = event.id; // ------ (*)
return Promise.resolve([1, ..., event_number])
.then(function(){
Inventory.create({product_id: some_product_id, event_id: event_id},
{transaction: t});
});
.then(function(){
return Promise.cast(t.commit())
.then(function() { // successfully committed
return res.json(d);
}).catch(function(err){ // cannot commit somehow
return res.json(500, err.toString());
});
}).catch(function(err){ // error rollback
return Promise.cast(t.rollback())
.then(function() {
return Promise.reject('rollback: ' + err.toString());
});
});
});
But this doesn't work because before the transaction is commiteed the (*) has no value and gives me event_id of NULL.
Instead I do something like below:
var Event = sequelize.model('Event');
var Inventory = sequelize.model('Inventory');
var _create = function(t, ev){
var ev_id_secret = {secret: 'some random secret'};
return Promise.cast(Event.create(ev_id_secret))
.then(function(d){
ev_id_secret.id = d.id;
return true;
}).then(function(){
return Promise.resolve(_.range(ev.number_of_products))
.map(function(){
var inventory = {
event_id: ev_id_secret.id,
product_id: ev.product_id
};
return Promise.cast(Inventory._create(t, inventory));
});
}).then(function(){ // thennable a transaction
return Promise.cast(Event.update(ev, ev_id_secret, {transaction: t}));
});
};
So I can do something like this.
sequelize.transactionPromise=Promise.promisify(sequelize.transaction, sequelize);
return sequelize.transactionPromise({autocommit: 0})
.then(function(t) {
return Event._create(t, ev)
.then(function(){
return Promise.cast(t.commit())
.then(function() {
return res.json(d);
}).catch(function(err){
return res.json(500, err.toString());
});
}).catch(function(err){
return Promise.cast(t.rollback())
.then(function() {
return Promise.reject('rollback: ' + err.toString());
});
});
}).catch(function(err){
console.log(err.stack);
res.json(500, {error: err.toString()});
});
What I do with _create is that I just insert an empty event (with randomly generated secret) in Event db and some empty products in Inventory db, later get event_id using this secret to query, update the event and the inventory accordingly.
The thing is when the promise is rejected, the transaction rollback() is called and leave empty event and product records in the db. So I have to deal with the empty records later, which is really disturbing.
So my question is how do I do transaction between 2 tables correctly? Am I on the right track?
p.s: as a side question you can see my code is full of return Promise.xxx statements, this ensures the control flow but the promise gets really messy. Is there something I can do to improve my code? Thanks.
Your original implementation was correct. However, after you created the event, the object should come back with the id already populated without having to commit (I'm assuming you're using the standard auto-incremented sequelize id's). All SQL databases function this way. I recommend debugging your application as to determine the root of the problem.
Here's a sample implementation of sequelize transaction from one of my applications. I've just tested and confirmed that calling db.Deposit.create() resolves to an object with valid id. I am using Bluebird, Sequelize 2.0.0-rc2, and Postgresql 9.3
module.exports.createDeposit = function(deposit) {
return db.sequelize.transaction({isolationLevel: 'READ COMMITTED' })
.then(function(t){
var strQuery = 'UPDATE "Accounts" '
+ 'SET "balance"="balance" + :amount, "updatedAt"=NOW() '
+ 'WHERE "id"= :AccountId RETURNING *';
return sql.query(strQuery,
db.Account.build(),
{ raw: true, transaction : t },
{ AccountId: deposit.AccountId, amount: deposit.amount })
.then(function(account){
if (!account) throw new Error('Account does not exist')
return db.Deposit.create(deposit, { transaction: t });
})
.then(function(dbDeposit){
// If successful, dbDeposit object contains a valid id
if (!dbDeposit) throw new Error('Failed to create deposit');
return t.commit()
.then(function(){
return dbDeposit;
});
})
.catch(function(e){
return t.rollback()
.then(function(){
throw e;
});
});
});
};
Your original implementation was correct as far as i can tell but can be improved by levering managed transactions and full promise support in the latest versions of Sequelize:
return sequelize.transaction({autocommit: false}, function(t) {
return Event.create(ev, {transaction: t}).then(function(event) {
return Promise.map(_.range(event_number), function (number) {
return Inventory.create({product_id: some_product_id, event_id: event.get('id')}, {transaction: t});
});
});
}).then(function(){
// Automatically comitted at this point if the promise chain returned to the transaction successfully resolved
return res.status(200).json(d);
}).catch(function(err){
// Automatically rolledback at this point if the promise chain returned to the transaction was rejected
return res.status(403).json();
});
You can read more about managed transactions on the docs: http://sequelize.readthedocs.org/en/latest/docs/transactions/