Sequelize delete multiple associations - mysql

Hi I have the following tables
models.User= sequelize.define("User", {
name: {
type: DataTypes.STRING, allowNull: false, unique: true
}
});
models.Group= sequelize.define("Group", {
name: {
type: DataTypes.STRING, allowNull: false, unique: true
}
});
db.User.belongsToMany(db.Group, {
"through": "UsersGroup"
});
db.Group.belongsToMany(db.User, {
"through": "UsersGroup"
});
Is there a way to delete at once many user from a group, given the group name , and the names (users) to delete?
like:
var groupName = 'Work';
var users = ['Alice','Bob']
So I need to delete the association between Work group and {Alice and Bob}

Using "through": "UsersGroup" will automatically create a new Model UsersGroup however it won't associate it with User and Group directly. So what i'd do is to create a new sequelize model UsersGroup with whatever attributes and keys and define the association to User and Group. Then instead of passing "UsersGroup" string to belongsToMany you use the model directly as db.UsersGroup.
models.UsersGroup = sequelize.define("UsersGroup", {});
db.UsersGroup.belongsTo(db.User);
db.UsersGroup.belongsTo(db.Group);
db.UsersGroup.findAll({attributes:['id'], include:[
{model: db.User, where: {name: {$in:['Alice','Bob']}}},
{mode: db.Group, where: {name: 'Work'}}
]}).then(function(toBeDeleted){
return db.UsersGroups.destroy({where:{id:{$in:toBeDeleted.map(function(d){ return d.id})}}})
}).then(function(){
....
}).catch(function(dbErr){throw err;})
You also might want to check if toBeDeleted.length > 0 as i remember sequelize was acting funny when an empty array is passed.
Also in case you don't have id as primary key in usersgroup table you can easily modify the code above to use combination of user_id and group_id - Just modify the where statements.
The other approach could be selecting Users and Groups separately and use resulting sets in UsersGroups.destroy(). But i prefer to have an association even from a through model.
If you were to delete associations just based on group name OR user name you'd be able to do that using removeAssociation or removeAssociations have a looj here

Ended up doing that(role-> group , action->user)
var role = JSON.parse(req.body.role);
var actions = JSON.parse(req.body.actions);
var actionNames = _.map(actions, function (action) { return action.name });
models.Role.findOne({ where: { name: role.name } }).then(function (_role) {
if (_role == null) {
res.status(500).send("role was not found");
}
else {
models.Action.findAll({ where: { name: actionNames } }).then(function (actionsToRemove) {
if (actionsToRemove == null) {
res.status(500).send("No action [" + action.name + "] was found");
}
else {
_role.removeActions(actionsToRemove).then(function (removedActions) {
if (removedActions == null || removedActions != actions.length) {
res.status(500).send(actions.length + " Action expected to be removed , but only [" + removedActions + "] were removed");
}
else {
res.send("" + removedActions);
}
}).catch(function (err) {
throw err;
})
}
})
}
}).catch(function (err) {
res.status(500).send(err);
});

Related

Check if an user exist in my database with Node and MySQL [duplicate]

I need to check if entry with specific ID exists in the database using Sequelize in Node.js
function isIdUnique (id) {
db.Profile.count({ where: { id: id } })
.then(count => {
if (count != 0) {
return false;
}
return true;
});
}
I call this function in an if statement but the result is always undefined
if(isIdUnique(id)){...}
I don't prefer using count to check for record existence. Suppose you have similarity for hundred in million records why to count them all if you want just to get boolean value, true if exists false if not?
findOne will get the job done at the first value when there's matching.
const isIdUnique = id =>
db.Profile.findOne({ where: { id} })
.then(token => token !== null)
.then(isUnique => isUnique);
Update: see the answer which suggests using findOne() below. I personally prefer; this answer though describes an alternative approach.
You are not returning from the isIdUnique function:
function isIdUnique (id) {
return db.Profile.count({ where: { id: id } })
.then(count => {
if (count != 0) {
return false;
}
return true;
});
}
isIdUnique(id).then(isUnique => {
if (isUnique) {
// ...
}
});
You can count and find.
Project
.findAndCountAll({
where: {
title: {
[Op.like]: 'foo%'
}
},
offset: 10,
limit: 2
})
.then(result => {
console.log(result.count);
console.log(result.rows);
});
Doc link, v5 Beta Release
I found the answer by #alecxe to be unreliable in some instances, so I tweaked the logic:
function isIdUnique (id, done) {
db.Profile.count({ where: { id: id } })
.then(count => {
return (count > 0) ? true : false
});
}
As Sequelize is designed around promises anyway, alecxe's answer probably makes most sense, but for the sake of offering an alternative, you can also pass in a callback:
function isIdUnique (id, done) {
db.Profile.count({ where: { id: id } })
.then(count => {
done(count == 0);
});
}
}
isIdUnique(id, function(isUnique) {
if (isUnique) {
// stuff
}
});
Extending #Jalal's answer, if you're very conscious about performance implications while maintaining a simple Sequelize structure and you do not need the row data, I suggest you only request one column from the database. Why waste bandwidth and time asking the database to return all columns when you won't even use them?
const isIdUnique = id =>
db.Profile.findOne({ where: { id }, attributes: ['id'] })
.then(token => token !== null)
.then(isUnique => isUnique);
The attributes field tells Sequelize to only request the id column from the database and not sending the whole row's content.
Again this may seem a bit excessive but at scale and if you have many columns that hold a lot of data, this could make a giant difference in performance.
Try the below solution. I tried it and it works well.
const isIdUnique = async (id, model) => {
return await model.count({ where: { id: id } });
};
const checkExistId = await isIdUnique(idUser, User);
console.log("checkExistId: ", checkExistId);

Transform Request to Autoquery friendly

We are working with a 3rd party grid (telerik kendo) that has paging/sorting/filtering built in. It will send the requests in a certain way when making the GET call and I'm trying to determine if there is a way to translate these requests to AutoQuery friendly requests.
Query string params
Sort Pattern:
sort[{0}][field] and sort[{0}][dir]
Filtering:
filter[filters][{0}][field]
filter[filters][{0}][operator]
filter[filters][{0}][value]
So this which is populated in the querystring:
filter[filters][0][field]
filter[filters][0][operator]
filter[filters][0][value]
would need to be translated to.
FieldName=1 // filter[filters][0][field]+filter[filters][0][operator]+filter[filters][0][value] in a nutshell (not exactly true)
Should I manipulate the querystring object in a plugin by removing the filters (or just adding the ones I need) ? Is there a better option here?
I'm not sure there is a clean way to do this on the kendo side either.
I will explain the two routes I'm going down, I hope to see a better answer.
First, I tried to modify the querystring in a request filter, but could not. I ended up having to run the autoqueries manually by getting the params and modifying them before calling AutoQuery.Execute. Something like this:
var requestparams = Request.ToAutoQueryParams();
var q = AutoQueryDb.CreateQuery(requestobject, requestparams);
AutoQueryDb.Execute(requestobject, q);
I wish there was a more global way to do this. The extension method just loops over all the querystring params and adds the ones that I need.
After doing the above work, I wasn't very happy with the result so I investigated doing it differently and ended up with the following:
Register the Kendo grid filter operations to their equivalent Service Stack auto query ones:
var aq = new AutoQueryFeature { MaxLimit = 100, EnableAutoQueryViewer=true };
aq.ImplicitConventions.Add("%neq", aq.ImplicitConventions["%NotEqualTo"]);
aq.ImplicitConventions.Add("%eq", "{Field} = {Value}");
Next, on the grid's read operation, we need to reformat the the querystring:
read: {
url: "/api/stuff?format=json&isGrid=true",
data: function (options) {
if (options.sort && options.sort.length > 0) {
options.OrderBy = (options.sort[0].dir == "desc" ? "-" : "") + options.sort[0].field;
}
if (options.filter && options.filter.filters.length > 0) {
for (var i = 0; i < options.filter.filters.length; i++) {
var f = options.filter.filters[i];
console.log(f);
options[f.field + f.operator] = f.value;
}
}
}
Now, the grid will send the operations in a Autoquery friendly manner.
I created an AutoQueryDataSource ts class that you may or may not find useful.
It's usage is along the lines of:
this.gridDataSource = AutoQueryKendoDataSource.getDefaultInstance<dtos.QueryDbSubclass, dtos.ListDefinition>('/api/autoQueryRoute', { orderByDesc: 'createdOn' });
export default class AutoQueryKendoDataSource<queryT extends dtos.QueryDb_1<T>, T> extends kendo.data.DataSource {
private constructor(options: kendo.data.DataSourceOptions = {}, public route?: string, public request?: queryT) {
super(options)
}
defer: ng.IDeferred<any>;
static exportToExcel(columns: kendo.ui.GridColumn[], dataSource: kendo.data.DataSource, filename: string) {
let rows = [{ cells: columns.map(d => { return { value: d.field }; }) }];
dataSource.fetch(function () {
var data = this.data();
for (var i = 0; i < data.length; i++) {
//push single row for every record
rows.push({
cells: _.map(columns, d => { return { value: data[i][d.field] } })
})
}
var workbook = new kendo.ooxml.Workbook({
sheets: [
{
columns: _.map(columns, d => { return { autoWidth: true } }),
// Title of the sheet
title: filename,
// Rows of the sheet
rows: rows
}
]
});
//save the file as Excel file with extension xlsx
kendo.saveAs({ dataURI: workbook.toDataURL(), fileName: filename });
})
}
static getDefaultInstance<queryT extends dtos.QueryDb_1<T>, T>(route: string, request: queryT, $q?: ng.IQService, model?: any) {
let sortInfo: {
orderBy?: string,
orderByDesc?: string,
skip?: number
} = {
};
let opts = {
transport: {
read: {
url: route,
dataType: 'json',
data: request
},
parameterMap: (data, type) => {
if (type == 'read') {
if (data.sort) {
data.sort.forEach((s: any) => {
if (s.field.indexOf('.') > -1) {
var arr = _.split(s.field, '.')
s.field = arr[arr.length - 1];
}
})
}//for autoquery to work, need only field names not entity names.
sortInfo = {
orderByDesc: _.join(_.map(_.filter(data.sort, (s: any) => s.dir == 'desc'), 'field'), ','),
orderBy: _.join(_.map(_.filter(data.sort, (s: any) => s.dir == 'asc'), 'field'), ','),
skip: 0
}
if (data.page)
sortInfo.skip = (data.page - 1) * data.pageSize,
_.extend(data, request);
//override sorting if done via grid
if (sortInfo.orderByDesc) {
(<any>data).orderByDesc = sortInfo.orderByDesc;
(<any>data).orderBy = null;
}
if (sortInfo.orderBy) {
(<any>data).orderBy = sortInfo.orderBy;
(<any>data).orderByDesc = null;
}
(<any>data).skip = sortInfo.skip;
return data;
}
return data;
},
},
requestStart: (e: kendo.data.DataSourceRequestStartEvent) => {
let ds = <AutoQueryKendoDataSource<queryT, T>>e.sender;
if ($q)
ds.defer = $q.defer();
},
requestEnd: (e: kendo.data.DataSourceRequestEndEvent) => {
new DatesToStringsService().convert(e.response);
let ds = <AutoQueryKendoDataSource<queryT, T>>e.sender;
if (ds.defer)
ds.defer.resolve();
},
schema: {
data: (response: dtos.QueryResponse<T>) => {
return response.results;
},
type: 'json',
total: 'total',
model: model
},
pageSize: request.take || 40,
page: 1,
serverPaging: true,
serverSorting: true
}
let ds = new AutoQueryKendoDataSource<queryT, T>(opts, route, request);
return ds;
}
}

Sails / WaterLine ORM / Has Many Through: Insert data into join table

I'm newbie with Sails/WaterLine ORM
I'm following http://sailsjs.org/documentation/concepts/models-and-orm/associations/through-associations
One question.
How way to insert data into a join table ?
For example: User m - m Pet
User model
module.exports = {
attributes: {
name: {
type: 'string'
},
pets:{
collection: 'pet',
via: 'owner',
through: 'petuser'
}
}
Pet model
module.exports = {
attributes: {
name: {
type: 'string'
},
color: {
type: 'string'
},
owners:{
collection: 'user',
via: 'pet',
through: 'petuser'
}
}
PetUser model (join table)
module.exports = {
attributes: {
owner:{
model:'user'
},
pet: {
model: 'pet'
}
}
}
Pet data is available (some record with ID1, ID2, ID3...)
I want to add new one user with some pets
PetUser ( id , id_of_user, id_of_pet)
1, U1, P1
2, U1, P2
{
"name" : "John",
"pets" : [2,3]
}
UserController
module.exports = {
addUserWithPets: function(req, res) {
User.create(req.body).exec(function(err, user) {
if(err){
throw err;
}else {
/*pets.forEach(function(pet, index){
user.pets.add(pet);
})
user.save(function(err) {});*/
user.pets.add(data);
user.save(function(err) {});
}
return res.ok({
data: user
});
})
}
};
Thanks!
I think this hasn't been implemented yet in sails.
Refer to this question: through associations in sails.js on SO.
Here is what waterline docs say:
Many-to-Many through associations behave the same way as many-to-many associations with the exception of the join table being automatically created for you. This allows you to attach additional attributes onto the relationship inside of the join table.
Coming Soon

Unable to Insert Data into a collection

I have officer Schema in which if a user wants to fix an appointment, his entry is made in the DB. The schema is:
officerSchema = mongoose.Schema({
email : {type: String,
index: { unique: true }
},
appointmentList : Array // array of jsonObject of dates and userID
});
The AppointmentList is an array of JSON Objects which contains the ID of the officer with which appointment has to be made, date and userID (the user which wants to fix the appointment).
However to avoid duplicate appointment entries, I have been using several methods mentioned on the internet. None of them have worked for me so far. I am posting the code below. The problem with below code is it NEVER inserts any data in the appointmentsList. However if I use save() instead of update() insertion occurs but duplicates also get inserted.
Here is the JSON Object that I want to add in the array from DB,
{
"id": "1321231231",
"appointment": {
"userID": "31321",
"date": "24 March"
}
}
var ID = requestObject.id;
var newObject = {$addToSet: requestObject.appointment};
OfficerModel.findOne({_id : ID}, function(err, foundData) {
if(err) {
console.log(err);
return;
}
else {
var dbList = foundData.list;
dbList.push(newObject);
foundData.update(function(err, updatedData) {
if(err) {
console.log( err);
}
else {
console.log("successful");
}
});
}
});
Using the $addToSet operator might work for you.
var appt = {
id: "1321231231",
appointment: {
userID: "31321",
date: "24 March"
}
}
Officer.update(
{_id: ID},
{$addToSet: {appointmentList: appt}},
function(err) { ... }
);
But it's not a perfect solution because {one: 1, two: 2} and {two: 2, one: 1} aren't interpreted as equal, so they could both get added to an array with $addToSet.
To totally avoid duplicates, you could do something like this:
var appt = {
id: "1321231231",
appointment: {
userID: "31321",
date: "24 March"
}
};
Officer.findOne(
{_id: ID, 'appointmentList.id': appt.id},
function(err, officerDoc) {
if (err) { ... }
// since no document matched your query, add the appointment
if (!officerDoc) {
Officer.update(
{_id: ID},
{$push: {appointmentList: appt}},
function(err) { ... }
);
}
// since that appointment already exists, update it
else {
Officer.update(
{_id: ID, 'appointmentList.id': appt.id},
{$set: {'appointmentList.$.appointment': appt.appointment}},
function(err) { ... }
);
}
}
);
The operation above that updates the existing appointment uses the positional operator.

mongodb + nodejs to find and return specific fields in documents

I'm trying to extract specific document fields from a mongodb collection (v 3.0.8 at MongoLab). The returned documents must fall within a date range. My goal is to extract specific fields from these documents. My nodejs code,
var query = {}, operator1 = {}, operator2 = {}, operator3 = {} ;
operator1.$gte = +startDate;
operator2.$lte = +endDate;
operator3.$ne = 'move';
query['xid'] = 1; // Problem here?
query['date'] = Object.assign(operator1, operator2);
query['type'] = operator3;
console.log(query);
MongoClient.connect(connection, function(err, db) {
if(err){
res.send(err);
} else {
db.collection('jbone')
.find(query)
.toArray(function(err, result){
console.log(err);
res.json(result);
});
};
});
If I opt to return all fields in the date range, the query works fine. If I select only field xid I get no results. My query object looks sensible according to the docs. console.log(err) gives:
{ xid: 1,
date: { '$gte': 20160101, '$lte': 20160107 },
type: { '$ne': 'move' } }
null
null is the err.
Can anyone help me understand what I'm doing wrong?
Or point me to another similar SO questions with an answer?
Thanks
To select the specific field could be done as below
.find(
{date: { '$gte': 20160101, '$lte': 20160107 }, type: { '$ne': 'move' }},
{ xid: 1} )
Sample codes as following.
query['date'] = Object.assign(operator1, operator2);
query['type'] = operator3;
db.collection('jbone')
.find(query, {xid: 1})
.toArray(function(err, result){
console.log(err);
res.json(result);
});