According to the documentation found here it states as follows
upsert(values, [options]) -> Promise.<created>
Insert or update a single row. An update will be executed if a row which matches the supplied values on either the primary key or a unique key is found. Note that the unique index must be defined in your sequelize model and not just in the table. Otherwise you may experience a unique constraint violation, because sequelize fails to identify the row that should be updated.
So my expectation is that upserting using a unique key should replace the existing value. However when my code runs instead of updating the existing database record, it adds a new one. What am I doing wrong?
here is a sample of my model
'use strict'
module.exports = (db, dataTypes) => {
const titanJob = db.define('titanJob', {
titanId: {
type: dataTypes.STRING,
allowNull: false,
unique: true
},
name: {
type: dataTypes.STRING,
allowNull: false
}
}, {
timestamps: true
})
return titanJob
}
and here is an example of my upsert
await asyncForEach(res.data.hits.hits, async es => {
const src = es._source
try {
await titanJob.upsert({
name: src.name,
titanId: src.id,
}, { titanId: src.id })
logger.debug(`[${file}] upsert successful`)
} catch (err) {
logger.warn(`[${file}] failed to save to database`)
logger.warn(`[${file}] ${err}`)
}
})
First you should add a unique index (constraint) to your table. The data you upserting should contain the field set of the unique index (constraint).
It should work. Here is an example using "sequelize": "^5.21.3":
index.ts:
import { Model, DataTypes } from 'sequelize';
import { sequelize } from '../../db';
import assert from 'assert';
class TitanJob extends Model {}
TitanJob.init(
{
titanId: {
type: DataTypes.STRING,
allowNull: false,
unique: true,
},
name: {
type: DataTypes.STRING,
allowNull: false,
},
},
{ sequelize, modelName: 'titanJob', timestamps: true },
);
(async function test() {
try {
await sequelize.sync({ force: true });
const datas = [
{ titanId: '1', name: 'programmer' },
{ titanId: '2', name: 'teacher' },
];
const jobs = await TitanJob.bulkCreate(datas);
assert.deepEqual(
jobs.map((job) => ({ titanId: job.id, name: job.name })),
datas,
'Should bulk create programmer and teacher datas',
);
const rval = await TitanJob.upsert({ titanId: '1', name: 'driver' }, { returning: true });
assert.equal(rval[0].titanId, '1', 'Should update the row which titanId is "1"');
} catch (error) {
console.log(error);
} finally {
await sequelize.close();
}
})();
Execution results:
{ POSTGRES_HOST: '127.0.0.1',
POSTGRES_PORT: '5430',
POSTGRES_PASSWORD: 'testpass',
POSTGRES_USER: 'testuser',
POSTGRES_DB: 'node-sequelize-examples' }
Executing (default): DROP TABLE IF EXISTS "titanJob" CASCADE;
Executing (default): DROP TABLE IF EXISTS "titanJob" CASCADE;
Executing (default): CREATE TABLE IF NOT EXISTS "titanJob" ("id" SERIAL , "titanId" VARCHAR(255) NOT NULL UNIQUE, "name" VARCHAR(255) NOT NULL, "createdAt" TIMESTAMP WITH TIME ZONE NOT NULL, "updatedAt" TIMESTAMP WITH TIME ZONE NOT NULL, PRIMARY KEY ("id"));
Executing (default): SELECT i.relname AS name, ix.indisprimary AS primary, ix.indisunique AS unique, ix.indkey AS indkey, array_agg(a.attnum) as column_indexes, array_agg(a.attname) AS column_names, pg_get_indexdef(ix.indexrelid) AS definition FROM pg_class t, pg_class i, pg_index ix, pg_attribute a WHERE t.oid = ix.indrelid AND i.oid = ix.indexrelid AND a.attrelid = t.oid AND t.relkind = 'r' and t.relname = 'titanJob' GROUP BY i.relname, ix.indexrelid, ix.indisprimary, ix.indisunique, ix.indkey ORDER BY i.relname;
Executing (default): INSERT INTO "titanJob" ("id","titanId","name","createdAt","updatedAt") VALUES (DEFAULT,'1','programmer','2020-02-14 08:09:45.506 +00:00','2020-02-14 08:09:45.506 +00:00'),(DEFAULT,'2','teacher','2020-02-14 08:09:45.506 +00:00','2020-02-14 08:09:45.506 +00:00') RETURNING *;
Executing (default): CREATE OR REPLACE FUNCTION pg_temp.sequelize_upsert(OUT created boolean, OUT primary_key text) AS $func$ BEGIN INSERT INTO "titanJob" ("titanId","name","createdAt","updatedAt") VALUES ('1','driver','2020-02-14 08:09:45.524 +00:00','2020-02-14 08:09:45.524 +00:00') RETURNING "id" INTO primary_key; created := true; EXCEPTION WHEN unique_violation THEN UPDATE "titanJob" SET "titanId"='1',"name"='driver',"updatedAt"='2020-02-14 08:09:45.524 +00:00' WHERE ("id" IS NULL OR "titanId" = '1') RETURNING "id" INTO primary_key; created := false; END; $func$ LANGUAGE plpgsql; SELECT * FROM pg_temp.sequelize_upsert();
Executing (default): SELECT "id", "titanId", "name", "createdAt", "updatedAt" FROM "titanJob" AS "titanJob" WHERE "titanJob"."id" = '1';
No assertion fails. It works as expected. Check the data rows in the database:
node-sequelize-examples=# select * from "titanJob";
id | titanId | name | createdAt | updatedAt
----+---------+---------+----------------------------+----------------------------
2 | 2 | teacher | 2020-02-14 08:09:45.506+00 | 2020-02-14 08:09:45.506+00
1 | 1 | driver | 2020-02-14 08:09:45.506+00 | 2020-02-14 08:09:45.524+00
(2 rows)
source code: https://github.com/mrdulin/node-sequelize-examples/tree/master/src/examples/stackoverflow/59686743
Related
I've been making an application for tracking user finances with NestJS, TypeORM and MySQL on the backend and all was going swell until I ran into a brick wall which was saving an entity called Financialchange which represents one unit of financial expense or gain. The problem is that in a table that represents a many to many relationship of user defined tags with the financial change, an empty row is inserted every time I save this entity along with the row that is correctly inserted.
The saving is done like this:
const financialChange = await this.financialChangeRepository.save({
amount: payload.amount,
description: payload.description,
expense: payload.expense,
paymentSourceId: payload.paymentSourceId,
appUserId: payload.appUserId,
createdAt: format(new Date(), "yyyy-MM-dd hh:mm:ss")
});
payload.tagIds.forEach(async (tagId) => {
await this.financialChangeTagRepository.save({
financialChangeId: financialChange.id,
tagId
});
});
An example of sent data:
mutation {
addFinancialChange(
financialChange: {
appUserId: 1
amount: 200
description: "Description"
expense: true
paymentSourceId: 1
tagIds: [1]
}
)
}
And when selecting data from the many to many table I get this:
The entity data is as follows, first the MySQL table creation:
CREATE TABLE financialchange (
id INT AUTO_INCREMENT PRIMARY KEY,
amount DOUBLE,
createdAt DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP(),
description VARCHAR(255),
expense BOOLEAN,
paymentSourceId INT,
appUserId INT,
FOREIGN KEY (appUserId) REFERENCES appuser(id),
FOREIGN KEY (paymentSourceId) REFERENCES paymentsource(id)
);
And then the TypeORM entity in Nest:
#Index("appUserId", ["appUserId"], {})
#Index("paymentSourceId", ["paymentSourceId"], {})
#Entity("financialchange", { schema: "finapp" })
export class Financialchange {
#PrimaryGeneratedColumn({ type: "int", name: "id" })
public id?: number;
#Column("double", { name: "amount", nullable: true, precision: 22 })
public amount?: number | null;
#Column("datetime", { name: "createdAt", default: () => "CURRENT_TIMESTAMP" })
public createdAt?: Date;
#Column("varchar", { name: "description", nullable: true, length: 255 })
public description?: string | null;
#Column("tinyint", { name: "expense", nullable: true, width: 1 })
public expense?: boolean | null;
#Column("int", { name: "paymentSourceId", nullable: true })
public paymentSourceId?: number | null;
#Column("int", { name: "appUserId", nullable: true })
public appUserId?: number | null;
#ManyToOne(() => Appuser, (appuser) => appuser.financialchanges, {
onDelete: "NO ACTION",
onUpdate: "NO ACTION"
})
#JoinColumn([{ name: "appUserId", referencedColumnName: "id" }])
public appUser?: Appuser;
#ManyToOne(
() => Paymentsource,
(paymentsource) => paymentsource.financialchanges,
{ onDelete: "NO ACTION", onUpdate: "NO ACTION" }
)
#JoinColumn([{ name: "paymentSourceId", referencedColumnName: "id" }])
public paymentSource?: Paymentsource;
#OneToMany(
() => Financialchangetag,
(financialchangetag) => financialchangetag.financialChange
)
public financialchangetags?: Financialchangetag[];
}
It is in concept defined as having N amounts of tags binded to it, which is why I have created a table that represents a many to many relation called financialchangetags:
CREATE TABLE financialchangetag (
id INT AUTO_INCREMENT PRIMARY KEY,
financialChangeId INT,
tagId INT,
FOREIGN KEY (financialChangeId) REFERENCES financialchange(id),
FOREIGN KEY (tagId) REFERENCES tag(id)
);
#Index("financialChangeId", ["financialChangeId"], {})
#Index("tagId", ["tagId"], {})
#Entity("financialchangetag", { schema: "finapp" })
export class Financialchangetag {
#PrimaryGeneratedColumn({ type: "int", name: "id" })
public id?: number;
#Column("int", { name: "financialChangeId", nullable: true })
public financialChangeId?: number | null;
#Column("int", { name: "tagId", nullable: true })
public tagId?: number | null;
#ManyToOne(
() => Financialchange,
(financialchange) => financialchange.financialchangetags,
{ onDelete: "NO ACTION", onUpdate: "NO ACTION" }
)
#JoinColumn([{ name: "financialChangeId", referencedColumnName: "id" }])
public financialChange?: Financialchange;
#ManyToOne(() => Tag, (tag) => tag.financialchangetags, {
onDelete: "NO ACTION",
onUpdate: "NO ACTION"
})
#JoinColumn([{ name: "tagId", referencedColumnName: "id" }])
public tag?: Tag;
}
And the tag table is defined in this manner:
CREATE TABLE tag (
id INT AUTO_INCREMENT PRIMARY KEY,
description VARCHAR(255)
);
#Entity("tag", { schema: "finapp" })
export class Tag {
#PrimaryGeneratedColumn({ type: "int", name: "id" })
public id?: number;
#Column("varchar", { name: "description", nullable: true, length: 255 })
public description?: string | null;
#OneToMany(
() => Financialchangetag,
(financialchangetag) => financialchangetag.tag
)
public financialchangetags?: Financialchangetag[];
}
I have no clue why this is, I have tried inserting a record into financialchange with a transaction manager and through other methods of saving data that TypeORM offers, but I still get an empty row in my M:N table.
I have two tables:
//User.js
module.exports = (sequelize, DataTypes) => {
const User = sequelize.define("User", {
userId: {
type: DataTypes.STRING,
allowNull: false,
unique: true,
primaryKey: true,
},
email: {
type: DataTypes.STRING,
},
firstName: {
type: DataTypes.STRING,
allowNull: false,
},
lastName: {
type: DataTypes.STRING,
defaultValue: "",
},
password: {
type: DataTypes.STRING,
allowNull: false,
},
chapterId: {
type: DataTypes.STRING,
},
});
User.associate = (models) => {
User.belongsTo(models.Chapter, {
foreignKey: "chapterId",
targetKey: "chapterId",
as: "chapter",
});
};
return User;
};
and
//chapter table
module.exports = (sequelize, DataTypes) => {
const Chapter = sequelize.define("Chapter", {
chapterId: {
type: DataTypes.STRING,
allowNull: false,
unique: true,
primaryKey: true,
},
chapterName: {
type: DataTypes.STRING,
allowNull: false,
},
isChapterLocal: {
type: DataTypes.BOOLEAN,
allowNull: false,
},
});
Chapter.associate = (models) => {
};
return Chapter;
};
and i am trying to fetch users with chapters included into it.
let getAll = async (req, res) => {
try {
const userData = await db.User.findAll({
include: [
{
model: Chapter,
as: "chapter",
},
],
});
res.send(userData);
} catch (e) {
res.send(e);
}
};
how to include chapter id and chapter name from chapter table, as present in chapterId row for user table.
I am new to sequelize and MySQL and am unsure if the relation i have defined in the user model is good.
Do we need to define associations in both tables.
It should work as expected. E.g.
import { sequelize } from '../../db';
import { Model, DataTypes } from 'sequelize';
class User extends Model {}
User.init(
{
userId: {
type: DataTypes.STRING,
allowNull: false,
unique: true,
primaryKey: true,
},
email: {
type: DataTypes.STRING,
},
firstName: {
type: DataTypes.STRING,
allowNull: false,
},
lastName: {
type: DataTypes.STRING,
defaultValue: '',
},
password: {
type: DataTypes.STRING,
allowNull: false,
},
chapterId: {
type: DataTypes.STRING,
},
},
{ sequelize, modelName: 'User' },
);
class Chapter extends Model {}
Chapter.init(
{
chapterId: {
type: DataTypes.STRING,
allowNull: false,
unique: true,
primaryKey: true,
},
chapterName: {
type: DataTypes.STRING,
allowNull: false,
},
isChapterLocal: {
type: DataTypes.BOOLEAN,
allowNull: false,
},
},
{ sequelize, modelName: 'Chapter' },
);
User.belongsTo(Chapter, { foreignKey: 'chapterId', targetKey: 'chapterId', as: 'chapter' });
(async function test() {
try {
await sequelize.sync({ force: true });
// seed
await User.create(
{
userId: '1',
email: 'example#gmail.com',
firstName: 'Lin',
lastName: 'Du',
password: '123',
chapter: {
chapterId: '1',
chapterName: 'ok',
isChapterLocal: false,
},
},
{ include: [{ model: Chapter, as: 'chapter' }] },
);
// test
const userData = await User.findAll({
include: [{ model: Chapter, as: 'chapter' }],
raw: true,
});
console.log('userData:', userData);
} catch (error) {
console.log(error);
} finally {
sequelize.close();
}
})();
The execution results:
Executing (default): DROP TABLE IF EXISTS "User" CASCADE;
Executing (default): DROP TABLE IF EXISTS "Chapter" CASCADE;
Executing (default): DROP TABLE IF EXISTS "Chapter" CASCADE;
Executing (default): CREATE TABLE IF NOT EXISTS "Chapter" ("chapterId" VARCHAR(255) NOT NULL UNIQUE , "chapterName" VARCHAR(255) NOT NULL, "isChapterLocal" BOOLEAN NOT NULL, PRIMARY KEY ("chapterId"));
Executing (default): SELECT i.relname AS name, ix.indisprimary AS primary, ix.indisunique AS unique, ix.indkey AS indkey, array_agg(a.attnum) as column_indexes, array_agg(a.attname) AS column_names, pg_get_indexdef(ix.indexrelid) AS definition FROM pg_class t, pg_class i, pg_index ix, pg_attribute a WHERE t.oid = ix.indrelid AND i.oid = ix.indexrelid AND a.attrelid = t.oid AND t.relkind = 'r' and t.relname = 'Chapter' GROUP BY i.relname, ix.indexrelid, ix.indisprimary, ix.indisunique, ix.indkey ORDER BY i.relname;
Executing (default): DROP TABLE IF EXISTS "User" CASCADE;
Executing (default): CREATE TABLE IF NOT EXISTS "User" ("userId" VARCHAR(255) NOT NULL UNIQUE , "email" VARCHAR(255), "firstName" VARCHAR(255) NOT NULL, "lastName" VARCHAR(255) DEFAULT '', "password" VARCHAR(255) NOT NULL, "chapterId" VARCHAR(255) REFERENCES "Chapter" ("chapterId") ON DELETE NO ACTION ON UPDATE CASCADE, PRIMARY KEY ("userId"));
Executing (default): SELECT i.relname AS name, ix.indisprimary AS primary, ix.indisunique AS unique, ix.indkey AS indkey, array_agg(a.attnum) as column_indexes, array_agg(a.attname) AS column_names, pg_get_indexdef(ix.indexrelid) AS definition FROM pg_class t, pg_class i, pg_index ix, pg_attribute a WHERE t.oid = ix.indrelid AND i.oid = ix.indexrelid AND a.attrelid = t.oid AND t.relkind = 'r' and t.relname = 'User' GROUP BY i.relname, ix.indexrelid, ix.indisprimary, ix.indisunique, ix.indkey ORDER BY i.relname;
Executing (default): INSERT INTO "Chapter" ("chapterId","chapterName","isChapterLocal") VALUES ($1,$2,$3) RETURNING *;
Executing (default): INSERT INTO "User" ("userId","email","firstName","lastName","password","chapterId") VALUES ($1,$2,$3,$4,$5,$6) RETURNING *;
Executing (default): SELECT "User"."userId", "User"."email", "User"."firstName", "User"."lastName", "User"."password", "User"."chapterId", "chapter"."chapterId" AS "chapter.chapterId", "chapter"."chapterName" AS "chapter.chapterName", "chapter"."isChapterLocal" AS "chapter.isChapterLocal" FROM "User" AS "User" LEFT OUTER JOIN "Chapter" AS "chapter" ON "User"."chapterId" = "chapter"."chapterId";
userData: [ { userId: '1',
email: 'example#gmail.com',
firstName: 'Lin',
lastName: 'Du',
password: '123',
chapterId: '1',
'chapter.chapterId': '1',
'chapter.chapterName': 'ok',
'chapter.isChapterLocal': false } ]
Check the database:
node-sequelize-examples=# select * from "User";
userId | email | firstName | lastName | password | chapterId
--------+-------------------+-----------+----------+----------+-----------
1 | example#gmail.com | Lin | Du | 123 | 1
(1 row)
node-sequelize-examples=# select * from "Chapter";
chapterId | chapterName | isChapterLocal
-----------+-------------+----------------
1 | ok | f
(1 row)
hello i want insert data with bulkCreate ex:
[
{
"typeId": 5,
"devEui": "0094E796CBFCFEF9",
"application_name": "Pressure No.10",
"createdAt": "2020-02-05T08:07:17.000Z",
"updatedAt": "2020-02-05T08:07:17.000Z"
}
]
and my sequelize code :
return models.sequelize.transaction(t=>{
return models.iot_nodes.bulkCreate(data,{
updateOnDuplicate: ["devEui",]
})
})
when i hit this code in first data that will be insert to db
my problem is when i hit again whit same data that not update, just insert in new row
iam using mysql db, laragon
log:
Executing (f202b84c-c5d8-4c67-954c-e22f96fb93d8): START TRANSACTION;
Executing (default): INSERT INTO `iot_nodes` (`id`,`typeId`,`devEui`,`application_name`,`createdAt`,`updatedAt`) VALUES (NULL,5,'0094E796CBFCFEF9','Pressure No.10','2020-02-05 08:07:17','2020-02-05 08:07:17') ON DUPLICATE KEY UPDATE `id`=VALUES(`id`),`devEui`=VALUES(`devEui`);
Executing (f202b84c-c5d8-4c67-954c-e22f96fb93d8): COMMIT;
It seems to fit this scenario based on the information. You want to update devEui field. updateOnDuplicate option:
Fields to update if row key already exists (on duplicate key update)?
So, the row key already exists means the table must have a unique key or the primary key is duplicated when you insert the data.
E.g.
import { sequelize } from '../../db';
import { Model, DataTypes } from 'sequelize';
class IotNode extends Model {}
IotNode.init(
{
typeId: {
type: DataTypes.INTEGER,
unique: true,
},
devEui: DataTypes.STRING,
application_name: DataTypes.STRING,
},
{ sequelize, modelName: 'iot_nodes' },
);
(async function test() {
try {
await sequelize.sync({ force: true });
const datas = [
{
typeId: 5,
devEui: '0094E796CBFCFEF9',
application_name: 'Pressure No.10',
createdAt: '2020-02-05T08:07:17.000Z',
updatedAt: '2020-02-05T08:07:17.000Z',
},
];
await IotNode.bulkCreate(datas, { updateOnDuplicate: ['devEui'] });
await IotNode.bulkCreate(datas, { updateOnDuplicate: ['devEui'] });
} catch (error) {
console.log(error);
} finally {
await sequelize.close();
}
})();
As you can see, I make the typeId unique and execute IotNode.bulkCreate twice. The generated SQL logs:
Executing (default): INSERT INTO "iot_nodes" ("id","typeId","devEui","application_name") VALUES (DEFAULT,5,'0094E796CBFCFEF9','Pressure No.10') ON CONFLICT ("typeId") DO UPDATE SET "devEui"=EXCLUDED."devEui" RETURNING *;
Executing (default): INSERT INTO "iot_nodes" ("id","typeId","devEui","application_name") VALUES (DEFAULT,5,'0094E796CBFCFEF9','Pressure No.10') ON CONFLICT ("typeId") DO UPDATE SET "devEui"=EXCLUDED."devEui" RETURNING *;
sequelize use the unique typeId field as the duplicate key. Check the rows in the database:
=# select * from iot_nodes;
id | typeId | devEui | application_name
----+--------+------------------+------------------
1 | 5 | 0094E796CBFCFEF9 | Pressure No.10
(1 row)
The data row is upserted as expected.
If we remove the unique: true from typeId field. sequelize will use primary key as the duplicate key. Take a look below generated SQL and data rows in the database:
Executing (default): INSERT INTO "iot_nodes" ("id","typeId","devEui","application_name") VALUES (DEFAULT,5,'0094E796CBFCFEF9','Pressure No.10') ON CONFLICT ("id") DO UPDATE SET "devEui"=EXCLUDED."devEui" RETURNING *;
Executing (default): INSERT INTO "iot_nodes" ("id","typeId","devEui","application_name") VALUES (DEFAULT,5,'0094E796CBFCFEF9','Pressure No.10') ON CONFLICT ("id") DO UPDATE SET "devEui"=EXCLUDED."devEui" RETURNING *;
=# select * from iot_nodes;
id | typeId | devEui | application_name
----+--------+------------------+------------------
1 | 5 | 0094E796CBFCFEF9 | Pressure No.10
2 | 5 | 0094E796CBFCFEF9 | Pressure No.10
(2 rows)
I have rows in my MYSQL and I Need Sequelize.js query.
Every row have col of type JSON what include this for example:
[
{id: 1234, blah: "test"},
{id: 3210, blah: "test"},
{id: 5897, blah: "test"}
]
I have id and I need to select row what include this id in at least one object in array.
Raw mysql query will be like this:
SELECT * FROM `user` WHERE JSON_CONTAINS(`comments`, '{"id": 1234}');
Simple sequelize example:
const { fn, col, cast } = this.sequelize;
const User = this.sequelize.define('user', {
id: {
type: Sequelize.INTEGER,
primaryKey: true,
autoIncrement: true
},
comments: DataTypes.JSON,
defaultValue: [],
})
User.findAll({
where: fn('JSON_CONTAINS', col('comments'), cast('{"id": 1234}', 'CHAR CHARACTER SET utf8')),
})
.then(users => console.log('result', users.map(u => u.get())))
.catch(err => console.log('error', err));
Cast function is used to unescape double quotes around "id" to avoid wrong query string like this:
SELECT * FROM `user` WHERE JSON_CONTAINS(`comments`, '{\"id\": 1234}');
There is one more dirty mysql query example (don't use it):
SELECT * FROM `user` WHERE `comments` LIKE '%"id": 1234%';
when I use order option in #findAll it generates SQL:
SELECT
`id`, `first_name` AS `firstName`,
`last_name` AS `lastName` FROM `customers` AS `Customer`
ORDER BY `Customer`.`firstName` DESC;
but this SQL causes error:
ER_BAD_FIELD_ERROR: Unknown column 'Customer.firstName' in 'order clause'
Code example:
var Customer = sequelize.define("Customer", {
id: {
type: Sequelize.INTEGER({unsigned: true}),
primaryKey: true
},
firstName: {
type: Sequelize.STRING(32),
field: "first_name"
},
lastName: {
type: Sequelize.STRING(32),
field: "last_name"
}
}, {
name: {
singular: "customer",
plural: "customers"
},
tableName: "customers",
timestamps: false,
underscored: true
});
Customer.findAll({
order: [["firstName", "DESC"]]
}).then(function(list) {
console.log(list);
}).catch(function(err) {
console.log(err);
});
Mysql: 5.6.20
Sequelize: 3.14.2
Are there any solutions of this issue?
Use order: [["first_name", "DESC"]]; the ORDER BY looks at column names, not your SELECT alias firstName.