mysql inner join with 3 tables issue with order count - mysql

I have the following sample tables.
CREATE TABLE IF NOT EXISTS `my_customer` (
`customer_id` int(11) NOT NULL AUTO_INCREMENT,
`customer_email` text NOT NULL,
PRIMARY KEY (`customer_id`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=64 ;
INSERT INTO `my_customer` (`customer_id`, `customer_email`) VALUES
(4, 'muthu.d#test.in'),
(5, 'nsrirengan#test.in'),
(6, 'vinothini.k#test.in'),
(8, 'vinothini.k111#test.in'),
(63, 'sri.n321#test.in'),
(56, 'vesri.n#test.in'),
(57, 'veesri.n#test.in'),
(58, 'veeisri.n#test.in'),
(59, 'ren#test.in'),
(60, 'ren1#test.in'),
(61, 'nsrirengan123#test.in'),
(62, 'nsrirengan321#test.in'),
(53, 'sri.n#test.in'),
(54, 'royalrenga#test.in'),
(55, 'vesri#test.in');
CREATE TABLE IF NOT EXISTS `my_order` (
`orderid` int(11) NOT NULL AUTO_INCREMENT,
`ordergenerateid` varchar(20) NOT NULL,
`restaurant_id` int(11) NOT NULL,
`customer_id` int(11) NOT NULL,
`usertype` varchar(10) NOT NULL,
`customeremail` varchar(200) NOT NULL,
PRIMARY KEY (`orderid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=152 ;
INSERT INTO `my_order` (`orderid`, `ordergenerateid`, `restaurant_id`, `customer_id`, `usertype`, `customeremail`) VALUES
(1, 'ORD0001', 3, 6, 'C', 'vinothini.k#test.in'),
(2, 'ORD0002', 1, 6, 'C', 'vinothini.k#test.in'),
(3, 'ORD0003', 3, 6, 'C', 'vinothini.k#test.in'),
(4, 'ORD0004', 3, 6, 'C', 'vinothini.k#test.in'),
(5, 'ORD0005', 3, 0, 'G', 'vinothini.k5555555#test.in'),
(6, 'ORD0006', 3, 6, 'C', 'vinothini.k#test.in'),
(7, 'ORD0007', 3, 6, 'C', 'vinothini.k#test.in'),
(8, 'ORD0008', 3, 6, 'C', 'vinothini.k#test.in'),
(9, 'ORD0009', 3, 6, 'C', 'vinothini.k#test.in'),
(10, 'ORD0010', 3, 6, 'C', 'vinothini.k#test.in'),
(11, 'ORD0011', 3, 6, 'C', 'vinothini.k#test.in'),
(12, 'ORD0012', 3, 6, 'C', 'vinothini.k#test.in'),
(13, 'ORD0013', 3, 6, 'C', 'vinothini.k#test.in'),
(14, 'ORD0014', 3, 6, 'C', 'vinothini.k#test.in'),
(15, 'ORD0015', 2, 3, 'C', 'sri.n#test.in'),
(16, 'ORD0016', 2, 3, 'C', 'sri.n#test.in'),
(17, 'ORD0017', 2, 3, 'C', 'sri.n#test.in'),
(18, 'ORD0018', 2, 3, 'C', 'sri.n#test.in'),
(19, 'ORD0019', 2, 3, 'C', 'sri.n#test.in'),
(20, 'ORD0020', 8, 3, 'C', 'sri.n#test.in'),
(21, 'ORD0021', 5, 3, 'C', 'sri.n#test.in'),
(22, 'ORD0022', 13, 3, 'C', 'sri.n#test.in'),
(23, 'ORD0023', 13, 3, 'C', 'sri.n#test.in'),
(24, 'ORD0024', 13, 3, 'C', 'sri.n#test.in'),
(25, 'ORD0025', 13, 3, 'C', 'sri.n#test.in'),
(26, 'ORD0026', 13, 3, 'C', 'sri.n#test.in'),
(27, 'ORD0027', 13, 3, 'C', 'sri.n#test.in'),
(28, 'ORD0028', 13, 3, 'C', 'sri.n#test.in'),
(29, 'ORD0029', 13, 3, 'C', 'sri.n#test.in'),
(30, 'ORD0030', 13, 3, 'C', 'sri.n#test.in'),
(31, 'ORD0031', 13, 3, 'C', 'sri.n#test.in'),
(32, 'ORD0032', 13, 3, 'C', 'sri.n#test.in'),
(33, 'ORD0033', 13, 3, 'C', 'sri.n#test.in'),
(34, 'ORD0034', 13, 3, 'C', 'sri.n#test.in'),
(35, 'ORD0035', 13, 3, 'C', 'sri.n#test.in'),
(36, 'ORD0036', 13, 3, 'C', 'sri.n#test.in'),
(37, 'ORD0037', 13, 3, 'C', 'sri.n#test.in'),
(38, 'ORD0038', 13, 3, 'C', 'sri.n#test.in'),
(39, 'ORD0039', 19, 3, 'C', 'sri.n#test.in'),
(40, 'ORD0040', 13, 3, 'C', 'sri.n#test.in'),
(41, 'ORD0041', 13, 3, 'C', 'sri.n#test.in'),
(42, 'ORD0042', 13, 3, 'C', 'sri.n#test.in'),
(43, 'ORD0043', 13, 3, 'C', 'sri.n#test.in'),
(44, 'ORD0044', 13, 3, 'C', 'sri.n#test.in'),
(45, 'ORD0045', 13, 3, 'C', 'sri.n#test.in'),
(46, 'ORD0046', 13, 3, 'C', 'sri.n#test.in'),
(47, 'ORD0047', 13, 3, 'C', 'sri.n#test.in'),
(48, 'ORD0048', 13, 3, 'C', 'sri.n#test.in'),
(49, 'ORD0049', 19, 3, 'C', 'sri.n#test.in'),
(51, 'ORD0051', 13, 3, 'C', 'sri.n#test.in'),
(52, 'ORD0052', 13, 3, 'C', 'sri.n#test.in'),
(53, 'ORD0053', 13, 3, 'C', 'sri.n#test.in'),
(54, 'ORD0054', 13, 10, 'G', 'sri.nas#test.in'),
(55, 'ORD0055', 13, 11, 'G', 'sri.nasqw#test.in'),
(56, 'ORD0056', 13, 12, 'G', 'sri.nqw#test.in'),
(57, 'ORD0057', 13, 13, 'G', 'sri.nas1123#test.in'),
(58, 'ORD0058', 13, 14, 'G', 'sri.nqw13#test.in'),
(59, 'ORD0059', 13, 15, 'G', 'sri.nas123#test.in'),
(60, 'ORD0060', 13, 16, 'G', 'sri.nas12345#test.in'),
(61, 'ORD0061', 13, 17, 'G', 'sri.nqw123#test.in'),
(62, 'ORD0062', 13, 18, 'G', 'sri.nas123111#test.in'),
(63, 'ORD0063', 13, 19, 'G', 'sri#test.in'),
(64, 'ORD0064', 13, 20, 'G', 'sri.nas111#test.in'),
(65, 'ORD0065', 13, 21, 'G', 'sri.nas12354klk#test.in'),
(66, 'ORD0066', 13, 22, 'G', 'sri.nas123879#test.in'),
(67, 'ORD0067', 13, 23, 'G', 'sri.nasasd#test.in'),
(68, 'ORD0068', 13, 24, 'G', 'sri.nasqwe#test.in'),
(69, 'ORD0069', 13, 25, 'G', 'sri.nas121212#test.in'),
(70, 'ORD0070', 13, 26, 'G', 'sri.nasqwqw#test.in'),
(71, 'ORD0071', 13, 27, 'G', 'sri.nqw321#test.in'),
(72, 'ORD0072', 13, 28, 'G', 'sri.nas123123#test.in'),
(73, 'ORD0073', 13, 3, 'C', 'sri.n#test.in'),
(74, 'ORD0074', 13, 3, 'C', 'sri.n#test.in'),
(75, 'ORD0075', 13, 3, 'C', 'sri.n#test.in'),
(76, 'ORD0076', 13, 3, 'C', 'sri.n#test.in'),
(77, 'ORD0077', 13, 3, 'C', 'sri.n#test.in'),
(78, 'ORD0078', 13, 3, 'C', 'sri.n#test.in'),
(79, 'ORD0079', 13, 3, 'C', 'sri.n#test.in'),
(121, 'ORD0121', 13, 52, 'G', 'sssri.n123123#test.in'),
(122, 'ORD0122', 13, 3, 'C', 'sri.n#test.in'),
(123, 'ORD0123', 13, 3, 'C', 'sri.n#test.in'),
(84, 'ORD0084', 13, 3, 'C', 'sri.n#test.in'),
(86, 'ORD0086', 13, 3, 'C', 'sri.n#test.in'),
(87, 'ORD0087', 13, 3, 'C', 'sri.n#test.in'),
(89, 'ORD0089', 13, 31, 'G', 'royalrenga#test.in'),
(90, 'ORD0090', 13, 32, 'G', 'nsri.n#test.in'),
(91, 'ORD0091', 13, 33, 'G', 'nnsri.n#test.in'),
(92, 'ORD0092', 13, 3, 'C', 'sri.n#test.in'),
(93, 'ORD0093', 13, 34, 'G', 'sssri.n#test.in'),
(94, 'ORD0094', 13, 35, 'G', 'qwsri.n#test.in'),
(95, 'ORD0095', 13, 36, 'G', 'snsri.n#test.in'),
(96, 'ORD0096', 13, 37, 'G', 'ncnsri.n#test.in'),
(97, 'ORD0097', 13, 38, 'G', 'nwnsri.n#test.in'),
(98, 'ORD0098', 13, 39, 'G', 'ncnasri.n#test.in'),
(99, 'ORD0099', 13, 40, 'G', 'ncnsasri.n#test.in'),
(100, 'ORD0100', 13, 41, 'G', 'ncqqnasri.n#test.in'),
(101, 'ORD0101', 13, 42, 'G', 'asdqazsri.nas123#test.in'),
(102, 'ORD0102', 13, 43, 'G', 'nacqqnasri.n#test.in'),
(103, 'ORD0103', 13, 3, 'C', 'sri.n#test.in'),
(104, 'ORD0104', 13, 3, 'C', 'sri.n#test.in'),
(105, 'ORD0105', 13, 3, 'C', 'sri.n#test.in'),
(106, 'ORD0106', 13, 3, 'C', 'sri.n#test.in'),
(107, 'ORD0107', 13, 3, 'C', 'sri.n#test.in'),
(108, 'ORD0108', 13, 0, 'G', 'sri.n#test.in'),
(109, 'ORD0109', 13, 3, 'C', 'sri.n#test.in'),
(110, 'ORD0110', 13, 3, 'C', 'sri.n#test.in'),
(111, 'ORD0111', 13, 44, 'G', 'qsw#test.in'),
(112, 'ORD0112', 13, 45, 'G', 'asdasd#test.in'),
(113, 'ORD0113', 13, 46, 'G', 'qweee#test.in'),
(114, 'ORD0114', 13, 47, 'G', 'qweqwe#test.in'),
(115, 'ORD0115', 13, 48, 'G', 'nsv123sri.n#test.in'),
(116, 'ORD0116', 13, 49, 'G', 'asdasdasd#test.in'),
(117, 'ORD0117', 13, 50, 'G', 'asdasdasdasd#test.in'),
(118, 'ORD0118', 13, 51, 'G', 'qwerew#test.in'),
(119, 'ORD0119', 13, 7, 'C', 'kvinocse86#test.in'),
(120, 'ORD0120', 13, 3, 'C', 'sri.n#test.in'),
(124, 'ORD0124', 13, 53, 'C', 'sri.n#test.in'),
(125, 'ORD0125', 13, 0, 'G', 'sri.n#test.in'),
(126, 'ORD0126', 13, 53, 'C', 'sri.n#test.in'),
(127, 'ORD0127', 13, 53, 'C', 'sri.n#test.in'),
(128, 'ORD0128', 13, 53, 'C', 'sri.n#test.in'),
(129, 'ORD0129', 13, 53, 'C', 'sri.n#test.in'),
(130, 'ORD0130', 13, 53, 'C', 'sri.n#test.in'),
(131, 'ORD0131', 13, 53, 'C', 'sri.n#test.in'),
(132, 'ORD0132', 13, 53, 'C', 'sri.n#test.in'),
(133, 'ORD0133', 13, 53, 'C', 'sri.n#test.in'),
(134, 'ORD0134', 13, 53, 'C', 'sri.n#test.in'),
(135, 'ORD0135', 13, 53, 'C', 'sri.n#test.in'),
(136, 'ORD0136', 13, 53, 'C', 'sri.n#test.in'),
(137, 'ORD0137', 13, 53, 'C', 'sri.n#test.in'),
(138, 'ORD0138', 13, 53, 'C', 'sri.n#test.in'),
(139, 'ORD0139', 13, 0, 'G', 'sri.n321#test.in'),
(140, 'ORD0140', 13, 53, 'C', 'sri.n123#test.in'),
(141, 'ORD0141', 13, 53, 'C', 'sri.n#test.in'),
(142, 'ORD0142', 13, 53, 'C', 'sri.n#test.in'),
(143, 'ORD0143', 13, 55, 'G', 'vesri#test.in'),
(144, 'ORD0144', 13, 56, 'G', 'vesri.n#test.in'),
(145, 'ORD0145', 13, 57, 'G', 'veesri.n#test.in'),
(146, 'ORD0146', 13, 58, 'G', 'veeisri.n#test.in'),
(147, 'ORD0147', 13, 59, 'G', 'ren#test.in'),
(148, 'ORD0148', 13, 60, 'G', 'ren1#test.in'),
(149, 'ORD0149', 13, 53, 'C', 'sri.n#test.in'),
(150, 'ORD0150', 13, 53, 'C', 'sri.n#test.in'),
(151, 'ORD0151', 13, 53, 'C', 'sri.n#test.in');
CREATE TABLE IF NOT EXISTS `my_restaurant` (
`restaurant_id` int(11) NOT NULL AUTO_INCREMENT,
`restaurant_name` varchar(100) NOT NULL,
PRIMARY KEY (`restaurant_id`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=23 ;
INSERT INTO `my_restaurant` (`restaurant_id`, `restaurant_name`) VALUES
(1, 'Fuji Sushi'),
(2, 'Big Pete''s Pizza'),
(3, 'Vinos Pizza'),
(4, 'Wafaa and Mikes Cafe'),
(5, 'Fuji Sushi (San Marco Blvd)'),
(6, 'Midtown Deli + Cafe'),
(7, 'De Real Ting Cafe'),
(8, 'Alex Delicatessen'),
(9, 'Two Doors Down Restaurant'),
(10, 'Ginas Deli'),
(11, 'The Mudville Grille (Beach Blvd)'),
(12, 'Casbah Cafe'),
(13, 'Alexander Grill'),
(14, 'The Southern Grill'),
(15, 'Cool Moose Cafe'),
(16, 'Basil: Thai and Sushi'),
(17, 'Hot Wok'),
(18, 'China Joy'),
(19, 'Blu Diner'),
(21, 'New Test restaurant'),
(22, 'testing res');
I have written the following SQL for Most Order done by customer with restauant.
SELECT o.customeremail AS custemail, o.restaurant_id, rest.restaurant_name, COUNT( o.customeremail ) AS totalordercount
FROM my_order AS o
INNER JOIN my_customer AS cust ON cust.customer_email = o.customeremail
INNER JOIN my_restaurant AS rest ON rest.restaurant_id = o.restaurant_id
WHERE o.orderid IS NOT NULL
GROUP BY o.restaurant_id
ORDER BY totalordercount DESC
I'm getting the result like this.
custemail restaurant_id restaurant_name totalordercount
sri.n#test.in 13 Alexander Grill 79
vinothini.k#test.in 3 Vinos Pizza 12
sri.n#test.in 2 Big Pete's Pizza 5
sri.n#test.in 19 Blu Diner 2
vinothini.k#test.in 1 Fuji Sushi 1
sri.n#test.in 8 Alex Delicatessen 1
sri.n#test.in 5 Fuji Sushi (San Marco Blvd) 1
SELECT *
FROM `my_order`
WHERE `restaurant_id` =13
AND `customeremail` = 'sri.n#test.in'
LIMIT 0 , 30
I'm getting from above query with 71 rows only. But my query is showing the rows 79
sri.n#test.in 13 Alexander Grill 79
But I need the output like this.
custemail restaurant_id restaurant_name totalordercount
sri.n#test.in 13 Alexander Grill 71
vinothini.k#test.in 3 Vinos Pizza 12
sri.n#test.in 2 Big Pete's Pizza 5
sri.n#test.in 19 Blu Diner 2
vinothini.k#test.in 1 Fuji Sushi 1
sri.n#test.in 8 Alex Delicatessen 1
sri.n#test.in 5 Fuji Sushi (San Marco Blvd) 1
Thanks in advance

First, you're misusing the pernicious nonstandard MySQL extension to GROUP BY by trying to display a customer email in your result set aggregated by restaurant. Your misuse of this causes your result set to contain arbitrary values of custemail.
Second, this simple query reveals that there are 121 orders for the Alexander Grill. Your claim that there are only 71 seems to be incorrect.
SELECT COUNT(*), o.restaurant_id, r.restaurant_name
FROM my_order AS o
LEFT JOIN my_restaurant AS r ON o.restaurant_id = r.restaurant_id
GROUP BY o.restaurant_id, r.restaurant_name
Third, there are quite a few orders with a customer_id that isn't found in your customer table. You're using INNER JOIN so your query is dropping and not counting those orders. LEFT JOIN will restore those dropped records. But still, you are not using the results of that JOIN.
Fourth, your WHERE o.orderid IS NOT NULL clause is pointless: that column is a primary key.
Try this query leaving out the my_customer table JOIN:
SELECT o.restaurant_id,
rest.restaurant_name,
COUNT( DISTINCT o.customer_id ) AS distinctcustomercount,
COUNT( * ) AS totalordercount
FROM my_order AS o
LEFT JOIN my_restaurant AS rest ON rest.restaurant_id = o.restaurant_id
GROUP BY o.restaurant_id, rest.restaurant_name /* fix GROUP BY misuse */
ORDER BY COUNT( * ) DESC, rest.restaurant_name
I think this does the trick for you. It fully specifies the GROUP BY, and it does some kind of ordering among the restaurant rows that only have one order.

I think you are looking for visit count for restaurants for a customer email that has presence in the customer table. The other numbers just turn out to be right because the others from the 71'er have only visited one restaraunt.
You need to add the customer email to the group by if you want customers for each restaurant.
SELECT o.customeremail AS custemail,
o.restaurant_id, rest.restaurant_name, COUNT(o.customeremail ) AS totalordercount
FROM my_order AS o
INNER JOIN my_customer AS cust ON cust.customer_email = o.customeremail
INNER JOIN my_restaurant AS rest ON rest.restaurant_id = o.restaurant_id
GROUP BY o.restaurant_id, o.customeremail
ORDER BY totalordercount DESC
My answer is the same as Ollie's except I leave the customer join in, I think that presence is required to get the results expected.

Related

OR operator applying to entire WHERE clause

SELECT DISTINCT
customers.customer_id,
services.name
FROM users
INNER JOIN customers ON users.user_id=customers.user_id
LEFT JOIN appointments ON customers.customer_id=appointments.customer_id
INNER JOIN pets ON customers.customer_id=pets.customer_id
INNER JOIN services on appointments.service_id=services.service_id
WHERE ((appointments.customer_id IS NULL)
OR NOT (appointments.date > (SELECT SUBDATE(CURDATE(), 365)))
OR ((appointments.date > (SELECT SUBDATE(CURDATE(), 365)))
AND services.name NOT LIKE '%General Health Checkup%'))
GROUP BY customers.customer_id
I am trying to find all customers who are due a yearly general health checkup
this requires them to have:
a) never have appointments
b) not had an appointment in the past year
c) had an appointment in the past year but it wasn't a general health checkup
I assumed the final OR im my WHERE clause would only apply to that that operation i.e.
OR ((appointments.date > (SELECT SUBDATE(CURDATE(), 365))) AND services.name NOT LIKE '%General Health Checkup%'))
however it is ruling out all options for services named General Health Checkup.
How can i only apply this to that area of the where clause so that general health check up appointments can get through but only if they are from over a year ago.
Thanks !
appointments table:
CREATE TABLE `appointments` (
`appointment_id` int(8) NOT NULL,
`customer_id` int(8) DEFAULT NULL,
`service_id` int(4) DEFAULT NULL,
`staff_id` int(6) DEFAULT NULL,
`pet_id` int(9) DEFAULT NULL,
`date` date NOT NULL,
`start_time` time NOT NULL,
`status` enum('Open','Cancelled','Completed','') NOT NULL,
`create_date` datetime NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `appointments`
--
INSERT INTO `appointments` (`appointment_id`, `customer_id`, `service_id`, `staff_id`, `pet_id`, `date`, `start_time`, `status`, `create_date`) VALUES
(1, 1, 2, 1, 1, '2017-03-22', '10:00:00', 'Completed', '2022-03-16 11:28:46'),
(2, 3, 2, 1, 6, '2021-06-18', '12:00:00', 'Completed', '2021-06-15 11:01:43'),
(3, 2, 2, 1, 2, '2020-07-17', '13:00:00', 'Completed', '2020-05-14 11:30:18'),
(4, 3, 2, 1, 5, '2020-07-10', '14:00:00', 'Completed', '2020-05-21 11:30:18'),
(5, 4, 3, 1, 7, '2020-09-17', '10:00:00', 'Completed', '2022-03-16 12:31:59'),
(6, 8, 2, 1, 11, '2022-03-17', '12:00:00', 'Cancelled', '2022-03-17 23:44:56'),
(7, 4, 2, 7, 7, '2022-03-17', '10:00:00', 'Cancelled', '2022-03-17 23:50:11'),
(8, 1, 1, 13, 1, '2022-03-17', '13:00:00', 'Completed', '2022-03-18 00:28:10'),
(9, 7, 2, 13, 9, '2022-03-18', '15:00:00', 'Cancelled', '2022-03-18 13:16:37'),
(10, 7, 1, 13, 10, '2022-03-18', '16:00:00', 'Cancelled', '2022-03-18 13:48:12'),
(11, 1, 1, 13, 1, '2022-03-22', '11:00:00', 'Completed', '2022-03-22 12:34:55'),
(12, 11, 1, 13, 11, '2022-03-23', '13:00:00', 'Completed', '2022-03-23 15:28:22'),
(13, 9, 3, 13, 12, '2022-03-26', '13:00:00', 'Completed', '2022-03-26 13:13:46'),
(14, 35, 2, 13, 16, '2022-03-27', '10:00:00', 'Completed', '2022-03-27 16:09:14'),
(15, 34, 2, 13, 20, '2022-03-28', '10:00:00', 'Completed', '2022-03-28 10:05:41'),
(16, 33, 1, 13, 20, '2022-03-28', '12:00:00', 'Completed', '2022-03-28 11:40:50'),
(17, 8, 2, 13, 20, '2022-03-16', '14:00:00', 'Completed', '2022-03-28 12:31:42'),
(18, 15, 2, 13, 20, '2022-03-28', '14:00:00', 'Completed', '2022-03-28 12:33:47'),
(19, 31, 4, 13, 20, '2022-03-29', '00:00:00', 'Completed', '2022-03-29 14:20:04'),
(20, 31, 4, 13, 20, '2022-03-29', '10:00:00', 'Completed', '2022-03-29 14:20:42'),
(21, 1, 1, 13, 1, '2022-03-30', '11:00:00', 'Completed', '2022-03-30 15:18:23'),
(22, 33, 4, 13, 22, '2022-03-30', '12:00:00', 'Completed', '2022-03-30 15:22:02'),
(23, 3, 1, 13, 5, '2022-03-30', '13:00:00', 'Open', '2022-03-30 15:22:02'),
(24, 4, 1, 13, 7, '2022-03-30', '13:30:00', 'Completed', '2022-03-30 15:24:52'),
(25, 7, 2, 13, 10, '2022-03-30', '14:30:00', 'Open', '2022-03-30 15:26:11'),
(26, 12, 1, 7, 8, '2022-04-21', '10:00:00', 'Open', '2022-04-21 12:54:10'),
(27, 2, 1, 1, 8, '2022-04-21', '10:00:00', 'Cancelled', '2022-04-21 13:16:23'),
(28, 17, 4, 1, 20, '2022-04-21', '10:00:00', 'Completed', '2022-04-21 13:18:41'),
(29, NULL, 6, 7, 21, '2022-04-21', '12:00:00', 'Completed', '2022-04-21 16:22:44'),
(30, 13, 2, 3, 5, '2022-04-21', '14:00:00', 'Open', '2022-04-21 17:42:10'),
(31, 5, 1, 9, 14, '2022-04-22', '11:00:00', 'Open', '2022-04-22 16:16:57');
services table:
CREATE TABLE `services` (
`service_id` int(4) NOT NULL,
`name` varchar(100) NOT NULL,
`description` text NOT NULL,
`average_time` int(3) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `services`
--
INSERT INTO `services` (`service_id`, `name`, `description`, `average_time`) VALUES
(1, 'Consultation', 'General consultation to help you find the best path to good health for your pet.', 30),
(2, 'General Health Checkup', 'Review of your pets health.', 30),
(3, 'Microchip Insertion', 'Keep your dog safe and trackable with microchip.', 90),
(4, 'Puppy Vaccination', 'Initial puppy vaccination', 30),
(6, 'Booster Vaccination', 'Regular booster vaccincation service.', 30);
I would just check if the customer had a service_id=2 in the last year:
SELECT
customers.customer_id,
GROUP_CONCAT(CONCAT(services.name,'(',appointments.date,')')) as services
FROM users
INNER JOIN customers ON users.user_id=customers.user_id
LEFT JOIN appointments ON customers.customer_id=appointments.customer_id
INNER JOIN pets ON customers.customer_id=pets.customer_id
INNER JOIN services on appointments.service_id=services.service_id
WHERE SUBDATE(CURDATE(), 365) > (SELECT MAX(date)
FROM appointments
WHERE customers.customer_id=appointments.customer_id
AND appointments.service_id=2)
GROUP BY customers.customer_id
output:
customer_id
services
1
General Health Checkup(2017-03-22),Consultation(2022-03-30),Consultation(2022-03-22),Consultation(2022-03-17)
2
General Health Checkup(2020-07-17),Consultation(2022-04-21)
see: DBFIDDLE

Update one table column data to another table column along with unique,duplicate check and update with suffix on duplicate

Need to do it for lower Mysql version like 4.9 to 5.6
I need to copy one table column data to another table but need to apply unique check and in case found duplicate then needs to add suffix to data and continue the update.(Don't want to stop query execution because of duplicate data) .
Let me clarify things:
My first table is tbl_categories:
cat_id cat_parent_id cat_active cat_display_order cat_suggested_hourly_rate
1 0 1 1 10
2 1 1 2 10
And second table is tbl_categories_metadata:
cdata_cat_id cdata_lang_id cdata_name
1 1 A
1 2 B
1 3 C
2 1 A
2 2 B
3 1 D
3 2 E
3 3 F
So in my second table category name added based on language id.
Now I need to add a unique column in first table with name cat_identifier, so I did :
ALTER TABLE `tbl_categories` ADD `cat_identifier` VARCHAR(100) NOT NULL AFTER `cat_id`;
Which worked fine, Now I have to make it unique, but straight forward it can not be done due to similar value present while running above query, so I did:
UPDATE
`tbl_categories` a
INNER JOIN `tbl_categories` b ON `a`.cat_id = `b`.cat_id
SET
`a`.cat_identifier = `b`.cat_id;
It worked fine and cat_id added to cat_identifier column, now I am able to made this column unique via below query :
ALTER TABLE `tbl_categories`
ADD UNIQUE KEY `cat_identifier` (`cat_identifier`);
Worked fine,and my table now look like this now:
cat_id cat_identifier cat_parent_id cat_active cat_display_order cat_suggested_hourly_rate
1 1 0 1 1 10
2 2 1 1 2 10
Where I am stuck:
I need to update cat_identifier values taken from cdata_name column based on language id 1, but in-case language id 1 has same data for 2 categories, then i need to add -cat_id as suffix for that data and needs to update
So I tried below query :
UPDATE
`tbl_categories`
INNER JOIN `tbl_categories_metadata` ON `tbl_categories`.cat_id = `tbl_categories_metadata`.cdata_cat_id
SET
`tbl_categories`.cat_identifier = `tbl_categories_metadata`.cdata_name
WHERE
`tbl_categories_metadata`.cdata_lang_id = 1;
It's working , but at once duplicate found for language 1 it stops.
What I want is In case duplicate found then add -cat_id (category id of column) as suffix and do update. like clean-3,clean-4 etc...
Purpose to do so : Sometime admin/front-end seller not adding language specific names for categories and some time they add same name, so we added cat_identifier which will be unique,language independent as well as mandatory to add. This concept will work straightforward for new installation of our project,but in already working system(previous version of our projects) we have to do it in a way so that with minimal changes system work fine.
Note: Queries to create both table along with data
CREATE TABLE `tbl_categories` (
`cat_id` int(11) UNSIGNED NOT NULL,
`cat_identifier` varchar(100) NOT NULL,
`cat_parent_id` int(11) UNSIGNED NOT NULL COMMENT '0 defaults to parent category',
`cat_active` tinyint(4) UNSIGNED NOT NULL COMMENT '0 - Inactive, 1 - Active',
`cat_display_order` decimal(4,2) NOT NULL,
`cat_suggested_hourly_rate` decimal(10,2) NOT NULL COMMENT 'This will be used as suggestion hourly rate for this category.'
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
INSERT INTO `tbl_categories` (`cat_id`, `cat_identifier`, `cat_parent_id`, `cat_active`, `cat_display_order`, `cat_suggested_hourly_rate`) VALUES
(1, '', 0, 1, '1.00', '20.00'),
(2, '', 1, 1, '4.00', '15.00'),
(3, '', 1, 0, '3.00', '12.00'),
(4, '', 1, 1, '1.00', '18.00'),
(5, '', 1, 1, '2.00', '15.00'),
(6, '', 1, 1, '5.00', '10.00'),
(7, '', 0, 1, '2.00', '25.00'),
(8, '', 7, 1, '1.00', '20.00'),
(9, '', 7, 1, '2.00', '20.00'),
(10, '', 7, 1, '3.00', '20.00'),
(11, '', 0, 1, '3.00', '25.00'),
(12, '', 11, 1, '1.00', '20.00'),
(13, '', 11, 1, '2.00', '25.00'),
(14, '', 0, 1, '4.00', '20.00'),
(15, '', 14, 1, '1.00', '18.00'),
(16, '', 14, 1, '2.00', '25.00'),
(17, '', 0, 1, '5.00', '30.00'),
(18, '', 17, 1, '1.00', '0.00'),
(19, '', 17, 1, '2.00', '0.00'),
(20, '', 17, 1, '3.00', '0.00'),
(21, '', 0, 0, '2.00', '20.00'),
(22, '', 0, 0, '4.00', '25.00'),
(23, '', 0, 1, '5.00', '15.00'),
(24, '', 0, 0, '8.00', '22.00'),
(25, '', 0, 0, '9.00', '28.00'),
(26, '', 0, 1, '1.00', '20.00'),
(27, '', 26, 1, '1.00', '20.00'),
(28, '', 26, 1, '2.00', '45.00'),
(29, '', 26, 1, '3.00', '40.00'),
(30, '', 0, 0, '2.00', '15.00'),
(31, '', 0, 1, '3.00', '30.00'),
(32, '', 31, 1, '1.00', '22.00'),
(33, '', 31, 1, '2.00', '0.00'),
(34, '', 0, 0, '4.00', '15.00'),
(35, '', 0, 1, '5.00', '25.00'),
(36, '', 35, 1, '1.00', '25.00'),
(37, '', 35, 1, '2.00', '10.00'),
(38, '', 0, 0, '1.00', '40.00'),
(39, '', 0, 1, '3.00', '25.00'),
(40, '', 39, 1, '1.00', '22.00'),
(41, '', 39, 1, '2.00', '25.00'),
(42, '', 0, 0, '6.00', '35.00'),
(43, '', 0, 1, '7.00', '15.00'),
(44, '', 23, 1, '1.00', '22.00'),
(45, '', 23, 1, '2.00', '20.00'),
(46, '', 7, 1, '4.00', '25.00'),
(47, '', 43, 1, '1.00', '35.00'),
(48, '', 43, 1, '2.00', '18.00'),
(49, '', 43, 1, '3.00', '20.00'),
(50, '', 43, 1, '4.00', '40.00'),
(51, '', 7, 1, '5.00', '28.00'),
(52, '', 0, 1, '1.00', '10.00'),
(53, '', 0, 1, '1.00', '10.00');
ALTER TABLE `tbl_categories`
ADD PRIMARY KEY (`cat_id`);
ALTER TABLE `tbl_categories`
MODIFY `cat_id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=54;
CREATE TABLE `tbl_categories_metadata` (
`cdata_cat_id` int(11) UNSIGNED NOT NULL COMMENT 'ID of table tbl_categories',
`cdata_lang_id` int(11) UNSIGNED NOT NULL,
`cdata_name` varchar(255) NOT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
INSERT INTO `tbl_categories_metadata` (`cdata_cat_id`, `cdata_lang_id`, `cdata_name`) VALUES
(1, 3, 'Limpieza'),
(1, 2, 'Nettoyage'),
(1, 1, 'Cleaning'),
(2, 1, 'Bathroom Deep Cleaning'),
(2, 2, 'Nettoyage en profondeur de la salle de bain'),
(2, 3, 'Limpieza profunda de ba?'),
(3, 3, 'Limpieza de alfombras'),
(3, 2, 'Nettoyage de tapis'),
(3, 1, 'Carpet Cleaning'),
(4, 3, 'Limpieza profunda en el hogar'),
(4, 2, 'Nettoyage en profondeur'),
(4, 1, 'Home Deep Cleaning'),
(5, 1, 'Kitchen Deep Cleaning'),
(5, 2, 'Nettoyage en profondeur de la cuisine'),
(5, 3, 'Limpieza profunda de cocina'),
(6, 1, 'Car Cleaning'),
(6, 2, 'Nettoyage de voiture'),
(6, 3, 'Limpieza de coches'),
(7, 3, 'Experto'),
(7, 2, 'Qualifié'),
(7, 1, 'Skilled'),
(8, 1, 'Electricians'),
(8, 2, '?'),
(8, 3, 'Electricistas'),
(9, 1, 'Plumbers'),
(9, 2, 'Plombiers'),
(9, 3, 'Fontaneros'),
(10, 1, 'Carpenters'),
(10, 2, 'Charpentiers'),
(10, 3, 'Carpinteros'),
(11, 1, 'Fitness & Yoga'),
(11, 2, 'Fitness et yoga'),
(11, 3, 'Fitness y yoga'),
(12, 1, 'Fitness Trainer at Home'),
(12, 2, 'Fitness Trainer ?'),
(12, 3, 'Entrenador de fitness en casa'),
(13, 1, 'Yoga Trainer at Home'),
(13, 2, 'Formateur de yoga ?'),
(13, 3, 'Entrenador de yoga en casa'),
(14, 1, 'Salon at Home'),
(14, 2, 'Salon ?'),
(14, 3, 'Salon en casa'),
(15, 3, 'Salon en casa'),
(15, 2, 'Salon à domicile'),
(15, 1, 'Salon at home'),
(16, 1, 'Makeup and Hairstyling'),
(16, 2, 'Maquillage et Coiffure'),
(16, 3, 'Maquillaje y Peluquer?'),
(17, 3, 'Servicios de fotografia'),
(17, 2, 'Services de photographie'),
(17, 1, 'Photography Services'),
(18, 1, 'Wedding Photography & Filming'),
(18, 2, 'Photographie et tournage de mariage'),
(18, 3, 'Fotografía y filmación de bodas'),
(19, 3, 'Fotografía y rodaje de cumpleaños'),
(19, 2, 'Photographie et tournage d\'anniversaire'),
(19, 1, 'Birthday Photography & Filming'),
(20, 1, 'Family Function Shoots'),
(20, 2, 'Prise de vue en famille'),
(20, 3, 'Disparos de funciones familiares'),
(21, 3, 'Pintura mural'),
(21, 2, 'Peinture murale'),
(22, 2, 'Charpenterie'),
(22, 1, 'Carpentry'),
(23, 3, 'Personal de mantenimiento'),
(23, 2, 'Bricoleur'),
(23, 1, 'Handyman'),
(24, 3, 'Actividades de jardinería'),
(24, 2, 'Activités de jardinage'),
(24, 1, 'Gardening Activities'),
(25, 2, 'Déménagement d\'une maison complète / déménagement d\'une maison'),
(25, 3, 'Remoción de casa completa / mudanza de casa'),
(25, 1, 'Full House Removal / House moving'),
(26, 1, 'Performing Arts'),
(26, 2, 'Arts performants'),
(26, 3, 'Las artes escénicas'),
(27, 1, 'Party Host'),
(27, 2, 'Hôte de fête'),
(27, 3, 'Anfitrión de la fiesta'),
(28, 1, 'DJ'),
(28, 2, 'DJ'),
(28, 3, 'DJ'),
(29, 1, 'Choreographer'),
(29, 2, 'Chorégraphe'),
(29, 3, 'Coreógrafo'),
(30, 3, 'Mesas de barman / espera'),
(30, 2, 'Tables de barman / d\'attente'),
(30, 1, 'Bartending / Waiting Tables'),
(31, 2, 'Connectivité réseau'),
(31, 1, 'Network Connectivity'),
(31, 3, 'Conectividad de red'),
(32, 1, 'Broadband Connection installation'),
(32, 2, 'Installation de connexion à large bande'),
(32, 3, 'Instalación de conexión de banda ancha'),
(33, 1, 'Leased Line Connection'),
(33, 2, 'Connexion de ligne louée'),
(33, 3, 'Conexión de línea arrendada'),
(34, 3, 'Vigilancia de los niños'),
(34, 2, 'Baby-sitting'),
(34, 1, 'Baby Sitting'),
(35, 1, 'Pet Services'),
(35, 2, 'Services pour animaux'),
(35, 3, 'Servicios para mascotas'),
(36, 1, 'Pet Bathing & Grooming'),
(36, 2, 'Bain et toilettage d\'animaux'),
(36, 3, 'Baño y aseo de mascotas'),
(37, 1, 'Walking the pet'),
(37, 2, 'Promener l\'animal'),
(37, 3, 'Paseando a la mascota'),
(38, 2, 'Antiparasitaire'),
(39, 1, 'Personal Training'),
(39, 2, 'Formation personnelle'),
(39, 3, 'Entrenamiento personal'),
(40, 1, 'Voice Modulation / Speech'),
(40, 2, 'Modulation vocale / discours'),
(40, 3, 'Modulación de voz / habla'),
(41, 1, 'Personality Trainer'),
(41, 2, 'Entraîneur de personnalité'),
(41, 3, 'Entrenador de personalidad'),
(42, 3, 'Carta de presentación / Redactor'),
(42, 2, 'Lettre d\'accompagnement / Rédacteur de CV'),
(42, 1, 'Cover Letter / Resume Writer'),
(43, 3, 'Otros'),
(43, 2, 'Autres'),
(43, 1, 'Others'),
(21, 1, 'Wall Painting'),
(44, 1, 'Gardening Activities'),
(44, 2, 'Activités de jardinage'),
(44, 3, 'Actividades de jardinería'),
(45, 1, 'House moving'),
(45, 2, 'déménagement'),
(45, 3, 'mudanza'),
(22, 3, 'Carpintería'),
(46, 1, 'Carpentry'),
(46, 2, 'Charpenterie'),
(46, 3, 'Carpintería'),
(47, 1, 'Cover letter/Resume Writer'),
(47, 2, 'Lettre de motivation / Rédacteur de CV'),
(47, 3, 'Carta de presentación / Redactor'),
(48, 1, 'Baby Sitting'),
(48, 2, 'Baby-sitting'),
(48, 3, 'Vigilancia de los niños'),
(49, 1, 'Bartending/ Waiting Tables'),
(49, 2, 'Tables de barman / d\'attente'),
(49, 3, 'Mesas de barman / espera'),
(50, 1, 'Pest Control'),
(50, 2, 'Antiparasitaire'),
(50, 3, 'Control de plagas'),
(38, 1, 'Pest Control'),
(38, 3, 'Control de plagas'),
(51, 1, 'Wall Painting'),
(51, 2, 'Peinture murale'),
(51, 3, 'Pintura mural'),
(52, 1, 'Cat1'),
(53, 1, 'Cleaning');
ALTER TABLE `tbl_categories_metadata`
ADD UNIQUE KEY `cat_id` (`cdata_cat_id`,`cdata_lang_id`);
This might work.
UPDATE tbl_categories a
INNER JOIN (
SELECT a.cat_id, MAX(b.cdata_name) cdata_name, ROW_NUMBER() OVER (PARTITION BY cdata_name ORDER BY cat_id) rn
FROM tbl_categories a
INNER JOIN tbl_categories_metadata b ON a.cat_id = b.cdata_cat_id
WHERE b.cdata_lang_id = 1
GROUP BY a.cat_id
) b ON a.cat_id = b.cat_id
SET a.cat_identifier = (CASE WHEN b.rn = 1 THEN b.cdata_name ELSE CONCAT(b.cdata_name, '-', a.cat_id) END)
https://dbfiddle.uk/?rdbms=mysql_8.0&fiddle=8f620a00e3d81012a3e1332f13914ed8
Revised version for MySQL 5.6
UPDATE tbl_categories a
INNER JOIN (
SELECT a.cat_id, MAX(b.cdata_name) cdata_name
FROM tbl_categories a
INNER JOIN tbl_categories_metadata b ON a.cat_id = b.cdata_cat_id
WHERE b.cdata_lang_id = 1
GROUP BY a.cat_id
) b ON a.cat_id = b.cat_id
LEFT JOIN (
SELECT MIN(a.cat_id) cat_id, b.cdata_name
FROM tbl_categories a
INNER JOIN tbl_categories_metadata b ON a.cat_id = b.cdata_cat_id
WHERE b.cdata_lang_id = 1
GROUP BY b.cdata_name
) c ON a.cat_id = c.cat_id AND b.cdata_name = c.cdata_name
SET a.cat_identifier = (CASE WHEN c.cat_id IS NOT NULL THEN b.cdata_name ELSE CONCAT(b.cdata_name, '-', a.cat_id) END)
;
https://dbfiddle.uk/?rdbms=mysql_5.6&fiddle=2c433ca4f20af22f7578dfe31e66db7b
Aside
cat_identifier and cat_id have the same meaning, which is confusing. A more appropriate name for the new column, given its use, would be default_name (or default_en_name). This answer will use the former.
Answer
First, set the column values to guaranteed unique values as planned, using both tbl_categories_metadata.cdata_name and tbl_categories.cat_id:
UPDATE `tbl_categories` AS tc
JOIN `tbl_categories_metadata` AS tcm
ON tc.cat_id = tcm.cdata_cat_id
SET `default_name` = CONCAT(tcm.cdata_name, '-', tc.cat_id)
WHERE
tcm.cdata_lang_id = 1;
The column could simply be left as-is. However, if you don't want the cat_id on some of the fields, remove it. What expression used to remove it depends on the version of MySQL server used (and what UDFs are loaded). If using MySQL 8.0, make use of REGEXP_REPLACE
UPDATE IGNORE `tbl_categories`
SET `default_name` = REGEXP_REPLACE(`default_name`,
CONCAT('-', cat_id, '$'),
'')
ORDER BY cat_id
Similarly, if you have a UDF that adds regex functionality, use that. If using a version before 8.0, a combination of SUBSTRING() and CHAR_LENGTH().
...
SET `default_name` = SUBSTRING(`default_name`, 1,
CHAR_LENGTH(`default_name`) - 1 - CHAR_LENGTH(cat_id)
)
...
A simpler (though more error-prone) solution would be to use REPLACE()
...
SET `default_name` = REPLACE(`default_name`,
CONCAT('-', cat_id), '')
...
Alternate Answer
More as an exercise, it can be done in a single query in a few ways; here's a systematic approach.
Whenever rows might depend on other rows (such as with unique indices, but not only then), a single-query solution can generally be done with an additional join on one of the tables, usually grouped and by the use of aggregate functions, though sometimes with non-equality join conditions (e.g. sometimes you can use something like tbl_alias_0.col < tbl_alias_1.col). To get the related rows, the join goes through tbl_categories_metadata. The table references clause would thus be:
...
`tbl_categories` AS tc
JOIN `tbl_categories_metadata` AS tcm
ON tc.cat_id = tcm.cdata_cat_id
JOIN `tbl_categories_metadata` AS tcm_groups
ON tcm.cdata_name = tcm_groups.cdata_name
AND tcm.cdata_lang_id = tcm_groups.cdata_lang_id
...
(Note that tcm is only used to join through in this example, though in some places some of the tcm_groups column references could be replaced with tcm column references.)
For this example, since each row (identified by cat_id or cdata_cat_id) will get assigned a cdata_name, these naturally form groups.
...
GROUP BY tc.cat_id, tcm_groups.cdata_name
...
The cdata_name in each group will potentially come from multiple rows in tbl_categories (via cdata_cat_id). As only one row from tbl_categories in the group won't have a suffix appended to the default name, this must be specified. One simple option is to pick the row with minimal cat_id, but other options (e.g. maximal cat_id, random) could be implemented instead. This is implemented with a CASE using a comparison with tcm_groups.cdata_cat_id to distinguish the cases.
...
CASE tc.cat_id
WHEN MIN(tcm_groups.cdata_cat_id) THEN tcm.cdata_name
ELSE CONCAT(tcm_groups.cdata_name, '-', tc.cat_id)
END
...
If this were a simple SELECT, the above components are all you'd need. (Starting with a SELECT is useful to check the work.) Combined, they are:
SELECT tc.cat_id,
CASE tc.cat_id
WHEN MIN(tcm_groups.cdata_cat_id) THEN tcm_groups.cdata_name
ELSE CONCAT(tcm_groups.cdata_name, '-', tc.cat_id)
END AS default_name
FROM `tbl_categories` AS tc
JOIN `tbl_categories_metadata` AS tcm
ON tc.cat_id = tcm.cdata_cat_id
JOIN `tbl_categories_metadata` AS tcm_groups
ON tcm.cdata_name = tcm_groups.cdata_name
AND tcm.cdata_lang_id = tcm_groups.cdata_lang_id
WHERE tcm_groups.cdata_lang_id = 1
GROUP BY tc.cat_id, tcm_groups.cdata_name
ORDER BY tc.cat_id
The one issue with this is that GROUP BY isn't allowed in UPDATE statements. To address this, the joined table, grouping and aggregate functions need to instead take place in a sub-SELECT. The groups therein should be the columns in JOIN conditions and any in the grouping clause. The aggregate functions get used in the result columns. This gives the sub-SELECT:
SELECT MIN(cdata_cat_id) AS cdata_cat_id, cdata_lang_id, cdata_name
FROM `tbl_categories_metadata`
GROUP BY cdata_name, cdata_lang_id
Rewriting the table references using that gives:
...
`tbl_categories` AS tc
JOIN `tbl_categories_metadata` AS tcm
ON tc.cat_id = tcm.cdata_cat_id
JOIN (
SELECT MIN(cdata_cat_id) AS cdata_cat_id, cdata_lang_id, cdata_name
FROM `tbl_categories_metadata`
GROUP BY cdata_name, cdata_lang_id
) AS tcm_groups
ON tcm.cdata_name = tcm_groups.cdata_name
AND tcm.cdata_lang_id = tcm_groups.cdata_lang_id
...
Aggregate functions are replaced with references to the sub-SELECT columns:
...
CASE tc.cat_id
WHEN tcm_groups.cdata_cat_id THEN tcm_groups.cdata_name
ELSE CONCAT(tcm_groups.cdata_name, '-', tc.cat_id)
END
...
These parts can be combined into an UPDATE:
UPDATE `tbl_categories` AS tc
JOIN `tbl_categories_metadata` AS tcm
ON tc.cat_id = tcm.cdata_cat_id
JOIN (
SELECT MIN(cdata_cat_id) AS cdata_cat_id, cdata_lang_id, cdata_name
FROM `tbl_categories_metadata`
GROUP BY cdata_name, cdata_lang_id
) AS tcm_groups
ON tcm.cdata_name = tcm_groups.cdata_name AND tcm.cdata_lang_id = tcm_groups.cdata_lang_id
SET default_name = CASE tc.cat_id
WHEN tcm_groups.cdata_cat_id THEN tcm.cdata_name
ELSE CONCAT(tcm.cdata_name, '-', tc.cat_id)
END
WHERE tcm.cdata_lang_id = 1
Performance
The sub-SELECT is less performant than the flat join, but can't be avoided in the UPDATE.
A WHERE tcm.cdata_lang_id = 1 could be added to the sub-SELECT. This won't affect correctness, but could result in a more efficient query if there's an index on cdata_lang_id. The most efficient query will result from an index on (cdata_lang_id, cdata_name).

using sub query in sql

I have 4 Tables:
Travelers (TravelerID,FirstName,LastName)
Guides(GuideID,FirstName,LastName)
Locations(LocationID,LocationName)
Trips(TravelerID,GuideID,LocationID,Stars,StartDate,ReturnDate)
I want to return per guide the name of location which he guided the maximum number of travelers.the result needs to contain all the guides even if they didnt have travelers at all
I have tried to use this sub query but it doesn't work :
SELECT G.FirstName,L.LocationName,count(distinct(TravelerID))as
number_of_travelers_per_guide
FROM Guides AS G
LEFT JOIN Trips AS T USING (GuideID)
LEFT JOIN Locations AS L USING (LocationID)
GROUP BY G.FirstName,L.LocationName
HAVING max((SELECT T1.number_of_travelers_per_guide
FROM Trips AS T1
WHERE T.GuideID=T1.GuideID));
the result should be like this:
I will appreciate any help
Edit by me Patrick Artner:
/* data creation script - python 3.6.2
import random
random.seed(815) # fixed 0815 seed
Travelers = [[x,'a' + str(x), 'A' + str(x)] for x in range(20)]
Guides = [ [x,"guide_" + str(x), "G_" + str(x)] for x in range(10)]
Locations = [[x, "location_" + str(x)] for x in range(6)]
# Trips(TravelerID,GuideID,LocationID,Stars,StartDate,ReturnDate)
Trips = []
for n in range(300):
Trips.append([random.choice(range(20)),random.choice(range(10)),random.choice(range(6)),random.randint(1,6),None, None])
def prnList(lst):
def prn(lst):
for i in lst:
yield " ,".join([str(x) for x in i]) + "\n"
yield "\n\n"
return "".join(prn(lst))
with open("demodata.txt","w") as f:
f.write("Travelers\nTravelerID,FirstName,LastName\n")
f.write(prnList(Travelers))
f.write("Guides\nGuideID,FirstName,LastName\n")
f.write(prnList(Guides))
f.write("Locations\nLocationID,LocationName\n")
f.write(prnList(Locations))
f.write("Trips\nTravelerID,GuideID,LocationID,Stars,StartDate,ReturnDate\n")
f.write(prnList(Trips))
*/
This data was cropped due to limitations in sqlfiddle length:
CREATE TABLE Travelers (`TravelerID` int, `FirstName` varchar(3), `LastName` varchar(3));
INSERT INTO Travelers (`TravelerID`, `FirstName`, `LastName`)
VALUES
(0, 'a0', 'A0'),
(1, 'a1', 'A1'),
(2, 'a2', 'A2'),
(3, 'a3', 'A3'),
(4, 'a4', 'A4'),
(5, 'a5', 'A5'),
(6, 'a6', 'A6'),
(7, 'a7', 'A7'),
(8, 'a8', 'A8'),
(9, 'a9', 'A9'),
(10, 'a10', 'A10'),
(11, 'a11', 'A11'),
(12, 'a12', 'A12'),
(13, 'a13', 'A13'),
(14, 'a14', 'A14'),
(15, 'a15', 'A15'),
(16, 'a16', 'A16'),
(17, 'a17', 'A17'),
(18, 'a18', 'A18'),
(19, 'a19', 'A19')
;
CREATE TABLE Guides (`GuideID` int, `FirstName` varchar(7), `LastName` varchar(3));
INSERT INTO Guides (`GuideID`, `FirstName`, `LastName`)
VALUES
(0, 'guide_0', 'G_0'),
(1, 'guide_1', 'G_1'),
(2, 'guide_2', 'G_2'),
(3, 'guide_3', 'G_3'),
(4, 'guide_4', 'G_4'),
(5, 'guide_5', 'G_5'),
(6, 'guide_6', 'G_6'),
(7, 'guide_7', 'G_7'),
(8, 'guide_8', 'G_8'),
(9, 'guide_9', 'G_9')
;
CREATE TABLE Locations (`LocationID` int, `LocationName` varchar(10));
INSERT INTO Locations (`LocationID`, `LocationName`)
VALUES
(0, 'location_0'),
(1, 'location_1'),
(2, 'location_2'),
(3, 'location_3'),
(4, 'location_4'),
(5, 'location_5')
;
CREATE TABLE Trips (`TravelerID` int, `GuideID` int, `LocationID` int, `Stars` int, `StartDate` varchar(4), `ReturnDate` varchar(4));
INSERT INTO Trips (`TravelerID`, `GuideID`, `LocationID`, `Stars`, `StartDate`, `ReturnDate`)
VALUES
(0, 4, 0, 5, 'None', 'None'),
(9, 5, 3, 1, 'None', 'None'),
(16, 9, 0, 6, 'None', 'None'),
(6, 3, 2, 4, 'None', 'None'),
(5, 0, 2, 4, 'None', 'None'),
(1, 4, 4, 5, 'None', 'None'),
(5, 4, 1, 1, 'None', 'None'),
(8, 7, 3, 3, 'None', 'None'),
(8, 2, 1, 5, 'None', 'None'),
(5, 6, 3, 5, 'None', 'None'),
(2, 8, 1, 3, 'None', 'None'),
(17, 0, 1, 4, 'None', 'None'),
(6, 8, 3, 4, 'None', 'None'),
(0, 3, 3, 2, 'None', 'None'),
(4, 8, 3, 4, 'None', 'None'),
(16, 5, 0, 6, 'None', 'None'),
(10, 9, 5, 4, 'None', 'None'),
(19, 8, 0, 4, 'None', 'None'),
(7, 7, 4, 3, 'None', 'None'),
(7, 4, 1, 5, 'None', 'None'),
(13, 7, 5, 6, 'None', 'None'),
(0, 9, 5, 2, 'None', 'None'),
(4, 5, 5, 5, 'None', 'None'),
(19, 1, 2, 3, 'None', 'None'),
(2, 6, 1, 1, 'None', 'None'),
(13, 3, 0, 4, 'None', 'None'),
(8, 0, 0, 2, 'None', 'None'),
(18, 6, 2, 5, 'None', 'None'),
(14, 4, 5, 3, 'None', 'None'),
(12, 8, 1, 6, 'None', 'None'),
(8, 1, 3, 4, 'None', 'None'),
(15, 1, 5, 2, 'None', 'None'),
(5, 1, 4, 6, 'None', 'None'),
(5, 5, 1, 1, 'None', 'None'),
(14, 6, 0, 5, 'None', 'None'),
(3, 8, 3, 6, 'None', 'None'),
(3, 1, 1, 4, 'None', 'None'),
(17, 1, 0, 4, 'None', 'None'),
(6, 4, 0, 1, 'None', 'None'),
(16, 6, 5, 6, 'None', 'None'),
(6, 8, 3, 1, 'None', 'None'),
(13, 8, 1, 1, 'None', 'None'),
(13, 3, 5, 5, 'None', 'None'),
(12, 4, 4, 2, 'None', 'None'),
(13, 8, 3, 3, 'None', 'None'),
(3, 1, 1, 1, 'None', 'None'),
(6, 4, 2, 2, 'None', 'None'),
(0, 8, 0, 6, 'None', 'None'),
(3, 8, 3, 2, 'None', 'None'),
(17, 7, 1, 3, 'None', 'None'),
(0, 4, 4, 1, 'None', 'None'),
(13, 0, 2, 6, 'None', 'None'),
(5, 5, 2, 6, 'None', 'None'),
(14, 8, 0, 2, 'None', 'None'),
(12, 0, 1, 2, 'None', 'None'),
(3, 7, 3, 1, 'None', 'None'),
(8, 2, 0, 2, 'None', 'None'),
(17, 0, 4, 4, 'None', 'None'),
(4, 6, 0, 4, 'None', 'None'),
(2, 2, 1, 1, 'None', 'None'),
(9, 6, 4, 6, 'None', 'None'),
(11, 1, 4, 3, 'None', 'None'),
(6, 5, 2, 3, 'None', 'None'),
(1, 5, 5, 5, 'None', 'None'),
(13, 6, 3, 3, 'None', 'None'),
(15, 4, 5, 2, 'None', 'None'),
(5, 3, 5, 2, 'None', 'None'),
(2, 5, 5, 5, 'None', 'None'),
(6, 2, 1, 1, 'None', 'None'),
(19, 0, 1, 4, 'None', 'None'),
(15, 3, 3, 1, 'None', 'None'),
(13, 1, 5, 1, 'None', 'None'),
(14, 8, 3, 4, 'None', 'None'),
(19, 5, 4, 5, 'None', 'None'),
(17, 1, 0, 5, 'None', 'None'),
(6, 8, 3, 4, 'None', 'None'),
(0, 4, 4, 2, 'None', 'None'),
(16, 5, 3, 4, 'None', 'None'),
(13, 4, 5, 6, 'None', 'None'),
(0, 8, 4, 3, 'None', 'None'),
(10, 0, 3, 3, 'None', 'None'),
(0, 8, 0, 4, 'None', 'None'),
(9, 6, 5, 1, 'None', 'None'),
(9, 7, 3, 5, 'None', 'None'),
(18, 2, 4, 3, 'None', 'None'),
(15, 9, 3, 5, 'None', 'None'),
(5, 5, 3, 3, 'None', 'None'),
(17, 4, 1, 4, 'None', 'None'),
(7, 6, 4, 5, 'None', 'None'),
(7, 9, 3, 5, 'None', 'None'),
(12, 3, 3, 3, 'None', 'None'),
(13, 2, 3, 6, 'None', 'None'),
(17, 8, 5, 5, 'None', 'None'),
(15, 8, 0, 3, 'None', 'None'),
(3, 3, 2, 4, 'None', 'None'),
(11, 1, 2, 5, 'None', 'None'),
(17, 1, 0, 3, 'None', 'None'),
(17, 4, 0, 5, 'None', 'None'),
(1, 5, 1, 4, 'None', 'None'),
(16, 8, 4, 4, 'None', 'None'),
(15, 5, 3, 3, 'None', 'None'),
(17, 9, 4, 1, 'None', 'None'),
(1, 8, 2, 5, 'None', 'None'),
(15, 9, 2, 6, 'None', 'None'),
(17, 2, 1, 1, 'None', 'None'),
(12, 1, 2, 6, 'None', 'None'),
(6, 9, 1, 3, 'None', 'None'),
(5, 1, 3, 5, 'None', 'None'),
(8, 1, 5, 2, 'None', 'None'),
(12, 9, 1, 5, 'None', 'None'),
(3, 4, 0, 5, 'None', 'None'),
(11, 6, 2, 4, 'None', 'None'),
(8, 0, 5, 1, 'None', 'None'),
(5, 1, 1, 1, 'None', 'None'),
(15, 3, 0, 3, 'None', 'None'),
(15, 1, 2, 4, 'None', 'None'),
(2, 6, 1, 5, 'None', 'None'),
(19, 7, 4, 6, 'None', 'None'),
(2, 2, 4, 1, 'None', 'None'),
(19, 2, 2, 6, 'None', 'None'),
(10, 4, 4, 2, 'None', 'None'),
(0, 1, 1, 1, 'None', 'None'),
(7, 2, 4, 3, 'None', 'None'),
(16, 5, 3, 4, 'None', 'None'),
(11, 3, 4, 3, 'None', 'None'),
(15, 1, 2, 5, 'None', 'None'),
(9, 4, 0, 3, 'None', 'None'),
(16, 3, 5, 5, 'None', 'None'),
(7, 8, 4, 6, 'None', 'None'),
(14, 5, 0, 5, 'None', 'None'),
(19, 6, 3, 1, 'None', 'None'),
(17, 5, 3, 5, 'None', 'None'),
(12, 7, 0, 5, 'None', 'None'),
(7, 0, 1, 2, 'None', 'None'),
(0, 1, 4, 4, 'None', 'None'),
(16, 2, 0, 3, 'None', 'None')
;
If you want all the guide but first the guide with max number of teavelers you could use an order by desc eg:
SELECT G.FirstName, L.LocationName, count(distinct(TravelerID))as number_of_travelers_per_guide
FROM Guides AS G
LEFT JOIN Trips AS T USING (GuideID)
LEFT JOIN Locations AS L USING (LocationID)
GROUP BY G.FirstName,L.LocationName
ORDER BY number_of_travelers_per_guide DESC
If you need the max guide and alos the others name then you could joj the result from the max with all the guides
select Guides.FirstName, my_t2.LocationName, my_t2.number_of_travelers_per_guide
from (
SELECT G.GuideId, G.FirstName,L.LocationName, count(distinct T.TravelerID ) as number_of_travelers_per_guide
FROM Guides AS G
LEFT JOIN Trips AS T USING (GuideID)
LEFT JOIN Locations AS L USING (LocationID)
GROUP BY G.FirstName,L.LocationName
HAVING count(distinct TravelerID ) = (
select max(my_count) from (SELECT count(distinct T.TravelerID) my_count
FROM Guides AS G
LEFT JOIN Trips AS T USING (GuideID)
LEFT JOIN Locations AS L USING (LocationID)
GROUP BY G.FirstName, L.LocationName ) my_t )
) my_t2
left join Guides on Guides.GuideID = my_t2.GuideID

SQL query: How to validate the prerequisite for courses that finished and how to find time conflict of courses?

Am trying to write query that advise student what courses to register . The query will select the suitable courses and will validate 1) the courses they finished .and what left for them to take 2) The prerequisite courses to be finished .3) validate the time conflict. In order to recommend for him best courses.
I did those table and join them , but the join operation is not working . What syntax is the correct ?What if there are no prerequisite how i will check that ?some prerequisite are for senior or junior is that need separate table?
ERROR 1064 (42000): You have an error in your SQL syntax; check the manual that corresponds to your
MYSQL server version for the right syntax to use near 'studyplan sp on (t.std_id=sp.std_is)
left outer join prerequsit p on (p.preid = c.' at line 3
select c.*
from std t
inner join schedule22 c studyplan sp
on (t.std_id=sp.std_id)
left outer join prerequsit p
on (p.preid=c.courseid)
inner join schedule22 c
on (c.courseid=p.courseid)
where t.std=1 AND
sp.complated='No' AND
sp.passed='No' AND
p.preid=courseid;
Student
enter code here
std_id username pass fname email
1 hjh 154 jdf example#live.com
Studyplan
Courseid code `prerequisite std_id completed passed
2 UNS 100 No Prerequisite 1 Y Y
3, 'ENG 100', 'No Prerequisite', 1, 'Y', 'Y'),
5, 'MTT 101', 'MTG 100', 1, 'Y', 'Y'),
6, 'MTT 202', 'MTT 101', 1, 'Y', 'N'),
(7, 'STT 100', 'No Prerequisite', 1, 'N', 'N'),
(8, 'MTT 102','MTT 101', 1, 'N', 'N'),
(9, 'ENG 200','english1', 1, 'N', 'N'),
(10, 'OE1',3, 'NULL', 1, 'N', 'N'),
(11, 'ENG 201','ENG 200', 1, 'N', 'N'),
(12, 'CSC 302', 'MTT 202', 1, 'N', 'N'),
(13, 'STT 201',, 'STT 100', 1, 'N', 'N'),
(15, 'CSC 201','MTT 101 or MTT 102', 1, 'N', 'N'),
(16, 'CSC 202', 'CSC 201', 1, 'N', 'N'),
(17, 'PSY 201', 'ENG 100 + UNS 100', 1, 'N', 'N'),
(18, 'NSC 201', 'No Prerequisite', 1, 'N', 'N'),
(19, 'CSC 307', 'CSC 201', 1, 'N', 'N'),
(20, 'CSC 301','CSC 202', 1, 'N', 'N'),
(21, 'ITE 390', 'Junior Level', 1, 'N', 'N'),
(22, 'CSC 305', 'Junior Level', 1, 'Y', 'Y'),
(23, 'ITE 305', ' 'Junior Level', 1, 'Y', 'Y'),
(24, 'ITE 414', ', 'junior Level', 1, 'Y', 'Y'),
(25, 'CSC 308', 'CSC 301', 1, 'N', 'N'),
(26, 'ITE 402', 'CSC 305', 1, 'N', 'N'),
(27, 'CSC 311', 'CSC 201', 1, 'N', 'N'),
(28, 'ITE 422', 'CSC 305', 1, 'N', 'N'),
(29, 'CIS 401', 'CSC 302', 1, 'N', 'N'),
(30, 'ITE 409', 'Senior Level', 1, 'N', 'N'),
(31, 'CIS 401', 'CSC 302', 1, 'N', 'N'),
(32, 'CSC 401', 'ITE 305', 1, 'N', 'N'),
(33, 'ITE 409', 'Null', 1, 'N', 'N'),
(34, 'ITE 408', 'CSC 305', 1, 'N', 'N')
Schedule
enter code here
semester`, `courseid`, `coursecode`, `section`,`date`, `time`, `,`sch_id`)
('fall', 9, 'ENG 100', 51,'MoWe', '1:45PM-3:15PM', 'staff',1),
('fall', 16, 'CSC202', 51, 'Mo-We',' 1:45PM-3:15PM', 'staff',1),
('fall', 26, 'ITE402', 51, 'Tu','10:30-12pm', 'staff',1),
('fall', 6, 'MTT 202', 51,'Su-Tu', '12:00-2:00PM', 'staff',1),
('fall', 8, 'MTT 102', 51','SuTu',' 12:00-2:00PM', 'staff',1),
('fall', 12, 'CSC 302', 51,'Mo-We',' 10:00-12:00PM', 'staff',1),
('fall', 15, 'CSC 201', 52,'Mo-We',' 10:00-12:00PM', 'staff',1),
('fall', 21, 'ITE 390', 51, 'Su-Tu',' 12:00-2:00PM', 'staff',1),
('fall', 5, 'MTT 101', 51, 'Su',' 4:00PM -7:00PM', 'staff',1),
('fall', 28, 'ITE 422', 51, Su-Tu',' 12:00-2:00PM', 'staff',1);
prerequsit`
enter code here
(`courseid`, `preid`) VALUES
(5, 1,),
(6, 2),
(8, 3),
(9, 4),
(11, 5),
(12, 6),
(13, 7),
(14, 8),
(15, 9),
(16, 10),
(17, 11),
(18, 12),
(19, 13),
(20, 14),
(21, 21),
(22, 22),
(23, 23),
(24, 24),
(25, 20),
(26, 22),
(27, 25),
(28, 22),
(29, 12),
(30, 30),
(32, 23),
(34, 22,),
(35, 12),
(36, 22),
(37, 3),
Your query contains schedule22 c twice in the from clause. That's an error. There may be more.

Group results by day and month (php timestamp) showing total revenue per day

Using mysql how can I group together by day and month showing tghe total revenue?
E.g. (not based on below data)
day month revenue
1 01 10.97
2 01 3.57
3 01 0
etc.
Heres an example of my data:
CREATE TABLE IF NOT EXISTS `sales` (
`id` bigint(255) NOT NULL AUTO_INCREMENT,
`timestamp` int(12) NOT NULL,
`product` int(5) NOT NULL,
`publisher` int(5) NOT NULL,
`market` int(5) NOT NULL,
`revenue` float NOT NULL,
`Units` int(5) NOT NULL,
`Downloads` int(11) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=138 ;
--
-- Dumping data for table `sales`
--
INSERT INTO `sales` (`id`, `timestamp`, `revenue`) VALUES
(1, 1394150400, 3.65),
(2, 1394064000, 0),
(4, 1393977600, 0),
(5, 1393891200, 7.42),
(6, 1393804800, 0),
(7, 1393718400, 0),
(8, 1393632000, 0),
(9, 1393545600, 0),
(10, 1393459200, 0),
(11, 1393372800, 0),
(12, 1393286400, 3.65),
(13, 1393200000, 3.65),
(14, 1393177032, 0),
(15, 1393090632, 3.65),
(16, 1393004232, 0),
(17, 1392917832, 0),
(18, 1392831432, 0),
(19, 1392745032, 0),
(20, 1392658632, 0),
(21, 1392572232, 0),
(24, 1391881032, 0),
(23, 1392485832, 0),
(25, 1392336000, 0),
(26, 1392249600, 0),
(27, 1392163200, 0),
(28, 1392076800, 0),
(29, 1391990400, 3.81),
(30, 1391904000, 0),
(31, 1391817600, 0),
(32, 1391731200, 3.65),
(33, 1391644800, 3.58),
(34, 1391558400, 3.58),
(35, 1391472000, 0),
(36, 1391385600, 0),
(37, 1391299200, 0),
(38, 1391212800, 7.23),
(39, 1391126400, 0),
(40, 1391040000, 0),
(41, 1390953600, 3.81),
(42, 1390867200, 4.52),
(43, 1390780800, 0),
(44, 1390694400, 3.65),
(45, 1390608000, 3.81),
(46, 1390585032, 0),
(47, 1390435200, 0),
(48, 1390348800, 3.58),
(49, 1390262400, 0),
(50, 1390176000, 0),
(51, 1390089600, 0),
(52, 1390003200, 0),
(53, 1389916800, 3.58),
(54, 1389893832, 0),
(55, 1389744000, 0),
(56, 1389657600, 0),
(57, 1389571200, 0),
(58, 1389484800, 0),
(59, 1389398400, 3.65),
(60, 1389312000, 3.18),
(61, 1389225600, 0),
(62, 1389139200, 0),
(63, 1389052800, 0),
(64, 1389052800, 0),
(65, 1388966400, 3.65),
(66, 1388880000, 4.05),
(67, 1388793600, 0),
(68, 1388707200, 3.65),
(69, 1388620800, 0),
(70, 1388534400, 0),
(71, 1394236800, 0),
(72, 1394236800, 2.51),
(73, 1394236800, 0),
(74, 1394150400, 5.02),
(75, 1394150400, 2.76),
(76, 1394064000, 7.5),
(77, 1394064000, 8.28),
(78, 1393977600, 0),
(79, 1393977600, 0),
(80, 1393891200, 7.5),
(81, 1393891200, 2.36),
(82, 1393804800, 0),
(83, 1393804800, 0),
(84, 1393718400, 2.76),
(85, 1393718400, 0),
(86, 1393632000, 0),
(87, 1393545600, 0),
(88, 1393545600, 2.76),
(89, 1393459200, 2.51),
(90, 1393459200, 2.51),
(91, 1393433613, 2.51),
(92, 1393433613, 0),
(93, 1393286400, 2.54),
(94, 1393286400, 2.76),
(95, 1393200000, 2.52),
(96, 1393200000, 5.51),
(97, 1394323200, 0),
(98, 1394323200, 5.01),
(99, 1394323200, 5.52),
(100, 1394409600, 0),
(101, 1394409600, 2.05),
(102, 1394409600, 5.27),
(103, 1393113600, 5.08),
(104, 1393027200, 5.09),
(105, 1392854400, 5.32),
(106, 1392854400, 7.63),
(107, 1392940800, 0),
(108, 1392595200, 0),
(109, 1392508800, 7.64),
(110, 1392422400, 0),
(111, 1392336000, 2.58),
(112, 1392163200, 5.57),
(113, 1391990400, 0),
(114, 1391817600, 0),
(115, 1391731200, 15.99),
(116, 1391472000, 10.66),
(117, 1391385600, 2.54),
(118, 1391299200, 2.54),
(119, 1391212800, 5.34),
(120, 1391040000, 0),
(121, 1390953600, 2.55),
(122, 1390780800, 10.9),
(123, 1390608000, 12.72),
(124, 1390435200, 7.64),
(125, 1390262400, 2.55),
(126, 1390089600, 9.92),
(127, 1389916800, 2.55),
(128, 1389744000, 2.55),
(129, 1389571200, 5.1),
(130, 1389398400, 2.55),
(131, 1389225600, 5.1),
(132, 1389052800, 7.65),
(133, 1388880000, 5.1),
(134, 1388793600, 9.99),
(135, 1388620800, 0),
(136, 1394582400, 4.14),
(137, 1394582400, 2.76);
SELECT DATE_FORMAT(FROM_UNIXTIME(`timestamp`),'%d') DAY, DATE_FORMAT(FROM_UNIXTIME(`timestamp`),'%m') MONTH, SUM(`revenue`)
FROM sales
GROUP BY DAY,MONTH
ORDER BY MONTH,DAY
Check the FROM_UNIXTIME Function Here