├── complex_interview_problems.md ├── Top45.md ├── README.md ├── SQL_Medium_Complex_Interview_Problems.md └── sql_tips_ankit_bansal.md /complex_interview_problems.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Complex SQL 2 | find new and repeat customers | SQL Interview Questions 4 | 5 | ```sql 6 | 7 | create table customer_orders ( 8 | order_id integer, 9 | customer_id integer, 10 | order_date date, 11 | order_amount integer 12 | ); 13 | 14 | insert into 15 | customer_orders values 16 | (1,100,cast('2022-01-01' as date),2000), 17 | (2,200,cast('2022-01-01' as date),2500), 18 | (3,300,cast('2022-01-01' as date),2100), 19 | (4,100,cast('2022-01-02' as date),2000), 20 | (5,400,cast('2022-01-02' as date),2200), 21 | (6,500,cast('2022-01-02' as date),2700), 22 | (7,100,cast('2022-01-03' as date),3000), 23 | (8,400,cast('2022-01-03' as date),1000), 24 | (9,600,cast('2022-01-03' as date),3000) 25 | ; 26 | 27 | select * from customer_orders; 28 | 29 | ``` 30 | 31 | -- Find New and Repeat Customers on each order date 32 | 33 | ```sql 34 | 35 | with first_order_table as 36 | (select customer_id, min(order_date) as first_order_date 37 | from customer_orders 38 | group by customer_id) 39 | 40 | select co.order_date, 41 | sum(case when co.order_date = fot.first_order_date then co.order_amount else 0 end) as order_amount_by_new_customer, 42 | sum(case when co.order_date <> fot.first_order_date then co.order_amount else 0 end) as order_amount_by_repeat_customer 43 | from customer_orders co 44 | join first_order_table fot on fot.customer_id = co.customer_id 45 | group by co.order_date 46 | order by 1; 47 | 48 | ``` 49 | 50 | # USING ROW_NUMBER FUNCTION -- 51 | 52 | ```sql 53 | 54 | with order_rnk as ( 55 | 56 | select order_date , row_number() over(partition by customer_id order by order_id) as rnk from customer_orders 57 | 58 | ) 59 | 60 | select order_date , 61 | sum(case when rnk = 1 then 1 else 0 end ) as new_customer, 62 | 63 | sum(case when rnk > 1 then 1 else 0 end ) as repeat_customer 64 | 65 | from order_rnk 66 | 67 | group by 1 68 | 69 | 70 | ``` 71 | 72 | --- 73 | --- 74 | 75 | 76 | # Complex SQL 3 | Scenario based Interviews Question for Product companies 77 | 78 | ```sql 79 | 80 | create table entries ( 81 | name varchar(20), 82 | address varchar(20), 83 | email varchar(20), 84 | floor int, 85 | resources varchar(10)); 86 | 87 | insert into entries 88 | values ('A','Bangalore','A@gmail.com',1,'CPU'),('A','Bangalore','A1@gmail.com',1,'CPU'),('A','Bangalore','A2@gmail.com',2,'DESKTOP') 89 | ,('B','Bangalore','B@gmail.com',2,'DESKTOP'),('B','Bangalore','B1@gmail.com',2,'DESKTOP'),('B','Bangalore','B2@gmail.com',1,'MONITOR') 90 | 91 | ; 92 | select * from entries; 93 | 94 | SELECT name, count(*) as number_of_visit, group_concat(distinct resources) as used_resources FROM entries group by name; 95 | 96 | ``` 97 | 98 | -------------------------------------------------------------------------------- /Top45.md: -------------------------------------------------------------------------------- 1 | # LEETCODE 45 MOST ASKED 2 | 3 | ## 175. Combine Two Tables 4 | 5 | select t1.firstName , t1.lastName , t2.city , t2.state from Person t1 6 | 7 | LEFT JOIN Address t2 8 | 9 | on t1.personId = t2.personId ; 10 | 11 | 12 | ## 176. Second Highest Salary 13 | 14 | SELECT MAX(SALARY) FROM employees 15 | WHERE SALARY < (SELECT MAX(SALARY) FROM employees); 16 | 17 | OR 18 | 19 | SELECT DISTINCT 20 | (SELECT DISTINCT Salary 21 | FROM Employee 22 | ORDER BY Salary DESC 23 | LIMIT 1 OFFSET 1) AS SecondHighestSalary; 24 | 25 | 26 | ## 177. Nth Highest Salary 27 | 28 | CREATE FUNCTION getNthHighestSalary(N INT) RETURNS INT 29 | BEGIN 30 | SET N = N - 1; 31 | RETURN ( 32 | # Write your MySQL query statement below. 33 | 34 | SELECT DISTINCT Salary 35 | FROM Employee 36 | ORDER BY Salary DESC 37 | LIMIT 1 OFFSET N 38 | 39 | ); 40 | END 41 | 42 | 43 | 44 | ## 178. Rank Scores 45 | 46 | select score , dense_rank() over( order by score desc ) as 'Rank' from Scores; 47 | 48 | 49 | ## 180. Consecutive Numbers 50 | 51 | 52 | with temp(num, next_num,prev_num) as 53 | 54 | (select num , LEAD(num,1,0) over(order by id) as next_num , LAG(num ,1, 0) over(order by id) as prev_num from LOGS) 55 | 56 | select distinct num as ConsecutiveNums from temp 57 | where num= next_num and num = prev_num; 58 | 59 | 60 | 61 | ## 181. Employees Earning More Than Their Managers 62 | 63 | select e1.name AS Employee 64 | from Employee e1 65 | join Employee e2 66 | on e1.managerId = e2.id 67 | where e1.salary > e2.salary 68 | 69 | ## 182. Duplicate Emails 70 | 71 | select email as Email from person 72 | group by email having count(email) > 1; 73 | 74 | 75 | ## 183. Customers Who Never Order 76 | 77 | 78 | SELECT c.name AS Customers 79 | FROM Customers c LEFT JOIN Orders o 80 | ON c.id=o.customerId 81 | WHERE o.customerId IS NULL; 82 | 83 | 84 | 85 | ## 184. Department Highest Salary 86 | 87 | 88 | SELECT 89 | Department.name AS Department , 90 | Employee.name AS Employee, 91 | Employee.salary 92 | 93 | FROM Department 94 | 95 | Inner JOIN Employee ON Employee.departmentId=Department.id 96 | 97 | WHERE(departmentId, salary) IN 98 | (SELECT departmentId,MAX(salary) FROM Employee GROUP BY departmentId) ; 99 | 100 | 101 | 102 | ## 185. Department Top Three Salaries 103 | 104 | 105 | 106 | SELECT Department, employee, salary FROM ( 107 | SELECT d.name AS Department 108 | , e.name AS employee 109 | , e.salary 110 | , DENSE_RANK() OVER (PARTITION BY d.name ORDER BY e.salary DESC) AS drk 111 | FROM Employee e JOIN Department d ON e.DepartmentId= d.Id 112 | ) t WHERE t.drk <= 3 113 | 114 | -- 196. Delete Duplicate Emails 115 | 116 | DELETE p1 from person p1 , person p2 where p1.email = p2.email and p1.id > p2.id 117 | 118 | 119 | 120 | 121 | 122 | 123 | ## 584. Find Customer Referee 124 | 125 | 126 | Select name from Customer where referee_id != 2 or referee_id is null; 127 | 128 | ## 595. Big Countries 129 | 130 | 131 | Select name, population ,area from World where area >= 3000000 or population >= 25000000; 132 | 133 | 134 | ## 586. Customer Placing the Largest no. of Orders 135 | 136 | select customer_number from orders 137 | 138 | group by customer_number order by Count(customer_number) desc limit 1; 139 | 140 | 141 | 142 | ## 596. Classes More Than 5 Students 143 | 144 | select class from courses 145 | group by class 146 | having count(class) >=5 147 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Complete Sql 2023 2 | 3 | # Advanced SQL Mastery :crystal_ball: 4 | 5 | Welcome to my repository where I delve into advanced SQL topics. This project is a hub for discovering the intricacies of SQL through practical examples and straightforward explanations. 6 | 7 | ## What's Covered? 8 | 9 | In this repository, you'll find basic yet insightful demonstrations of various advanced SQL concepts. From window functions to other advanced operations, we'll walk through fundamental techniques step by step. 10 | 11 | Let's embark on this SQL journey together! :rocket: 12 | 13 | 14 | # WINDOW FUNCTIONS ~ 15 | 16 | # row_number() :star: 17 | 18 | 19 | 20 | ```sql 21 | 22 | -- Ever wondered how to conjure the first two employees from each department who embarked on the company adventure? Behold the `ROW_NUMBER()` spell: 23 | 24 | SELECT * 25 | FROM ( 26 | SELECT e.*, 27 | ROW_NUMBER() OVER (PARTITION BY dept_name ORDER BY emp_id) AS rn 28 | FROM employees e 29 | ) AS x 30 | WHERE x.rn < 3; 31 | 32 | ``` 33 | 34 | ------- 35 | 36 | # Rank() :star: 37 | 38 | 39 | ```sql 40 | 41 | -- Ascend to greatness by summoning the top 3 earners in each department through the `RANK()` enchantment: 42 | 43 | SELECT * 44 | FROM ( 45 | SELECT e.*, 46 | RANK() OVER (PARTITION BY dept_name ORDER BY salary DESC) AS rnk 47 | FROM employees e 48 | ) AS x 49 | WHERE x.rnk < 4; 50 | 51 | ``` 52 | 53 | ------- 54 | 55 | # Lead() & Lag() :star: 56 | 57 | 58 | ```sql 59 | 60 | -- Peer into the past and future with the mystic `LEAD()` and `LAG()` incantations, unlocking the wisdom of adjacent records: 61 | 62 | SELECT e.*, 63 | LAG(salary) OVER (PARTITION BY dept_name ORDER BY emp_id) AS prev_emp_salary, 64 | LEAD(salary) OVER (PARTITION BY dept_name ORDER BY emp_id) AS next_emp_salary 65 | FROM employees e; 66 | 67 | ``` 68 | 69 | 70 | # Common Table Expressions ~ (With Clause or SubQuery Factoring) 71 | 72 | 73 | ```sql 74 | 75 | -- select * from products; 76 | 77 | -- Find the stores whose sales are greater than the average sales across all stores 78 | 79 | -- METHOD 1 - SUB - QUERIES 80 | 81 | -- 1.) Find the total sales per store 82 | 83 | 84 | SELECT store_id, SUM(cost_in_cents) AS total_sales_per_store 85 | FROM products 86 | GROUP BY store_id; 87 | 88 | -- 2.) Find the average sales across all stores 89 | 90 | 91 | SELECT AVG(total_sales_per_store) AS avg_sales_for_all_stores 92 | FROM ( 93 | SELECT store_id, SUM(cost_in_cents) AS total_sales_per_store 94 | FROM products 95 | GROUP BY store_id 96 | ) AS x; 97 | 98 | -- 3.) Find the stores where total sales > average sales of all stores 99 | 100 | 101 | SELECT total_sales.store_id, total_sales.total_sales_per_store 102 | FROM ( 103 | SELECT store_id, SUM(cost_in_cents) AS total_sales_per_store 104 | FROM products 105 | GROUP BY store_id 106 | ) AS total_sales 107 | JOIN ( 108 | SELECT AVG(total_sales_per_store) AS avg_sales_for_all_stores 109 | FROM ( 110 | SELECT store_id, SUM(cost_in_cents) AS total_sales_per_store 111 | FROM products 112 | GROUP BY store_id 113 | ) AS x 114 | ) AS avg_sales 115 | ON total_sales.total_sales_per_store > avg_sales.avg_sales_for_all_stores; 116 | 117 | ``` 118 | 119 | ```sql 120 | 121 | 122 | -- METHOD 2 : Using the WITH Clause 123 | 124 | with Total_Sales(store_id ,total_sales_per_store) as 125 | 126 | (select s.store_id , sum(cost_in_cents) as total_sales_per_store from products s group by store_id) , 127 | 128 | Avg_Sales(avg_sales_for_all_stores) as 129 | 130 | (select avg(total_sales_per_store) as avg_sales_for_all_stores from Total_Sales) 131 | 132 | select * from Total_Sales t1 133 | join Avg_Sales t2 134 | on t1.total_sales_per_store > t2.avg_sales_for_all_stores; 135 | 136 | ``` 137 | 138 | --- 139 | --- 140 | 141 | ## Referring More advance topics such as Views , Stored Procedures , Indexes , Query Optimisations from here - [Don't Click here](https://roadmap.sh/sql) 142 | 143 | --- 144 | --- 145 | 146 | ## Stored Procedures - A SQL stored procedure is a set of SQL code that can be saved and reused. In other words, it’s a precompiled object because it’s compiled at a time when it’s created on the database. 147 | 148 | * Setup : 149 | 150 | ```sql 151 | 152 | 153 | create table mobile_product( product_id int , product_code varchar(5) ,product_name varchar(10), 154 | price int, quantity_remain int , quantity_sold int); 155 | 156 | insert into mobile_product values (1, "P1" ,"Iphone", 20000 ,4,195); 157 | 158 | 159 | create table sales(order_id int, order_date varchar(10) , product_code varchar(10) , quantity_ordered int , sales_price int); 160 | 161 | -- Inserting the first row 162 | INSERT INTO sales (order_id, order_date, product_code, quantity_ordered, sales_price) 163 | VALUES (1, '2023-09-06', 'P1', 10, 50); 164 | 165 | -- Inserting the second row 166 | INSERT INTO sales (order_id, order_date, product_code, quantity_ordered, sales_price) 167 | VALUES (2, '2023-09-07', 'P1', 15, 75); 168 | 169 | -- Inserting the third row 170 | INSERT INTO sales (order_id, order_date, product_code, quantity_ordered, sales_price) 171 | VALUES (3, '2023-09-08', 'P1', 8, 40); 172 | 173 | 174 | select * from mobile_product; 175 | select * from sales; 176 | 177 | 178 | call retail(); 179 | 180 | ``` 181 | 182 | 183 | * Non parameterized Procedure a Sample Example on sales and product table ~ (MySql RDMS Version) -- 184 | 185 | 186 | ```sql 187 | 188 | DELIMITER $$ 189 | DROP PROCEDURE IF EXISTS retail$$ 190 | 191 | CREATE PROCEDURE retail() 192 | BEGIN 193 | DECLARE t_product_code VARCHAR(10); 194 | DECLARE t_product_price FLOAT; 195 | 196 | SELECT product_code, price 197 | INTO t_product_code, t_product_price 198 | FROM mobile_product 199 | WHERE product_name = 'Iphone'; 200 | 201 | INSERT INTO sales (order_id, order_date, product_code, quantity_ordered, sales_price) 202 | VALUES (1, CURDATE(), t_product_code, 1, (t_product_price * 1)); 203 | 204 | SET SQL_SAFE_UPDATES=0; 205 | 206 | UPDATE mobile_product 207 | SET quantity_remain = (quantity_remain - 1), quantity_sold = (quantity_sold + 1) 208 | WHERE product_code = t_product_code; 209 | 210 | SET SQL_SAFE_UPDATES=1; 211 | 212 | SELECT 'Product Sold !'; 213 | END$$ 214 | 215 | DELIMITER ; 216 | 217 | ``` 218 | 219 | 220 | ## Please Make sure to checkout other readme's for Interview Problems , content is regularly updating so stay tuned !! 221 | 222 | 223 | 224 | -------------------------------------------------------------------------------- /SQL_Medium_Complex_Interview_Problems.md: -------------------------------------------------------------------------------- 1 | ## 1 - Olympic Gold Medals Problem | SQL Online Interview Question | Data Analytics | 2 Solutions 2 | 3 | 4 | * Create the table -- 5 | 6 | ```sql 7 | 8 | CREATE TABLE events ( 9 | ID int, 10 | event varchar(255), 11 | YEAR INt, 12 | GOLD varchar(255), 13 | SILVER varchar(255), 14 | BRONZE varchar(255) 15 | ); 16 | 17 | delete from events; 18 | 19 | INSERT INTO events VALUES (1,'100m',2016, 'Amthhew Mcgarray','donald','barbara'); 20 | INSERT INTO events VALUES (2,'200m',2016, 'Nichole','Alvaro Eaton','janet Smith'); 21 | INSERT INTO events VALUES (3,'500m',2016, 'Charles','Nichole','Susana'); 22 | INSERT INTO events VALUES (4,'100m',2016, 'Ronald','maria','paula'); 23 | INSERT INTO events VALUES (5,'200m',2016, 'Alfred','carol','Steven'); 24 | INSERT INTO events VALUES (6,'500m',2016, 'Nichole','Alfred','Brandon'); 25 | INSERT INTO events VALUES (7,'100m',2016, 'Charles','Dennis','Susana'); 26 | INSERT INTO events VALUES (8,'200m',2016, 'Thomas','Dawn','catherine'); 27 | INSERT INTO events VALUES (9,'500m',2016, 'Thomas','Dennis','paula'); 28 | INSERT INTO events VALUES (10,'100m',2016, 'Charles','Dennis','Susana'); 29 | INSERT INTO events VALUES (11,'200m',2016, 'jessica','Donald','Stefeney'); 30 | INSERT INTO events VALUES (12,'500m',2016,'Thomas','Steven','Catherine'); 31 | 32 | ``` 33 | 34 | ## -- Write a query to find no of gold medal per swimmer for swimmers who won only gold medals 35 | 36 | 37 | 38 | 39 | * Method 1 - using subquery - 40 | 41 | ```sql 42 | 43 | select Gold , count(Gold) as cnt_gold_medal_per_swimmer from events 44 | where Gold not in (select silver from events union all select Bronze from events) 45 | group by Gold; 46 | 47 | ``` 48 | 49 | * Method 2 - Group By having + CTE 50 | 51 | ```sql 52 | 53 | with cte as ( 54 | SELECT GOLD, count(*) as total_medals 55 | FROM events 56 | GROUP BY GOLD ) 57 | 58 | select 59 | GOLD, total_medals 60 | from cte 61 | where GOLD NOT IN (select distinct SILVER from events union all select distinct BRONZE from events) 62 | 63 | 64 | ``` 65 | 66 | --- 67 | --- 68 | 69 | ## 2 - Solving a REAL Business Use Case Using SQL | Business Days Excluding Weekends and Public Holidays - MYSQL 70 | 71 | * Create Required Tables - 72 | 73 | ```sql 74 | create table tickets 75 | ( 76 | ticket_id varchar(10), 77 | create_date date, 78 | resolved_date date 79 | ); 80 | insert into tickets values 81 | (1,'2022-08-01','2022-08-03') 82 | ,(2,'2022-08-01','2022-08-12') 83 | ,(3,'2022-08-01','2022-08-16'); 84 | create table holidays 85 | ( 86 | holiday_date date 87 | ,reason varchar(100) 88 | ); 89 | insert into holidays values 90 | ('2022-08-11','Rakhi'),('2022-08-15','Independence day'); 91 | 92 | 93 | select * from tickets; 94 | select * from holidays; 95 | 96 | ``` 97 | 98 | * Query to Calculate no of days between two days and then difference of weeks between two days --- 99 | 100 | ```sql 101 | 102 | select * , datediff(resolved_date ,create_date) as no_of_days , 103 | week(create_date) as start_week , week(resolved_date) as end_date , FLOOR( datediff(resolved_date ,create_date) / 7) AS 104 | weeks_difference 105 | from tickets ; 106 | 107 | ``` 108 | 109 | 110 | * Query gives no of business days excluding weekend between two dates and the public holidays 111 | 112 | 113 | ```sql 114 | 115 | select * , DATEDIFF(resolved_date , create_date) as actual_days,DATEDIFF(resolved_date, create_date) - 2 * FLOOR(DATEDIFF(resolved_date, create_date) / 7) as actual_after_weekend_excluson , 116 | DATEDIFF(resolved_date, create_date) - 2 * FLOOR(DATEDIFF(resolved_date, create_date) / 7) - no_of_public_holidays AS actual_after_weekend_and_public_holiday_exclusin 117 | from ( 118 | SELECT ticket_id , create_date , resolved_date , count(holiday_date) as no_of_public_holidays 119 | FROM 120 | tickets left join holidays on holiday_date between create_date and resolved_date group by ticket_id , create_date , resolved_date 121 | ) as a; 122 | 123 | ``` 124 | 125 | 126 | 127 | ## 3 - Amazon SQL Interview Question for Data Analyst Position [2-3 Year Of Experience ] | Data Analytics 128 | 129 | * Create the Required Tables - 130 | 131 | ```sql 132 | 133 | create table hospital ( emp_id int 134 | , action varchar(10) 135 | , time datetime); 136 | 137 | insert into hospital values ('1', 'in', '2019-12-22 09:00:00'); 138 | insert into hospital values ('1', 'out', '2019-12-22 09:15:00'); 139 | insert into hospital values ('2', 'in', '2019-12-22 09:00:00'); 140 | insert into hospital values ('2', 'out', '2019-12-22 09:15:00'); 141 | insert into hospital values ('2', 'in', '2019-12-22 09:30:00'); 142 | insert into hospital values ('3', 'out', '2019-12-22 09:00:00'); 143 | insert into hospital values ('3', 'in', '2019-12-22 09:15:00'); 144 | insert into hospital values ('3', 'out', '2019-12-22 09:30:00'); 145 | insert into hospital values ('3', 'in', '2019-12-22 09:45:00'); 146 | insert into hospital values ('4', 'in', '2019-12-22 09:45:00'); 147 | insert into hospital values ('5', 'out', '2019-12-22 09:40:00'); 148 | 149 | select * from hospital; 150 | 151 | 152 | ``` 153 | 154 | 155 | # Write a sql to find the total number of people present inside the hospital 156 | 157 | 158 | * Method 1 - Using CTE or having clause simply 159 | 160 | * Logic - calculate maximum intime and outtime for each employee then you can compare if that max(intime) > max(outtime) this means person is inside hospital 161 | 162 | 163 | ```sql 164 | 165 | with cte as ( 166 | select emp_id ,max( case when action = 'in' then time end ) as intime , max(case when action = 'out' then time end )as outtime 167 | 168 | from hospital group by emp_id) 169 | 170 | select * from cte where intime > outtime or outtime is null; 171 | 172 | ``` 173 | 174 | 175 | * or 176 | 177 | ```sql 178 | 179 | select emp_id ,max( case when action = 'in' then time end ) as intime , max(case when action = 'out' then time end )as outtime 180 | 181 | from hospital group by emp_id having max( case when action = 'in' then time end ) > max(case when action = 'out' then time end ) 182 | 183 | or 184 | 185 | max(case when action = 'out' then time end ) is null ; 186 | 187 | ``` 188 | 189 | 190 | 191 | * Using JOINS - Seperate both Intime and Outimes 192 | 193 | ```sql 194 | 195 | with intime as ( 196 | 197 | select emp_id , max( time ) as latest_intime from hospital where action = 'in' group by emp_id 198 | ), 199 | 200 | outime as ( 201 | select emp_id , max( time ) as latest_outtime from hospital where action = 'out' group by emp_id 202 | 203 | ) 204 | 205 | select * from intime left join outime on intime.emp_id = outime.emp_id 206 | where latest_intime > latest_outtime or latest_outtime is null; 207 | 208 | ``` 209 | 210 | 211 | 212 | * Magic solution -- we can check if the maximum time consedering in and out = maximum time when he/she was in , it means person is 213 | inside right now 214 | 215 | ```sql 216 | 217 | with latest_time as ( 218 | 219 | select emp_id , max(time) as max_latest_time from hospital group by emp_id 220 | ), 221 | 222 | latest_in_time as ( 223 | 224 | select emp_id , max(time) as max_in_time from hospital where action = 'in' group by emp_id 225 | 226 | ) 227 | 228 | select * from latest_time lt inner join latest_in_time mt on lt.emp_id = mt.emp_id 229 | where max_latest_time = max_in_time; 230 | 231 | ``` 232 | 233 | --- 234 | --- 235 | 236 | ## 4 - Airbnb SQL Interview Question | Convert Comma Separated Values into Rows | Data Analytics 237 | 238 | * Create Required Tables - 239 | 240 | ```sql 241 | create table airbnb_searches 242 | ( 243 | user_id int, 244 | date_searched date, 245 | filter_room_types varchar(200) 246 | ); 247 | insert into airbnb_searches values 248 | (1,'2022-01-01','entire home,private room') 249 | ,(2,'2022-01-02','entire home,shared room') 250 | ,(3,'2022-01-02','private room,shared room') 251 | ,(4,'2022-01-03','private room') 252 | ; 253 | 254 | /*Find the room types that are searched most no of times. 255 | Output the room type alongside the number of searches for it. 256 | If the filter for room types has more than one room type, 257 | consider each unique room type as a separate row. 258 | Sort the result based on the number of searches in descending order. 259 | */ 260 | 261 | select * from airbnb_searches; 262 | 263 | ``` 264 | 265 | * Query using Cte's and Like Clause ~~ MYSQL 266 | 267 | ```sql 268 | 269 | with room1 as ( 270 | select sum(case when filter_room_types like '%entire%' then 1 else 0 end) as s1 from airbnb_searches 271 | 272 | ), 273 | room2 as ( select sum(case when filter_room_types like '%private%' then 1 else 0 end) as s2 from airbnb_searches 274 | ), 275 | room3 as ( select sum(case when filter_room_types like '%shared%' then 1 else 0 end) as s3 from airbnb_searches) 276 | 277 | , final_output as ( 278 | select 'entire home' as value , s1 as cnt from room1 279 | union all 280 | select 'private room' as value , s2 as cnt from room2 281 | union all 282 | select 'shared room' as value , s3 as cnt from room3 283 | 284 | 285 | ) 286 | 287 | select * from final_output order by cnt desc; 288 | 289 | ``` 290 | 291 | --- 292 | --- 293 | 294 | 295 | 296 | 297 | 298 | ## 5 - SQL Interview Question for Senior Data Engineer Position in Poland | Data Engineering 299 | 300 | 301 | * Table Script - 302 | 303 | ```sql 304 | 305 | CREATE TABLE emp_salary 306 | ( 307 | emp_id INTEGER NOT NULL, 308 | name NVARCHAR(20) NOT NULL, 309 | salary NVARCHAR(30), 310 | dept_id INTEGER 311 | ); 312 | 313 | 314 | INSERT INTO emp_salary 315 | (emp_id, name, salary, dept_id) 316 | VALUES(101, 'sohan', '3000', '11'), 317 | (102, 'rohan', '4000', '12'), 318 | (103, 'mohan', '5000', '13'), 319 | (104, 'cat', '3000', '11'), 320 | (105, 'suresh', '4000', '12'), 321 | (109, 'mahesh', '7000', '12'), 322 | (108, 'kamal', '8000', '11'); 323 | 324 | select * from emp_salary; 325 | 326 | ``` 327 | 328 | * Write a SQL to return all employee whose salary is same in same department 329 | 330 | 331 | * This is Using Inner Join ~ 332 | 333 | ```sql 334 | 335 | with cte as( 336 | select dept_id , salary from emp_salary group by dept_id , salary having count(1) > 1 ) 337 | 338 | select t1.* from emp_salary t1 inner join cte on t1.dept_id = cte.dept_id where t1.salary = cte.salary; 339 | 340 | 341 | ``` 342 | 343 | ** Alternate using Left Join 344 | 345 | ```sql 346 | 347 | with sal_dep as( 348 | select dept_id, salary 349 | from emp_salary 350 | group by dept_id, salary 351 | having count(1)=1) 352 | select es. * from 353 | emp_salary es 354 | LEFT join sal_dep sd on es.dept_id=sd.dept_id and es.salary=sd.salary 355 | where sd.dept_id is null 356 | 357 | 358 | ``` 359 | --- 360 | --- 361 | 362 | ## 6 & 7 ~ Premium Questions on Datalemur.com 363 | 364 | --- 365 | --- 366 | 367 | # 8 - Most Asked Join Based SQL Problem Part -2 | Asked in Amazon for Data Engineer Position 368 | 369 | 370 | /*There are 2 tables, first table has 5 records and second table has 10 records. 371 | you can assume any values in each of the tables. how many maximum and minimum records possible in case of 372 | inner join, left join, right join and full outer join */ 373 | 374 | 375 | ```sql 376 | 377 | create table t1 ( val1 int ); 378 | create table t2 (val2 int); 379 | insert into t1 values(1), (1), (1), (1), (1); 380 | insert into t2 values(1), (1), (1), (1), (1) , (1), (1), (1), (1), (1); 381 | 382 | create table t3(val3 int); 383 | insert into t3 values(2), (2), (2), (2), (2) , (2), (2), (2), (2), (2); 384 | 385 | 386 | ``` 387 | 388 | 389 | * calculation of t1 with t2 produces maximum records in each case of join 390 | * calculation of t3 with t2 produces minimum records in each case of join 391 | 392 | * logic is to produce maxium records make the cartesian product maximise by making values as same in both the table 393 | * while to achieve minimum have disimilar values 394 | 395 | ```sql 396 | 397 | select * from t1; 398 | select * from t2; 399 | select * from t3; 400 | 401 | select * from t1 inner join t2 on t1.val1 = t2.val2; 402 | select * from t1 left join t2 on t1.val1 = t2.val2; 403 | select * from t1 right join t2 on t1.val1 = t2.val2; 404 | 405 | select * from t1 left join t2 on t1.val1 = t2.val2 406 | union all 407 | select * from t1 right join t2 on t1.val1 = t2.val2; 408 | 409 | 410 | select * from t3 inner join t2 on t3.val3 = t2.val2; -- 0 records 411 | select * from t3 left join t2 on t3.val3= t2.val2; -- 10 records from t3 412 | select * from t3 right join t2 on t3.val3 = t2.val2; -- 10 records from t2 413 | 414 | 415 | select * from t3 left join t2 on t3.val3 = t2.val2 416 | union all 417 | select * from t3 right join t2 on t3.val3 = t2.val2; 418 | 419 | ``` 420 | 421 | --- 422 | --- 423 | 424 | ## Tricky SQL Interview Problem with a Simple Solution | Data Analytics 425 | 426 | * Create Table as required 427 | 428 | ```sql 429 | 430 | create table purchases(user_id int,product_id int,quantity int,purchase_date datetime); 431 | insert into purchases values(536, 3223, 6, '2022-01-11 12:33:44'); 432 | insert into purchases values(827, 3585, 35, '2022-02-20 14:05:26'); 433 | insert into purchases values(536, 3223, 5, '2022-03-02 09:33:28'); 434 | insert into purchases values(536, 1435, 10, '2022-03-02 08:40:00'); 435 | insert into purchases values(827, 2452, 45, '2022-03-02 00:00:00'); 436 | insert into purchases values(333, 1122, 9, '2022-02-06 01:00:00'); 437 | insert into purchases values(333, 1122, 10, '2022-02-06 02:00:00'); 438 | 439 | select * from purchases; 440 | 441 | 442 | ``` 443 | 444 | 445 | ## 9 - Write a sql query to give information of that users who made purchases of same product on different dates 446 | 447 | ```sql 448 | 449 | select user_id, product_id , count(distinct Date(purchase_date)) as p_cnt from purchases 450 | group by user_id, product_id having count(distinct Date(purchase_date)) > 1 451 | 452 | ``` 453 | 454 | --- 455 | --- 456 | 457 | 458 | 459 | 460 | ## Top 5 Artists [Spotify SQL Interview Question] 461 | 462 | 463 | ```sql 464 | 465 | /* This CTE1 retrieves the count of song appearances for each artist in the top 10 of the global_song_rank table */ 466 | WITH TopArtists AS ( 467 | SELECT 468 | a.artist_name, 469 | COUNT(*) AS song_appearances 470 | FROM 471 | artists a 472 | JOIN 473 | songs s ON a.artist_id = s.artist_id 474 | JOIN 475 | global_song_rank gsr ON s.song_id = gsr.song_id 476 | WHERE 477 | gsr.rank <= 10 478 | GROUP BY 479 | a.artist_name 480 | ), 481 | /* This CTE Assigns ranks to the artists based on their song appearances */ 482 | RankedArtists AS ( 483 | SELECT 484 | artist_name, 485 | song_appearances, 486 | DENSE_RANK() OVER (ORDER BY song_appearances DESC) AS artist_rank 487 | FROM 488 | TopArtists 489 | ) 490 | /* Final SELECT statement filters and retrieves the top 5 ranked artists along with their ranks.*/ 491 | SELECT 492 | artist_name, 493 | artist_rank 494 | FROM 495 | RankedArtists 496 | WHERE 497 | artist_rank <= 5 498 | ORDER BY 499 | artist_rank; 500 | 501 | ``` 502 | 503 | # Game Analysis I-1V 504 | 505 | ```sql 506 | 507 | create table activity ( 508 | 509 | player_id int , 510 | device_id int , 511 | event_date date , 512 | games_played int 513 | ); 514 | 515 | insert into activity values (1,2,'2016-03-01',5 ),(1,2,'2016-03-02',6 ),(2,3,'2017-06-25',1 ) 516 | ,(3,1,'2016-03-02',0 ),(3,4,'2018-07-03',5 ); 517 | 518 | select * from activity; 519 | 520 | ``` 521 | 522 | 523 | # Questions: 524 | 525 | ```sql 526 | 527 | Game Play Analysis 528 | 529 | q1: Write an SQL query that reports the first login date for each player 530 | 531 | q2: Write a SQL query that reports the device that is first logged in for each player 532 | 533 | q3: Write an SQL query that reports for each player and date, how many games played so far by the player. 534 | That is, the total number of games played by the player until that date. 535 | 536 | q4: Write an SQL query that reports the fraction of players that logged in again 537 | on the day after the day they first logged in, rounded to 2 decimal places 538 | 539 | ``` 540 | 541 | 542 | * Solution 1 - 543 | 544 | ```sql 545 | 546 | select player_id , min(event_date) as f_login from activity group by player_id; 547 | 548 | ``` 549 | 550 | * Solution 2 - 551 | 552 | ```sql 553 | 554 | select player_id , min(device_id) as f_device from activity group by player_id ; 555 | 556 | ``` 557 | 558 | * Solution 3 559 | 560 | ```sql 561 | 562 | select player_id , sum(games_played) over(partition by player_id order by event_date) as total_till_date , event_date from activity ; 563 | 564 | ``` 565 | 566 | 567 | * Solution 4 568 | 569 | ```sql 570 | 571 | 572 | WITH PlayerFirstEvent AS ( 573 | SELECT player_id, MIN(event_date) AS event_date 574 | FROM Activity 575 | GROUP BY player_id 576 | ) 577 | 578 | SELECT ROUND(COUNT(DISTINCT b.player_id) / COUNT(DISTINCT a.player_id), 2) AS fraction 579 | FROM PlayerFirstEvent a 580 | LEFT JOIN Activity b ON a.player_id = b.player_id AND DATEDIFF(b.event_date, a.event_date) = 1; 581 | 582 | ``` 583 | 584 | -------------------------------------------------------------------------------- /sql_tips_ankit_bansal.md: -------------------------------------------------------------------------------- 1 | # SQL TIPS & TRICKS BY ANKIT BANSAL 2 | 3 | 4 | # 1 - Where Clause vs Having Clause 5 | 6 | ## Let's first Create an Employee Table 7 | 8 | ```sql 9 | 10 | CREATE TABLE employee ( 11 | emp_id INT PRIMARY KEY AUTO_INCREMENT, 12 | department_id INT, 13 | salary DECIMAL(10, 2), 14 | emp_name VARCHAR(100), 15 | manager_id INT, 16 | CONSTRAINT fk_manager 17 | FOREIGN KEY (manager_id) 18 | REFERENCES employee(emp_id) 19 | ); 20 | 21 | INSERT INTO employee (department_id, salary, emp_name, manager_id) 22 | VALUES 23 | (1, 50000.00, 'John Doe', NULL), 24 | (1, 60000.00, 'Jane Smith', 1), 25 | (2, 55000.00, 'Michael Johnson', NULL), 26 | (2, 70000.00, 'Emily Brown', 3), 27 | (1, 45000.00, 'Robert Lee', 1), 28 | (3, 80000.00, 'Jessica Davis', NULL), 29 | (3, 75000.00, 'William Wilson', 6), 30 | (2, 60000.00, 'Linda Anderson', 3), 31 | (1, 48000.00, 'James White', 1), 32 | (3, 72000.00, 'Karen Martinez', 6); 33 | 34 | ``` 35 | 36 | * Where clause use to filter the data row by row - which means the keyword iteratively check for each row according to the filter condition. 37 | 38 | ```sql 39 | 40 | select * from employee 41 | where salary > 60000 ; 42 | 43 | ``` 44 | --- 45 | 46 | * Having clause is used on aggregated values 47 | 48 | ```sql 49 | 50 | 51 | select department_id, avg(salary) from employee 52 | group by department_id having avg(salary) > 30000; 53 | 54 | ``` 55 | 56 | --- 57 | 58 | * while using both keep in mind where should be used first then do having ! 59 | 60 | ```sql 61 | 62 | 63 | select department_id ,avg(salary) from employee 64 | where salary > 60000 group by department_id having avg(salary)>30000; 65 | 66 | ``` 67 | 68 | --- 69 | --- 70 | 71 | # 2 - SQL Convert Rows to Columns and Columns to Rows without using Pivot Functions 72 | 73 | ## Create Table ~ 74 | 75 | ```sql 76 | create table emp_compensation ( 77 | emp_id int, 78 | salary_component_type varchar(20), 79 | val int 80 | ); 81 | insert into emp_compensation 82 | values (1,'salary',10000),(1,'bonus',5000),(1,'hike_percent',10) 83 | , (2,'salary',15000),(2,'bonus',7000),(2,'hike_percent',8) 84 | , (3,'salary',12000),(3,'bonus',6000),(3,'hike_percent',7); 85 | 86 | 87 | select * from emp_compensation; 88 | 89 | ``` 90 | 91 | ## Converting rows to columns, also known as pivoting 92 | 93 | ```sql 94 | select emp_id, 95 | sum(case when salary_component_type = 'salary' then val end ) as salary, 96 | sum(case when salary_component_type = 'hike_percent' then val end) as hike_percent, 97 | sum(case when salary_component_type = 'bonus' then val end ) as bonus 98 | from emp_compensation 99 | group by emp_id; 100 | 101 | ``` 102 | 103 | ## Making a new table generated using above query 104 | 105 | ``` sql 106 | create table emp_compensation_unpivot as 107 | select emp_id, 108 | sum(case when salary_component_type = 'salary' then val end ) as salary, 109 | sum(case when salary_component_type = 'hike_percent' then val end) as hike_percent, 110 | sum(case when salary_component_type = 'bonus' then val end ) as bonus 111 | from emp_compensation 112 | group by emp_id; 113 | 114 | ``` 115 | 116 | ## Converting columns to rows, also known as unpivoting 117 | 118 | ```sql 119 | 120 | select * from( 121 | select emp_id, 'salary' as salary_component_type, salary as val from emp_compensation_unpivot 122 | union all 123 | select emp_id, 'hike_percent' as salary_component_type, hike_percent as val from emp_compensation_unpivot 124 | union all 125 | select emp_id, 'bonus' as salary_component_type, bonus as val from emp_compensation_unpivot 126 | 127 | ) as temp order by emp_id; 128 | 129 | ``` 130 | 131 | --- 132 | --- 133 | # 3 - Top 10 SQL interview Questions and Answers | Frequently asked SQL interview questions. 134 | 135 | ## Create the 'emp' table 136 | 137 | ```sql 138 | CREATE TABLE sampletable ( 139 | emp_id INT, 140 | emp_name VARCHAR(20), 141 | department_id INT, 142 | salary INT, 143 | manager_id INT, 144 | emp_age INT 145 | ); 146 | ``` 147 | 148 | ## Insert data into the 'emp' table 149 | 150 | ```sql 151 | 152 | INSERT INTO sampletable (emp_id, emp_name, department_id, salary, manager_id, emp_age) 153 | VALUES 154 | (1, 'Ankit', 100, 10000, 4, 39), 155 | (2, 'Mohit', 100, 15000, 5, 48), 156 | (3, 'Vikas', 100, 10000, 4, 37), 157 | (4, 'Rohit', 100, 5000, 2, 16), 158 | (5, 'Mudit', 200, 12000, 6, 55), 159 | (6, 'Agam', 200, 12000, 2, 14), 160 | (7, 'Sanjay', 200, 9000, 2, 13), 161 | (8, 'Ashish', 200, 5000, 2, 12), 162 | (9, 'Mukesh', 300, 6000, 6, 51), 163 | (10, 'Rakesh', 300, 7000, 6, 50); 164 | 165 | ``` 166 | 167 | ## inserting a duplicate 168 | 169 | ``` sql 170 | 171 | insert into sampletable(emp_id, emp_name, department_id, salary, manager_id, emp_age) 172 | values( 1, 'Aman', 400, 5000, 2, 50); 173 | 174 | ``` 175 | 176 | ## table structure 177 | 178 | ```sql 179 | 180 | select * from sampletable; 181 | 182 | ``` 183 | 184 | ## Q1. How to find duplicate in a given table 185 | 186 | ```sql 187 | 188 | select emp_id , count(1) from sampletable group by emp_id having count(1) > 1 ; 189 | 190 | ``` 191 | 192 | ## Q2. How to delete duplicates ~ (Works in MYSQL Server) 193 | 194 | ```sql 195 | 196 | with cte as (select *, row_number() over(partition by emp_id order by emp_id) as rn from sampletable) 197 | delete from cte where rn >1; 198 | 199 | ``` 200 | 201 | ## Q3. Difference between union and union all 202 | 203 | * when we do "union all" - between two queries it will give summation of both results , kind of merging the tables 204 | * whereas , when "union" is used - it will remove duplicates and give unique entries as a final result 205 | 206 | 207 | 208 | ## Q4. Difference rank , row_number and dense_rank 209 | 210 | * Checkout previous readme's of mine for explanation ! 211 | 212 | 213 | ## Q5. Employees who are not present in department table 214 | 215 | 216 | * let's us first create our department table 217 | 218 | ```sql 219 | 220 | -- Create the department table 221 | 222 | CREATE TABLE department ( 223 | dept_id INT PRIMARY KEY, 224 | dept_name VARCHAR(255) 225 | ); 226 | ``` 227 | * Insert two records 228 | 229 | ```sql 230 | INSERT INTO department (dept_id, dept_name) VALUES (100, 'Analytics'); 231 | INSERT INTO department (dept_id, dept_name) VALUES (300, 'IT'); 232 | 233 | 234 | select * from sampletable; 235 | select * from department; 236 | select * from sampletable where department_id not in ( select dept_id from department); 237 | 238 | ``` 239 | 240 | * or we can use Left join as to optimise the above sub query 241 | 242 | ```sql 243 | select sampletable.* , department.dept_id , department.dept_name from sampletable 244 | left join department on sampletable.department_id = department.dept_id where department.dept_name is null; 245 | 246 | ``` 247 | 248 | ## Q6. Second highest salary in each department 249 | 250 | 251 | ```sql 252 | 253 | select * from( 254 | select sampletable.*, dense_rank() over(partition by department_id order by salary desc) as rn from sampletable 255 | 256 | ) as temp where temp.rn = 2; 257 | 258 | ``` 259 | 260 | 261 | ## Q7. Find all transaction done by shilpa 262 | 263 | 264 | ```sql 265 | 266 | create orders table - 267 | CREATE TABLE orders ( 268 | order_id INT AUTO_INCREMENT PRIMARY KEY, 269 | customer_name VARCHAR(255), 270 | order_date DATE, 271 | order_amount DECIMAL(10, 2), 272 | customer_gender ENUM('Male', 'Female', 'Other') 273 | ); 274 | 275 | ``` 276 | 277 | ```sql 278 | 279 | INSERT INTO orders (customer_name, order_date, order_amount, customer_gender) 280 | VALUES 281 | ('Shilpa', '2020-01-01', 10000, 'Male'), 282 | ('Rahul', '2020-01-02', 12000, 'Female'), 283 | ('SHILPA', '2020-01-02', 12000, 'Male'), 284 | ('Rohit', '2020-01-03', 15000, 'Female'), 285 | ('shilpa', '2020-01-03', 14000, 'Male'); 286 | 287 | ``` 288 | 289 | ```sql 290 | 291 | 292 | select * from orders where UPPER(customer_name) = 'SHILPA'; 293 | 294 | 295 | ``` 296 | 297 | ## Q8. self join , manager salary > emp salary 298 | 299 | ```sql 300 | 301 | 302 | select * from sampletable; 303 | 304 | select t1.emp_name , t1.salary from sampletable t1 JOIN 305 | sampletable t2 on t1.manager_id = t2.emp_id where t1.salary > t2.salary; 306 | 307 | 308 | ``` 309 | 310 | ## Q9. Joins left Join/ Inner Join 311 | 312 | * Inner Join: Combines matching rows from both tables based on a condition, excluding non-matching rows. 313 | * Left Join: Combines all rows from the left table with matching rows from the right table; non-matching rows from the left table have null values. 314 | * Right Join: Combines all rows from the right table with matching rows from the left table; non-matching rows from the right table have null values. 315 | 316 | 317 | ## Q10. Update query to swap gender 318 | 319 | 320 | ```sql 321 | 322 | SET SQL_SAFE_UPDATES = 0; 323 | 324 | UPDATE orders 325 | SET customer_gender = CASE 326 | WHEN customer_gender = 'Male' THEN 'Female' 327 | WHEN customer_gender = 'Female' THEN 'Male' 328 | ELSE customer_gender -- Include this line if you want to keep other values as they are 329 | END; 330 | 331 | select * from orders; 332 | 333 | ``` 334 | 335 | --- 336 | --- 337 | 338 | # 4 - SQL Self Join Concept | Most Asked Interview Question | Employee Salary More than Manager's Salary 339 | 340 | 341 | ```sql 342 | 343 | select * from sampletable; 344 | 345 | select t1.emp_name , t1.salary from sampletable t1 JOIN 346 | sampletable t2 on t1.manager_id = t2.emp_id where t1.salary > t2.salary; 347 | 348 | ``` 349 | 350 | # 5 - How to Practice SQLs Without Creating Tables In Your Database 351 | 352 | 353 | ```sql 354 | 355 | 356 | with emp1 as 357 | ( 358 | select 1 as emp_id, 1000 as emp_salary, 1 as dep_id 359 | union all select 2 as emp_id, 2000 as emp_salary, 2 as dep_id 360 | union all select 3 as emp_id ,3000 as emp_salary, 3 as dep_id 361 | union all select 4 as emp_id ,4000 as emp_salary, 4 as dep_id 362 | ), 363 | dep as 364 | ( 365 | select 1 as dep_id ,'d1' as dep_name 366 | union all select 2 as dep_id, 'd1' as dep_name 367 | union all select 3 as dep_id, 'd2' as dep_name 368 | union all select 4 as dep_id, 'd3' as dep_name 369 | ) 370 | select* from emp; 371 | 372 | ``` 373 | --- 374 | --- 375 | 376 | # 6 - SQL Cross Join | Use Cases | Master Data | Performance Data 377 | 378 | 379 | ## Create Required Tables used in the video - 380 | 381 | ```sql 382 | 383 | create table products ( 384 | id int, 385 | name varchar(10) 386 | ); 387 | insert into products VALUES 388 | (1, 'A'), 389 | (2, 'B'), 390 | (3, 'C'), 391 | (4, 'D'), 392 | (5, 'E'); 393 | 394 | 395 | create table colors ( 396 | color_id int, 397 | color varchar(50) 398 | ); 399 | insert into colors values (1,'Blue'),(2,'Green'),(3,'Orange'); 400 | 401 | 402 | create table sizes 403 | ( 404 | size_id int, 405 | size varchar(10) 406 | ); 407 | 408 | insert into sizes values (1,'M'),(2,'L'),(3,'XL'); 409 | 410 | 411 | create table transactions 412 | ( 413 | order_id int, 414 | product_name varchar(20), 415 | color varchar(10), 416 | size varchar(10), 417 | amount int 418 | ); 419 | insert into transactions values (1,'A','Blue','L',300),(2,'B','Blue','XL',150),(3,'B','Green','L',250),(4,'C','Blue','L',250), 420 | (5,'E','Green','L',270),(6,'D','Orange','L',200),(7,'D','Green','M',250); 421 | 422 | ``` 423 | 424 | * First use case is to produce master data for a fact table 425 | 426 | ```sql 427 | 428 | 429 | select * from transactions; 430 | 431 | select product_name, color, size, sum(amount) as totalamount 432 | from transactions 433 | group by product_name, color, size; 434 | 435 | with master_data as (select p.name as product_name , c.color , s.size from products p , colors c , sizes s) 436 | , sales as (select product_name, color, size , sum(amount) as totalamount 437 | from transactions 438 | group by product_name, color, size) 439 | select md.product_name, md.color, md.size , ifnull(s.totalamount,0) as totalamount from master_data md 440 | LEFT join sales s on md.product_name=s.product_name and md.color=s.color and md.size = s.size order by totalamount; 441 | 442 | ``` 443 | 444 | * Second use case is when you want to generate large no of records for performance testing. 445 | * Code shown below is just a example like how we can join table with large records to make a large dataset and manipulate calculations 446 | 447 | 448 | ```sql 449 | 450 | 451 | select row_number() over (order by t.order_id) as order_id, t.product_name, t. color, 452 | case when row_number() over (order by t.order_id) %3=0 then 'L' else 'XL'end size 453 | ,t.amount from transactions t; 454 | 455 | ``` 456 | 457 | --- 458 | --- 459 | 460 | # 7 - Most Asked SQL JOIN based Interview Question | # of Records after 4 types of JOINs 461 | 462 | ## interview question: no of records with diffrent kinds of joins when there are duplicate key values 463 | 464 | ```sql 465 | 466 | create table t1 ( a INT ) ; 467 | create table t2 ( b INT ) ; 468 | 469 | Insert into t1 values(1); 470 | Insert into t1 values(1); 471 | 472 | Insert into t2 values(1); 473 | Insert into t2 values(1); 474 | Insert into t2 values(1); 475 | 476 | select * from t1; 477 | select * from t2; 478 | 479 | ``` 480 | 481 | * Inner join 482 | 483 | ```sql 484 | 485 | select * from t1 inner join t2 on t1.a = t2.b; 486 | 487 | ``` 488 | * Left Join 489 | 490 | ```sql 491 | 492 | select * from t1 left join t2 on t1.a = t2.b; 493 | 494 | ``` 495 | * Right Join 496 | 497 | ```sql 498 | 499 | select * from t1 right join t2 on t1.a = t2.b; 500 | 501 | ``` 502 | * Full Outer Join 503 | 504 | ```sql 505 | 506 | select * from t1 full outer join t2 on t1.a = t2.b; 507 | 508 | ``` 509 | 510 | * point to be noted null != null , we can't join on this condition 511 | 512 | --- 513 | --- 514 | 515 | # 8 - How to Calculate Mode in SQL | How to Find Most Frequent Value in a Column 516 | 517 | ```sql 518 | 519 | create table modes ( temp INT); 520 | insert into modes values(1); 521 | insert into modes values(2); 522 | insert into modes values(2); 523 | insert into modes values(2); 524 | insert into modes values(3); 525 | insert into modes values(4); 526 | insert into modes values(5); 527 | insert into modes values(5); 528 | insert into modes values(7); 529 | insert into modes values(7); 530 | insert into modes values(8); 531 | 532 | select * from modes; 533 | 534 | ``` 535 | 536 | # Query to find mode - 537 | 538 | * Method 1 - Using CTE - 539 | 540 | ```sql 541 | 542 | with freq_cte as ( 543 | select temp , count(*) as freq from modes group by temp) 544 | select * from freq_cte where freq = (select max(freq) from freq_cte); 545 | 546 | ``` 547 | 548 | * Method 2 - Using Rank Function - 549 | 550 | ```sql 551 | 552 | select temp , row_number() over(partition by temp order by temp desc) from modes; 553 | 554 | with freq_cte as ( select temp , count(*) as freq from modes group by temp) , 555 | rnk_cte as (select * , rank() over(order by freq desc) as rn from freq_cte) 556 | select * from rnk_cte where rn= 1; 557 | 558 | ``` 559 | 560 | 561 | --- 562 | --- 563 | 564 | # 9 - SQL Interview Question Based on Full Outer Join | Asked in Deloitte 565 | 566 | 567 | # Create required tables 568 | 569 | ```sql 570 | 571 | create table emp_2020 572 | ( 573 | emp_id int, 574 | designation varchar(20) 575 | ); 576 | 577 | create table emp_2021 578 | ( 579 | emp_id int, 580 | designation varchar(20) 581 | ); 582 | 583 | insert into emp_2020 values (1,'Trainee'), (2,'Developer'),(3,'Senior Developer'),(4,'Manager'); 584 | insert into emp_2021 values (1,'Developer'), (2,'Developer'),(3,'Manager'),(5,'Trainee'); 585 | 586 | select * from emp_2020 ; 587 | select * from emp_2021; 588 | 589 | ``` 590 | 591 | * Full outer join dosen't work in mysql so make a union between left and right join 592 | 593 | ```sql 594 | 595 | select e20.* , e21.* from emp_2020 e20 596 | left join emp_2021 e21 on e20.emp_id = e21.emp_id 597 | 598 | union 599 | 600 | select e20.* , e21.* from emp_2020 e20 601 | right join emp_2021 e21 on e20.emp_id = e21.emp_id; 602 | 603 | ``` 604 | 605 | * Query - ( Used combination of joins as full outer join not works in Mysql) 606 | 607 | ``` sql 608 | 609 | select ifnull(e20.emp_id , e21.emp_id) as emp_id , case 610 | when e21.designation != e20.designation then 'Promoted' 611 | when e21.designation is null then 'Resigned' 612 | else 'New' end 613 | as comment 614 | 615 | from emp_2020 e20 616 | 617 | left join emp_2021 e21 on e20.emp_id = e21.emp_id where ifnull(e20.designation, 'xxx') != ifnull(e21.designation, 'yyy') 618 | 619 | union 620 | 621 | select ifnull(e20.emp_id , e21.emp_id) as emp_id , case 622 | when e21.designation != e20.designation then 'Promoted' 623 | when e21.designation is null then 'Resigned' 624 | else 'New' end 625 | as comment 626 | 627 | from emp_2020 e20 628 | 629 | right join emp_2021 e21 on e20.emp_id = e21.emp_id where ifnull(e20.designation, 'xxx') != ifnull(e21.designation, 'yyy'); 630 | 631 | ``` 632 | 633 | --- 634 | --- 635 | 636 | 637 | # 10 - A Simple and Tricky SQL Question | Rank Only Duplicates | SQL Interview Questions 638 | 639 | ``` sql 640 | 641 | create table list (id varchar(5)); 642 | insert into list values ('a'); 643 | insert into list values ('a'); 644 | insert into list values ('b'); 645 | insert into list values ('c'); 646 | insert into list values ('c'); 647 | insert into list values ('c'); 648 | insert into list values ('d'); 649 | insert into list values ('d'); 650 | insert into list values ('e'); 651 | 652 | select * from list; 653 | 654 | ``` 655 | 656 | ``` sql 657 | with cte_duplicates as ( 658 | select * from list group by id having count(1)>1 ), 659 | cte_rank as ( 660 | select id,rank() over(order by id) as rnk from cte_duplicates ) 661 | select l.id, cr.rnk from list l left join cte_rank cr on l.id = cr.id 662 | 663 | ``` 664 | 665 | --- 666 | --- 667 | 668 | 669 | # 11 - Master SQL UPDATE Statement | SQL UPDATE A-Z Tutorial | SQL Update with JOIN 670 | 671 | 672 | ```sql 673 | create database ankit_sql; 674 | 675 | CREATE TABLE EmployeeData ( 676 | emp_id INT PRIMARY KEY, 677 | emp_name VARCHAR(255), 678 | salary DECIMAL(10, 2), 679 | manager_id INT, 680 | emp_age INT, 681 | dept_id INT 682 | ); 683 | 684 | INSERT INTO EmployeeData (emp_id, emp_name, salary, manager_id, emp_age, dept_id) 685 | VALUES 686 | (1, 'John Doe', 60000.00, NULL, 30, 101), 687 | (2, 'Jane Smith', 55000.00, 1, 28, 101), 688 | (3, 'Michael Johnson', 70000.00, 1, 32, 102), 689 | (4, 'Emily Davis', 62000.00, 1, 29, 102), 690 | (5, 'William Brown', 58000.00, 3, 31, 103), 691 | (6, 'Olivia Wilson', 56000.00, 3, 27, 103), 692 | (7, 'James Taylor', 75000.00, 1, 35, 104), 693 | (8, 'Sophia Martinez', 63000.00, 7, 30, 104), 694 | (9, 'Alexander Anderson', 60000.00, 7, 29, 105), 695 | (10, 'Ava Rodriguez', 57000.00, 7, 28, 105); 696 | 697 | 698 | CREATE TABLE DepartmentData ( 699 | dept_id INT PRIMARY KEY, 700 | dept_name VARCHAR(255) 701 | ); 702 | 703 | INSERT INTO DepartmentData (dept_id, dept_name) 704 | VALUES 705 | (101, 'Human Resources'), 706 | (102, 'Marketing'), 707 | (103, 'Engineering'), 708 | (104, 'Finance'), 709 | (105, 'Sales'); 710 | 711 | 712 | select * from EmployeeData; 713 | select * from DepartmentData; 714 | 715 | ``` 716 | 717 | 718 | 719 | # Update syntax for single value update 720 | 721 | 722 | ```sql 723 | 724 | SET SQL_SAFE_UPDATES = 0; 725 | 726 | update EmployeeData set salary = 10000 ; 727 | 728 | ``` 729 | 730 | 731 | # Update syntax with where clause 732 | 733 | 734 | ```sql 735 | 736 | 737 | update EmployeeData set salary = 12000 where emp_age> 30 ; 738 | 739 | 740 | ``` 741 | 742 | # Update multiple values 743 | 744 | ```sql 745 | 746 | update EmployeeData set salary = 12000 , dept_id = 104 where emp_id = 32 ; 747 | 748 | 749 | ``` 750 | 751 | # Update col with constant values or derived calculations - ( aggregations / case when statement ) 752 | 753 | ```sql 754 | 755 | 756 | update EmployeeData set salary = case when dept_id = 101 then salary*1.1 when dept_id = 104 then salary*1.2 else salary end; 757 | 758 | 759 | 760 | ``` 761 | 762 | 763 | # Update statement using join - here we will see how to join dept name from DepartmentData table to EmployeeData ( Workbench - MySql) 764 | 765 | 766 | * Adding a new column dept_name in employee table - initially all values will be null 767 | 768 | ```sql 769 | 770 | alter table EmployeeData add dept_name varchar(20); 771 | 772 | ``` 773 | 774 | # Update using join 775 | 776 | ```sql 777 | 778 | update EmployeeData e 779 | inner join DepartmentData d on e.dept_id=d.dept_id set e.dept_name=d.dept_name; 780 | 781 | 782 | ``` 783 | 784 | # Interview question on swapping the genders - 785 | 786 | * let's first add a gender column in EmployeeData Table 787 | 788 | ```sql 789 | 790 | alter table EmployeeData add gender varchar(15); 791 | 792 | ``` 793 | 794 | ```sql 795 | 796 | update employeedata set gender = case when dept_id = 101 then 'Male' when dept_id = 103 then 'Female' else 'Male' end; 797 | select * from employeedata; 798 | 799 | ``` 800 | 801 | 802 | # Swap gender ~ 803 | 804 | ```sql 805 | 806 | 807 | update employeedata set gender = case when gender = 'Male' then 'Female' when gender = 'Female' then 'Male' end; 808 | 809 | 810 | ``` 811 | 812 | * note - here above we can't use two update statements as - neccessity is to use - case when for desired output 813 | 814 | 815 | 816 | # Steps to check before making updating the database - convert it into select statement to get a run time overview - 817 | 818 | ```sql 819 | 820 | select * , case when gender = 'Male' then 'Female' when gender = 'Female' then 'Male' end as updated_gender from employeedata; 821 | 822 | ``` 823 | 824 | --- 825 | --- 826 | 827 | 828 | # 12 - Slowly Changing Dimensions In Data warehousing with iPhone 11 Example | SCD 1/2/3 829 | 830 | * Watch video for expalanation - [Link](https://youtu.be/ejdIgYPfcV4?si=Ro5YAye2fWMO06mb) 831 | 832 | --- 833 | --- 834 | 835 | # 13 - Custom Sort Trick in SQL | Sorting Happiness Index Data with India on Top | SQL Interview Question 836 | 837 | 838 | ```sql 839 | 840 | /* CREATE TABLE */ 841 | CREATE TABLE IF NOT EXISTS TABLE_NAME( 842 | Ranks INT(11), 843 | Country VARCHAR( 100 ), 844 | Happiness_2021  DECIMAL( 10 , 2 ), 845 | Happiness_2020 DECIMAL( 10 , 2 ), 846 | 2022_Population INT(11) 847 | ); 848 | 849 | /* INSERT QUERY */ 850 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 851 | VALUES 852 | ( 853 | 1,'Finland',7.842,7.809,5554960 854 | ); 855 | 856 | /* INSERT QUERY */ 857 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 858 | VALUES 859 | ( 860 | 2,'Denmark',7.62,7.646,5834950 861 | ); 862 | 863 | /* INSERT QUERY */ 864 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 865 | VALUES 866 | ( 867 | 3,'Switzerland',7.571,7.56,8773637 868 | ); 869 | 870 | /* INSERT QUERY */ 871 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 872 | VALUES 873 | ( 874 | 4,'Iceland',7.554,7.504,345393 875 | ); 876 | 877 | /* INSERT QUERY */ 878 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 879 | VALUES 880 | ( 881 | 5,'Netherlands',7.464,7.449,17211447 882 | ); 883 | 884 | /* INSERT QUERY */ 885 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 886 | VALUES 887 | ( 888 | 6,'Norway',7.392,7.488,5511370 889 | ); 890 | 891 | /* INSERT QUERY */ 892 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 893 | VALUES 894 | ( 895 | 7,'Sweden',7.363,7.353,10218971 896 | ); 897 | 898 | /* INSERT QUERY */ 899 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 900 | VALUES 901 | ( 902 | 8,'Luxembourg',7.324,7.238,642371 903 | ); 904 | 905 | /* INSERT QUERY */ 906 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 907 | VALUES 908 | ( 909 | 9,'New Zealand',7.277,7.3,4898203 910 | ); 911 | 912 | /* INSERT QUERY */ 913 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 914 | VALUES 915 | ( 916 | 10,'Austria',7.268,7.294,9066710 917 | ); 918 | 919 | /* INSERT QUERY */ 920 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 921 | VALUES 922 | ( 923 | 11,'Australia',7.183,7.223,26068792 924 | ); 925 | 926 | /* INSERT QUERY */ 927 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 928 | VALUES 929 | ( 930 | 12,'Israel',7.157,7.129,8922892 931 | ); 932 | 933 | /* INSERT QUERY */ 934 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 935 | VALUES 936 | ( 937 | 13,'Germany',7.155,7.076,83883596 938 | ); 939 | 940 | /* INSERT QUERY */ 941 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 942 | VALUES 943 | ( 944 | 14,'Canada',7.103,7.232,38388419 945 | ); 946 | 947 | /* INSERT QUERY */ 948 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 949 | VALUES 950 | ( 951 | 15,'Ireland',7.085,7.129,5020199 952 | ); 953 | 954 | /* INSERT QUERY */ 955 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 956 | VALUES 957 | ( 958 | 16,'Costa Rica',7.069,7.121,5182354 959 | ); 960 | 961 | /* INSERT QUERY */ 962 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 963 | VALUES 964 | ( 965 | 17,'United Kingdom',7.064,7.165,68497907 966 | ); 967 | 968 | /* INSERT QUERY */ 969 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 970 | VALUES 971 | ( 972 | 18,'Czech Republic',6.965,6.911,10736784 973 | ); 974 | 975 | /* INSERT QUERY */ 976 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 977 | VALUES 978 | ( 979 | 19,'United States',6.951,6.94,334805269 980 | ); 981 | 982 | /* INSERT QUERY */ 983 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 984 | VALUES 985 | ( 986 | 20,'Belgium',6.834,6.864,11668278 987 | ); 988 | 989 | /* INSERT QUERY */ 990 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 991 | VALUES 992 | ( 993 | 21,'France',6.69,6.664,65584518 994 | ); 995 | 996 | /* INSERT QUERY */ 997 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 998 | VALUES 999 | ( 1000 | 22,'Bahrain',6.647,6.227,1783983 1001 | ); 1002 | 1003 | /* INSERT QUERY */ 1004 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1005 | VALUES 1006 | ( 1007 | 23,'Malta',6.602,6.773,444033 1008 | ); 1009 | 1010 | /* INSERT QUERY */ 1011 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1012 | VALUES 1013 | ( 1014 | 24,'Taiwan',6.584,6.455,23888595 1015 | ); 1016 | 1017 | /* INSERT QUERY */ 1018 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1019 | VALUES 1020 | ( 1021 | 25,'United Arab Emirates',6.561,6.791,10081785 1022 | ); 1023 | 1024 | /* INSERT QUERY */ 1025 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1026 | VALUES 1027 | ( 1028 | 26,'Saudi Arabia',6.494,6.406,35844909 1029 | ); 1030 | 1031 | /* INSERT QUERY */ 1032 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1033 | VALUES 1034 | ( 1035 | 27,'Spain',6.491,6.401,46719142 1036 | ); 1037 | 1038 | /* INSERT QUERY */ 1039 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1040 | VALUES 1041 | ( 1042 | 28,'Italy',6.483,6.387,60262770 1043 | ); 1044 | 1045 | /* INSERT QUERY */ 1046 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1047 | VALUES 1048 | ( 1049 | 29,'Slovenia',6.461,6.363,2078034 1050 | ); 1051 | 1052 | /* INSERT QUERY */ 1053 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1054 | VALUES 1055 | ( 1056 | 30,'Guatemala',6.435,6.399,18584039 1057 | ); 1058 | 1059 | /* INSERT QUERY */ 1060 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1061 | VALUES 1062 | ( 1063 | 31,'Uruguay',6.431,6.44,3496016 1064 | ); 1065 | 1066 | /* INSERT QUERY */ 1067 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1068 | VALUES 1069 | ( 1070 | 32,'Singapore',6.377,6.377,5943546 1071 | ); 1072 | 1073 | /* INSERT QUERY */ 1074 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1075 | VALUES 1076 | ( 1077 | 33,'Slovakia',6.331,6.281,5460193 1078 | ); 1079 | 1080 | /* INSERT QUERY */ 1081 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1082 | VALUES 1083 | ( 1084 | 34,'Brazil',6.33,6.376,215353593 1085 | ); 1086 | 1087 | /* INSERT QUERY */ 1088 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1089 | VALUES 1090 | ( 1091 | 35,'Mexico',6.317,6.465,131562772 1092 | ); 1093 | 1094 | /* INSERT QUERY */ 1095 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1096 | VALUES 1097 | ( 1098 | 36,'Jamaica',6.309,5.89,2985094 1099 | ); 1100 | 1101 | /* INSERT QUERY */ 1102 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1103 | VALUES 1104 | ( 1105 | 37,'Lithuania',6.255,6.215,2661708 1106 | ); 1107 | 1108 | /* INSERT QUERY */ 1109 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1110 | VALUES 1111 | ( 1112 | 38,'Cyprus',6.223,6.159,1223387 1113 | ); 1114 | 1115 | /* INSERT QUERY */ 1116 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1117 | VALUES 1118 | ( 1119 | 39,'Estonia',6.189,6.022,1321910 1120 | ); 1121 | 1122 | /* INSERT QUERY */ 1123 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1124 | VALUES 1125 | ( 1126 | 40,'Panama',6.18,6.305,4446964 1127 | ); 1128 | 1129 | /* INSERT QUERY */ 1130 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1131 | VALUES 1132 | ( 1133 | 41,'Uzbekistan',6.179,6.258,34382084 1134 | ); 1135 | 1136 | /* INSERT QUERY */ 1137 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1138 | VALUES 1139 | ( 1140 | 42,'Chile',6.172,6.228,19250195 1141 | ); 1142 | 1143 | /* INSERT QUERY */ 1144 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1145 | VALUES 1146 | ( 1147 | 43,'Poland',6.166,6.186,37739785 1148 | ); 1149 | 1150 | /* INSERT QUERY */ 1151 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1152 | VALUES 1153 | ( 1154 | 44,'Kazakhstan',6.152,6.058,19205043 1155 | ); 1156 | 1157 | /* INSERT QUERY */ 1158 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1159 | VALUES 1160 | ( 1161 | 45,'Romania',6.14,6.124,19031335 1162 | ); 1163 | 1164 | /* INSERT QUERY */ 1165 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1166 | VALUES 1167 | ( 1168 | 46,'Kuwait',6.106,6.102,4380326 1169 | ); 1170 | 1171 | /* INSERT QUERY */ 1172 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1173 | VALUES 1174 | ( 1175 | 47,'Serbia',6.078,5.778,8653016 1176 | ); 1177 | 1178 | /* INSERT QUERY */ 1179 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1180 | VALUES 1181 | ( 1182 | 48,'El Salvador',6.061,6.348,6550389 1183 | ); 1184 | 1185 | /* INSERT QUERY */ 1186 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1187 | VALUES 1188 | ( 1189 | 49,'Mauritius',6.049,6.101,1274727 1190 | ); 1191 | 1192 | /* INSERT QUERY */ 1193 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1194 | VALUES 1195 | ( 1196 | 50,'Latvia',6.032,5.95,1848837 1197 | ); 1198 | 1199 | /* INSERT QUERY */ 1200 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1201 | VALUES 1202 | ( 1203 | 51,'Colombia',6.012,6.163,51512762 1204 | ); 1205 | 1206 | /* INSERT QUERY */ 1207 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1208 | VALUES 1209 | ( 1210 | 52,'Hungary',5.992,6,9606259 1211 | ); 1212 | 1213 | /* INSERT QUERY */ 1214 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1215 | VALUES 1216 | ( 1217 | 53,'Thailand',5.985,5.999,70078203 1218 | ); 1219 | 1220 | /* INSERT QUERY */ 1221 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1222 | VALUES 1223 | ( 1224 | 54,'Nicaragua',5.972,6.137,6779100 1225 | ); 1226 | 1227 | /* INSERT QUERY */ 1228 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1229 | VALUES 1230 | ( 1231 | 55,'Japan',5.94,5.871,125584838 1232 | ); 1233 | 1234 | /* INSERT QUERY */ 1235 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1236 | VALUES 1237 | ( 1238 | 57,'Portugal',5.929,5.911,10140570 1239 | ); 1240 | 1241 | /* INSERT QUERY */ 1242 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1243 | VALUES 1244 | ( 1245 | 56,'Argentina',5.929,5.975,46010234 1246 | ); 1247 | 1248 | /* INSERT QUERY */ 1249 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1250 | VALUES 1251 | ( 1252 | 58,'Honduras',5.919,5.953,10221247 1253 | ); 1254 | 1255 | /* INSERT QUERY */ 1256 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1257 | VALUES 1258 | ( 1259 | 59,'Croatia',5.882,5.505,4059286 1260 | ); 1261 | 1262 | /* INSERT QUERY */ 1263 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1264 | VALUES 1265 | ( 1266 | 60,'Philippines',5.88,6.006,112508994 1267 | ); 1268 | 1269 | /* INSERT QUERY */ 1270 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1271 | VALUES 1272 | ( 1273 | 61,'South Korea',5.845,5.872,51329899 1274 | ); 1275 | 1276 | /* INSERT QUERY */ 1277 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1278 | VALUES 1279 | ( 1280 | 62,'Peru',5.84,5.797,33684208 1281 | ); 1282 | 1283 | /* INSERT QUERY */ 1284 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1285 | VALUES 1286 | ( 1287 | 63,'Bosnia And Herzegovina',5.813,5.674,3249317 1288 | ); 1289 | 1290 | /* INSERT QUERY */ 1291 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1292 | VALUES 1293 | ( 1294 | 64,'Moldova',5.766,5.608,4013171 1295 | ); 1296 | 1297 | /* INSERT QUERY */ 1298 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1299 | VALUES 1300 | ( 1301 | 65,'Ecuador',5.764,5.925,18113361 1302 | ); 1303 | 1304 | /* INSERT QUERY */ 1305 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1306 | VALUES 1307 | ( 1308 | 66,'Kyrgyzstan',5.744,5.542,6728271 1309 | ); 1310 | 1311 | /* INSERT QUERY */ 1312 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1313 | VALUES 1314 | ( 1315 | 67,'Greece',5.723,5.515,10316637 1316 | ); 1317 | 1318 | /* INSERT QUERY */ 1319 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1320 | VALUES 1321 | ( 1322 | 68,'Bolivia',5.716,5.747,11992656 1323 | ); 1324 | 1325 | /* INSERT QUERY */ 1326 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1327 | VALUES 1328 | ( 1329 | 69,'Mongolia',5.677,5.456,3378078 1330 | ); 1331 | 1332 | /* INSERT QUERY */ 1333 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1334 | VALUES 1335 | ( 1336 | 70,'Paraguay',5.653,5.692,7305843 1337 | ); 1338 | 1339 | /* INSERT QUERY */ 1340 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1341 | VALUES 1342 | ( 1343 | 71,'Montenegro',5.581,5.546,627950 1344 | ); 1345 | 1346 | /* INSERT QUERY */ 1347 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1348 | VALUES 1349 | ( 1350 | 72,'Dominican Republic',5.545,5.689,11056370 1351 | ); 1352 | 1353 | /* INSERT QUERY */ 1354 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1355 | VALUES 1356 | ( 1357 | 73,'Belarus',5.534,5.54,9432800 1358 | ); 1359 | 1360 | /* INSERT QUERY */ 1361 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1362 | VALUES 1363 | ( 1364 | 75,'Hong Kong',5.477,5.51,7604299 1365 | ); 1366 | 1367 | /* INSERT QUERY */ 1368 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1369 | VALUES 1370 | ( 1371 | 74,'Russia',5.477,5.546,145805947 1372 | ); 1373 | 1374 | /* INSERT QUERY */ 1375 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1376 | VALUES 1377 | ( 1378 | 76,'Tajikistan',5.466,5.556,9957464 1379 | ); 1380 | 1381 | /* INSERT QUERY */ 1382 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1383 | VALUES 1384 | ( 1385 | 77,'Vietnam',5.411,5.353,98953541 1386 | ); 1387 | 1388 | /* INSERT QUERY */ 1389 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1390 | VALUES 1391 | ( 1392 | 78,'Libya',5.41,5.489,7040745 1393 | ); 1394 | 1395 | /* INSERT QUERY */ 1396 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1397 | VALUES 1398 | ( 1399 | 79,'Malaysia',5.384,5.384,33181072 1400 | ); 1401 | 1402 | /* INSERT QUERY */ 1403 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1404 | VALUES 1405 | ( 1406 | 80,'Indonesia',5.345,5.286,279134505 1407 | ); 1408 | 1409 | /* INSERT QUERY */ 1410 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1411 | VALUES 1412 | ( 1413 | 81,'Republic of the Congo',5.342,5.194,5797805 1414 | ); 1415 | 1416 | /* INSERT QUERY */ 1417 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1418 | VALUES 1419 | ( 1420 | 82,'China',5.339,5.124,1448471400 1421 | ); 1422 | 1423 | /* INSERT QUERY */ 1424 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1425 | VALUES 1426 | ( 1427 | 83,'Ivory Coast',5.306,5.233,27742298 1428 | ); 1429 | 1430 | /* INSERT QUERY */ 1431 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1432 | VALUES 1433 | ( 1434 | 84,'Armenia',5.283,4.677,2971966 1435 | ); 1436 | 1437 | /* INSERT QUERY */ 1438 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1439 | VALUES 1440 | ( 1441 | 85,'Nepal',5.269,5.137,30225582 1442 | ); 1443 | 1444 | /* INSERT QUERY */ 1445 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1446 | VALUES 1447 | ( 1448 | 86,'Bulgaria',5.266,5.102,6844597 1449 | ); 1450 | 1451 | /* INSERT QUERY */ 1452 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1453 | VALUES 1454 | ( 1455 | 87,'Maldives',5.198,5.198,540985 1456 | ); 1457 | 1458 | /* INSERT QUERY */ 1459 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1460 | VALUES 1461 | ( 1462 | 88,'Azerbaijan',5.171,5.165,10300205 1463 | ); 1464 | 1465 | /* INSERT QUERY */ 1466 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1467 | VALUES 1468 | ( 1469 | 89,'Cameroon',5.142,5.085,27911548 1470 | ); 1471 | 1472 | /* INSERT QUERY */ 1473 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1474 | VALUES 1475 | ( 1476 | 90,'Senegal',5.132,4.981,17653671 1477 | ); 1478 | 1479 | /* INSERT QUERY */ 1480 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1481 | VALUES 1482 | ( 1483 | 91,'Albania',5.117,4.883,2866374 1484 | ); 1485 | 1486 | /* INSERT QUERY */ 1487 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1488 | VALUES 1489 | ( 1490 | 92,'North Macedonia',5.101,5.16,2081304 1491 | ); 1492 | 1493 | /* INSERT QUERY */ 1494 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1495 | VALUES 1496 | ( 1497 | 93,'Ghana',5.088,5.148,32395450 1498 | ); 1499 | 1500 | /* INSERT QUERY */ 1501 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1502 | VALUES 1503 | ( 1504 | 94,'Niger',5.074,4.91,26083660 1505 | ); 1506 | 1507 | /* INSERT QUERY */ 1508 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1509 | VALUES 1510 | ( 1511 | 95,'Turkmenistan',5.066,5.119,6201943 1512 | ); 1513 | 1514 | /* INSERT QUERY */ 1515 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1516 | VALUES 1517 | ( 1518 | 96,'Gambia',5.051,4.751,2558482 1519 | ); 1520 | 1521 | /* INSERT QUERY */ 1522 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1523 | VALUES 1524 | ( 1525 | 97,'Benin',5.045,5.216,12784726 1526 | ); 1527 | 1528 | /* INSERT QUERY */ 1529 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1530 | VALUES 1531 | ( 1532 | 98,'Laos',5.03,4.889,7481023 1533 | ); 1534 | 1535 | /* INSERT QUERY */ 1536 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1537 | VALUES 1538 | ( 1539 | 99,'Bangladesh',5.025,4.833,167885689 1540 | ); 1541 | 1542 | /* INSERT QUERY */ 1543 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1544 | VALUES 1545 | ( 1546 | 100,'Guinea',4.984,4.949,13865691 1547 | ); 1548 | 1549 | /* INSERT QUERY */ 1550 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1551 | VALUES 1552 | ( 1553 | 101,'South Africa',4.956,4.814,60756135 1554 | ); 1555 | 1556 | /* INSERT QUERY */ 1557 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1558 | VALUES 1559 | ( 1560 | 102,'Turkey',4.948,5.132,85561976 1561 | ); 1562 | 1563 | /* INSERT QUERY */ 1564 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1565 | VALUES 1566 | ( 1567 | 103,'Pakistan',4.934,5.693,229488994 1568 | ); 1569 | 1570 | /* INSERT QUERY */ 1571 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1572 | VALUES 1573 | ( 1574 | 104,'Morocco',4.918,5.095,37772756 1575 | ); 1576 | 1577 | /* INSERT QUERY */ 1578 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1579 | VALUES 1580 | ( 1581 | 105,'Venezuela',4.892,5.053,29266991 1582 | ); 1583 | 1584 | /* INSERT QUERY */ 1585 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1586 | VALUES 1587 | ( 1588 | 106,'Georgia',4.891,4.673,3968738 1589 | ); 1590 | 1591 | /* INSERT QUERY */ 1592 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1593 | VALUES 1594 | ( 1595 | 107,'Algeria',4.887,5.005,45350148 1596 | ); 1597 | 1598 | /* INSERT QUERY */ 1599 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1600 | VALUES 1601 | ( 1602 | 108,'Ukraine',4.875,4.561,43192122 1603 | ); 1604 | 1605 | /* INSERT QUERY */ 1606 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1607 | VALUES 1608 | ( 1609 | 109,'Iraq',4.854,4.785,42164965 1610 | ); 1611 | 1612 | /* INSERT QUERY */ 1613 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1614 | VALUES 1615 | ( 1616 | 110,'Gabon',4.852,4.829,2331533 1617 | ); 1618 | 1619 | /* INSERT QUERY */ 1620 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1621 | VALUES 1622 | ( 1623 | 111,'Burkina Faso',4.834,4.769,22102838 1624 | ); 1625 | 1626 | /* INSERT QUERY */ 1627 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1628 | VALUES 1629 | ( 1630 | 112,'Cambodia',4.83,4.848,17168639 1631 | ); 1632 | 1633 | /* INSERT QUERY */ 1634 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1635 | VALUES 1636 | ( 1637 | 113,'Mozambique',4.794,4.624,33089461 1638 | ); 1639 | 1640 | /* INSERT QUERY */ 1641 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1642 | VALUES 1643 | ( 1644 | 114,'Nigeria',4.759,4.724,216746934 1645 | ); 1646 | 1647 | /* INSERT QUERY */ 1648 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1649 | VALUES 1650 | ( 1651 | 115,'Mali',4.723,4.729,21473764 1652 | ); 1653 | 1654 | /* INSERT QUERY */ 1655 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1656 | VALUES 1657 | ( 1658 | 116,'Iran',4.721,4.672,86022837 1659 | ); 1660 | 1661 | /* INSERT QUERY */ 1662 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1663 | VALUES 1664 | ( 1665 | 117,'Uganda',4.636,4.432,48432863 1666 | ); 1667 | 1668 | /* INSERT QUERY */ 1669 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1670 | VALUES 1671 | ( 1672 | 118,'Liberia',4.625,4.558,5305117 1673 | ); 1674 | 1675 | /* INSERT QUERY */ 1676 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1677 | VALUES 1678 | ( 1679 | 119,'Kenya',4.607,4.583,56215221 1680 | ); 1681 | 1682 | /* INSERT QUERY */ 1683 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1684 | VALUES 1685 | ( 1686 | 120,'Tunisia',4.596,4.392,12046656 1687 | ); 1688 | 1689 | /* INSERT QUERY */ 1690 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1691 | VALUES 1692 | ( 1693 | 121,'Lebanon',4.584,4.772,6684849 1694 | ); 1695 | 1696 | /* INSERT QUERY */ 1697 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1698 | VALUES 1699 | ( 1700 | 122,'Namibia',4.574,4.571,2633874 1701 | ); 1702 | 1703 | /* INSERT QUERY */ 1704 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1705 | VALUES 1706 | ( 1707 | 123,'Myanmar',4.426,4.308,55227143 1708 | ); 1709 | 1710 | /* INSERT QUERY */ 1711 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1712 | VALUES 1713 | ( 1714 | 124,'Jordan',4.395,4.633,10300869 1715 | ); 1716 | 1717 | /* INSERT QUERY */ 1718 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1719 | VALUES 1720 | ( 1721 | 125,'Chad',4.355,4.423,17413580 1722 | ); 1723 | 1724 | /* INSERT QUERY */ 1725 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1726 | VALUES 1727 | ( 1728 | 126,'Sri Lanka',4.325,4.327,21575842 1729 | ); 1730 | 1731 | /* INSERT QUERY */ 1732 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1733 | VALUES 1734 | ( 1735 | 127,'Eswatini',4.308,4.308,1184817 1736 | ); 1737 | 1738 | /* INSERT QUERY */ 1739 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1740 | VALUES 1741 | ( 1742 | 128,'Comoros',4.289,4.289,907419 1743 | ); 1744 | 1745 | /* INSERT QUERY */ 1746 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1747 | VALUES 1748 | ( 1749 | 129,'Egypt',4.283,4.151,106156692 1750 | ); 1751 | 1752 | /* INSERT QUERY */ 1753 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1754 | VALUES 1755 | ( 1756 | 130,'Ethiopia',4.275,4.186,120812698 1757 | ); 1758 | 1759 | /* INSERT QUERY */ 1760 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1761 | VALUES 1762 | ( 1763 | 131,'Mauritania',4.227,4.375,4901981 1764 | ); 1765 | 1766 | /* INSERT QUERY */ 1767 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1768 | VALUES 1769 | ( 1770 | 132,'Madagascar',4.208,4.166,29178077 1771 | ); 1772 | 1773 | /* INSERT QUERY */ 1774 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1775 | VALUES 1776 | ( 1777 | 133,'Togo',4.107,4.187,8680837 1778 | ); 1779 | 1780 | /* INSERT QUERY */ 1781 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1782 | VALUES 1783 | ( 1784 | 134,'Zambia',4.073,3.759,19470234 1785 | ); 1786 | 1787 | /* INSERT QUERY */ 1788 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1789 | VALUES 1790 | ( 1791 | 135,'Sierra Leone',3.849,3.926,8306436 1792 | ); 1793 | 1794 | /* INSERT QUERY */ 1795 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1796 | VALUES 1797 | ( 1798 | 136,'India',3.819,3.573,1406631776 1799 | ); 1800 | 1801 | /* INSERT QUERY */ 1802 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1803 | VALUES 1804 | ( 1805 | 137,'Burundi',3.775,3.775,12624840 1806 | ); 1807 | 1808 | /* INSERT QUERY */ 1809 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1810 | VALUES 1811 | ( 1812 | 138,'Yemen',3.658,3.527,31154867 1813 | ); 1814 | 1815 | /* INSERT QUERY */ 1816 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1817 | VALUES 1818 | ( 1819 | 139,'Tanzania',3.623,3.476,63298550 1820 | ); 1821 | 1822 | /* INSERT QUERY */ 1823 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1824 | VALUES 1825 | ( 1826 | 140,'Haiti',3.615,3.721,11680283 1827 | ); 1828 | 1829 | /* INSERT QUERY */ 1830 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1831 | VALUES 1832 | ( 1833 | 141,'Malawi',3.6,3.538,20180839 1834 | ); 1835 | 1836 | /* INSERT QUERY */ 1837 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1838 | VALUES 1839 | ( 1840 | 142,'Lesotho',3.512,3.653,2175699 1841 | ); 1842 | 1843 | /* INSERT QUERY */ 1844 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1845 | VALUES 1846 | ( 1847 | 143,'Botswana',3.467,3.479,2441162 1848 | ); 1849 | 1850 | /* INSERT QUERY */ 1851 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1852 | VALUES 1853 | ( 1854 | 144,'Rwanda',3.415,3.312,13600464 1855 | ); 1856 | 1857 | /* INSERT QUERY */ 1858 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1859 | VALUES 1860 | ( 1861 | 145,'Zimbabwe',3.145,3.299,15331428 1862 | 1863 | 1864 | ); 1865 | 1866 | /* INSERT QUERY */ 1867 | INSERT INTO TABLE_NAME( Ranks,Country,Happiness_2021 ,Happiness_2020,2022_Population ) 1868 | VALUES 1869 | ( 1870 | 146,'Afghanistan',2.523,2.567,40754388 1871 | ); 1872 | 1873 | ``` 1874 | 1875 | ```sql 1876 | 1877 | 1878 | select * from table_name; 1879 | 1880 | ``` 1881 | 1882 | ```sql 1883 | 1884 | SELECT * 1885 | FROM ( 1886 | SELECT *, 1887 | CASE 1888 | WHEN country = 'India' THEN 1 1889 | WHEN country = 'Pakistan' THEN 2 1890 | WHEN country = 'Sri Lanka' THEN 3 1891 | ELSE 0 1892 | END AS country_derived 1893 | FROM table_name 1894 | ) temp 1895 | ORDER BY country_derived DESC, Happiness_2020 DESC; 1896 | 1897 | ``` 1898 | 1899 | 1900 | # Also we can directly give in order by rather than subquery 1901 | 1902 | 1903 | ```sql 1904 | 1905 | select * from table_name 1906 | 1907 | order by CASE 1908 | WHEN country = 'India' THEN 1 1909 | WHEN country = 'Pakistan' THEN 2 1910 | WHEN country = 'Sri Lanka' THEN 3 1911 | ELSE 0 1912 | END desc , Happiness_2020 desc; 1913 | 1914 | ``` 1915 | 1916 | 1917 | # 14 - Problem with Running SUM in SQL | Watch it to Avoid The Mistake 1918 | 1919 | 1920 | ```sql 1921 | create table products (product_id Varchar(2) , cost INT); 1922 | 1923 | insert into products values('P1' , 200); 1924 | insert into products values('P2' , 300); 1925 | insert into products values('P3' , 300); 1926 | insert into products values('P4' , 500); 1927 | insert into products values('P5' , 600); 1928 | 1929 | select * from products; 1930 | 1931 | ``` 1932 | 1933 | 1934 | ```sql 1935 | 1936 | 1937 | select *, sum(cost) over(order by cost) as running_sum from products; 1938 | 1939 | ``` 1940 | 1941 | 1942 | * output of this give running sum as same for value 300 - as it is duplicate 1943 | 1944 | * to avoid this and get correct result there are two methods below -- 1945 | 1946 | * 1- First could be specifying any other column in the order by too in order to get unique result such that product_id in this case 1947 | 1948 | ```sql 1949 | 1950 | select *, sum(cost) over(order by cost, product_id) as running_sum from products; 1951 | 1952 | ``` 1953 | 1954 | * 2 - or using. - rows between unbounded preceding and current row - clause 1955 | 1956 | ```sql 1957 | 1958 | select *, sum(cost) over(order by cost asc rows between unbounded preceding and current row) as running_sum from products; 1959 | 1960 | 1961 | ``` 1962 | 1963 | # 15 - Difference Between count(*) ,count(0),count(-1),count(col),count('youtube') | SQL Interview question 1964 | 1965 | ```sql 1966 | 1967 | count(*) - give count of total no. of rows in the table 1968 | count(0) , count(-1) , count("jaspreet") ... anything like this means count this constant value count(*) no. of times which is same as count(*) 1969 | 1970 | count(dept_name) .. means count over a column name .. give count of row expect null ones 1971 | 1972 | ``` 1973 | --- 1974 | --- 1975 | 1976 | 1977 | ## 16 - SQL to Count Occurrence of a Character/Word in a String 1978 | 1979 | ```sql 1980 | create table strings (name varchar(50)); 1981 | delete from strings; 1982 | insert into strings values ('Ankit Bansal'),('Ram Kumar Verma'),('Akshay Kumar Ak k'),('Rahul'); 1983 | 1984 | 1985 | select * from strings; 1986 | ``` 1987 | 1988 | * Query to count no of spaces in the string -- 1989 | 1990 | ```sql 1991 | select name , replace( name , ' ' , '') as rep_name , length(name) - length(replace( name , ' ' , '')) as cnt from strings; 1992 | 1993 | ``` 1994 | 1995 | * Query to count no of occurrences of 'Ak' in the strings -- this is a sample use case 1996 | 1997 | ```sql 1998 | 1999 | SELECT 2000 | name, 2001 | REPLACE(name, 'Ak', '') AS rep_name, 2002 | ROUND((LENGTH(name) - LENGTH(REPLACE(name, 'Ak', ''))) / LENGTH('Ak')) AS cnt 2003 | FROM strings; 2004 | 2005 | ``` 2006 | 2007 | --- 2008 | --- 2009 | --------------------------------------------------------------------------------