├── 01.sql ├── 02.sql ├── 04.sql ├── 05.sql ├── 08.sql ├── 09.sql ├── 10.sql ├── 11.sql ├── 12.sql ├── 13.sql ├── 14.sql ├── 16.sql ├── 17.sql ├── 18.sql ├── 19.sql ├── 20.sql ├── 21.sql ├── 22.sql ├── 23.sql ├── 24.sql ├── 25 v1.sql ├── 26.sql ├── 27.sql ├── 28.sql ├── 29.sql ├── 30.sql ├── 31.sql ├── 32.sql ├── 33.sql ├── 34.sql ├── 35.sql ├── 36.sql ├── 37.sql ├── 38.sql ├── 39.sql ├── 40.sql ├── 41.sql ├── 42.sql ├── 43.sql ├── 44.sql ├── 45.sql ├── 46.sql ├── 47.sql ├── 48.sql ├── 49.sql ├── 50.sql ├── README.md ├── SQL 15 .sql ├── day 03.sql ├── day 06.sql └── day 07.sql /01.sql: -------------------------------------------------------------------------------- 1 | -- 01/50 Days SQL challenge 2 | 3 | -- Create the employees table 4 | CREATE TABLE employees ( 5 | employee_id SERIAL PRIMARY KEY, 6 | name VARCHAR(100), 7 | department VARCHAR(50), 8 | salary DECIMAL(10, 2) 9 | ); 10 | 11 | -- Insert records for three departments 12 | INSERT INTO employees (name, department, salary) VALUES 13 | ('John Doe', 'Engineering', 63000), 14 | ('Jane Smith', 'Engineering', 55000), 15 | ('Michael Johnson', 'Engineering', 64000), 16 | ('Emily Davis', 'Marketing', 58000), 17 | ('Chris Brown', 'Marketing', 56000), 18 | ('Emma Wilson', 'Marketing', 59000), 19 | ('Alex Lee', 'Sales', 58000), 20 | ('Sarah Adams', 'Sales', 58000), 21 | ('Ryan Clark', 'Sales', 61000); 22 | 23 | 24 | /* 25 | 26 | Write the SQL query to find the second highest salary 27 | 28 | */ 29 | 30 | -- ------------------------- 31 | -- My Solution 32 | -- ------------------------- 33 | 34 | 35 | -- Approach 1 36 | 37 | SELECT * FROM employees 38 | ORDER BY salary DESC 39 | LIMIT 1 OFFSET 1; 40 | 41 | 42 | -- ADDED new records 43 | INSERT INTO employees 44 | VALUES 45 | (11, 'zara', 'it', 63000) 46 | 47 | 48 | -- Approach 2 49 | -- Window function dense_rank 50 | 51 | SELECT * 52 | FROM 53 | ( SELECT *, 54 | DENSE_RANK() OVER( ORDER BY salary DESC) drn 55 | FROM employees 56 | ) as subquery 57 | WHERE drn = 2 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | -- Your Task 67 | 68 | -- Question: Get the details of the employee with the second-highest salary from each department 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | /* 83 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 84 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 85 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 86 | */ 87 | 88 | 89 | -------------------------------------------------------------------------------- /02.sql: -------------------------------------------------------------------------------- 1 | -- Day 02/50 2 | 3 | DROP TABLE IF EXISTS Orders; 4 | 5 | CREATE TABLE Orders ( 6 | OrderID INT PRIMARY KEY, 7 | OrderDate DATE, 8 | TotalAmount DECIMAL(10, 2) 9 | ); 10 | 11 | DROP TABLE IF EXISTS Returns; 12 | CREATE TABLE Returns ( 13 | ReturnID INT PRIMARY KEY, 14 | OrderID INT, 15 | FOREIGN KEY (OrderID) REFERENCES Orders(OrderID) 16 | ); 17 | 18 | INSERT INTO Orders (OrderID, OrderDate, TotalAmount) VALUES 19 | (1, '2023-01-15', 150.50), 20 | (2, '2023-02-20', 200.75), 21 | (3, '2023-02-28', 300.25), 22 | (4, '2023-03-10', 180.00), 23 | (5, '2023-04-05', 250.80); 24 | 25 | INSERT INTO Returns (ReturnID, OrderID) VALUES 26 | (101, 2), 27 | (102, 4), 28 | (103, 5), 29 | (104, 1), 30 | (105, 3); 31 | 32 | 33 | /* 34 | 35 | Given the Orders table with columns OrderID, 36 | OrderDate, and TotalAmount, and the 37 | Returns table with columns ReturnID and OrderID, 38 | 39 | write an SQL query to calculate the total 40 | numbers of returned orders for each month 41 | 42 | */ 43 | -- total numbers of returns 44 | -- group by month orders 45 | -- LEFT JOIN 46 | 47 | 48 | 49 | SELECT * FROM orders; 50 | SELECT * FROM returns; 51 | 52 | -- ------------------------------------ 53 | -- My Solution 54 | -- ------------------------------------ 55 | 56 | 57 | SELECT 58 | EXTRACT(MONTH FROM o.orderdate) || '-' || EXTRACT(YEAR FROM o.orderdate) as month, 59 | COUNT(r.returnid) as total_return 60 | FROM returns as r 61 | LEFT JOIN 62 | orders as o 63 | ON r.orderid = o.orderid 64 | GROUP BY month 65 | ORDER BY month ; 66 | 67 | 68 | 69 | 70 | 71 | 72 | /* 73 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 74 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 75 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 76 | */ 77 | 78 | -------------------------------------------------------------------------------- /04.sql: -------------------------------------------------------------------------------- 1 | -- Day 04/50 2 | 3 | 4 | 5 | create table orders( 6 | category varchar(20), 7 | product varchar(20), 8 | user_id int , 9 | spend int, 10 | transaction_date DATE 11 | ); 12 | 13 | Insert into orders values 14 | ('appliance','refrigerator',165,246.00,'2021/12/26'), 15 | ('appliance','refrigerator',123,299.99,'2022/03/02'), 16 | ('appliance','washingmachine',123,219.80,'2022/03/02'), 17 | ('electronics','vacuum',178,152.00,'2022/04/05'), 18 | ('electronics','wirelessheadset',156, 249.90,'2022/07/08'), 19 | ('electronics','TV',145,189.00,'2022/07/15'), 20 | ('Television','TV',165,129.00,'2022/07/15'), 21 | ('Television','TV',163,129.00,'2022/07/15'), 22 | ('Television','TV',141,129.00,'2022/07/15'), 23 | ('toys','Ben10',145,189.00,'2022/07/15'), 24 | ('toys','Ben10',145,189.00,'2022/07/15'), 25 | ('toys','yoyo',165,129.00,'2022/07/15'), 26 | ('toys','yoyo',163,129.00,'2022/07/15'), 27 | ('toys','yoyo',141,129.00,'2022/07/15'), 28 | ('toys','yoyo',145,189.00,'2022/07/15'), 29 | ('electronics','vacuum',145,189.00,'2022/07/15'); 30 | 31 | 32 | 33 | /* 34 | Find the top 2 products in the top 2 categories based on spend amount? 35 | */ 36 | 37 | -- top 2 category based on spend 38 | -- top 2 product in above best 2 category 39 | 40 | 41 | SELECT * FROM orders; 42 | 43 | 44 | WITH ranked_category 45 | AS 46 | ( 47 | SELECT 48 | category, 49 | total_spend_category 50 | FROM 51 | ( SELECT 52 | category, 53 | SUM(spend) as total_spend_category, 54 | DENSE_RANK() OVER( ORDER BY SUM(spend) DESC) drn 55 | FROM orders 56 | GROUP BY category 57 | ) as subquery 58 | WHERE drn <= 2 59 | ) 60 | 61 | SELECT 62 | category, 63 | product, 64 | total_spend_by_product 65 | FROM ( 66 | SELECT 67 | o.category, 68 | o.product, 69 | SUM(o.spend) as total_spend_by_product, 70 | DENSE_RANK() OVER(PARTITION BY o.category ORDER BY SUM(o.spend) DESC ) as pdrn 71 | FROM orders as o 72 | JOIN ranked_category as rc 73 | ON rc.category = o.category 74 | GROUP BY o.category, o.product 75 | ) subquery2 76 | WHERE pdrn <= 2 77 | 78 | 79 | 80 | -- Your Task Find top category and product that has least spend amount 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | /* 94 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 95 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 96 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 97 | */ 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | -------------------------------------------------------------------------------- /05.sql: -------------------------------------------------------------------------------- 1 | -- Day 05/30 SQL Challenge 2 | 3 | 4 | 5 | DROP TABLE IF EXISTS Employees; 6 | -- Create the Employee table 7 | CREATE TABLE Employees ( 8 | EmployeeID INT PRIMARY KEY, 9 | Name VARCHAR(50), 10 | Department VARCHAR(50), 11 | Salary DECIMAL(10, 2), 12 | HireDate DATE 13 | ); 14 | 15 | -- Insert sample records into the Employee table 16 | INSERT INTO Employees (EmployeeID, Name, Department, Salary, HireDate) VALUES 17 | (101, 'John Smith', 'Sales', 60000.00, '2022-01-15'), 18 | (102, 'Jane Doe', 'Marketing', 55000.00, '2022-02-20'), 19 | (103, 'Michael Johnson', 'Finance', 70000.00, '2021-12-10'), 20 | (104, 'Emily Brown', 'Sales', 62000.00, '2022-03-05'), 21 | (106, 'Sam Brown', 'IT', 62000.00, '2022-03-05'), 22 | (105, 'Chris Wilson', 'Marketing', 58000.00, '2022-01-30'); 23 | 24 | 25 | /* 26 | 27 | Write a SQL query to retrieve the 28 | third highest salary from the Employee table. 29 | 30 | */ 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -- ----------------------- 42 | -- My Solutions 43 | -- ----------------------- 44 | 45 | 46 | SELECT 47 | salary as third_highest_salary 48 | FROM 49 | ( SELECT 50 | *, 51 | DENSE_RANK() OVER( ORDER BY salary desc) drn 52 | FROM employees 53 | ) as subquery 54 | WHERE drn = 3 55 | 56 | 57 | 58 | 59 | 60 | 61 | -- Your task 62 | -- Find the employee details who has highest salary from each department 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | SELECT 85 | salary 86 | FROM 87 | ( 88 | SELECT *, 89 | DENSE_RANK() OVER(ORDER BY salary) as drn 90 | FROM employees 91 | ) 92 | WHERE drn = 3 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | /* 104 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 105 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 106 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 107 | */ 108 | 109 | 110 | -------------------------------------------------------------------------------- /08.sql: -------------------------------------------------------------------------------- 1 | -- Day 08/50 2 | 3 | 4 | 5 | -- Create Product table 6 | DROP TABLE IF EXISTS Products; 7 | CREATE TABLE Products ( 8 | product_id SERIAL PRIMARY KEY, 9 | product_name VARCHAR(100), 10 | category VARCHAR(50), 11 | price DECIMAL(10, 2) 12 | ); 13 | 14 | -- Insert sample records into Product table 15 | INSERT INTO Products (product_name, category, price) VALUES 16 | ('Product A', 'Category 1', 10.00), 17 | ('Product B', 'Category 2', 15.00), 18 | ('Product C', 'Category 1', 20.00), 19 | ('Product D', 'Category 3', 25.00); 20 | 21 | 22 | -- Create Sales table 23 | DROP TABLE IF EXISTS Sales; 24 | CREATE TABLE Sales ( 25 | sale_id SERIAL PRIMARY KEY, 26 | product_id INT, 27 | sale_date DATE, 28 | quantity INT, 29 | FOREIGN KEY (product_id) REFERENCES Product(product_id) 30 | ); 31 | 32 | -- Insert sample records into Sales table 33 | INSERT INTO Sales (product_id, sale_date, quantity) VALUES 34 | (1, '2023-09-15', 5), 35 | (2, '2023-10-20', 3), 36 | (1, '2024-01-05', 2), 37 | (3, '2024-02-10', 4), 38 | (4, '2023-12-03', 1); 39 | 40 | 41 | 42 | /* 43 | Question 44 | 45 | Write a SQL query to find all products that 46 | haven't been sold in the last six months. 47 | 48 | Return the product_id, product_name, category, 49 | and price of these products. 50 | 51 | */ 52 | 53 | SELECT * FROM products; 54 | SELECT * FROM sales; 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | -- everything from product table 87 | -- there shouldn't be any sale in last 6 month 88 | -- no sale 89 | -- join 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | SELECT 114 | p.*, 115 | s.sale_date 116 | FROM products as p 117 | LEFT JOIN 118 | sales as s 119 | ON p.product_id = s.product_id 120 | WHERE s.sale_date IS NULL OR 121 | s.sale_date < CURRENT_DATE - INTERVAL '6 month' 122 | 123 | 124 | 125 | 126 | 127 | 128 | SELECT CURRENT_DATE - INTERVAL '6 month' 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | -- Your Task select all product which has not received any sale in current year; 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | /* 150 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 151 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 152 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 153 | */ 154 | 155 | 156 | 157 | 158 | 159 | -------------------------------------------------------------------------------- /09.sql: -------------------------------------------------------------------------------- 1 | -- Day 09/50 2 | 3 | 4 | 5 | -- Create Customers table 6 | DROP TABLE IF EXISTS customers; 7 | CREATE TABLE Customers ( 8 | CustomerID INT, 9 | CustomerName VARCHAR(50) 10 | ); 11 | 12 | -- Create Purchases table 13 | DROP TABLE IF EXISTS purchases; 14 | CREATE TABLE Purchases ( 15 | PurchaseID INT, 16 | CustomerID INT, 17 | ProductName VARCHAR(50), 18 | PurchaseDate DATE 19 | ); 20 | 21 | -- Insert sample data into Customers table 22 | INSERT INTO Customers (CustomerID, CustomerName) VALUES 23 | (1, 'John'), 24 | (2, 'Emma'), 25 | (3, 'Michael'), 26 | (4, 'Ben'), 27 | (5, 'John') ; 28 | 29 | -- Insert sample data into Purchases table 30 | INSERT INTO Purchases (PurchaseID, CustomerID, ProductName, PurchaseDate) VALUES 31 | (100, 1, 'iPhone', '2024-01-01'), 32 | (101, 1, 'MacBook', '2024-01-20'), 33 | (102, 1, 'Airpods', '2024-03-10'), 34 | (103, 2, 'iPad', '2024-03-05'), 35 | (104, 2, 'iPhone', '2024-03-15'), 36 | (105, 3, 'MacBook', '2024-03-20'), 37 | (106, 3, 'Airpods', '2024-03-25'), 38 | (107, 4, 'iPhone', '2024-03-22'), 39 | (108, 4, 'Airpods', '2024-03-29'), 40 | (110, 5, 'Airpods', '2024-02-29'), 41 | (109, 5, 'iPhone', '2024-03-22'); 42 | 43 | 44 | /* 45 | Apple data analyst interview question 46 | 47 | Given two tables - Customers and Purchases, 48 | where Customers contains information about 49 | customers and Purchases contains information 50 | about their purchases, 51 | 52 | write a SQL query to find customers who 53 | bought Airpods after purchasing an iPhone. 54 | 55 | */ 56 | 57 | -- Find out all customers who bought iPhone 58 | -- All customers who bought Airpods 59 | -- Customer has to buy Airpods after purchasing the iPhone 60 | 61 | 62 | 63 | 64 | 65 | SELECT * FROM customers; 66 | SELECT * FROM purchases; 67 | 68 | 69 | 70 | 71 | SELECT 72 | DISTINCT c.* 73 | FROM customers as c 74 | JOIN purchases as p1 75 | ON c.customerid = p1.customerid 76 | JOIN purchases p2 77 | ON c.customerid = p2.customerid 78 | WHERE p1.productname = 'iPhone' 79 | AND 80 | p2.productname = 'Airpods' 81 | AND 82 | p1.purchasedate < p2.purchasedate 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | /* 97 | -- Your task 98 | Find out what is the % of chance is there that the 99 | customer who bought MacBook will buy an Airpods 100 | */ 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | /* 114 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 115 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 116 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 117 | */ 118 | 119 | -------------------------------------------------------------------------------- /10.sql: -------------------------------------------------------------------------------- 1 | -- Day 10/50 SQL Challenge 2 | 3 | 4 | 5 | -- Create Employee table 6 | DROP TABLE IF EXISTS employees; 7 | 8 | CREATE TABLE employees ( 9 | EmployeeID INT PRIMARY KEY, 10 | FirstName VARCHAR(50), 11 | LastName VARCHAR(50), 12 | Department VARCHAR(50), 13 | Salary NUMERIC(10, 2) 14 | ); 15 | 16 | -- Insert sample records into Employee table 17 | INSERT INTO employees (EmployeeID, FirstName, LastName, Department, Salary) VALUES 18 | (1, 'John', 'Doe', 'Finance', 75000.00), 19 | (2, 'Jane', 'Smith', 'HR', 60000.00), 20 | (3, 'Michael', 'Johnson', 'IT', 45000.00), 21 | (4, 'Emily', 'Brown', 'Marketing', 55000.00), 22 | (5, 'David', 'Williams', 'Finance', 80000.00), 23 | (6, 'Sarah', 'Jones', 'HR', 48000.00), 24 | (7, 'Chris', 'Taylor', 'IT', 72000.00), 25 | (8, 'Jessica', 'Wilson', 'Marketing', 49000.00); 26 | 27 | 28 | /* 29 | 30 | Write a SQL query to classify employees into three categories based on their salary: 31 | 32 | "High" - Salary greater than $70,000 33 | "Medium" - Salary between $50,000 and $70,000 (inclusive) 34 | "Low" - Salary less than $50,000 35 | 36 | Your query should return the EmployeeID, FirstName, LastName, Department, Salary, and 37 | a new column SalaryCategory indicating the category to which each employee belongs. 38 | 39 | */ 40 | 41 | SELECT *, 42 | CASE 43 | WHEN salary > 70000 THEN 'High' 44 | WHEN salary BETWEEN 50000 AND 70000 THEN 'Medium' 45 | ELSE 'Low' 46 | END as salary_category 47 | FROM Employees; 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | -- Your Task is to find out count of 82 | -- employee for each salary category? 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | SELECT *, 124 | CASE 125 | WHEN salary > 70000 THEN 'High' 126 | WHEN salary BETWEEN 50000 AND 70000 THEN 'Medium' 127 | ELSE 'Low' 128 | END as salary_category 129 | FROM employees; 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | -------------------------------------------------------------------------------- /11.sql: -------------------------------------------------------------------------------- 1 | -- Day 11/50 2 | 3 | 4 | DROP TABLE IF EXISTS orders; 5 | DROP TABLE IF EXISTS returns; 6 | 7 | 8 | -- Create the orders table 9 | CREATE TABLE orders ( 10 | order_id VARCHAR(10), 11 | customer_id VARCHAR(10), 12 | order_date DATE, 13 | product_id VARCHAR(10), 14 | quantity INT 15 | ); 16 | 17 | -- Create the returns table 18 | CREATE TABLE returns ( 19 | return_id VARCHAR(10), 20 | order_id VARCHAR(10) 21 | ); 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | -- Insert sample records into the orders table 31 | INSERT INTO orders (order_id, customer_id, order_date, product_id, quantity) 32 | VALUES 33 | ('1001', 'C001', '2023-01-15', 'P001', 4), 34 | ('1002', 'C001', '2023-02-20', 'P002', 3), 35 | ('1003', 'C002', '2023-03-10', 'P003', 8), 36 | ('1004', 'C003', '2023-04-05', 'P004', 2), 37 | ('1005', 'C004', '2023-05-20', 'P005', 3), 38 | ('1006', 'C002', '2023-06-15', 'P001', 6), 39 | ('1007', 'C003', '2023-07-20', 'P002', 1), 40 | ('1008', 'C004', '2023-08-10', 'P003', 2), 41 | ('1009', 'C005', '2023-09-05', 'P002', 3), 42 | ('1010', 'C001', '2023-10-20', 'P002', 1); 43 | 44 | -- Insert sample records into the returns table 45 | INSERT INTO returns (return_id, order_id) 46 | VALUES 47 | ('R001', '1001'), 48 | ('R002', '1002'), 49 | ('R003', '1005'), 50 | ('R004', '1008'), 51 | ('R005', '1007'); 52 | 53 | 54 | 55 | /* 56 | 57 | Identify returning customers based on their order history. 58 | Categorize customers as "Returning" if they have placed more than one return, 59 | and as "New" otherwise. 60 | 61 | Considering you have two table orders has information about sale 62 | and returns has information about returns 63 | 64 | */ 65 | -- no of return for each cx 66 | -- orders and return 67 | -- CASE cnt > 1 then Returning else new 68 | 69 | 70 | 71 | SELECT * FROM orders; 72 | SELECT * FROM returns; 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | SELECT 90 | o.customer_id, 91 | COUNT(o.order_id) as total_orders, 92 | COUNT(return_id) as total_returns, 93 | CASE 94 | WHEN COUNT(return_id) > 1 THEN 'Returning' 95 | ELSE 'New' 96 | END as customer_category 97 | FROM orders as o 98 | LEFT JOIN returns as r 99 | ON o.order_id = r.order_id 100 | GROUP BY customer_id 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | /* 135 | Task: 136 | Task: 137 | Categorize products based on their quantity sold into three categories: 138 | 139 | "Low Demand": Quantity sold less than or equal to 5. 140 | "Medium Demand": Quantity sold between 6 and 10 (inclusive). 141 | "High Demand": Quantity sold greater than 10. 142 | Expected Output: 143 | 144 | Product ID 145 | Product Name 146 | Quantity Sold 147 | Demand Category 148 | 149 | 150 | */ 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | /* 167 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 168 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 169 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 170 | */ -------------------------------------------------------------------------------- /12.sql: -------------------------------------------------------------------------------- 1 | -- Day 12/50 Days sql Challenge 2 | 3 | 4 | DROP TABLE IF EXISTS Employees; 5 | -- Create Employees table 6 | CREATE TABLE Employees ( 7 | id INT PRIMARY KEY, 8 | name VARCHAR(255) 9 | ); 10 | 11 | -- Insert sample data into Employees table 12 | INSERT INTO Employees (id, name) VALUES 13 | (1, 'Alice'), 14 | (7, 'Bob'), 15 | (11, 'Meir'), 16 | (90, 'Winston'), 17 | (3, 'Jonathan'); 18 | 19 | 20 | DROP TABLE IF EXISTS EmployeeUNI; 21 | -- Create EmployeeUNI table 22 | CREATE TABLE EmployeeUNI ( 23 | id INT PRIMARY KEY, 24 | unique_id INT 25 | ); 26 | 27 | -- Insert sample data into EmployeeUNI table 28 | INSERT INTO EmployeeUNI (id, unique_id) VALUES 29 | (3, 1), 30 | (11, 2), 31 | (90, 3); 32 | 33 | 34 | 35 | /* 36 | 37 | Write a solution to show the unique ID of each user, 38 | If a user does not have a unique ID replace just show null. 39 | 40 | Return employee name and their unique_id. 41 | 42 | 43 | 44 | Table: Employees 45 | 46 | +---------------+---------+ 47 | | Column Name | Type | 48 | +---------------+---------+ 49 | | id | int | 50 | | name | varchar | 51 | +---------------+---------+ 52 | id is the primary key (column with unique values) for this table. 53 | Each row of this table contains the id and the name of an employee in a company. 54 | 55 | 56 | Table: EmployeeUNI 57 | 58 | +---------------+---------+ 59 | | Column Name | Type | 60 | +---------------+---------+ 61 | | id | int | 62 | | unique_id | int | 63 | +---------------+---------+ 64 | (id, unique_id) is the primary key (combination of columns with unique values) for this table. 65 | Each row of this table contains the id and the corresponding unique id of an employee in the company. 66 | 67 | */ 68 | 69 | SELECT * FROM Employees; 70 | SELECT * FROM EmployeeUNI; 71 | 72 | 73 | SELECT 74 | e.name, 75 | eu.unique_id 76 | FROM employees as e 77 | LEFT JOIN 78 | employeeuni as eu 79 | ON eu.id = e.id 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | -- Your task to replace null values to 0 for the employee who doesn't have unique id 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | /* 97 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 98 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 99 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 100 | */ 101 | 102 | 103 | -------------------------------------------------------------------------------- /13.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge Day 13/50 2 | 3 | DROP TABLE IF EXISTS employees; 4 | CREATE TABLE employees ( 5 | emp_id INT PRIMARY KEY, 6 | name VARCHAR(100), 7 | manager_id INT, 8 | FOREIGN KEY (manager_id) REFERENCES employees(emp_id) 9 | ); 10 | 11 | INSERT INTO employees (emp_id, name, manager_id) VALUES 12 | (1, 'John Doe', NULL), -- John Doe is not a manager 13 | (2, 'Jane Smith', 1), -- Jane Smith's manager is John Doe 14 | (3, 'Alice Johnson', 1), -- Alice Johnson's manager is John Doe 15 | (4, 'Bob Brown', 3), -- Bob Brown's manager is Alice Johnson 16 | (5, 'Emily White', NULL), -- Emily White is not a manager 17 | (6, 'Michael Lee', 3), -- Michael Lee's manager is Alice Johnson 18 | (7, 'David Clark', NULL), -- David Clark is not a manager 19 | (8, 'Sarah Davis', 2), -- Sarah Davis's manager is Jane Smith 20 | (9, 'Kevin Wilson', 2), -- Kevin Wilson's manager is Jane Smith 21 | (10, 'Laura Martinez', 4); -- Laura Martinez's manager is Bob Brown 22 | 23 | 24 | /* 25 | You have a table named employees containing information about employees, 26 | including their emp_id, name, and manager_id. 27 | The manager_id refers to the emp_id of the employee's manager. 28 | 29 | 30 | write a SQL query to retrieve all employees' 31 | details along with their manager's names based on the manager ID 32 | 33 | */ 34 | 35 | SELECT * FROM employees; 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | -- ----------- 50 | -- My Solution 51 | -- ----------- 52 | 53 | 54 | SELECT 55 | e1.emp_id, 56 | e1.name, 57 | e1.manager_id, 58 | e2.name as manager_name 59 | FROM employees as e1 60 | CROSS JOIN 61 | employees as e2 62 | WHERE e1.manager_id = e2.emp_id 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | -- Your Task 77 | -- Write a SQL query to find the names of all employees who are also managers. 78 | In other words, retrieve the names of employees who appear as managers in the manager_id column. 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | /* 88 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 89 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 90 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 91 | */ 92 | 93 | -------------------------------------------------------------------------------- /14.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge Day 14/50 2 | 3 | DROP TABLE IF EXISTS customers; 4 | CREATE TABLE customers ( 5 | customer_id INT PRIMARY KEY, 6 | customer_name VARCHAR(100), 7 | customer_email VARCHAR(100) 8 | ); 9 | 10 | DROP TABLE IF EXISTS orders; 11 | CREATE TABLE orders ( 12 | order_id INT PRIMARY KEY, 13 | customer_id INT, 14 | order_date DATE, 15 | order_amount DECIMAL(10, 2), 16 | FOREIGN KEY (customer_id) REFERENCES customers(customer_id) 17 | ); 18 | 19 | INSERT INTO customers (customer_id, customer_name, customer_email) VALUES 20 | (1, 'John Doe', 'john@example.com'), 21 | (2, 'Jane Smith', 'jane@example.com'), 22 | (3, 'Alice Johnson', 'alice@example.com'), 23 | (4, 'Bob Brown', 'bob@example.com'); 24 | 25 | INSERT INTO orders (order_id, customer_id, order_date, order_amount) VALUES 26 | (1, 1, '2024-01-03', 50.00), 27 | (2, 2, '2024-01-05', 75.00), 28 | (3, 1, '2024-01-10', 25.00), 29 | (4, 3, '2024-01-15', 60.00), 30 | (5, 2, '2024-01-20', 50.00), 31 | (6, 1, '2024-02-01', 100.00), 32 | (7, 2, '2024-02-05', 25.00), 33 | (8, 3, '2024-02-10', 90.00), 34 | (9, 1, '2024-02-15', 50.00), 35 | (10, 2, '2024-02-20', 75.00); 36 | 37 | 38 | 39 | /* 40 | 41 | 42 | You are given two tables: orders and customers. 43 | The orders table contains information about orders placed by customers, including the order_id, customer_id, order_date, and order_amount. 44 | 45 | The customers table contains information about customers, 46 | including the customer_id, customer_name, and customer_email. 47 | 48 | -- Find the top 2 customers who have spent the most money across all their orders. 49 | Return their names, emails, and total amounts spent. 50 | 51 | */ 52 | -- customer_name 53 | -- customer_email 54 | -- total_amt from orders 55 | -- join based cx id form both table 56 | -- order by total amt desc 57 | -- limit 2 58 | 59 | SELECT * FROM customers; 60 | SELECT * FROM orders; 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | SELECT 80 | c.customer_name, 81 | c.customer_email, 82 | SUM(o.order_amount) as total_spent 83 | FROM customers as c 84 | JOIN 85 | orders as o 86 | ON c.customer_id = o.customer_id 87 | GROUP BY c.customer_id, c.customer_name, c.customer_email 88 | ORDER BY total_spent DESC 89 | LIMIT 2 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | -- Your Task 104 | -- Find out customers details who has placed highest orders and total count of orders and total order amount 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | /* 119 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 120 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 121 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 122 | */ 123 | 124 | 125 | -------------------------------------------------------------------------------- /16.sql: -------------------------------------------------------------------------------- 1 | -- Day 06/50 SQL Challenge 2 | 3 | 4 | DROP TABLE IF EXISTS employees; 5 | CREATE TABLE Employees ( 6 | id INT PRIMARY KEY, 7 | name VARCHAR(255), 8 | department VARCHAR(255), 9 | managerId INT 10 | ); 11 | 12 | INSERT INTO Employees (id, name, department, managerId) VALUES 13 | (101, 'John', 'A', NULL), 14 | (102, 'Dan', 'A', 101), 15 | (103, 'James', 'A', 101), 16 | (104, 'Amy', 'A', 101), 17 | (105, 'Anne', 'A', 101), 18 | (106, 'Ron', 'B', 101), 19 | (107, 'Michael', 'C', NULL), 20 | (108, 'Sarah', 'C', 107), 21 | (109, 'Emily', 'C', 107), 22 | (110, 'Brian', 'C', 107); 23 | 24 | 25 | /* 26 | 27 | Given a table named employees with the following columns: 28 | id, name, department, managerId 29 | 30 | Write a SQL query to find the names of 31 | managers who have at least five direct reports. 32 | Return the result table in any order. 33 | 34 | Ensure that no employee is their own manager. 35 | 36 | The result format should include only the names 37 | of the managers meeting the criteria. 38 | */ 39 | 40 | SELECT * FROM employees; 41 | 42 | -- find manager name based on manager id 43 | -- count of emp who is reporting to this id 44 | -- having count >= 5 45 | 46 | SELECT 47 | e2.name as manager_name 48 | FROM employees as e1 49 | JOIN 50 | employees as e2 51 | ON e1.managerid = e2.id 52 | GROUP BY e1.managerid, e2.name 53 | HAVING COUNT(e1.id) >= 5; 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | -- Your Task is to find out the total employees who doesn't have any managers! 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | /* 85 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 86 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 87 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 88 | */ 89 | 90 | -------------------------------------------------------------------------------- /17.sql: -------------------------------------------------------------------------------- 1 | -- Day 17/50 2 | 3 | DROP TABLE IF EXISTS customers; 4 | -- Creating the Customers table 5 | CREATE TABLE Customers ( 6 | customer_id INT PRIMARY KEY, 7 | customer_name VARCHAR(50) 8 | ); 9 | 10 | 11 | DROP TABLE IF EXISTS purchases; 12 | -- Creating the Purchases table 13 | CREATE TABLE Purchases ( 14 | purchase_id INT PRIMARY KEY, 15 | customer_id INT, 16 | product_category VARCHAR(50), 17 | FOREIGN KEY (customer_id) REFERENCES Customers(customer_id) 18 | ); 19 | 20 | -- Inserting sample data into Customers table 21 | INSERT INTO Customers (customer_id, customer_name) VALUES 22 | (1, 'Alice'), 23 | (2, 'Bob'), 24 | (3, 'Charlie'), 25 | (4, 'David'), 26 | (5, 'Emma'); 27 | 28 | -- Inserting sample data into Purchases table 29 | INSERT INTO Purchases (purchase_id, customer_id, product_category) VALUES 30 | (101, 1, 'Electronics'), 31 | (102, 1, 'Books'), 32 | (103, 1, 'Clothing'), 33 | (104, 1, 'Electronics'), 34 | (105, 2, 'Clothing'), 35 | (106, 1, 'Beauty'), 36 | (107, 3, 'Electronics'), 37 | (108, 3, 'Books'), 38 | (109, 4, 'Books'), 39 | (110, 4, 'Clothing'), 40 | (111, 4, 'Beauty'), 41 | (112, 5, 'Electronics'), 42 | (113, 5, 'Books'); 43 | 44 | 45 | 46 | /* 47 | 48 | Question: 49 | Write an SQL query to find customers who have made 50 | purchases in all product categories. 51 | 52 | Tables: 53 | Customers: customer_id (INT), customer_name (VARCHAR) 54 | 55 | Purchases: purchase_id (INT), customer_id (INT), 56 | product_category (VARCHAR) 57 | 58 | Your query should return the customer_id and 59 | customer_name of these customers. 60 | 61 | */ 62 | 63 | SELECT * FROM customers; 64 | SELECT * FROM purchases; 65 | 66 | -- cx_id, cx_name 67 | -- find total distinct category 68 | -- how many distinct category each cx purchase from 69 | -- join both 70 | 71 | 72 | 73 | 74 | 75 | 76 | SELECT 77 | c.customer_id, 78 | c.customer_name 79 | COUNT(DISTINCT p.product_category) 80 | FROM customers as c 81 | JOIN 82 | purchases as p 83 | ON p.customer_id = c.customer_id 84 | GROUP BY c.customer_id, c.customer_name 85 | HAVING COUNT(DISTINCT p.product_category) = 86 | (SELECT COUNT(DISTINCT product_category) FROM purchases) 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | /* 110 | Task: 111 | Write an SQL query to identify customers who have not made any purchases 112 | in Electronics categories. 113 | */ 114 | 115 | 116 | -------------------------------------------------------------------------------- /18.sql: -------------------------------------------------------------------------------- 1 | -- Day 18/50 2 | 3 | 4 | -- Creating the hotel_bookings table 5 | CREATE TABLE hotel_bookings ( 6 | booking_id SERIAL PRIMARY KEY, 7 | booking_date DATE, 8 | hotel_name VARCHAR(100), 9 | total_guests INT, 10 | total_nights INT, 11 | total_price DECIMAL(10, 2) 12 | ); 13 | 14 | -- Inserting sample data for hotel bookings for 2023 and 2022 15 | INSERT INTO hotel_bookings (booking_date, hotel_name, total_guests, total_nights, total_price) VALUES 16 | ('2023-01-05', 'Hotel A', 2, 3, 300.00), 17 | ('2023-02-10', 'Hotel B', 3, 5, 600.00), 18 | ('2023-03-15', 'Hotel A', 4, 2, 400.00), 19 | ('2023-04-20', 'Hotel B', 2, 4, 500.00), 20 | ('2023-05-25', 'Hotel A', 3, 3, 450.00), 21 | ('2023-06-30', 'Hotel B', 5, 2, 350.00), 22 | ('2023-07-05', 'Hotel A', 2, 5, 550.00), 23 | ('2023-08-10', 'Hotel B', 3, 3, 450.00), 24 | ('2023-09-15', 'Hotel A', 4, 4, 500.00), 25 | ('2023-10-20', 'Hotel B', 2, 3, 300.00), 26 | ('2023-11-25', 'Hotel A', 3, 2, 350.00), 27 | ('2023-12-30', 'Hotel B', 5, 4, 600.00), 28 | ('2022-01-05', 'Hotel A', 2, 3, 300.00), 29 | ('2022-02-10', 'Hotel B', 3, 5, 600.00), 30 | ('2022-03-15', 'Hotel A', 4, 2, 400.00), 31 | ('2022-04-20', 'Hotel B', 2, 4, 500.00), 32 | ('2022-05-25', 'Hotel A', 3, 3, 450.00), 33 | ('2022-06-30', 'Hotel B', 5, 2, 350.00), 34 | ('2022-07-05', 'Hotel A', 2, 5, 550.00), 35 | ('2022-08-10', 'Hotel B', 3, 3, 450.00), 36 | ('2022-09-15', 'Hotel A', 4, 4, 500.00), 37 | ('2022-10-20', 'Hotel B', 2, 3, 300.00), 38 | ('2022-11-25', 'Hotel A', 3, 2, 350.00), 39 | ('2022-12-30', 'Hotel B', 5, 4, 600.00); 40 | 41 | 42 | /* 43 | -- Write a SQL query to find out each hotal best 44 | performing months based on revenue 45 | */ 46 | -- hotel_name, revenue for each month -- group by 47 | -- window function ranking 48 | 49 | 50 | 51 | 52 | SELECT * FROM hotel_bookings; 53 | 54 | SELECT 55 | * 56 | FROM ( 57 | SELECT 58 | year, 59 | month, 60 | hotel_name, 61 | revenue, 62 | RANK() OVER(PARTITION BY year, hotel_name ORDER BY revenue DESC) as rn 63 | FROM 64 | ( 65 | SELECT 66 | EXTRACT(YEAR FROM booking_date) as year, 67 | EXTRACT(MONTH FROM booking_date) as month, 68 | hotel_name, 69 | SUM(total_price) as revenue 70 | FROM hotel_bookings 71 | GROUP BY 1, 2, 3 72 | ORDER BY year ASC, revenue DESC 73 | ) as monthly_revenue 74 | ) as subquery 75 | WHERE rn = 1 76 | 77 | 78 | -------------------------------------------------------------------------------- /19.sql: -------------------------------------------------------------------------------- 1 | -- Day 19/50 SQL Challenge 2 | 3 | DROP TABLE IF EXISTS employees; 4 | -- Creating the employees table 5 | CREATE TABLE employees ( 6 | employee_id SERIAL PRIMARY KEY, 7 | employee_name VARCHAR(100), 8 | department VARCHAR(50), 9 | salary DECIMAL(10, 2) 10 | ); 11 | 12 | -- Inserting sample data for employees 13 | INSERT INTO employees (employee_name, department, salary) 14 | VALUES 15 | ('John Doe', 'HR', 50000.00), 16 | ('Jane Smith', 'HR', 55000.00), 17 | ('Michael Johnson', 'HR', 60000.00), 18 | ('Emily Davis', 'IT', 60000.00), 19 | ('David Brown', 'IT', 65000.00), 20 | ('Sarah Wilson', 'Finance', 70000.00), 21 | ('Robert Taylor', 'Finance', 75000.00), 22 | ('Jennifer Martinez', 'Finance', 80000.00); 23 | 24 | /* 25 | -- Q. 26 | You have a table with below COLUMNS 27 | emp_id employee_name, department, salary 28 | 29 | 30 | Find the details of employees whose salary is greater 31 | than the average salary across the entire company. 32 | */ 33 | 34 | SELECT * FROM employees; 35 | 36 | -- Find avg salary - 64375 37 | -- select * from employees use where salary > Find avg salary 38 | -- 39 | 40 | 41 | SELECT * FROM employees 42 | WHERE salary > (SELECT AVG(salary) from employees) ; 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | /* 61 | -- Your Task: 62 | Question: 63 | Find the average salary of employees in each department, 64 | along with the total number of employees in that department. 65 | */ 66 | 67 | 68 | -------------------------------------------------------------------------------- /20.sql: -------------------------------------------------------------------------------- 1 | -- Day 20/50 Days SQL Challenge 2 | 3 | 4 | DROP TABLE IF EXISTS products; 5 | CREATE TABLE products ( 6 | product_id INT, 7 | product_name VARCHAR(100), 8 | supplier_name VARCHAR(50) 9 | ); 10 | 11 | INSERT INTO products (product_id, product_name, supplier_name) VALUES 12 | (1, 'Product 1', 'Supplier A'), 13 | (1, 'Product 1', 'Supplier B'), 14 | (3, 'Product 3', 'Supplier A'), 15 | (3, 'Product 3', 'Supplier A'), 16 | (5, 'Product 5', 'Supplier A'), 17 | (5, 'Product 5', 'Supplier B'), 18 | (7, 'Product 7', 'Supplier C'), 19 | (8, 'Product 8', 'Supplier A'), 20 | (7, 'Product 7', 'Supplier B'), 21 | (7, 'Product 7', 'Supplier A'), 22 | (9, 'Product 9', 'Supplier B'), 23 | (9, 'Product 9', 'Supplier C'), 24 | (10, 'Product 10', 'Supplier C'), 25 | (11, 'Product 11', 'Supplier C'), 26 | (10, 'Product 10', 'Supplier A') 27 | 28 | ; 29 | 30 | /* 31 | -- Write a query to find products that are sold by 32 | both Supplier A and Supplier B, 33 | excluding products sold by only one supplier. 34 | 35 | */ 36 | 37 | -- product_id, product_name 38 | -- sold by supplier a and B where 39 | 40 | 41 | 42 | 43 | SELECT 44 | product_id, 45 | product_name, 46 | COUNT(supplier_name) as cnt_sellers 47 | FROM products 48 | WHERE supplier_name IN ('Supplier A', 'Supplier B') 49 | GROUP BY product_id, product_name 50 | HAVING COUNT(DISTINCT supplier_name) = 2 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | -- Your Task 65 | -- Find the product that are selling by Supplier C and Supplier B but not Supplier A 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | /* 77 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 78 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 79 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 80 | */ 81 | 82 | 83 | 84 | 85 | -------------------------------------------------------------------------------- /21.sql: -------------------------------------------------------------------------------- 1 | -- Day 21/50 Days SQL Challenge 2 | 3 | 4 | DROP TABLE IF EXISTS products; 5 | -- Creating the products table 6 | CREATE TABLE products ( 7 | product_id INT PRIMARY KEY, 8 | product_name VARCHAR(100), 9 | price DECIMAL(10, 2), 10 | quantity_sold INT 11 | ); 12 | 13 | -- Inserting sample data for products 14 | INSERT INTO products (product_id, product_name, price, quantity_sold) VALUES 15 | (1, 'iPhone', 899.00, 600), 16 | (2, 'iMac', 1299.00, 150), 17 | (3, 'MacBook Pro', 1499.00, 500), 18 | (4, 'AirPods', 499.00, 800), 19 | (5, 'Accessories', 199.00, 300); 20 | 21 | 22 | 23 | /* 24 | -- Question 25 | You have a table called products with below columns 26 | product_id, product_name, price, qty 27 | 28 | Calculate the percentage contribution of each product 29 | to total revenue? 30 | 31 | Round the result into 2 decimal 32 | */ 33 | 34 | 35 | SELECT * FROM products; 36 | 37 | -- total revenue 38 | -- sales by each product 39 | -- sales by product/total revenue * 100 40 | 41 | 42 | -- total revenue 43 | SELECT SUM(price * quantity_sold) from products; 44 | 45 | 46 | SELECT 47 | product_id, 48 | product_name, 49 | price * quantity_sold as revenue_by_product, 50 | ROUND(price * quantity_sold/(SELECT SUM(price * quantity_sold) from products) * 100, 2) as contribution 51 | FROM products 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | /* 69 | -- Your Task 70 | 71 | Find what is the contribution of MacBook Pro and iPhone 72 | Round the result in two DECIMAL 73 | 74 | */ 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | /* 85 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 86 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 87 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 88 | */ 89 | 90 | -------------------------------------------------------------------------------- /22.sql: -------------------------------------------------------------------------------- 1 | -- Day 22/50 SQL Challenge 2 | 3 | DROP TABLE IF EXISTS delivery; 4 | -- Create the Delivery table 5 | CREATE TABLE Delivery ( 6 | delivery_id SERIAL PRIMARY KEY, 7 | customer_id INT, 8 | order_date DATE, 9 | customer_pref_delivery_date DATE 10 | ); 11 | 12 | -- Insert data into the Delivery table 13 | INSERT INTO Delivery (customer_id, order_date, customer_pref_delivery_date) VALUES 14 | (1, '2019-08-01', '2019-08-02'), 15 | (2, '2019-08-02', '2019-08-02'), 16 | (1, '2019-08-11', '2019-08-12'), 17 | (3, '2019-08-24', '2019-08-24'), 18 | (3, '2019-08-21', '2019-08-22'), 19 | (2, '2019-08-11', '2019-08-13'), 20 | (4, '2019-08-09', '2019-08-09'), 21 | (5, '2019-08-09', '2019-08-10'), 22 | (4, '2019-08-10', '2019-08-12'), 23 | (6, '2019-08-09', '2019-08-11'), 24 | (7, '2019-08-12', '2019-08-13'), 25 | (8, '2019-08-13', '2019-08-13'), 26 | (9, '2019-08-11', '2019-08-12'); 27 | 28 | 29 | 30 | /* 31 | -- Question 32 | 33 | You have dataset of a food delivery company 34 | with columns order_id, customer_id, order_date, 35 | pref_delivery_date 36 | 37 | 38 | If the customer's preferred delivery date is 39 | the same as the order date, then the order is 40 | called immediate; otherwise, it is called scheduled. 41 | 42 | 43 | Write a solution to find the percentage of immediate 44 | orders in the first orders of all customers, 45 | rounded to 2 decimal places. 46 | 47 | */ 48 | -- find first orders of each cx 49 | -- total cnt of first orders 50 | -- case immediate or scheduled 51 | -- total immediate orders / total cnt of first orders * 100 52 | -- round 2 53 | 54 | SELECT * FROM Delivery; 55 | 56 | SELECT 57 | ROUND( 58 | SUM( 59 | CASE 60 | WHEN 61 | order_date = cpdd THEN 1 62 | ELSE 0 63 | END::numeric 64 | )/COUNT(*)::numeric * 100, 2) as imd_del_percentage 65 | FROM 66 | ( 67 | SELECT 68 | DISTINCT ON(customer_id) 69 | customer_id, 70 | order_date, 71 | customer_pref_delivery_date as cpdd 72 | FROM Delivery 73 | ORDER BY customer_id, order_date 74 | ) 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | /* 95 | -- Your task 96 | Your Challenge: 97 | Write an SQL query to determine the percentage 98 | of orders where customers select next day delivery. 99 | We're excited to see your solution! 100 | 101 | -- Next Day Delivery is Order Date + 1 102 | 103 | */ 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | /* 112 | -- Your Task: 113 | Question: 114 | Find the average salary of employees in each department, 115 | along with the total number of employees in that department. 116 | */ 117 | 118 | -------------------------------------------------------------------------------- /23.sql: -------------------------------------------------------------------------------- 1 | -- Day 23/50 SQL Challenge 2 | 3 | DROP TABLE IF EXISTS amazon_transactions; 4 | CREATE TABLE amazon_transactions ( 5 | id SERIAL PRIMARY KEY, 6 | user_id INT, 7 | item VARCHAR(255), 8 | purchase_date DATE, 9 | revenue NUMERIC 10 | ); 11 | 12 | INSERT INTO amazon_transactions (user_id, item, purchase_date, revenue) VALUES 13 | (109, 'milk', '2020-03-03', 123), 14 | (139, 'biscuit', '2020-03-18', 421), 15 | (120, 'milk', '2020-03-18', 176), 16 | (108, 'banana', '2020-03-18', 862), 17 | (130, 'milk', '2020-03-28', 333), 18 | (103, 'bread', '2020-03-29', 862), 19 | (122, 'banana', '2020-03-07', 952), 20 | (125, 'bread', '2020-03-13', 317), 21 | (139, 'bread', '2020-03-30', 929), 22 | (141, 'banana', '2020-03-17', 812), 23 | (116, 'bread', '2020-03-31', 226), 24 | (128, 'bread', '2020-03-04', 112), 25 | (146, 'biscuit', '2020-03-04', 362), 26 | (119, 'banana', '2020-03-28', 127), 27 | (142, 'bread', '2020-03-09', 503), 28 | (122, 'bread', '2020-03-06', 593), 29 | (128, 'biscuit', '2020-03-24', 160), 30 | (112, 'banana', '2020-03-24', 262), 31 | (149, 'banana', '2020-03-29', 382), 32 | (100, 'banana', '2020-03-18', 599), 33 | (130, 'milk', '2020-03-16', 604), 34 | (103, 'milk', '2020-03-31', 290), 35 | (112, 'banana', '2020-03-23', 523), 36 | (102, 'bread', '2020-03-25', 325), 37 | (120, 'biscuit', '2020-03-21', 858), 38 | (109, 'bread', '2020-03-22', 432), 39 | (101, 'milk', '2020-03-01', 449), 40 | (138, 'milk', '2020-03-19', 961), 41 | (100, 'milk', '2020-03-29', 410), 42 | (129, 'milk', '2020-03-02', 771), 43 | (123, 'milk', '2020-03-31', 434), 44 | (104, 'biscuit', '2020-03-31', 957), 45 | (110, 'bread', '2020-03-13', 210), 46 | (143, 'bread', '2020-03-27', 870), 47 | (130, 'milk', '2020-03-12', 176), 48 | (128, 'milk', '2020-03-28', 498), 49 | (133, 'banana', '2020-03-21', 837), 50 | (150, 'banana', '2020-03-20', 927), 51 | (120, 'milk', '2020-03-27', 793), 52 | (109, 'bread', '2020-03-02', 362), 53 | (110, 'bread', '2020-03-13', 262), 54 | (140, 'milk', '2020-03-09', 468), 55 | (112, 'banana', '2020-03-04', 381), 56 | (117, 'biscuit', '2020-03-19', 831), 57 | (137, 'banana', '2020-03-23', 490), 58 | (130, 'bread', '2020-03-09', 149), 59 | (133, 'bread', '2020-03-08', 658), 60 | (143, 'milk', '2020-03-11', 317), 61 | (111, 'biscuit', '2020-03-23', 204), 62 | (150, 'banana', '2020-03-04', 299), 63 | (131, 'bread', '2020-03-10', 155), 64 | (140, 'biscuit', '2020-03-17', 810), 65 | (147, 'banana', '2020-03-22', 702), 66 | (119, 'biscuit', '2020-03-15', 355), 67 | (116, 'milk', '2020-03-12', 468), 68 | (141, 'milk', '2020-03-14', 254), 69 | (143, 'bread', '2020-03-16', 647), 70 | (105, 'bread', '2020-03-21', 562), 71 | (149, 'biscuit', '2020-03-11', 827), 72 | (117, 'banana', '2020-03-22', 249), 73 | (150, 'banana', '2020-03-21', 450), 74 | (134, 'bread', '2020-03-08', 981), 75 | (133, 'banana', '2020-03-26', 353), 76 | (127, 'milk', '2020-03-27', 300), 77 | (101, 'milk', '2020-03-26', 740), 78 | (137, 'biscuit', '2020-03-12', 473), 79 | (113, 'biscuit', '2020-03-21', 278), 80 | (141, 'bread', '2020-03-21', 118), 81 | (112, 'biscuit', '2020-03-14', 334), 82 | (118, 'milk', '2020-03-30', 603), 83 | (111, 'milk', '2020-03-19', 205), 84 | (146, 'biscuit', '2020-03-13', 599), 85 | (148, 'banana', '2020-03-14', 530), 86 | (100, 'banana', '2020-03-13', 175), 87 | (105, 'banana', '2020-03-05', 815), 88 | (129, 'milk', '2020-03-02', 489), 89 | (121, 'milk', '2020-03-16', 476), 90 | (117, 'bread', '2020-03-11', 270), 91 | (133, 'milk', '2020-03-12', 446), 92 | (124, 'bread', '2020-03-31', 937), 93 | (145, 'bread', '2020-03-07', 821), 94 | (105, 'banana', '2020-03-09', 972), 95 | (131, 'milk', '2020-03-09', 808), 96 | (114, 'biscuit', '2020-03-31', 202), 97 | (120, 'milk', '2020-03-06', 898), 98 | (130, 'milk', '2020-03-06', 581), 99 | (141, 'biscuit', '2020-03-11', 749), 100 | (147, 'bread', '2020-03-14', 262), 101 | (118, 'milk', '2020-03-15', 735), 102 | (136, 'biscuit', '2020-03-22', 410), 103 | (132, 'bread', '2020-03-06', 161), 104 | (137, 'biscuit', '2020-03-31', 427), 105 | (107, 'bread', '2020-03-01', 701), 106 | (111, 'biscuit', '2020-03-18', 218), 107 | (100, 'bread', '2020-03-07', 410), 108 | (106, 'milk', '2020-03-21', 379), 109 | (114, 'banana', '2020-03-25', 705), 110 | (110, 'bread', '2020-03-27', 225), 111 | (130, 'milk', '2020-03-16', 494), 112 | (117, 'bread', '2020-03-10', 209); 113 | 114 | 115 | /* 116 | 117 | -- Amazon Data Analyst Interview Question 118 | 119 | Write a query that'll identify returning active users. 120 | 121 | A returning active user is a user that has made a 122 | second purchase within 7 days of their first purchase 123 | 124 | Output a list of user_ids of these returning active users. 125 | 126 | */ 127 | -- find out first purchase 128 | -- second purchase >= 7 129 | -- join both table 130 | -- DISTINCT user 131 | 132 | 133 | 134 | 135 | SELECT * FROM amazon_transactions 136 | 137 | 138 | SELECT 139 | DISTINCT a1.user_id as active_users 140 | -- a1.purchase_date as first_purchase, 141 | -- a2.purchase_date as second_purchase, 142 | -- a2.purchase_date - a1.purchase_date 143 | FROM amazon_transactions a1 -- first purchase table 144 | JOIN amazon_transactions a2 -- second purchase table 145 | ON a1.user_id = a2.user_id 146 | AND a1.purchase_date < a2.purchase_date 147 | AND a2.purchase_date - a1.purchase_date <= 7 148 | ORDER BY 1 149 | 150 | -- 19/04 - 15/04 = 4 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | /* 168 | -- Your TASK 169 | Find the user_id who has not purchased anything for 7 days 170 | after first purchase but they have done second purchase after 7 days 171 | */ 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | /* 186 | -- Your Task: 187 | Question: 188 | Find the average salary of employees in each department, 189 | along with the total number of employees in that department. 190 | */ -------------------------------------------------------------------------------- /24.sql: -------------------------------------------------------------------------------- 1 | -- Day 24/50 Days 2 | 3 | -- Customer Revenue In March 4 | 5 | DROP TABLE IF EXISTS orders; 6 | 7 | CREATE TABLE orders ( 8 | id INT, 9 | cust_id INT, 10 | order_date DATE, 11 | order_details VARCHAR(50), 12 | total_order_cost INT 13 | ); 14 | 15 | INSERT INTO orders (id, cust_id, order_date, order_details, total_order_cost) VALUES 16 | (1, 7, '2019-03-04', 'Coat', 100), 17 | (2, 7, '2019-03-01', 'Shoes', 80), 18 | (3, 3, '2019-03-07', 'Skirt', 30), 19 | (4, 7, '2019-02-01', 'Coat', 25), 20 | (5, 7, '2019-03-10', 'Shoes', 80), 21 | (6, 1, '2019-02-01', 'Boats', 100), 22 | (7, 2, '2019-01-11', 'Shirts', 60), 23 | (8, 1, '2019-03-11', 'Slipper', 20), 24 | (9, 15, '2019-03-01', 'Jeans', 80), 25 | (10, 15, '2019-03-09', 'Shirts', 50), 26 | (11, 5, '2019-02-01', 'Shoes', 80), 27 | (12, 12, '2019-01-11', 'Shirts', 60), 28 | (13, 1, '2019-03-11', 'Slipper', 20), 29 | (14, 4, '2019-02-01', 'Shoes', 80), 30 | (15, 4, '2019-01-11', 'Shirts', 60), 31 | (16, 3, '2019-04-19', 'Shirts', 50), 32 | (17, 7, '2019-04-19', 'Suit', 150), 33 | (18, 15, '2019-04-19', 'Skirt', 30), 34 | (19, 15, '2019-04-20', 'Dresses', 200), 35 | (20, 12, '2019-01-11', 'Coat', 125), 36 | (21, 7, '2019-04-01', 'Suit', 50), 37 | (22, 3, '2019-04-02', 'Skirt', 30), 38 | (23, 4, '2019-04-03', 'Dresses', 50), 39 | (24, 2, '2019-04-04', 'Coat', 25), 40 | (25, 7, '2019-04-19', 'Coat', 125); 41 | 42 | 43 | 44 | /* 45 | -- Calculate the total revenue from 46 | each customer in March 2019. 47 | 48 | Include only customers who 49 | were active in March 2019. 50 | 51 | Output the revenue along with the 52 | customer id and sort the results based 53 | on the revenue in descending order. 54 | */ 55 | -- cx_id and their revenue SUM(total order cost) 56 | -- filter march 2019 57 | 58 | 59 | SELECT 60 | cust_id, 61 | SUM(total_order_cost) total_revenue 62 | FROM orders 63 | WHERE order_date BETWEEN '2019-03-01' 64 | AND '2019-03-31' 65 | GROUP BY cust_id 66 | ORDER BY total_revenue DESC; 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | /* 100 | -- Your Task 101 | Find the customers who purchased from both 102 | March and April of 2019 and their total revenue 103 | */ 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | /* 116 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 117 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 118 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 119 | */ -------------------------------------------------------------------------------- /25 v1.sql: -------------------------------------------------------------------------------- 1 | -- Day 26/50 days sql challenge 2 | 3 | DROP TABLE customers; 4 | -- Creating the customers table 5 | CREATE TABLE customers ( 6 | id INT PRIMARY KEY, 7 | first_name VARCHAR(50), 8 | last_name VARCHAR(50), 9 | city VARCHAR(50), 10 | address VARCHAR(100), 11 | phone_number VARCHAR(20) 12 | ); 13 | 14 | -- Inserting sample data into the customers table 15 | INSERT INTO customers (id, first_name, last_name, city, address, phone_number) VALUES 16 | (8, 'John', 'Joseph', 'San Francisco', NULL, '928868164'), 17 | (7, 'Jill', 'Michael', 'Austin', NULL, '8130567692'), 18 | (4, 'William', 'Daniel', 'Denver', NULL, '813155200'), 19 | (5, 'Henry', 'Jackson', 'Miami', NULL, '8084557513'), 20 | (13, 'Emma', 'Isaac', 'Miami', NULL, '808690201'), 21 | (14, 'Liam', 'Samuel', 'Miami', NULL, '808555201'), 22 | (15, 'Mia', 'Owen', 'Miami', NULL, '806405201'), 23 | (1, 'Mark', 'Thomas', 'Arizona', '4476 Parkway Drive', '602325916'), 24 | (12, 'Eva', 'Lucas', 'Arizona', '4379 Skips Lane', '3019509805'), 25 | (6, 'Jack', 'Aiden', 'Arizona', '4833 Coplin Avenue', '480230527'), 26 | (2, 'Mona', 'Adrian', 'Los Angeles', '1958 Peck Court', '714939432'), 27 | (10, 'Lili', 'Oliver', 'Los Angeles', '3832 Euclid Avenue', '5306951180'), 28 | (3, 'Farida', 'Joseph', 'San Francisco', '3153 Rhapsody Street', '8133681200'), 29 | (9, 'Justin', 'Alexander', 'Denver', '4470 McKinley Avenue', '9704337589'), 30 | (11, 'Frank', 'Jacob', 'Miami', '1299 Randall Drive', '8085905201'); 31 | 32 | 33 | 34 | 35 | 36 | -- Creating the orders table 37 | CREATE TABLE orders ( 38 | id INT PRIMARY KEY, 39 | cust_id INT, 40 | order_date DATE, 41 | order_details VARCHAR(100), 42 | total_order_cost INT 43 | ); 44 | 45 | -- Inserting sample data into the orders table 46 | INSERT INTO orders (id, cust_id, order_date, order_details, total_order_cost) VALUES 47 | (1, 3, '2019-03-04', 'Coat', 100), 48 | (2, 3, '2019-03-01', 'Shoes', 80), 49 | (3, 3, '2019-03-07', 'Skirt', 30), 50 | (4, 7, '2019-02-01', 'Coat', 25), 51 | (5, 7, '2019-03-10', 'Shoes', 80), 52 | (6, 15, '2019-02-01', 'Boats', 100), 53 | (7, 15, '2019-01-11', 'Shirts', 60), 54 | (8, 15, '2019-03-11', 'Slipper', 20), 55 | (9, 15, '2019-03-01', 'Jeans', 80), 56 | (10, 15, '2019-03-09', 'Shirts', 50), 57 | (11, 5, '2019-02-01', 'Shoes', 80), 58 | (12, 12, '2019-01-11', 'Shirts', 60), 59 | (13, 12, '2019-03-11', 'Slipper', 20), 60 | (14, 4, '2019-02-01', 'Shoes', 80), 61 | (15, 4, '2019-01-11', 'Shirts', 60), 62 | (16, 3, '2019-04-19', 'Shirts', 50), 63 | (17, 7, '2019-04-19', 'Suit', 150), 64 | (18, 15, '2019-04-19', 'Skirt', 30), 65 | (19, 15, '2019-04-20', 'Dresses', 200), 66 | (20, 12, '2019-01-11', 'Coat', 125), 67 | (21, 7, '2019-04-01', 'Suit', 50), 68 | (22, 7, '2019-04-02', 'Skirt', 30), 69 | (23, 7, '2019-04-03', 'Dresses', 50), 70 | (24, 7, '2019-04-04', 'Coat', 25), 71 | (25, 7, '2019-04-19', 'Coat', 125); 72 | 73 | 74 | 75 | 76 | 77 | /* 78 | 79 | You have given two tables customers with columns (id, name phone 80 | address) and orders table columns(order_id, cxid order_date and cost) 81 | 82 | Find the percentage of shipable orders. 83 | Consider an order is shipable if the customer's address is known. 84 | */ 85 | 86 | -- 10 5/10*100 87 | -- find total orders 88 | -- total shipable orders where address is not NULL 89 | -- shipable orders/total orders * 100 90 | 91 | 92 | 93 | 94 | SELECT * FROM customers; 95 | SELECT * FROM orders; 96 | 97 | SELECT COUNT(*) FROM orders; 98 | SELECT 99 | ROUND( SUM( 100 | CASE 101 | WHEN c.address IS NOT NULL THEN 1 102 | ELSE 0 103 | END 104 | )::numeric/ COUNT(*)::numeric * 100,2) as shipable_per_orders 105 | FROM orders as o 106 | LEFT JOIN 107 | customers as c 108 | ON o.cust_id = c.id 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | /* 136 | 137 | -- Your Task 138 | Find out the percentage of orders where customer 139 | doesn't have valid phone numbers 140 | 141 | -- Note 142 | The Length of valid phone no is 10 character 143 | */ 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | -------------------------------------------------------------------------------- /26.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge 26/50 2 | 3 | 4 | CREATE TABLE employees ( 5 | employee_id INT PRIMARY KEY, 6 | employee_name VARCHAR(100), 7 | department VARCHAR(100), 8 | salary DECIMAL(10, 2), 9 | manager_id INT 10 | ); 11 | 12 | INSERT INTO employees (employee_id, employee_name, department, salary, manager_id) 13 | VALUES 14 | (1, 'John Doe', 'HR', 50000.00, NULL), 15 | (2, 'Jane Smith', 'HR', 55000.00, 1), 16 | (3, 'Michael Johnson', 'HR', 60000.00, 1), 17 | (4, 'Emily Davis', 'IT', 60000.00, NULL), 18 | (5, 'David Brown', 'IT', 65000.00, 4), 19 | (6, 'Sarah Wilson', 'Finance', 70000.00, NULL), 20 | (7, 'Robert Taylor', 'Finance', 75000.00, 6), 21 | (8, 'Jennifer Martinez', 'Finance', 80000.00, 6); 22 | 23 | 24 | 25 | /* 26 | -- Question 27 | You have a employees table with columns emp_id, emp_name, 28 | department, salary, manager_id (manager is also emp in the table)) 29 | 30 | Identify employees who have a higher salary than their manager. 31 | */ 32 | 33 | 34 | 35 | SELECT 36 | e.employee_id, 37 | e.employee_name, 38 | e.department, 39 | e.salary, 40 | e.manager_id, 41 | m.employee_name as manager_name, 42 | m.salary as manager_salary 43 | from employees as e 44 | JOIN 45 | employees as m 46 | ON e.manager_id = m.employee_id 47 | WHERE e.salary > m.salary 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | -- Your task 83 | -- Find all the employee who has salary greater than average salary? 84 | 85 | 86 | 87 | 88 | 89 | /* 90 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 91 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 92 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 93 | */ -------------------------------------------------------------------------------- /27.sql: -------------------------------------------------------------------------------- 1 | -- Day 27/50 SQL challenge 2 | 3 | 4 | DROP TABLE IF EXISTS walmart_eu; 5 | -- Create the online_retail table 6 | CREATE TABLE walmart_eu ( 7 | invoiceno VARCHAR(255), 8 | stockcode VARCHAR(255), 9 | description VARCHAR(255), 10 | quantity INT, 11 | invoicedate DATE, 12 | unitprice FLOAT, 13 | customerid FLOAT, 14 | country VARCHAR(255) 15 | ); 16 | 17 | -- Insert the provided data into the online_retail table 18 | INSERT INTO walmart_eu (invoiceno, stockcode, description, quantity, invoicedate, unitprice, customerid, country) VALUES 19 | ('544586', '21890', 'S/6 WOODEN SKITTLES IN COTTON BAG', 3, '2011-02-21', 2.95, 17338, 'United Kingdom'), 20 | ('541104', '84509G', 'SET OF 4 FAIRY CAKE PLACEMATS', 3, '2011-01-13', 3.29, NULL, 'United Kingdom'), 21 | ('560772', '22499', 'WOODEN UNION JACK BUNTING', 3, '2011-07-20', 4.96, NULL, 'United Kingdom'), 22 | ('555150', '22488', 'NATURAL SLATE RECTANGLE CHALKBOARD', 5, '2011-05-31', 3.29, NULL, 'United Kingdom'), 23 | ('570521', '21625', 'VINTAGE UNION JACK APRON', 3, '2011-10-11', 6.95, 12371, 'Switzerland'), 24 | ('547053', '22087', 'PAPER BUNTING WHITE LACE', 40, '2011-03-20', 2.55, 13001, 'United Kingdom'), 25 | ('573360', '22591', 'CARDHOLDER GINGHAM CHRISTMAS TREE', 6, '2011-10-30', 3.25, 15748, 'United Kingdom'), 26 | ('571039', '84536A', 'ENGLISH ROSE NOTEBOOK A7 SIZE', 1, '2011-10-13', 0.42, 16121, 'United Kingdom'), 27 | ('578936', '20723', 'STRAWBERRY CHARLOTTE BAG', 10, '2011-11-27', 0.85, 16923, 'United Kingdom'), 28 | ('559338', '21391', 'FRENCH LAVENDER SCENT HEART', 1, '2011-07-07', 1.63, NULL, 'United Kingdom'), 29 | ('568134', '23171', 'REGENCY TEA PLATE GREEN', 1, '2011-09-23', 3.29, NULL, 'United Kingdom'), 30 | ('552061', '21876', 'POTTERING MUG', 12, '2011-05-06', 1.25, 13001, 'United Kingdom'), 31 | ('543179', '22531', 'MAGIC DRAWING SLATE CIRCUS PARADE', 1, '2011-02-04', 0.42, 12754, 'Japan'), 32 | ('540954', '22381', 'TOY TIDY PINK POLKADOT', 4, '2011-01-12', 2.1, 14606, 'United Kingdom'), 33 | ('572703', '21818', 'GLITTER HEART DECORATION', 13, '2011-10-25', 0.39, 16110, 'United Kingdom'), 34 | ('578757', '23009', 'I LOVE LONDON BABY GIFT SET', 1, '2011-11-25', 16.95, 12748, 'United Kingdom'), 35 | ('542616', '22505', 'MEMO BOARD COTTAGE DESIGN', 4, '2011-01-30', 4.95, 16816, 'United Kingdom'), 36 | ('554694', '22921', 'HERB MARKER CHIVES', 1, '2011-05-25', 1.63, NULL, 'United Kingdom'), 37 | ('569545', '21906', 'PHARMACIE FIRST AID TIN', 1, '2011-10-04', 13.29, NULL, 'United Kingdom'), 38 | ('549562', '21169', 'YOU''RE CONFUSING ME METAL SIGN', 1, '2011-04-10', 1.69, 13232, 'United Kingdom'), 39 | ('580610', '21945', 'STRAWBERRIES DESIGN FLANNEL', 1, '2011-12-05', 1.63, NULL, 'United Kingdom'), 40 | ('558066', 'gift_0001_50', 'Dotcomgiftshop Gift Voucher £50.00', 1, '2011-06-24', 41.67, NULL, 'United Kingdom'), 41 | ('538349', '21985', 'PACK OF 12 HEARTS DESIGN TISSUES', 1, '2010-12-10', 0.85, NULL, 'United Kingdom'), 42 | ('537685', '22737', 'RIBBON REEL CHRISTMAS PRESENT', 15, '2010-12-08', 1.65, 18077, 'United Kingdom'), 43 | ('545906', '22614', 'PACK OF 12 SPACEBOY TISSUES', 24, '2011-03-08', 0.29, 15764, 'United Kingdom'), 44 | ('550997', '22629', 'SPACEBOY LUNCH BOX', 12, '2011-04-26', 1.95, 17735, 'United Kingdom'), 45 | ('558763', '22960', 'JAM MAKING SET WITH JARS', 3, '2011-07-03', 4.25, 12841, 'United Kingdom'), 46 | ('562688', '22918', 'HERB MARKER PARSLEY', 12, '2011-08-08', 0.65, 13869, 'United Kingdom'), 47 | ('541424', '84520B', 'PACK 20 ENGLISH ROSE PAPER NAPKINS', 9, '2011-01-17', 1.63, NULL, 'United Kingdom'), 48 | ('581405', '20996', 'JAZZ HEARTS ADDRESS BOOK', 1, '2011-12-08', 0.19, 13521, 'United Kingdom'), 49 | ('571053', '23256', 'CHILDRENS CUTLERY SPACEBOY', 4, '2011-10-13', 4.15, 12631, 'Finland'), 50 | ('563333', '23012', 'GLASS APOTHECARY BOTTLE PERFUME', 1, '2011-08-15', 3.95, 15996, 'United Kingdom'), 51 | ('568054', '47559B', 'TEA TIME OVEN GLOVE', 4, '2011-09-23', 1.25, 16978, 'United Kingdom'), 52 | ('574262', '22561', 'WOODEN SCHOOL COLOURING SET', 12, '2011-11-03', 1.65, 13721, 'United Kingdom'), 53 | ('569360', '23198', 'PANTRY MAGNETIC SHOPPING LIST', 6, '2011-10-03', 1.45, 14653, 'United Kingdom'), 54 | ('570210', '22980', 'PANTRY SCRUBBING BRUSH', 2, '2011-10-09', 1.65, 13259, 'United Kingdom'), 55 | ('576599', '22847', 'BREAD BIN DINER STYLE IVORY', 1, '2011-11-15', 16.95, 14544, 'United Kingdom'), 56 | ('579777', '22356', 'CHARLOTTE BAG PINK POLKADOT', 4, '2011-11-30', 1.63, NULL, 'United Kingdom'), 57 | ('566060', '21106', 'CREAM SLICE FLANNEL CHOCOLATE SPOT', 1, '2011-09-08', 5.79, NULL, 'United Kingdom'), 58 | ('550514', '22489', 'PACK OF 12 TRADITIONAL CRAYONS', 24, '2011-04-18', 0.42, 14631, 'United Kingdom'), 59 | ('569898', '23437', '50''S CHRISTMAS GIFT BAG LARGE', 2, '2011-10-06', 2.46, NULL, 'United Kingdom'), 60 | ('563566', '23548', 'WRAP MAGIC FOREST', 25, '2011-08-17', 0.42, 13655, 'United Kingdom'), 61 | ('559693', '21169', 'YOU''RE CONFUSING ME METAL SIGN', 1, '2011-07-11', 4.13, NULL, 'United Kingdom'), 62 | ('573386', '22112', 'CHOCOLATE HOT WATER BOTTLE', 24, '2011-10-30', 4.25, 17183, 'United Kingdom'), 63 | ('576920', '23312', 'VINTAGE CHRISTMAS GIFT SACK', 4, '2011-11-17', 4.15, 13871, 'United Kingdom'), 64 | ('564473', '22384', 'LUNCH BAG PINK POLKADOT', 10, '2011-08-25', 1.65, 16722, 'United Kingdom'), 65 | ('562264', '23321', 'SMALL WHITE HEART OF WICKER', 3, '2011-08-03', 3.29, NULL, 'United Kingdom'), 66 | ('542541', '79030D', 'TUMBLER, BAROQUE', 1, '2011-01-28', 12.46, NULL, 'United Kingdom'), 67 | ('579937', '22090', 'PAPER BUNTING RETROSPOT', 12, '2011-12-01', 2.95, 13509, 'United Kingdom'), 68 | ('574076', '22483', 'RED GINGHAM TEDDY BEAR', 1, '2011-11-02', 5.79, NULL, 'United Kingdom'), 69 | ('579187', '20665', 'RED RETROSPOT PURSE', 1, '2011-11-28', 5.79, NULL, 'United Kingdom'), 70 | ('542922', '22423', 'REGENCY CAKESTAND 3 TIER', 3, '2011-02-02', 12.75, 12682, 'France'), 71 | ('570677', '23008', 'DOLLY GIRL BABY GIFT SET', 2, '2011-10-11', 16.95, 12836, 'United Kingdom'), 72 | ('577182', '21930', 'JUMBO STORAGE BAG SKULLS', 10, '2011-11-18', 2.08, 16945, 'United Kingdom'), 73 | ('576686', '20992', 'JAZZ HEARTS PURSE NOTEBOOK', 1, '2011-11-16', 0.39, 16916, 'United Kingdom'), 74 | ('553844', '22569', 'FELTCRAFT CUSHION BUTTERFLY', 4, '2011-05-19', 3.75, 13450, 'United Kingdom'), 75 | ('580689', '23150', 'IVORY SWEETHEART SOAP DISH', 6, '2011-12-05', 2.49, 12994, 'United Kingdom'), 76 | ('545000', '85206A', 'CREAM FELT EASTER EGG BASKET', 6, '2011-02-25', 1.65, 15281, 'United Kingdom'), 77 | ('541975', '22382', 'LUNCH BAG SPACEBOY DESIGN', 40, '2011-01-24', 1.65, NULL, 'Hong Kong'), 78 | ('544942', '22551', 'PLASTERS IN TIN SPACEBOY', 12, '2011-02-25', 1.65, 15544, 'United Kingdom'), 79 | ('543177', '22667', 'RECIPE BOX RETROSPOT', 6, '2011-02-04', 2.95, 14466, 'United Kingdom'), 80 | ('574587', '23356', 'LOVE HOT WATER BOTTLE', 4, '2011-11-06', 5.95, 14936, 'Channel Islands'), 81 | ('543451', '22774', 'RED DRAWER KNOB ACRYLIC EDWARDIAN', 1, '2011-02-08', 2.46, NULL, 'United Kingdom'), 82 | ('578270', '22579', 'WOODEN TREE CHRISTMAS SCANDINAVIAN', 1, '2011-11-23', 1.63, 14096, 'United Kingdom'), 83 | ('551413', '84970L', 'SINGLE HEART ZINC T-LIGHT HOLDER', 12, '2011-04-28', 0.95, 16227, 'United Kingdom'), 84 | ('567666', '22900', 'SET 2 TEA TOWELS I LOVE LONDON', 6, '2011-09-21', 3.25, 12520, 'Germany'), 85 | ('571544', '22810', 'SET OF 6 T-LIGHTS SNOWMEN', 2, '2011-10-17', 2.95, 17757, 'United Kingdom'), 86 | ('558368', '23249', 'VINTAGE RED ENAMEL TRIM PLATE', 12, '2011-06-28', 1.65, 14329, 'United Kingdom'), 87 | ('546430', '22284', 'HEN HOUSE DECORATION', 2, '2011-03-13', 1.65, 15918, 'United Kingdom'), 88 | ('565233', '23000', 'TRAVEL CARD WALLET TRANSPORT', 1, '2011-09-02', 0.83, NULL, 'United Kingdom'), 89 | ('559984', '16012', 'FOOD/DRINK SPONGE STICKERS', 50, '2011-07-14', 0.21, 16657, 'United Kingdom'), 90 | ('576920', '23312', 'VINTAGE CHRISTMAS GIFT SACK', -4, '2011-11-17', 4.15, 13871, 'United Kingdom'), 91 | ('564473', '22384', 'LUNCH BAG PINK POLKADOT', 10, '2011-08-25', 1.65, 16722, 'United Kingdom'), 92 | ('562264', '23321', 'SMALL WHITE HEART OF WICKER', 3, '2011-08-03', 3.29, NULL, 'United Kingdom'), 93 | ('542541', '79030D', 'TUMBLER, BAROQUE', 1, '2011-01-28', 12.46, NULL, 'United Kingdom'), 94 | ('579937', '22090', 'PAPER BUNTING RETROSPOT', 12, '2011-12-01', 2.95, 13509, 'United Kingdom'), 95 | ('574076', '22483', 'RED GINGHAM TEDDY BEAR', 1, '2011-11-02', 5.79, NULL, 'United Kingdom'), 96 | ('579187', '20665', 'RED RETROSPOT PURSE', 1, '2011-11-28', 5.79, NULL, 'United Kingdom'), 97 | ('542922', '22423', 'REGENCY CAKESTAND 3 TIER', 3, '2011-02-02', 12.75, 12682, 'France'), 98 | ('570677', '23008', 'DOLLY GIRL BABY GIFT SET', 2, '2011-10-11', 16.95, 12836, 'United Kingdom'), 99 | ('577182', '21930', 'JUMBO STORAGE BAG SKULLS', 10, '2011-11-18', 2.08, 16945, 'United Kingdom'), 100 | ('576686', '20992', 'JAZZ HEARTS PURSE NOTEBOOK', 1, '2011-11-16', 0.39, 16916, 'United Kingdom'), 101 | ('553844', '22569', 'FELTCRAFT CUSHION BUTTERFLY', 4, '2011-05-19', 3.75, 13450, 'United Kingdom'), 102 | ('580689', '23150', 'IVORY SWEETHEART SOAP DISH', 6, '2011-12-05', 2.49, 12994, 'United Kingdom'), 103 | ('545000', '85206A', 'CREAM FELT EASTER EGG BASKET', 6, '2011-02-25', 1.65, 15281, 'United Kingdom'), 104 | ('541975', '22382', 'LUNCH BAG SPACEBOY DESIGN', 40, '2011-01-24', 1.65, NULL, 'Hong Kong'), 105 | ('544942', '22551', 'PLASTERS IN TIN SPACEBOY', 12, '2011-02-25', 1.65, 15544, 'United Kingdom'), 106 | ('543177', '22667', 'RECIPE BOX RETROSPOT', 6, '2011-02-04', 2.95, 14466, 'United Kingdom'), 107 | ('574587', '23356', 'LOVE HOT WATER BOTTLE', 4, '2011-11-06', 5.95, 14936, 'Channel Islands'), 108 | ('543451', '22774', 'RED DRAWER KNOB ACRYLIC EDWARDIAN', 1, '2011-02-08', 2.46, NULL, 'United Kingdom'), 109 | ('578270', '22579', 'WOODEN TREE CHRISTMAS SCANDINAVIAN', 1, '2011-11-23', 1.63, 14096, 'United Kingdom'), 110 | ('551413', '84970L', 'SINGLE HEART ZINC T-LIGHT HOLDER', 12, '2011-04-28', 0.95, 16227, 'United Kingdom'), 111 | ('567666', '22900', 'SET 2 TEA TOWELS I LOVE LONDON', 6, '2011-09-21', 3.25, 12520, 'Germany'), 112 | ('571544', '22810', 'SET OF 6 T-LIGHTS SNOWMEN', 2, '2011-10-17', 2.95, 17757, 'United Kingdom'), 113 | ('558368', '23249', 'VINTAGE RED ENAMEL TRIM PLATE', 12, '2011-06-28', 1.65, 14329, 'United Kingdom'), 114 | ('546430', '22284', 'HEN HOUSE DECORATION', 2, '2011-03-13', 1.65, 15918, 'United Kingdom'), 115 | ('565233', '23000', 'TRAVEL CARD WALLET TRANSPORT', 1, '2011-09-02', 0.83, NULL, 'United Kingdom'), 116 | ('559984', '16012', 'FOOD/DRINK SPONGE STICKERS', 50, '2011-07-14', 0.21, 16657, 'United Kingdom'); 117 | 118 | 119 | 120 | /* 121 | Find the best selling item for each month 122 | (no need to separate months by year) 123 | where the biggest total invoice was paid. 124 | 125 | The best selling item is calculated using the formula 126 | (unitprice * quantity). 127 | Output the month, the description of the 128 | item along with the amount paid. 129 | */ 130 | -- month invoice data 131 | -- group by product desc 132 | -- revenue price * qty 133 | -- rank 134 | -- subquery 135 | 136 | 137 | SELECT 138 | month, 139 | description, 140 | total_sale 141 | FROM 142 | ( 143 | SELECT 144 | EXTRACT(MONTH FROM invoicedate) as month, 145 | description, 146 | SUM(unitprice * quantity) as total_sale, 147 | RANK() OVER( PARTITION BY EXTRACT(MONTH FROM invoicedate) 148 | ORDER BY SUM(unitprice * quantity) DESC) as rn 149 | FROM walmart_eu 150 | GROUP BY month, description 151 | ) as subquery 152 | WHERE rn= 1 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | -- Your Task 193 | -- Find Customer of the month from each MONTH one customer who has spent the highest amount (price * quantity) as total amount may include multiple purchase 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | /* 204 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 205 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 206 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 207 | */ 208 | -------------------------------------------------------------------------------- /28.sql: -------------------------------------------------------------------------------- 1 | -- day 28/50 days SQL Challenge 2 | 3 | 4 | 5 | 6 | 7 | 8 | SELECT * FROM walmart_eu; 9 | 10 | /* 11 | --Question 12 | Write a query to find the highest-selling 13 | product for each customer 14 | 15 | Return cx id, product description, 16 | and total count of purchase. 17 | 18 | */ 19 | -- cx all product they purchased and their total orders 20 | -- order by by number of purchase desc 21 | -- 1 product that has highest purchase 22 | -- rank 23 | 24 | 25 | SELECT * 26 | FROM 27 | ( 28 | SELECT 29 | customerid, 30 | description, 31 | COUNT(*) as total_purchase, 32 | RANK() OVER(PARTITION BY customerid 33 | ORDER BY COUNT(*) DESC) as rn 34 | FROM walmart_eu 35 | GROUP BY customerid, description 36 | ORDER BY customerid, total_purchase DESC 37 | ) 38 | WHERE rn = 1 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | /* 68 | -- Your Task 69 | Find each country and best selling product 70 | Return country_name, description, total count of sale 71 | */ -------------------------------------------------------------------------------- /29.sql: -------------------------------------------------------------------------------- 1 | -- 29/50 days SQL challenge 2 | 3 | 4 | CREATE TABLE bookings 5 | ( 6 | id INT, 7 | hotel_name VARCHAR(15), 8 | booking_date date, 9 | cust_id INT, 10 | adult INT, 11 | payment_type VARCHAR(10) 12 | ); 13 | 14 | -- inserting records 15 | 16 | INSERT INTO bookings (id, hotel_name, booking_date, cust_id, adult, payment_type) VALUES 17 | (1, 'Hotel A', '2022-05-06', 1001, 2, 'Credit'), 18 | (2, 'Hotel B', '2022-05-06', 1002, 1, 'Cash'), 19 | (3, 'Hotel C', '2022-05-07', 1003, 3, 'Credit'), 20 | (4, 'Hotel D', '2022-05-07', 1004, 2, 'Cash'), 21 | (5, 'Hotel E', '2022-05-05', 1005, 1, 'Credit'), 22 | (6, 'Hotel A', '2022-05-07', 1006, 2, 'Cash'), 23 | (7, 'Hotel B', '2022-05-06', 1007, 3, 'Credit'), 24 | (8, 'Hotel C', '2022-05-08', 1008, 1, 'Cash'), 25 | (9, 'Hotel D', '2022-05-09', 1009, 2, 'Credit'), 26 | (10, 'Hotel E', '2022-05-10', 1010, 3, 'Cash'), 27 | (11, 'Hotel A', '2022-05-14', 1011, 1, 'Credit'), 28 | (12, 'Hotel B', '2022-05-21', 1012, 2, 'Cash'), 29 | (13, 'Hotel C', '2022-05-13', 1013, 3, 'Credit'), 30 | (14, 'Hotel D', '2022-05-14', 1014, 1, 'Cash'), 31 | (15, 'Hotel E', '2022-05-15', 1015, 2, 'Credit'), 32 | (16, 'Hotel A', '2022-05-21', 1016, 3, 'Cash'), 33 | (17, 'Hotel B', '2022-05-17', 1017, 1, 'Credit'), 34 | (18, 'Hotel C', '2022-05-18', 1018, 2, 'Cash'), 35 | (19, 'Hotel D', '2022-05-19', 1019, 3, 'Credit'), 36 | (20, 'Hotel E', '2022-05-20', 1020, 1, 'Cash'), 37 | (21, 'Hotel A', '2022-05-28', 1021, 2, 'Credit'), 38 | (22, 'Hotel B', '2022-05-22', 1022, 3, 'Cash'), 39 | (23, 'Hotel C', '2022-05-23', 1023, 1, 'Credit'), 40 | (24, 'Hotel D', '2022-05-24', 1024, 2, 'Cash'), 41 | (25, 'Hotel E', '2022-05-25', 1025, 3, 'Credit'), 42 | (26, 'Hotel A', '2022-06-04', 1026, 1, 'Cash'), 43 | (27, 'Hotel B', '2022-06-04', 1027, 2, 'Credit'), 44 | (28, 'Hotel C', '2022-05-28', 1028, 3, 'Cash'), 45 | (29, 'Hotel D', '2022-05-29', 1029, 1, 'Credit'), 46 | (30, 'Hotel E', '2022-06-25', 1030, 2, 'Cash'), 47 | (31, 'Hotel A', '2022-06-18', 1031, 3, 'Credit'), 48 | (32, 'Hotel B', '2022-06-02', 1032, 1, 'Cash'), 49 | (33, 'Hotel C', '2022-06-03', 1033, 2, 'Credit'), 50 | (34, 'Hotel D', '2022-06-04', 1034, 3, 'Cash'), 51 | (35, 'Hotel E', '2022-06-05', 1035, 1, 'Credit'), 52 | (36, 'Hotel A', '2022-07-09', 1036, 2, 'Cash'), 53 | (37, 'Hotel B', '2022-06-06', 1037, 3, 'Credit'), 54 | (38, 'Hotel C', '2022-06-08', 1038, 1, 'Cash'), 55 | (39, 'Hotel D', '2022-06-09', 1039, 2, 'Credit'), 56 | (40, 'Hotel E', '2022-06-10', 1040, 3, 'Cash'), 57 | (41, 'Hotel A', '2022-07-23', 1041, 1, 'Credit'), 58 | (42, 'Hotel B', '2022-06-12', 1042, 2, 'Cash'), 59 | (43, 'Hotel C', '2022-06-13', 1043, 3, 'Credit'), 60 | (44, 'Hotel D', '2022-06-14', 1044, 1, 'Cash'), 61 | (45, 'Hotel E', '2022-06-15', 1045, 2, 'Credit'), 62 | (46, 'Hotel A', '2022-06-24', 1046, 3, 'Cash'), 63 | (47, 'Hotel B', '2022-06-24', 1047, 1, 'Credit'), 64 | (48, 'Hotel C', '2022-06-18', 1048, 2, 'Cash'), 65 | (49, 'Hotel D', '2022-06-19', 1049, 3, 'Credit'), 66 | (50, 'Hotel E', '2022-06-20', 1050, 1, 'Cash'); 67 | 68 | 69 | /* 70 | -- Question 71 | Find the hotel name and their total numbers 72 | of weekends bookings 73 | sort the data higher number first! 74 | */ 75 | 76 | -- hotel_name, 77 | -- total no of bookings which basically for weekends 78 | -- Group by by hotel_name 79 | -- order by total booking 80 | 81 | 82 | SELECT 83 | hotel_name, 84 | SUM(CASE 85 | WHEN EXTRACT(DOW FROM booking_date) IN (6, 7) 86 | THEN 1 87 | ELSE 0 88 | END) as total_w_bookings 89 | 90 | FROM bookings 91 | GROUP BY hotel_name 92 | ORDER BY total_w_bookings DESC 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | SELECT EXTRACT(DOW FROM current_date); 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | -- Your Task 129 | -- Find out hotel_name and their total number of booking by credit card and cash 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | -------------------------------------------------------------------------------- /30.sql: -------------------------------------------------------------------------------- 1 | -- Day 30/50 days SQL challenge 2 | 3 | -- Creating table 4 | DROP TABLE IF EXISTS orders; 5 | CREATE TABLE orders ( 6 | order_id INT PRIMARY KEY, 7 | order_date DATE, 8 | quantity INT 9 | ); 10 | 11 | 12 | INSERT INTO orders 13 | (order_id, order_date, quantity) 14 | VALUES 15 | (1, '2023-01-02', 5), 16 | (2, '2023-02-05', 3), 17 | (3, '2023-02-07', 2), 18 | (4, '2023-03-10', 6), 19 | (5, '2023-02-15', 4), 20 | (6, '2023-04-21', 8), 21 | (7, '2023-05-28', 7), 22 | (8, '2023-05-05', 3), 23 | (9, '2023-08-10', 5), 24 | (10, '2023-05-02', 6), 25 | (11, '2023-02-07', 4), 26 | (12, '2023-04-15', 9), 27 | (13, '2023-03-22', 7), 28 | (14, '2023-04-30', 8), 29 | (15, '2023-04-05', 6), 30 | (16, '2023-02-02', 6), 31 | (17, '2023-01-07', 4), 32 | (18, '2023-05-15', 9), 33 | (19, '2023-05-22', 7), 34 | (20, '2023-06-30', 8), 35 | (21, '2023-07-05', 6); 36 | 37 | 38 | /* 39 | -- Question 40 | You have amazon orders data 41 | 42 | For each week, find the total number 43 | of orders. 44 | Include only the orders that are 45 | from the first quarter of 2023. 46 | 47 | The output should contain 'week' 48 | and 'quantity'. 49 | 50 | */ 51 | -- week no from order date 52 | -- SUM(qty) 53 | -- where order 1st quarter 2023 54 | -- group by week 55 | 56 | SELECT 57 | EXTRACT(WEEK FROM order_date) as week, 58 | -- WEEK(order_date) as week, 59 | SUM(quantity) as total_qty_sold 60 | FROM orders 61 | WHERE 62 | EXTRACT(YEAR FROM order_date) = 2023 63 | AND 64 | EXTRACT(QUARTER FROM order_date) = 1 65 | GROUP BY week; 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | -- Your Task 88 | -- Find each quarter and their total qty sale 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | /* 98 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 99 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 100 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 101 | */ 102 | 103 | -------------------------------------------------------------------------------- /31.sql: -------------------------------------------------------------------------------- 1 | -- day 31/50 SQL challenge 2 | 3 | 4 | CREATE TABLE sales_data ( 5 | seller_id VARCHAR(10), 6 | total_sales NUMERIC, 7 | product_category VARCHAR(20), 8 | market_place VARCHAR(10), 9 | month DATE 10 | ); 11 | 12 | 13 | 14 | INSERT INTO sales_data (seller_id, total_sales, product_category, market_place, month) 15 | VALUES 16 | ('s236', 36486.73, 'electronics', 'in', DATE '2024-01-01'), 17 | ('s918', 24286.4, 'books', 'uk', DATE '2024-01-01'), 18 | ('s163', 18846.34, 'electronics', 'us', DATE '2024-01-01'), 19 | ('s836', 35687.65, 'electronics', 'uk', DATE '2024-01-01'), 20 | ('s790', 31050.13, 'clothing', 'in', DATE '2024-01-01'), 21 | ('s195', 14299, 'books', 'de', DATE '2024-01-01'), 22 | ('s483', 49361.62, 'clothing', 'uk', DATE '2024-01-01'), 23 | ('s891', 48847.68, 'electronics', 'de', DATE '2024-01-01'), 24 | ('s272', 11324.61, 'toys', 'us', DATE '2024-01-01'), 25 | ('s712', 43739.86, 'toys', 'in', DATE '2024-01-01'), 26 | ('s968', 36042.66, 'electronics', 'jp', DATE '2024-01-01'), 27 | ('s728', 29158.51, 'books', 'us', DATE '2024-01-01'), 28 | ('s415', 24593.5, 'electronics', 'uk', DATE '2024-01-01'), 29 | ('s454', 35520.67, 'toys', 'in', DATE '2024-01-01'), 30 | ('s560', 27320.16, 'electronics', 'jp', DATE '2024-01-01'), 31 | ('s486', 37009.18, 'electronics', 'us', DATE '2024-01-01'), 32 | ('s749', 36277.83, 'toys', 'de', DATE '2024-01-01'), 33 | ('s798', 31162.45, 'electronics', 'in', DATE '2024-01-01'), 34 | ('s515', 26372.16, 'toys', 'in', DATE '2024-01-01'), 35 | ('s662', 22157.87, 'books', 'in', DATE '2024-01-01'), 36 | ('s919', 24963.97, 'toys', 'de', DATE '2024-01-01'), 37 | ('s863', 46652.67, 'electronics', 'us', DATE '2024-01-01'), 38 | ('s375', 18107.08, 'clothing', 'de', DATE '2024-01-01'), 39 | ('s583', 20268.34, 'toys', 'jp', DATE '2024-01-01'), 40 | ('s778', 19962.89, 'electronics', 'in', DATE '2024-01-01'), 41 | ('s694', 36519.05, 'electronics', 'in', DATE '2024-01-01'), 42 | ('s214', 18948.55, 'electronics', 'de', DATE '2024-01-01'), 43 | ('s830', 39169.01, 'toys', 'us', DATE '2024-01-01'), 44 | ('s383', 12310.73, 'books', 'in', DATE '2024-01-01'), 45 | ('s195', 45633.35, 'books', 'de', DATE '2024-01-01'), 46 | ('s196', 13643.27, 'books', 'jp', DATE '2024-01-01'), 47 | ('s796', 19637.44, 'electronics', 'jp', DATE '2024-01-01'), 48 | ('s334', 11999.1, 'clothing', 'de', DATE '2024-01-01'), 49 | ('s217', 23481.03, 'books', 'in', DATE '2024-01-01'), 50 | ('s123', 36277.83, 'toys', 'uk', DATE '2024-01-01'), 51 | ('s383', 17337.392, 'electronics', 'de', DATE '2024-02-01'), 52 | ('s515', 13998.997, 'electronics', 'jp', DATE '2024-02-01'), 53 | ('s583', 36035.539, 'books', 'jp', DATE '2024-02-01'), 54 | ('s195', 18493.564, 'toys', 'de', DATE '2024-02-01'), 55 | ('s728', 34466.126, 'electronics', 'de', DATE '2024-02-01'), 56 | ('s830', 48950.221, 'electronics', 'us', DATE '2024-02-01'), 57 | ('s483', 16820.965, 'electronics', 'uk', DATE '2024-02-01'), 58 | ('s778', 48625.281, 'toys', 'in', DATE '2024-02-01'), 59 | ('s918', 37369.321, 'clothing', 'de', DATE '2024-02-01'), 60 | ('s123', 46372.816, 'electronics', 'uk', DATE '2024-02-01'), 61 | ('s195', 18317.667, 'electronics', 'in', DATE '2024-02-01'), 62 | ('s798', 41005.313, 'books', 'in', DATE '2024-02-01'), 63 | ('s454', 39090.88, 'electronics', 'de', DATE '2024-02-01'), 64 | ('s454', 17839.314, 'toys', 'us', DATE '2024-02-01'), 65 | ('s798', 31587.685, 'toys', 'in', DATE '2024-02-01'), 66 | ('s778', 21237.38, 'books', 'jp', DATE '2024-02-01'), 67 | ('s236', 10625.456, 'toys', 'jp', DATE '2024-02-01'), 68 | ('s236', 17948.627, 'toys', 'jp', DATE '2024-02-01'), 69 | ('s749', 38453.678, 'toys', 'de', DATE '2024-02-01'), 70 | ('s790', 47052.035, 'toys', 'uk', DATE '2024-02-01'), 71 | ('s272', 34931.925, 'books', 'de', DATE '2024-02-01'), 72 | ('s375', 36753.65, 'toys', 'us', DATE '2024-02-01'), 73 | ('s214', 32449.737, 'toys', 'in', DATE '2024-02-01'), 74 | ('s163', 40431.402, 'electronics', 'in', DATE '2024-02-01'), 75 | ('s214', 30909.313, 'electronics', 'in', DATE '2024-02-01'), 76 | ('s415', 18068.768, 'electronics', 'jp', DATE '2024-02-01'), 77 | ('s836', 46302.659, 'clothing', 'jp', DATE '2024-02-01'), 78 | ('s383', 19151.927, 'electronics', 'uk', DATE '2024-02-01'), 79 | ('s863', 45218.714, 'books', 'us', DATE '2024-02-01'), 80 | ('s830', 18737.617, 'books', 'de', DATE '2024-02-01'), 81 | ('s968', 22973.801, 'toys', 'in', DATE '2024-02-01'), 82 | ('s334', 20885.29, 'electronics', 'uk', DATE '2024-02-01'), 83 | ('s163', 10278.085, 'electronics', 'de', DATE '2024-02-01'), 84 | ('s272', 29393.199, 'clothing', 'jp', DATE '2024-02-01'), 85 | ('s560', 16731.642, 'electronics', 'jp', DATE '2024-02-01'), 86 | ('s583', 38120.758, 'books', 'uk', DATE '2024-03-01'), 87 | ('s163', 22035.132, 'toys', 'uk', DATE '2024-03-01'), 88 | ('s918', 26441.481, 'clothing', 'jp', DATE '2024-03-01'), 89 | ('s334', 35374.054, 'books', 'in', DATE '2024-03-01'), 90 | ('s796', 32115.724, 'electronics', 'jp', DATE '2024-03-01'), 91 | ('s749', 39128.654, 'toys', 'in', DATE '2024-03-01'), 92 | ('s217', 35341.188, 'electronics', 'us', DATE '2024-03-01'), 93 | ('s334', 16028.702, 'books', 'us', DATE '2024-03-01'), 94 | ('s383', 44334.352, 'toys', 'in', DATE '2024-03-01'), 95 | ('s163', 42380.042, 'books', 'jp', DATE '2024-03-01'), 96 | ('s483', 16974.657, 'clothing', 'in', DATE '2024-03-01'), 97 | ('s236', 37027.605, 'electronics', 'de', DATE '2024-03-01'), 98 | ('s196', 45093.574, 'toys', 'uk', DATE '2024-03-01'), 99 | ('s486', 42688.888, 'books', 'in', DATE '2024-03-01'), 100 | ('s728', 32331.738, 'electronics', 'us', DATE '2024-03-01'), 101 | ('s123', 38014.313, 'electronics', 'us', DATE '2024-03-01'), 102 | ('s662', 45483.457, 'clothing', 'jp', DATE '2024-03-01'), 103 | ('s968', 47425.4, 'books', 'uk', DATE '2024-03-01'), 104 | ('s778', 36540.071, 'books', 'in', DATE '2024-03-01'), 105 | ('s798', 29424.55, 'toys', 'us', DATE '2024-03-01'), 106 | ('s334', 10723.015, 'toys', 'de', DATE '2024-03-01'), 107 | ('s662', 24658.751, 'electronics', 'uk', DATE '2024-03-01'), 108 | ('s163', 36304.516, 'clothing', 'us', DATE '2024-03-01'), 109 | ('s863', 20608.095, 'books', 'de', DATE '2024-03-01'), 110 | ('s214', 27375.775, 'toys', 'de', DATE '2024-03-01'), 111 | ('s334', 33076.155, 'clothing', 'in', DATE '2024-03-01'), 112 | ('s515', 32880.168, 'toys', 'us', DATE '2024-03-01'), 113 | ('s195', 48157.143, 'books', 'uk', DATE '2024-03-01'), 114 | ('s583', 23230.012, 'books', 'uk', DATE '2024-03-01'), 115 | ('s334', 13013.85, 'toys', 'jp', DATE '2024-03-01'), 116 | ('s375', 20738.994, 'electronics', 'in', DATE '2024-03-01'), 117 | ('s778', 25787.659, 'electronics', 'jp', DATE '2024-03-01'), 118 | ('s796', 36845.741, 'clothing', 'uk', DATE '2024-03-01'), 119 | ('s214', 21811.624, 'electronics', 'de', DATE '2024-03-01'), 120 | ('s334', 15464.853, 'books', 'in', DATE '2024-03-01'); 121 | 122 | 123 | 124 | /* 125 | -- Amazon Data Analyst Interview 126 | -- Top Monthly Sellers 127 | 128 | You are provided with a transactional dataset from 129 | Amazon that contains detailed information about 130 | sales across different products and marketplaces. 131 | 132 | Your task is to list the top 3 sellers in each 133 | product category for January. 134 | 135 | The output should contain 'seller_id' , 136 | 'total_sales' ,'product_category' , 137 | 'market_place', and 'month'. 138 | 139 | */ 140 | 141 | -- seller_id TOTAL sale 142 | -- WHERE Jan 143 | -- Select top 3 seller from each p_c 144 | 145 | 146 | SELECT * FROM sales_data; 147 | 148 | 149 | SELECT 150 | product_category, 151 | seller_id, 152 | sales 153 | FROM 154 | ( SELECT 155 | product_category, 156 | seller_id, 157 | SUM(total_sales) as sales, 158 | DENSE_RANK() OVER(PARTITION BY product_category 159 | ORDER BY SUM(total_sales) DESC) dr 160 | FROM sales_data 161 | WHERE EXTRACT(MONTH FROM month) = 1 162 | GROUP BY product_category, seller_id 163 | ) as subquery 164 | WHERE dr <=3; 165 | 166 | -- ORDER BY product_category, sales DESC 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | -- Your Task 186 | -- Find out Each market place and their top 3 seller based on total sale 187 | 188 | 189 | 190 | 191 | 192 | /* 193 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 194 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 195 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 196 | */ 197 | 198 | -------------------------------------------------------------------------------- /32.sql: -------------------------------------------------------------------------------- 1 | -- Day 32/50 days SQL challenge 2 | 3 | 4 | -- Create the user_flags table 5 | CREATE TABLE user_flags ( 6 | user_firstname VARCHAR(50), 7 | user_lastname VARCHAR(50), 8 | video_id VARCHAR(20), 9 | flag_id VARCHAR(20) 10 | ); 11 | 12 | -- Insert the provided records into the user_flags table 13 | INSERT INTO user_flags (user_firstname, user_lastname, video_id, flag_id) VALUES 14 | ('Richard', 'Hasson', 'y6120QOlsfU', '0cazx3'), 15 | ('Mark', 'May', 'Ct6BUPvE2sM', '1cn76u'), 16 | ('Gina', 'Korman', 'dQw4w9WgXcQ', '1i43zk'), 17 | ('Mark', 'May', 'Ct6BUPvE2sM', '1n0vef'), 18 | ('Mark', 'May', 'jNQXAC9IVRw', '1sv6ib'), 19 | ('Gina', 'Korman', 'dQw4w9WgXcQ', '20xekb'), 20 | ('Mark', 'May', '5qap5aO4i9A', '4cvwuv'), 21 | ('Daniel', 'Bell', '5qap5aO4i9A', '4sd6dv'), 22 | ('Richard', 'Hasson', 'y6120QOlsfU', '6jjkvn'), 23 | ('Pauline', 'Wilks', 'jNQXAC9IVRw', '7ks264'), 24 | ('Courtney', '', 'dQw4w9WgXcQ', NULL), 25 | ('Helen', 'Hearn', 'dQw4w9WgXcQ', '8946nx'), 26 | ('Mark', 'Johnson', 'y6120QOlsfU', '8wwg0l'), 27 | ('Richard', 'Hasson', 'dQw4w9WgXcQ', 'arydfd'), 28 | ('Gina', 'Korman', '', NULL), 29 | ('Mark', 'Johnson', 'y6120QOlsfU', 'bl40qw'), 30 | ('Richard', 'Hasson', 'dQw4w9WgXcQ', 'ehn1pt'), 31 | ('Lopez', '', 'dQw4w9WgXcQ', 'hucyzx'), 32 | ('Greg', '', '5qap5aO4i9A', NULL), 33 | ('Pauline', 'Wilks', 'jNQXAC9IVRw', 'i2l3oo'), 34 | ('Richard', 'Hasson', 'jNQXAC9IVRw', 'i6336w'), 35 | ('Johnson', 'y6120QOlsfU', '', 'iey5vi'), 36 | ('William', 'Kwan', 'y6120QOlsfU', 'kktiwe'), 37 | ('', 'Ct6BUPvE2sM', '', NULL), 38 | ('Loretta', 'Crutcher', 'y6120QOlsfU', 'nkjgku'), 39 | ('Pauline', 'Wilks', 'jNQXAC9IVRw', 'ov5gd8'), 40 | ('Mary', 'Thompson', 'Ct6BUPvE2sM', 'qa16ua'), 41 | ('Daniel', 'Bell', '5qap5aO4i9A', 'xciyse'), 42 | ('Evelyn', 'Johnson', 'dQw4w9WgXcQ', 'xvhk6d'); 43 | 44 | /* 45 | Netflix Data Analyst Interview Question 46 | 47 | For each video, find how many unique users 48 | flagged it. 49 | A unique user can be identified using the 50 | combination of their first name and last name. 51 | 52 | Do not consider rows in which there is no flag ID. 53 | 54 | */ 55 | -- select video_id 56 | -- COUNT(unique users) 57 | -- DISTINTC first and last name 58 | -- filter the data for not null flagid 59 | -- GROUP BY 60 | 61 | SELECT * FROM user_flags; 62 | 63 | 64 | SELECT 65 | video_id, 66 | COUNT(DISTINCT(CONCAT(user_firstname, user_lastname))) 67 | as cnt_users 68 | FROM user_flags 69 | WHERE flag_id is not null 70 | GROUP BY video_id 71 | ORDER BY 2 DESC 72 | 73 | 74 | 75 | 76 | 77 | /* 78 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 79 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 80 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 81 | */ 82 | 83 | -------------------------------------------------------------------------------- /33.sql: -------------------------------------------------------------------------------- 1 | -- Day 32/50 SQL challenge 2 | 3 | 4 | 5 | -- Create table fb_active_users 6 | CREATE TABLE fb_active_users ( 7 | user_id INT, 8 | name VARCHAR(50), 9 | status VARCHAR(10), 10 | country VARCHAR(50) 11 | ); 12 | 13 | -- Insert records into fb_active_users 14 | INSERT INTO fb_active_users (user_id, name, status, country) VALUES 15 | (33, 'Amanda Leon', 'open', 'Australia'), 16 | (27, 'Jessica Farrell', 'open', 'Luxembourg'), 17 | (18, 'Wanda Ramirez', 'open', 'USA'), 18 | (50, 'Samuel Miller', 'closed', 'Brazil'), 19 | (16, 'Jacob York', 'open', 'Australia'), 20 | (25, 'Natasha Bradford', 'closed', 'USA'), 21 | (34, 'Donald Ross', 'closed', 'China'), 22 | (52, 'Michelle Jimenez', 'open', 'USA'), 23 | (11, 'Theresa John', 'open', 'China'), 24 | (37, 'Michael Turner', 'closed', 'Australia'), 25 | (32, 'Catherine Hurst', 'closed', 'Mali'), 26 | (61, 'Tina Turner', 'open', 'Luxembourg'), 27 | (4, 'Ashley Sparks', 'open', 'China'), 28 | (82, 'Jacob York', 'closed', 'USA'), 29 | (87, 'David Taylor', 'closed', 'USA'), 30 | (78, 'Zachary Anderson', 'open', 'China'), 31 | (5, 'Tiger Leon', 'closed', 'China'), 32 | (56, 'Theresa Weaver', 'closed', 'Brazil'), 33 | (21, 'Tonya Johnson', 'closed', 'Mali'), 34 | (89, 'Kyle Curry', 'closed', 'Mali'), 35 | (7, 'Donald Jim', 'open', 'USA'), 36 | (22, 'Michael Bone', 'open', 'Canada'), 37 | (31, 'Sara Michaels', 'open', 'Denmark'); 38 | 39 | 40 | /* 41 | -- Meta Data Analyst Question 42 | 43 | You have meta table with columns 44 | user_id, name, status, country 45 | 46 | Output share of US users that are active. 47 | Active users are the ones with an 48 | "open" status in the table. 49 | 50 | Return total users and active users 51 | and active users share for US 52 | */ 53 | 54 | -- COUNT FILTER FOR US 55 | -- COUNT ACTIVE users in US 56 | -- active users/total users * 100 57 | 58 | 59 | SELECT * FROM fb_active_users; 60 | 61 | 62 | 63 | SELECT 64 | COUNT(user_id) as total_users, 65 | SUM( 66 | CASE 67 | WHEN status = 'open' THEN 1 68 | ELSE 0 69 | END 70 | ) as active_users, 71 | SUM( 72 | CASE 73 | WHEN status = 'open' THEN 1 74 | ELSE 0 75 | END 76 | )::numeric/COUNT(user_id)::numeric * 100 77 | as share_of_active_users_US 78 | 79 | FROM fb_active_users 80 | WHERE country = 'USA' 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | -- Your Task 97 | -- Find non_active users share for China 98 | 99 | -------------------------------------------------------------------------------- /34.sql: -------------------------------------------------------------------------------- 1 | -- Day 34/50 SQL Challenge 2 | 3 | 4 | -- Create table bank_transactions 5 | CREATE TABLE bank_transactions ( 6 | transaction_id SERIAL PRIMARY KEY, 7 | bank_id INT, 8 | customer_id INT, 9 | transaction_amount DECIMAL(10, 2), 10 | transaction_type VARCHAR(10), 11 | transaction_date DATE 12 | ); 13 | 14 | -- Insert sample records into bank_transactions 15 | INSERT INTO bank_transactions (bank_id, customer_id, transaction_amount, transaction_type, transaction_date) VALUES 16 | (1, 101, 500.00, 'credit', '2024-01-01'), 17 | (1, 101, 200.00, 'debit', '2024-01-02'), 18 | (1, 101, 300.00, 'credit', '2024-01-05'), 19 | (1, 101, 150.00, 'debit', '2024-01-08'), 20 | (1, 102, 1000.00, 'credit', '2024-01-01'), 21 | (1, 102, 400.00, 'debit', '2024-01-03'), 22 | (1, 102, 600.00, 'credit', '2024-01-05'), 23 | (1, 102, 200.00, 'debit', '2024-01-09'); 24 | 25 | 26 | 27 | 28 | /* 29 | You are given a bank transaction data 30 | with columns bank_id, customer_id, 31 | amount_type(credit debit), 32 | transaction_amount and transaction_date 33 | 34 | 35 | 36 | -- Write a query to find starting and ending 37 | trans amount for each customer 38 | 39 | Return cx_id, their first_transaction_amt, 40 | last_transaction and these transaction_date 41 | 42 | */ 43 | -- 44 | 45 | 46 | SELECT * FROM bank_transactions; 47 | 48 | 49 | 50 | 51 | -- first trans details 52 | -- last trans details 53 | -- than join these 2 trans 54 | 55 | 56 | 57 | 58 | WITH CTE1 59 | AS 60 | ( 61 | SELECT *, 62 | ROW_NUMBER() OVER(PARTITION BY customer_id 63 | ORDER BY transaction_date) as rn 64 | FROM bank_transactions 65 | ), 66 | CTE2 -- first_trans_details 67 | AS 68 | ( 69 | SELECT 70 | customer_id, 71 | transaction_amount, 72 | transaction_date 73 | FROM CTE1 74 | WHERE rn = (SELECT MIN(rn) FROM CTE1) 75 | ), 76 | CTE3 -- -- last_trans_details 77 | AS 78 | ( 79 | SELECT 80 | customer_id, 81 | transaction_amount, 82 | transaction_date 83 | FROM CTE1 84 | WHERE rn = (SELECT MAX(rn) FROM CTE1) 85 | ) 86 | 87 | SELECT 88 | CTE2.customer_id, 89 | CTE2.transaction_amount as first_trans_amt, 90 | CTE2.transaction_date as first_trans_date, 91 | CTE3.transaction_amount as last_trans_amt, 92 | CTE3.transaction_date as last_trans_date 93 | FROM CTE2 94 | JOIN 95 | CTE3 96 | ON CTE2.customer_id = CTE3.customer_id 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | -- Your task 151 | -- Write a query to return each cx_id and their bank balance 152 | -- Note bank balance = Total Credit - Total_debit 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | -------------------------------------------------------------------------------- /35.sql: -------------------------------------------------------------------------------- 1 | -- Day 35/50 SQL Challenge 2 | 3 | DROP TABLE IF EXISTS Students; 4 | CREATE TABLE Students ( 5 | student_id INT PRIMARY KEY, 6 | student_name VARCHAR(50), 7 | marks INT, 8 | class VARCHAR(10) 9 | ); 10 | 11 | 12 | INSERT INTO Students (student_id, student_name, marks, class) VALUES 13 | (1, 'John Doe', 85, 'A'), 14 | (2, 'Jane Smith', 92, 'B'), 15 | (3, 'Michael Johnson', 78, 'A'), 16 | (4, 'Emily Brown', 59, 'C'), 17 | (5, 'David Lee', 88, 'B'), 18 | (6, 'Sarah Wilson', 59, 'A'), 19 | (7, 'Daniel Taylor', 90, 'C'), 20 | (8, 'Emma Martinez', 79, 'B'), 21 | (9, 'Christopher Anderson', 87, 'A'), 22 | (10, 'Olivia Garcia', 91, 'C'), 23 | (11, 'James Rodriguez', 83, 'B'), 24 | (12, 'Sophia Hernandez', 94, 'A'), 25 | (13, 'Matthew Martinez', 76, 'C'), 26 | (14, 'Isabella Lopez', 89, 'B'), 27 | (15, 'Ethan Gonzalez', 80, 'A'), 28 | (16, 'Amelia Perez', 93, 'C'), 29 | (17, 'Alexander Torres', 77, 'B'), 30 | (18, 'Mia Flores', 86, 'A'), 31 | (19, 'William Sanchez', 84, 'C'), 32 | (20, 'Ava Ramirez', 97, 'B'), 33 | (21, 'Daniel Taylor', 75, 'A'), 34 | (22, 'Chloe Cruz', 98, 'C'), 35 | (23, 'Benjamin Ortiz', 89, 'B'), 36 | (24, 'Harper Reyes', 99, 'A'), 37 | (25, 'Ryan Stewart', 99, 'C'); 38 | 39 | 40 | 41 | /* 42 | Data Analyst Interview Questions 43 | 44 | You have a students table with columns 45 | id, name, marks and class of students 46 | 47 | -- Write a query to fetch students 48 | with minmum marks and maximum marks 49 | 50 | 51 | */ 52 | 53 | 54 | 55 | -- Approach 1 56 | 57 | -- minimum marks 58 | -- maximum marks 59 | 60 | SELECT * FROM students; 61 | 62 | 63 | SELECT MIN(marks) FROM students; -- 59 64 | SELECT MAX(marks) FROM students; -- 99 65 | 66 | 67 | SELECT * FROM students 68 | WHERE 69 | marks = (SELECT MIN(marks) 70 | FROM students) 71 | OR 72 | marks = (SELECT MAX(marks) 73 | FROM students) 74 | 75 | 76 | 77 | 78 | -- Approach 2 79 | 80 | WITH CTE 81 | AS 82 | ( 83 | SELECT 84 | MIN(marks) as min_marks, 85 | MAX(marks) as max_marks 86 | FROM students 87 | ) 88 | SELECT 89 | s.* 90 | FROM students as s 91 | JOIN 92 | CTE ON s.marks = CTE.min_marks 93 | OR 94 | s.marks = CTE.max_marks 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | -- Your Task 107 | -- Write a SQL query to return students with maximum marks in each class 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | -------------------------------------------------------------------------------- /36.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge Day 36 2 | 3 | 4 | DROP TABLE IF EXISTS Employees; 5 | CREATE TABLE Employees ( 6 | Emp_No DECIMAL(4,0) NOT NULL, 7 | Emp_Name VARCHAR(10), 8 | Job_Name VARCHAR(9), 9 | Manager_Id DECIMAL(4,0), 10 | HireDate DATE, 11 | Salary DECIMAL(7,2), 12 | Commission DECIMAL(7,2), 13 | DeptNo DECIMAL(2,0) NOT NULL 14 | ); 15 | 16 | INSERT INTO Employees (Emp_No, Emp_Name, Job_Name, Manager_Id, HireDate, Salary, Commission, DeptNo) VALUES 17 | (7839, 'KING', 'PRESIDENT', NULL, '1981-11-17', 5000, NULL, 10), 18 | (7698, 'BLAKE', 'MANAGER', 7839, '1981-05-01', 2850, NULL, 30), 19 | (7782, 'CLARK', 'MANAGER', 7839, '1981-06-09', 2450, NULL, 10), 20 | (7566, 'JONES', 'MANAGER', NULL, '1981-04-02', 2975, NULL, 20), 21 | (7788, 'SCOTT', 'ANALYST', 7566, '1987-07-29', 3000, NULL, 20), 22 | (7902, 'FORD', 'ANALYST', 7566, '1981-12-03', 3000, NULL, 20), 23 | (7369, 'SMITH', 'CLERK', 7902, '1980-12-17', 800, NULL, 20), 24 | (7499, 'ALLEN', 'SALESMAN', NULL, '1981-02-20', 1600, 300, 30), 25 | (7521, 'WARD', 'SALESMAN', 7698, '1981-02-22', 1250, 500, 30), 26 | (7654, 'MARTIN', 'SALESMAN', 7698, '1981-09-28', 1250, 1400, 30), 27 | (7844, 'TURNER', 'SALESMAN', 7698, '1981-09-08', 1500, 0, 30), 28 | (7876, 'ADAMS', 'CLERK', NULL, '1987-06-02', 1100, NULL, 20), 29 | (7900, 'JAMES', 'CLERK', 7698, '1981-12-03', 950, NULL, 30), 30 | (7934, 'MILLER', 'CLERK', 7782, '1982-01-23', 1300, NULL, 10); 31 | 32 | 33 | 34 | /* 35 | Question 36 | 37 | Write an SQL script to display the 38 | immediate manager of an employee. 39 | 40 | Given a table Employees with columns: 41 | Emp_No, Emp_Name, and Manager_Id. 42 | 43 | The script should take an input parameter 44 | Emp_No and return the employee's name 45 | along with their immediate manager's name. 46 | 47 | If an employee has no manager 48 | (i.e., Manager_Id is NULL), 49 | display "No Boss" for that employee. 50 | 51 | */ 52 | 53 | SELECT * FROM employees; 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | SELECT 65 | e1.emp_name as employee_name, 66 | COALESCE(e2.emp_name, 'No Boss') as manager_name 67 | FROM employees as e1 68 | LEFT JOIN 69 | employees as e2 70 | ON e1.manager_id = e2.emp_no 71 | WHERE e1.emp_no = 7499 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | /* 88 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 89 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 90 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 91 | */ 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | -------------------------------------------------------------------------------- /37.sql: -------------------------------------------------------------------------------- 1 | -- 37/50 SQL challenge 2 | 3 | -- Create customers table 4 | CREATE TABLE customers ( 5 | customer_id INT PRIMARY KEY, 6 | customer_name VARCHAR(100) 7 | ); 8 | 9 | -- Create spending records table 10 | CREATE TABLE spending_records ( 11 | record_id INT PRIMARY KEY, 12 | customer_id INT, 13 | spending_amount DECIMAL(10, 2), 14 | spending_date DATE, 15 | FOREIGN KEY (customer_id) REFERENCES customers(customer_id) 16 | ); 17 | 18 | -- Insert example data into customers table 19 | INSERT INTO customers (customer_id, customer_name) VALUES 20 | (1, 'John'), 21 | (2, 'Alice'), 22 | (3, 'Bob'), 23 | (4, 'Charlie'); 24 | 25 | -- Insert example data into spending records table 26 | INSERT INTO spending_records (record_id, customer_id, spending_amount, spending_date) VALUES 27 | (9, 1, 120.00, '2024-03-25'), 28 | (10, 2, 80.00, '2024-03-25'), 29 | (11, 3, 150.00, '2024-03-25'), 30 | (12, 4, 70.00, '2024-03-25'), 31 | (13, 1, 90.00, '2024-03-02'), 32 | (14, 2, 100.00, '2024-04-02'), 33 | (15, 3, 160.00, '2024-04-02'), 34 | (16, 4, 30.00, '2024-03-02'), 35 | (17, 1, 110.00, '2024-04-09'), 36 | (18, 2, 70.00, '2024-02-09'), 37 | (19, 3, 140.00, '2024-03-09'), 38 | (20, 4, 60.00, '2024-04-09'), 39 | (21, 1, 100.00, '2024-03-16'), 40 | (22, 2, 60.00, '2024-03-16'), 41 | (23, 3, 130.00, '2024-03-16'), 42 | (24, 4, 50.00, '2024-04-16'), 43 | (25, 1, 80.00, '2024-03-23'), 44 | (26, 2, 50.00, '2024-04-23'), 45 | (27, 3, 120.00, '2024-04-23'), 46 | (28, 4, 40.00, '2024-04-23'), 47 | (29, 1, 70.00, '2024-04-30'), 48 | (30, 2, 40.00, '2024-04-30'), 49 | (31, 3, 110.00, '2024-03-01'), 50 | (32, 4, 30.00, '2024-03-01'); 51 | 52 | /* 53 | 54 | -- Amazon Data Analyst Interview Question 55 | 56 | You are given two table of amazon 57 | spending_records and customers 58 | 59 | Write a SQL query to show all customers 60 | and their total spending show only those 61 | customers whos total spending has reduced 62 | compare to last month () 63 | 64 | Return customer_name, customer_id, 65 | last MONTH spend, current month spent 66 | 67 | -- Note consider last month as March 68 | Current Month as April 69 | */ 70 | 71 | -- each cx spend for march 72 | -- each cx spend for april 73 | -- compare both of these 74 | -- make logic to say lasmonth spend > curr spend 75 | 76 | SELECT * FROM spending_records; 77 | SELECT * FROM customers; 78 | 79 | 80 | WITH CTE1 -- march_spend 81 | AS 82 | ( 83 | SELECT 84 | customer_id, 85 | SUM(spending_amount) as total_spend 86 | FROM spending_records 87 | WHERE EXTRACT(MONTH FROM spending_date) = 3 88 | -- MONTH(spending_date) 89 | GROUP BY 1 90 | ), 91 | CTE2 -- april_spend 92 | AS 93 | (SELECT 94 | customer_id, 95 | SUM(spending_amount) as total_spend 96 | FROM spending_records 97 | WHERE EXTRACT(MONTH FROM spending_date) = 4 98 | -- MONTH(spending_date) 99 | GROUP BY 1) 100 | 101 | SELECT 102 | CTE1.customer_id, 103 | c.customer_name, 104 | CTE1.total_spend as lastmonth_total_spend, 105 | CTE2.total_spend as currentmonth_total_spend 106 | FROM CTE1 107 | JOIN 108 | CTE2 109 | ON CTE1.customer_id = CTE2.customer_id 110 | JOIN customers as c 111 | ON CTE1.customer_id = c.customer_id 112 | WHERE CTE1.total_spend > CTE2.total_spend 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | -- WITH CTE1 160 | -- AS 161 | -- ( 162 | -- SELECT 163 | -- customer_id, 164 | -- SUM(spending_amount) as total_sepnd 165 | -- FROM spending_records 166 | -- WHERE EXTRACT(MONTH FROM spending_date) = 3 167 | -- GROUP BY 1 168 | -- ), 169 | -- CTE2 170 | -- AS 171 | -- ( 172 | -- SELECT 173 | -- customer_id, 174 | -- SUM(spending_amount) as total_sepnd 175 | -- FROM spending_records 176 | -- WHERE EXTRACT(MONTH FROM spending_date) = 4 177 | -- GROUP BY 1 178 | -- ) 179 | 180 | -- SELECT 181 | -- CTE1.customer_id, 182 | -- CTE1.total_sepnd, 183 | -- CTE2.total_sepnd 184 | -- FROM CTE1 185 | -- JOIN 186 | -- CTE2 187 | -- ON CTE1.customer_id = CTE2.customer_id 188 | -- WHERE 189 | -- CTE1.total_sepnd > CTE2.total_sepnd 190 | 191 | 192 | 193 | 194 | -------------------------------------------------------------------------------- /38.sql: -------------------------------------------------------------------------------- 1 | -- SQL challenge 39/50 2 | 3 | DROP TABLE IF EXISTS Employees; 4 | 5 | CREATE TABLE Employees ( 6 | Emp_No DECIMAL(4,0) NOT NULL, 7 | Emp_Name VARCHAR(10), 8 | Job_Name VARCHAR(9), 9 | Manager_Id DECIMAL(4,0), 10 | HireDate DATE, 11 | Salary DECIMAL(7,2), 12 | Commission DECIMAL(7,2), 13 | Department VARCHAR(20) -- Changed from DeptNo to Department 14 | ); 15 | 16 | INSERT INTO Employees (Emp_No, Emp_Name, Job_Name, Manager_Id, HireDate, Salary, Commission, Department) VALUES 17 | (7839, 'KING', 'PRESIDENT', NULL, '1981-11-17', 5000, NULL, 'IT'), 18 | (7698, 'BLAKE', 'MANAGER', 7839, '1981-05-01', 2850, NULL, 'HR'), 19 | (7782, 'CLARK', 'MANAGER', 7839, '1981-06-09', 2450, NULL, 'Marketing'), 20 | (7566, 'JONES', 'MANAGER', 7839, '1981-04-02', 2975, NULL, 'Operations'), 21 | (7788, 'SCOTT', 'ANALYST', 7566, '1987-07-29', 3000, NULL, 'Operations'), 22 | (7902, 'FORD', 'ANALYST', 7566, '1981-12-03', 3000, NULL, 'Operations'), 23 | (7369, 'SMITH', 'CLERK', 7902, '1980-12-17', 800, NULL, 'Operations'), 24 | (7499, 'ALLEN', 'SALESMAN', 7698, '1981-02-20', 1600, 300, 'HR'), 25 | (7521, 'WARD', 'SALESMAN', 7698, '1981-02-22', 1250, 500, 'HR'), 26 | (7654, 'MARTIN', 'SALESMAN', 7698, '1981-09-28', 1250, 1400, 'HR'), 27 | (7844, 'TURNER', 'SALESMAN', 7698, '1981-09-08', 1500, 0, 'HR'), 28 | (7876, 'ADAMS', 'CLERK', 7788, '1987-06-02', 1100, NULL, 'Operations'), 29 | (7900, 'JAMES', 'CLERK', 7698, '1981-12-03', 950, NULL, 'HR'), 30 | (7934, 'MILLER', 'CLERK', 7782, '1982-01-23', 1300, NULL, 'Marketing'), 31 | (7905, 'BROWN', 'SALESMAN', 7698, '1981-11-12', 1250, 1400, 'HR'), 32 | (7906, 'DAVIS', 'ANALYST', 7566, '1987-07-13', 3000, NULL, 'Operations'), 33 | (7907, 'GARCIA', 'MANAGER', 7839, '1981-08-12', 2975, NULL, 'IT'), 34 | (7908, 'HARRIS', 'SALESMAN', 7698, '1981-06-21', 1600, 300, 'HR'), 35 | (7909, 'JACKSON', 'CLERK', 7902, '1981-11-17', 800, NULL, 'Operations'), 36 | (7910, 'JOHNSON', 'MANAGER', 7839, '1981-04-02', 2850, NULL, 'Marketing'), 37 | (7911, 'LEE', 'ANALYST', 7566, '1981-09-28', 1250, 1400, 'Operations'), 38 | (7912, 'MARTINEZ', 'CLERK', 7902, '1981-12-03', 1250, NULL, 'Operations'), 39 | (7913, 'MILLER', 'MANAGER', 7839, '1981-01-23', 2450, NULL, 'HR'), 40 | (7914, 'RODRIGUEZ', 'SALESMAN', 7698, '1981-12-03', 1500, 0, 'Marketing'), 41 | (7915, 'SMITH', 'CLERK', 7902, '1980-12-17', 1100, NULL, 'IT'), 42 | (7916, 'TAYLOR', 'CLERK', 7902, '1981-02-20', 950, NULL, 'Marketing'), 43 | (7917, 'THOMAS', 'SALESMAN', 7698, '1981-02-22', 1250, 500, 'Operations'), 44 | (7918, 'WHITE', 'ANALYST', 7566, '1981-09-28', 1300, NULL, 'IT'), 45 | (7919, 'WILLIAMS', 'MANAGER', 7839, '1981-11-17', 5000, NULL, 'Marketing'), 46 | (7920, 'WILSON', 'SALESMAN', 7698, '1981-05-01', 2850, NULL, 'HR'), 47 | (7921, 'YOUNG', 'CLERK', 7902, '1981-06-09', 2450, NULL, 'Operations'), 48 | (7922, 'ADAMS', 'ANALYST', 7566, '1987-07-13', 3000, NULL, 'HR'), 49 | (7923, 'BROWN', 'MANAGER', 7839, '1981-08-12', 2975, NULL, 'Marketing'), 50 | (7924, 'DAVIS', 'SALESMAN', 7698, '1981-06-21', 1600, 300, 'Operations'); 51 | 52 | 53 | 54 | /* 55 | Most Asked Data Analyst Interview Questions 56 | 57 | Write an SQL query to retrieve employee details 58 | from each department who have a salary greater 59 | than the average salary in their department. 60 | 61 | */ 62 | 63 | -- Corelated Subquery 64 | 65 | SELECT 66 | e1.emp_name, 67 | e1.salary, 68 | e1.department 69 | FROM employees as e1 70 | WHERE salary > (SELECT AVG(e2.salary) 71 | FROM employees as e2 72 | WHERE e2.department = e1.department) -- IT 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | SELECT 104 | AVG(salary) 105 | FROM employees 106 | WHERE department = 'HR' -1850 107 | 108 | 109 | 110 | SELECT 111 | AVG(salary) 112 | FROM employees 113 | WHERE department = 'IT' -- 2593 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | -- Your Task 136 | -- Find the employee who has less than average salary accross company? 137 | -------------------------------------------------------------------------------- /39.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge Day 39/50 2 | DROP TABLE IF EXISTS amazon_products; 3 | CREATE TABLE amazon_products ( 4 | product_id SERIAL PRIMARY KEY, 5 | product_name VARCHAR(255), 6 | category VARCHAR(100), 7 | price DECIMAL(10, 2), 8 | country VARCHAR(50) 9 | ); 10 | 11 | -- Add 25+ records with real product names for the USA 12 | INSERT INTO amazon_products (product_name, category, price, country) VALUES 13 | ('iPhone 13 Pro Max', 'Smartphones', 1099.00, 'USA'), 14 | ('Samsung Galaxy S21 Ultra', 'Smartphones', 1199.99, 'USA'), 15 | ('Google Pixel 6 Pro', 'Smartphones', 899.00, 'USA'), 16 | ('Samsung QN90A Neo QLED TV', 'TVs', 2397.99, 'USA'), 17 | ('LG OLED C1 Series', 'TVs', 1996.99, 'USA'), 18 | ('Sony Bravia XR A90J', 'TVs', 2798.00, 'USA'), 19 | ('Apple MacBook Pro 16-inch', 'Laptops', 2399.00, 'USA'), 20 | ('Dell XPS 15', 'Laptops', 1899.99, 'USA'), 21 | ('Microsoft Surface Laptop 4', 'Laptops', 1299.99, 'USA'), 22 | ('Sony WH-1000XM4 Wireless Headphones', 'Headphones', 348.00, 'USA'), 23 | ('Bose Noise Cancelling Headphones 700', 'Headphones', 379.00, 'USA'), 24 | ('Apple AirPods Pro', 'Headphones', 249.00, 'USA'), 25 | ('Samsung Odyssey G9 Gaming Monitor', 'Monitors', 1399.99, 'USA'), 26 | ('Dell S2721QS 27-inch 4K Monitor', 'Monitors', 339.99, 'USA'), 27 | ('LG 27GN950-B UltraGear Gaming Monitor', 'Monitors', 1296.99, 'USA'), 28 | ('Canon EOS R5 Mirrorless Camera', 'Cameras', 3899.00, 'USA'), 29 | ('Sony Alpha a7 III Mirrorless Camera', 'Cameras', 1998.00, 'USA'), 30 | ('Nikon Z7 II Mirrorless Camera', 'Cameras', 2996.95, 'USA'), 31 | ('Nintendo Switch', 'Gaming Consoles', 299.99, 'USA'), 32 | ('PlayStation 5', 'Gaming Consoles', 499.99, 'USA'), 33 | ('Xbox Series X', 'Gaming Consoles', 499.99, 'USA'), 34 | ('Apple Watch Series 7', 'Smartwatches', 399.00, 'USA'), 35 | ('Samsung Galaxy Watch 4', 'Smartwatches', 249.99, 'USA'), 36 | ('Fitbit Sense', 'Smartwatches', 299.95, 'USA'), 37 | ('iPhone 13 Pro Max', 'Smartphones', 1099.00, 'USA'), 38 | ('Samsung Galaxy S21 Ultra', 'Smartphones', 1199.99, 'USA'), 39 | ('Google Pixel 6 Pro', 'Smartphones', 899.00, 'USA'), 40 | ('Samsung QN90A Neo QLED TV', 'TVs', 2397.99, 'USA'), 41 | ('LG OLED C1 Series', 'TVs', 1996.99, 'USA'), 42 | ('Sony Bravia XR A90J', 'TVs', 2798.00, 'USA'), 43 | ('Apple MacBook Pro 16-inch', 'Laptops', 2399.00, 'USA'), 44 | ('Dell XPS 15', 'Laptops', 1899.99, 'USA'), 45 | ('Microsoft Surface Laptop 4', 'Laptops', 1299.99, 'USA'), 46 | ('Sony WH-1000XM4 Wireless Headphones', 'Headphones', 348.00, 'USA'), 47 | ('Bose Noise Cancelling Headphones 700', 'Headphones', 379.00, 'USA'), 48 | ('Apple AirPods Pro', 'Headphones', 249.00, 'USA'), 49 | ('Samsung Odyssey G9 Gaming Monitor', 'Monitors', 1399.99, 'USA'), 50 | ('Dell S2721QS 27-inch 4K Monitor', 'Monitors', 339.99, 'USA'), 51 | ('LG 27GN950-B UltraGear Gaming Monitor', 'Monitors', 1296.99, 'USA'), 52 | ('Canon EOS R5 Mirrorless Camera', 'Cameras', 3899.00, 'USA'), 53 | ('Sony Alpha a7 III Mirrorless Camera', 'Cameras', 1998.00, 'USA'), 54 | ('Nikon Z7 II Mirrorless Camera', 'Cameras', 2996.95, 'USA'), 55 | ('Nintendo Switch', 'Gaming Consoles', 299.99, 'USA'), 56 | ('PlayStation 5', 'Gaming Consoles', 499.99, 'USA'), 57 | ('Xbox Series X', 'Gaming Consoles', 499.99, 'USA'), 58 | ('Apple Watch Series 7', 'Smartwatches', 399.00, 'USA'), 59 | ('Samsung Galaxy Watch 4', 'Smartwatches', 249.99, 'USA'), 60 | ('Fitbit Sense', 'Smartwatches', 299.95, 'USA'), 61 | ('iPhone 13 Pro Max', 'Smartphones', 1099.00, 'USA'), 62 | ('Samsung Galaxy S21 Ultra', 'Smartphones', 1199.99, 'USA'), 63 | ('Google Pixel 6 Pro', 'Smartphones', 899.00, 'USA'), 64 | ('Samsung QN90A Neo QLED TV', 'TVs', 2397.99, 'USA'), 65 | ('LG OLED C1 Series', 'TVs', 1996.99, 'USA'), 66 | ('Sony Bravia XR A90J', 'TVs', 2798.00, 'USA'), 67 | ('Apple MacBook Pro 16-inch', 'Laptops', 2399.00, 'USA'), 68 | ('Dell XPS 15', 'Laptops', 1899.99, 'USA'), 69 | ('Microsoft Surface Laptop 4', 'Laptops', 1299.99, 'USA'), 70 | ('Sony WH-1000XM4 Wireless Headphones', 'Headphones', 348.00, 'USA'); 71 | 72 | 73 | DROP TABLE IF EXISTS return_records; 74 | CREATE TABLE return_records ( 75 | return_id SERIAL PRIMARY KEY, 76 | order_id INT, 77 | product_id INT, 78 | return_reason VARCHAR(255), 79 | return_date DATE 80 | ); 81 | 82 | -- Add 10 more return records 83 | INSERT INTO return_records (order_id, product_id, return_reason, return_date) VALUES 84 | (1006, 7, 'Defective product', '2024-04-27'), 85 | (1007, 9, 'Wrong color', '2024-04-29'), 86 | (1008, 8, 'Size too small', '2024-05-01'), 87 | (1009, 6, 'Not satisfied with quality', '2024-05-03'), 88 | (1010, 10, 'Received wrong item', '2024-05-05'), 89 | (1011, 12, 'Defective product', '2024-05-07'), 90 | (1012, 11, 'Changed mind', '2024-05-09'), 91 | (1013, 14, 'Item not needed', '2024-05-11'), 92 | (1014, 15, 'Damaged upon arrival', '2024-05-13'), 93 | (1015, 13, 'Wrong quantity', '2024-05-15'), 94 | (1016, 16, 'Defective product', '2024-05-17'), 95 | (1017, 17, 'Wrong size', '2024-05-19'), 96 | (1018, 18, 'Received damaged', '2024-05-21'), 97 | (1019, 19, 'Not as described', '2024-05-23'), 98 | (1020, 20, 'Changed mind', '2024-05-25'), 99 | (1021, 21, 'Item not needed', '2024-05-27'), 100 | (1022, 22, 'Defective product', '2024-05-29'), 101 | (1023, 23, 'Wrong color', '2024-05-31'), 102 | (1024, 24, 'Received wrong item', '2024-06-02'), 103 | (1025, 25, 'Size too small', '2024-06-04'), 104 | (1026, 26, 'Damaged upon arrival', '2024-06-06'), 105 | (1027, 27, 'Defective product', '2024-06-08'), 106 | (1028, 28, 'Not satisfied with quality', '2024-06-10'), 107 | (1029, 29, 'Wrong quantity', '2024-06-12'), 108 | (1030, 30, 'Changed mind', '2024-06-14'), 109 | (1031, 31, 'Item not needed', '2024-06-16'), 110 | (1032, 32, 'Defective product', '2024-06-18'), 111 | (1033, 33, 'Wrong size', '2024-06-20'), 112 | (1034, 34, 'Received damaged', '2024-06-22'), 113 | (1035, 35, 'Not as described', '2024-06-24'), 114 | (1036, 36, 'Changed mind', '2024-06-26'), 115 | (1037, 37, 'Item not needed', '2024-06-28'), 116 | (1038, 38, 'Defective product', '2024-06-30'), 117 | (1039, 39, 'Wrong color', '2024-07-02'), 118 | (1040, 40, 'Received wrong item', '2024-07-04'); 119 | 120 | 121 | /* 122 | Question: 123 | 124 | Write a SQL query to show each product category 125 | and its return percentage. 126 | 127 | return percentage = 128 | total_return by category 129 | / 130 | total_overall_return * 100 131 | 132 | Expected Output: 133 | 134 | Category: Name of the product category. 135 | Return Percentage: Percentage of returns 136 | for each category. 137 | */ 138 | 139 | -- category name, 140 | -- total returns 141 | -- each category return count 142 | -- each category return count/total returns * 100 143 | 144 | 145 | SELECT * FROM amazon_products; 146 | SELECT * FROM return_records; 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | SELECT COUNT(*) FROM return_records; 165 | 166 | 167 | SELECT 168 | ap.category, 169 | COUNT(rr.*)::numeric/(SELECT COUNT(*) 170 | FROM return_records)::numeric * 100 171 | AS percetage_return 172 | 173 | FROM amazon_products as ap 174 | JOIN return_records as rr 175 | ON ap.product_id = rr.product_id 176 | GROUP BY ap.category 177 | ORDER BY 2 DESC 178 | 179 | 180 | 181 | -------------------------------------------------------------------------------- /40.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge Day 40/50 2 | 3 | 4 | DROP table if exists order_data; 5 | -- Create the table 6 | CREATE TABLE order_data ( 7 | order_id SERIAL PRIMARY KEY, 8 | order_time TIMESTAMP, 9 | customer_id INT, 10 | total_amount DECIMAL(10, 2) 11 | ); 12 | 13 | 14 | -- Add records 15 | 16 | -- Add records 17 | INSERT INTO order_data (order_time, customer_id, total_amount) VALUES 18 | ('2024-03-31 08:30:00', 1001, 25.50), 19 | ('2024-03-31 09:15:00', 1002, 32.75), 20 | ('2024-03-31 10:00:00', 1003, 20.00), 21 | ('2024-03-31 11:45:00', 1004, 18.50), 22 | ('2024-03-31 12:30:00', 1005, 27.80), 23 | ('2024-03-31 13:15:00', 1006, 35.20), 24 | ('2024-03-31 14:00:00', 1007, 40.00), 25 | ('2024-03-31 15:45:00', 1008, 22.90), 26 | ('2024-03-31 16:30:00', 1009, 28.75), 27 | ('2024-03-31 17:15:00', 1010, 30.60), 28 | ('2024-03-31 18:00:00', 1011, 24.95), 29 | ('2024-03-31 19:45:00', 1012, 38.25), 30 | ('2024-03-31 20:30:00', 1013, 42.80), 31 | ('2024-03-31 21:15:00', 1014, 26.40), 32 | ('2024-03-31 22:00:00', 1015, 33.10), 33 | ('2024-03-31 23:45:00', 1016, 20.50), 34 | ('2024-03-31 00:15:00', 1017, 28.75), 35 | ('2024-03-31 01:00:00', 1018, 18.90), 36 | ('2024-03-31 22:45:00', 1019, 23.25), 37 | ('2024-03-31 22:30:00', 1020, 30.00), 38 | ('2024-03-31 22:15:00', 1021, 35.80), 39 | ('2024-03-31 23:00:00', 1022, 38.50), 40 | ('2024-03-31 06:45:00', 1023, 21.20), 41 | ('2024-03-31 09:30:00', 1024, 27.95), 42 | ('2024-03-31 23:15:00', 1025, 32.70), 43 | ('2024-03-31 09:00:00', 1026, 25.45), 44 | ('2024-03-31 10:45:00', 1027, 37.80), 45 | ('2024-03-31 21:30:00', 1028, 40.90), 46 | ('2024-03-31 23:15:00', 1029, 24.60), 47 | ('2024-03-31 13:00:00', 1030, 31.75), 48 | ('2024-03-31 22:45:00', 1031, 22.50), 49 | ('2024-03-31 22:30:00', 1032, 30.25), 50 | ('2024-03-31 23:15:00', 1033, 19.80), 51 | ('2024-03-31 23:00:00', 1034, 24.75), 52 | ('2024-03-31 20:45:00', 1035, 32.50), 53 | ('2024-03-31 20:30:00', 1036, 38.20), 54 | ('2024-03-31 20:15:00', 1037, 41.75), 55 | ('2024-03-31 22:00:00', 1038, 23.80), 56 | ('2024-03-31 22:45:00', 1039, 29.95), 57 | ('2024-03-31 22:30:00', 1040, 31.60); 58 | 59 | 60 | 61 | /* 62 | Swiggy Data Analyst Interview Question: 63 | 64 | Write a SQL query to analyze the order patterns 65 | throughout the day, providing insights into each 66 | hour's total orders and their respective 67 | percentages of the total orders. 68 | 69 | The output should include the hour, total orders, 70 | and order percentage. 71 | Order by % order in decending 72 | 73 | %orders = hourly order/total_orders * 100 74 | */ 75 | -- each hour and their total order 76 | -- each hour order/total order * 100 77 | -- ORDER BY 2 Desc 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | SELECT * FROM order_data 92 | 93 | -- SELECT COUNT(*) FROM order_data 94 | 95 | SELECT 96 | EXTRACT(HOUR FROM order_time) as hour, 97 | COUNT(1) as cnt_orders, 98 | ROUND(COUNT(1)::numeric/(SELECT COUNT(*) 99 | FROM order_data)::numeric*100, 2) 100 | as order_percetage 101 | FROM order_data 102 | GROUP BY 1 103 | ORDER BY 3 DESC 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | /* 117 | -- Your Task 118 | Create a new time category as Morning, After_noon, Evening and Night 119 | And Find total orders fall into this category 120 | Morning < 12 O clock 121 | After noon between 12 and 5 122 | Evening 5 and 8 123 | Night > 8 124 | */ 125 | -------------------------------------------------------------------------------- /41.sql: -------------------------------------------------------------------------------- 1 | -- Day 41/50 SQL Challenge 2 | 3 | 4 | -- Create the user_purchases table 5 | CREATE TABLE user_purchases ( 6 | user_id INT, 7 | date DATE, 8 | amount_spent FLOAT, 9 | day_name VARCHAR(20) 10 | ); 11 | 12 | -- Insert records into the user_purchases table 13 | INSERT INTO user_purchases (user_id, date, amount_spent, day_name) VALUES 14 | (1047, '2023-01-01', 288, 'Sunday'), 15 | (1099, '2023-01-04', 803, 'Wednesday'), 16 | (1055, '2023-01-07', 546, 'Saturday'), 17 | (1040, '2023-01-10', 680, 'Tuesday'), 18 | (1052, '2023-01-13', 889, 'Friday'), 19 | (1052, '2023-01-13', 596, 'Friday'), 20 | (1016, '2023-01-16', 960, 'Monday'), 21 | (1023, '2023-01-17', 861, 'Tuesday'), 22 | (1010, '2023-01-19', 758, 'Thursday'), 23 | (1013, '2023-01-19', 346, 'Thursday'), 24 | (1069, '2023-01-21', 541, 'Saturday'), 25 | (1030, '2023-01-22', 175, 'Sunday'), 26 | (1034, '2023-01-23', 707, 'Monday'), 27 | (1019, '2023-01-25', 253, 'Wednesday'), 28 | (1052, '2023-01-25', 868, 'Wednesday'), 29 | (1095, '2023-01-27', 424, 'Friday'), 30 | (1017, '2023-01-28', 755, 'Saturday'), 31 | (1010, '2023-01-29', 615, 'Sunday'), 32 | (1063, '2023-01-31', 534, 'Tuesday'), 33 | (1019, '2023-02-03', 185, 'Friday'), 34 | (1019, '2023-02-03', 995, 'Friday'), 35 | (1092, '2023-02-06', 796, 'Monday'), 36 | (1058, '2023-02-09', 384, 'Thursday'), 37 | (1055, '2023-02-12', 319, 'Sunday'), 38 | (1090, '2023-02-15', 168, 'Wednesday'), 39 | (1090, '2023-02-18', 146, 'Saturday'), 40 | (1062, '2023-02-21', 193, 'Tuesday'), 41 | (1023, '2023-02-24', 259, 'Friday'), 42 | (1023, '2023-02-24', 849, 'Friday'), 43 | (1009, '2023-02-27', 552, 'Monday'), 44 | (1012, '2023-03-02', 303, 'Thursday'), 45 | (1001, '2023-03-05', 317, 'Sunday'), 46 | (1058, '2023-03-08', 573, 'Wednesday'), 47 | (1001, '2023-03-11', 531, 'Saturday'), 48 | (1034, '2023-03-14', 440, 'Tuesday'), 49 | (1096, '2023-03-17', 650, 'Friday'), 50 | (1048, '2023-03-20', 711, 'Monday'), 51 | (1089, '2023-03-23', 388, 'Thursday'), 52 | (1001, '2023-03-26', 353, 'Sunday'), 53 | (1016, '2023-03-29', 833, 'Wednesday'); 54 | 55 | 56 | /* 57 | SQL Challenge: Friday Purchases 58 | 59 | Scenario: 60 | IBM wants to analyze user purchases for Fridays 61 | in the first quarter of the year. 62 | 63 | Calculate the average amount users spent 64 | per order for each Friday. 65 | 66 | Table: 67 | Table Name: user_purchases 68 | 69 | Columns: 70 | user_id (int) 71 | date (datetime) 72 | amount_spent (float) 73 | day_name (varchar) 74 | 75 | Question: 76 | Write an SQL query to find the average amount 77 | spent by users per order for each Friday 78 | in the first quarter of the year. 79 | */ 80 | 81 | 82 | SELECT * FROM user_purchases; 83 | 84 | 85 | 86 | SELECT 87 | EXTRACT(WEEK FROM date) as week_num, 88 | AVG(amount_spent) as avg_spend_friday 89 | FROM user_purchases 90 | where 91 | EXTRACT(YEAR FROM date) = 2023 92 | AND 93 | EXTRACT(quarter FROM date) = 1 94 | AND 95 | EXTRACT(DOW FROM date) = 5 96 | GROUP BY 1 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | -------------------------------------------------------------------------------- /42.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge 42/50 2 | 3 | -- UBER Interview Question 4 | 5 | -- Drop the table if it exists 6 | DROP TABLE IF EXISTS uber_ride; 7 | 8 | -- Create the Uber ride table 9 | CREATE TABLE uber_ride ( 10 | ride_id SERIAL PRIMARY KEY, 11 | ride_timestamp TIMESTAMP, 12 | ride_status VARCHAR(20) -- "ride_completed", "cancelled_by_driver" or "cancelled_by_user" 13 | ); 14 | 15 | -- Insert sample records 16 | INSERT INTO uber_ride (ride_timestamp, ride_status) 17 | VALUES 18 | ('2024-05-09 08:30:00', 'cancelled_by_driver'), 19 | ('2024-05-09 09:00:00', 'cancelled_by_user'), 20 | ('2024-05-09 10:00:00', 'ride_completed'), 21 | ('2024-05-09 11:00:00', 'cancelled_by_user'), 22 | ('2024-05-09 12:00:00', 'cancelled_by_driver'), 23 | ('2024-05-09 13:00:00', 'cancelled_by_user'), 24 | ('2024-05-09 14:00:00', 'cancelled_by_user'), 25 | ('2024-05-09 15:00:00', 'cancelled_by_user'), 26 | ('2024-05-09 16:00:00', 'ride_completed'), 27 | ('2024-05-09 17:00:00', 'cancelled_by_user'), 28 | ('2024-05-09 18:00:00', 'ride_completed'), 29 | ('2024-05-09 19:00:00', 'cancelled_by_user'), 30 | ('2024-05-09 20:00:00', 'cancelled_by_user'), 31 | ('2024-05-09 21:00:00', 'cancelled_by_user'), 32 | ('2024-05-09 22:00:00', 'cancelled_by_driver'), 33 | ('2024-05-09 13:00:00', 'cancelled_by_user'), 34 | ('2024-05-09 14:00:00', 'cancelled_by_user'), 35 | ('2024-05-09 15:00:00', 'cancelled_by_user'), 36 | ('2024-05-09 16:00:00', 'ride_completed'), 37 | ('2024-05-09 17:00:00', 'cancelled_by_user'), 38 | ('2024-05-09 18:00:00', 'cancelled_by_driver'), 39 | ('2024-05-09 19:00:00', 'cancelled_by_user'), 40 | ('2024-05-09 20:00:00', 'cancelled_by_user'), 41 | ('2024-05-09 21:00:00', 'cancelled_by_user'), 42 | ('2024-05-09 22:00:00', 'cancelled_by_driver'); 43 | 44 | -- Check the records 45 | SELECT * FROM uber_ride; 46 | 47 | -- Check the records 48 | SELECT * FROM uber_ride; 49 | 50 | 51 | 52 | -- Check the records 53 | SELECT * FROM uber_ride; 54 | 55 | -- Check the records 56 | SELECT * FROM uber_ride; 57 | 58 | -- Check the records 59 | SELECT * FROM uber_ride; 60 | 61 | -- UBER Data Analyst Interview Question 62 | /* 63 | You are given a uber_ride table with columns 64 | ride_id, ride_time_stamp, ride_status. 65 | (which has information about the ride) 66 | 67 | 68 | Find out % of ride cancelled by uber_driver 69 | 70 | */ 71 | 72 | -- total cnt of cancelled ride 73 | -- total ride that was cancelled by driver 74 | -- 2/1 * 100 75 | 76 | SELECT * FROM uber_ride; 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | SELECT 118 | ROUND( 119 | SUM( 120 | CASE 121 | WHEN ride_status = 'cancelled_by_driver' 122 | THEN 1 123 | ELSE 0 124 | END 125 | )::numeric/(SELECT COUNT(1) FROM uber_ride 126 | WHERE ride_status <> 'ride_completed' )::numeric 127 | * 100,2) as percentage_ride_cancelled_driver 128 | FROM uber_ride 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | -- -- Your Task is to find out how how many ride were cancelled by user in the evening 152 | -- hour > 17 is considered as evening -------------------------------------------------------------------------------- /43.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge 43/50 2 | 3 | CREATE TABLE forbes_global ( 4 | company VARCHAR(100), 5 | sector VARCHAR(100), 6 | industry VARCHAR(100), 7 | country VARCHAR(100), 8 | sales FLOAT, 9 | profits FLOAT, 10 | rank INT 11 | ); 12 | 13 | 14 | -- Inserting the data 15 | insert into forbes_global 16 | VALUES 17 | ('Walmart', 'Consumer Discretionary', 'General Merchandisers', 'United States', 482130.0, 14694.0, 1), 18 | ('Sinopec-China Petroleum', 'Energy', 'Oil & Gas Operations', 'China', 448452.0, 7840.0, 2), 19 | ('Royal Dutch Shell', 'Energy', 'Oil & Gas Operations', 'Netherlands', 396556.0, 15340.0, 3), 20 | ('China National Petroleum', 'Energy', 'Oil & Gas Operations', 'China', 392976.0, 2837.0, 4), 21 | ('State Grid', 'Utilities', 'Electric Utilities', 'China', 387056.0, 9573.0, 5), 22 | ('Saudi Aramco', 'Energy', 'Oil & Gas Operations', 'Saudi Arabia', 355905.0, 11002.0, 6), 23 | ('Volkswagen', 'Consumer Discretionary', 'Auto & Truck Manufacturers', 'Germany', 283565.0, 17742.4, 7), 24 | ('BP', 'Energy', 'Oil & Gas Operations', 'United Kingdom', 282616.0, 3591.0, 8), 25 | ('Amazon.com', 'Consumer Discretionary', 'Internet Services and Retailing', 'United States', 280522.0, 5362.0, 9), 26 | ('Toyota Motor', 'Consumer Discretionary', 'Auto & Truck Manufacturers', 'Japan', 275288.0, 18499.3, 10), 27 | ('Apple', 'Information Technology', 'Computers, Office Equipment', 'United States', 265595.0, 55256.0, 11), 28 | ('Exxon Mobil', 'Energy', 'Oil & Gas Operations', 'United States', 263910.0, 15850.0, 12), 29 | ('Berkshire Hathaway', 'Financials', 'Diversified Financials', 'United States', 247837.0, 8971.0, 13), 30 | ('Samsung Electronics', 'Information Technology', 'Electronics', 'South Korea', 245898.0, 19783.3, 14), 31 | ('McKesson', 'Health Care', 'Health Care: Pharmacy and Other Services', 'United States', 231091.0, 5070.0, 15), 32 | ('Glencore', 'Materials', 'Diversified Metals & Mining', 'Switzerland', 219754.0, 5436.0, 16), 33 | ('UnitedHealth Group', 'Health Care', 'Health Care: Insurance and Managed Care', 'United States', 201159.0, 13650.0, 17), 34 | ('Daimler', 'Consumer Discretionary', 'Auto & Truck Manufacturers', 'Germany', 197515.0, 8245.1, 18), 35 | ('CVS Health', 'Health Care', 'Health Care: Pharmacy and Other Services', 'United States', 194579.0, 6634.0, 19), 36 | ('AT&T', 'Telecommunication Services', 'Telecommunications', 'United States', 181193.0, 13906.0, 20), 37 | ('Foxconn', 'Technology', 'Electronics', 'Taiwan', 175617.0, 4103.4, 21), 38 | ('General Motors', 'Consumer Discretionary', 'Auto & Truck Manufacturers', 'United States', 174049.0, 6710.0, 22), 39 | ('Verizon Communications', 'Telecommunication Services', 'Telecommunications', 'United States', 170756.0, 19225.0, 23), 40 | ('Total', 'Energy', 'Oil & Gas Operations', 'France', 149769.0, 7480.0, 24), 41 | ('IBM', 'Information Technology', 'Information Technology Services', 'United States', 141682.0, 6606.0, 25), 42 | ('Ford Motor', 'Consumer Discretionary', 'Auto & Truck Manufacturers', 'United States', 140545.0, 6471.0, 26), 43 | ('Hon Hai Precision Industry', 'Technology', 'Electronics', 'Taiwan', 135129.0, 4493.3, 27), 44 | ('Trafigura Group', 'Energy', 'Trading', 'Singapore', 131638.0, 975.0, 28), 45 | ('General Electric', 'Industrials', 'Diversified Industrials', 'United States', 126661.0, 5136.0, 29), 46 | ('AmerisourceBergen', 'Health Care', 'Wholesalers: Health Care', 'United States', 122848.0, 1605.5, 30), 47 | ('Fannie Mae', 'Financials', 'Diversified Financials', 'United States', 120472.0, 18418.0, 31), 48 | ('Trafigura Group', 'Energy', 'Trading', 'Switzerland', 120438.0, 975.0, 32), 49 | ('Koch Industries', 'Diversified', 'Diversified', 'United States', 115095.0, 5142.0, 33), 50 | ('Cardinal Health', 'Health Care', 'Wholesalers: Health Care', 'United States', 113982.0, 1377.0, 34), 51 | ('Alphabet', 'Technology', 'Internet Services and Retailing', 'United States', 110855.0, 18616.0, 35), 52 | ('Chevron', 'Energy', 'Oil & Gas Operations', 'United States', 110360.0, 5520.0, 36), 53 | ('Costco Wholesale', 'Consumer Discretionary', 'General Merchandisers', 'United States', 110215.0, 2115.0, 37), 54 | ('Cardinal Health', 'Health Care', 'Health Care: Pharmacy and Other Services', 'United States', 109838.0, 1718.0, 38), 55 | ('Ping An Insurance Group', 'Financials', 'Insurance', 'China', 109254.0, 2047.4, 39), 56 | ('Walgreens Boots Alliance', 'Consumer Staples', 'Food and Drug Stores', 'United States', 109026.0, 4563.0, 40), 57 | ('Costco Wholesale', 'Consumer Discretionary', 'Retailing', 'United States', 105156.0, 2115.0, 41), 58 | ('JPMorgan Chase', 'Financials', 'Diversified Financials', 'United States', 105153.0, 30615.0, 42), 59 | ('Verizon Communications', 'Telecommunication Services', 'Telecommunications', 'United States', 104887.0, 13568.0, 43), 60 | ('China Construction Bank', 'Financials', 'Banks', 'China', 104693.0, 38369.0, 44), 61 | ('China Construction Bank', 'Financials', 'Major Banks', 'China', 104692.9, 38369.2, 45), 62 | ('Trafigura Group', 'Energy', 'Trading', 'Netherlands', 103752.0, 975.0, 46), 63 | ('Exor Group', 'Financials', 'Diversified Financials', 'Netherlands', 103606.6, -611.2, 47), 64 | ('Anheuser-Busch InBev', 'Consumer Staples', 'Beverages', 'Belgium', 101541.0, 9536.0, 48), 65 | ('Bank of America', 'Financials', 'Banks', 'United States', 100264.0, 18724.0, 49), 66 | ('Bank of China', 'Financials', 'Banks', 'China', 99237.3, 28202.1, 50), 67 | ('Trafigura Group', 'Energy', 'Trading', 'Switzerland', 97296.0, 975.0, 51), 68 | ('Dell Technologies', 'Technology', 'Computers, Office Equipment', 'United States', 94477.0, 2743.0, 52), 69 | ('CVS Health', 'Health Care', 'Health Care: Insurance and Managed Care', 'United States', 94005.0, 6239.0, 53), 70 | ('Trafigura Group', 'Energy', 'Trading', 'United Kingdom', 90345.0, 975.0, 54), 71 | ('Trafigura Group', 'Energy', 'Trading', 'Switzerland', 88265.0, 975.0, 55), 72 | ('Trafigura Group', 'Energy', 'Trading', 'Netherlands', 88111.0, 975.0, 56), 73 | ('Trafigura Group', 'Energy', 'Trading', 'Switzerland', 87044.0, 975.0, 57), 74 | ('Trafigura Group', 'Energy', 'Trading', 'Switzerland', 84795.0, 975.0, 58), 75 | ('Trafigura Group', 'Energy', 'Trading', 'Switzerland', 84361.0, 975.0, 59), 76 | ('Trafigura Group', 'Energy', 'Trading', 'Switzerland', 83156.0, 975.0, 60), 77 | ('Trafigura Group', 'Energy', 'Trading', 'Switzerland', 82276.0, 975.0, 61); 78 | 79 | /* 80 | -- Most Profitable Companies 81 | You are given a table called 82 | forbes_global with columns 83 | company, sector, industry, country, 84 | sales, profit, rank 85 | 86 | Find out each country's most 87 | most profitable company details 88 | */ 89 | 90 | 91 | SELECT * FROM forbes_global; 92 | 93 | 94 | SELECT * 95 | FROM 96 | ( 97 | SELECT *, 98 | RANK() OVER(PARTITION BY 99 | country ORDER BY profits DESC) as rn 100 | FROM forbes_global 101 | ) 102 | WHERE rn = 1 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | -- -- Your Task 127 | -- Find out each sector top 2 most profitable company details 128 | -------------------------------------------------------------------------------- /44.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge 44/50 2 | 3 | -- Create the table 4 | CREATE TABLE house_price ( 5 | id INT, 6 | state VARCHAR(255), 7 | city VARCHAR(255), 8 | street_address VARCHAR(255), 9 | mkt_price INT 10 | ); 11 | -- Insert all the records 12 | INSERT INTO house_price (id, state, city, street_address, mkt_price) VALUES 13 | (1, 'NY', 'New York City', '66 Trout Drive', 449761), 14 | (2, 'NY', 'New York City', 'Atwater', 277527), 15 | (3, 'NY', 'New York City', '58 Gates Street', 268394), 16 | (4, 'NY', 'New York City', 'Norcross', 279929), 17 | (5, 'NY', 'New York City', '337 Shore Ave.', 151592), 18 | (6, 'NY', 'New York City', 'Plainfield', 624531), 19 | (7, 'NY', 'New York City', '84 Central Street', 267345), 20 | (8, 'NY', 'New York City', 'Passaic', 88504), 21 | (9, 'NY', 'New York City', '951 Fulton Road', 270476), 22 | (10, 'NY', 'New York City', 'Oxon Hill', 118112), 23 | (11, 'CA', 'Los Angeles', '692 Redwood Court', 150707), 24 | (12, 'CA', 'Los Angeles', 'Lewiston', 463180), 25 | (13, 'CA', 'Los Angeles', '8368 West Acacia Ave.', 538865), 26 | (14, 'CA', 'Los Angeles', 'Pearl', 390896), 27 | (15, 'CA', 'Los Angeles', '8206 Old Riverview Rd.', 117754), 28 | (16, 'CA', 'Los Angeles', 'Seattle', 424588), 29 | (17, 'CA', 'Los Angeles', '7227 Joy Ridge Rd.', 156850), 30 | (18, 'CA', 'Los Angeles', 'Battle Ground', 643454), 31 | (19, 'CA', 'Los Angeles', '233 Bedford Ave.', 713841), 32 | (20, 'CA', 'Los Angeles', 'Saint Albans', 295852), 33 | (21, 'IL', 'Chicago', '8830 Baker St.', 12944), 34 | (22, 'IL', 'Chicago', 'Watertown', 410766), 35 | (23, 'IL', 'Chicago', '632 Princeton St.', 160696), 36 | (24, 'IL', 'Chicago', 'Waxhaw', 464144), 37 | (25, 'IL', 'Chicago', '7773 Tailwater Drive', 129393), 38 | (26, 'IL', 'Chicago', 'Bonita Springs', 174886), 39 | (27, 'IL', 'Chicago', '31 Summerhouse Rd.', 296008), 40 | (28, 'IL', 'Chicago', 'Middleburg', 279000), 41 | (29, 'IL', 'Chicago', '273 Windfall Avenue', 424846), 42 | (30, 'IL', 'Chicago', 'Graham', 592268), 43 | (31, 'TX', 'Houston', '91 Canterbury Dr.', 632014), 44 | (32, 'TX', 'Houston', 'Dallas', 68868), 45 | (33, 'TX', 'Houston', '503 Elmwood St.', 454184), 46 | (34, 'TX', 'Houston', 'Kennewick', 186280), 47 | (35, 'TX', 'Houston', '739 Chapel Street', 334474), 48 | (36, 'TX', 'Houston', 'San Angelo', 204460), 49 | (37, 'TX', 'Houston', '572 Parker Dr.', 678443), 50 | (38, 'TX', 'Houston', 'Bellmore', 401090), 51 | (39, 'TX', 'Houston', '8653 South Oxford Street', 482214), 52 | (40, 'TX', 'Houston', 'Butler', 330868), 53 | (41, 'AZ', 'Phoenix', '8667 S. Joy Ridge Court', 316291), 54 | (42, 'AZ', 'Phoenix', 'Torrance', 210392), 55 | (43, 'AZ', 'Phoenix', '35 Harvard St.', 167502), 56 | (44, 'AZ', 'Phoenix', 'Nutley', 327554), 57 | (45, 'AZ', 'Phoenix', '7313 Vermont St.', 285135), 58 | (46, 'AZ', 'Phoenix', 'Lemont', 577667), 59 | (47, 'AZ', 'Phoenix', '8905 Buttonwood Dr.', 212301), 60 | (48, 'AZ', 'Phoenix', 'Lafayette', 317504); 61 | 62 | 63 | 64 | /* 65 | 66 | You are given a table of New York housing 67 | price called house_transactions with columns 68 | id, state, city, street_address, mkt_price 69 | 70 | Identify properites where the mkt_price of the house 71 | exceeds the city's average mkt_price. 72 | 73 | */ 74 | 75 | SELECT * FROM house_price 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | SELECT 93 | h1.id, 94 | h1.state, 95 | h1.city, 96 | h1.mkt_price 97 | FROM house_price h1 98 | WHERE h1.mkt_price > (SELECT 99 | AVG(h2.mkt_price) 100 | FROM house_price h2 101 | WHERE h2.city = h1.city) 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | SELECT 129 | AVG(h2.mkt_price) 130 | FROM house_price h2 131 | WHERE h2.city = 'Chicago' -- 294k 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | -- Your Task 156 | -- Write a query to find the property that has house mkt_price greater 157 | -- than average of the city's average price but less than nation's average price 158 | 159 | 160 | 161 | 162 | -------------------------------------------------------------------------------- /45.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge 45/50 2 | 3 | DROP TABLE IF EXISTS orders; 4 | -- Create Orders table 5 | CREATE TABLE Orders ( 6 | Order_id INT PRIMARY KEY, 7 | Customer_id INT, 8 | Order_Date DATE, 9 | Amount DECIMAL(10, 2) 10 | ); 11 | 12 | 13 | DROP TABLE IF EXISTS Customers; 14 | -- Create Customers table 15 | CREATE TABLE Customers ( 16 | Customer_id INT PRIMARY KEY, 17 | Customer_Name VARCHAR(50), 18 | Join_Date DATE 19 | ); 20 | 21 | -- Insert records into Orders table 22 | INSERT INTO Orders (Order_id, Customer_id, Order_Date, Amount) 23 | VALUES 24 | (1, 1, '2024-05-01', 100), 25 | (2, 2, '2024-05-02', 150), 26 | (3, 3, '2023-12-15', 200), 27 | (4, 1, '2024-05-03', 120), 28 | (5, 2, '2024-01-20', 180), 29 | (6, 4, '2024-03-10', 90); 30 | 31 | -- Insert records into Customers table 32 | INSERT INTO Customers (Customer_id, Customer_Name, Join_Date) 33 | VALUES 34 | (1, 'Alice', '2024-01-15'), 35 | (2, 'Bob', '2024-02-20'), 36 | (3, 'Charlie', '2023-12-01'), 37 | (4, 'David', '2024-03-01'); 38 | 39 | 40 | 41 | /* 42 | -- Amazon Data Analyst interview 43 | questions for exp 1-3 year! 44 | 45 | You have two tables: Orders and Customers. 46 | 47 | - Orders Table Columns: 48 | Order_id, Customer_id, Order_Date, Amount 49 | 50 | - Customers Table Columns: 51 | Customer_id, Customer_Name, Join_Date 52 | 53 | Write an SQL query to calculate the total 54 | order amount for each customer who joined 55 | in the current year. 56 | 57 | The output should contain Customer_Name and 58 | the total amount. 59 | 60 | */ 61 | -- join both table based on cx id 62 | -- filter the cx records for current_year 63 | -- based on eacx cx id sum the amount 64 | -- group by cx id 65 | 66 | 67 | SELECT * FROM Customers; 68 | SELECT * FROM orders; 69 | 70 | 71 | SELECT 72 | c.customer_name, 73 | SUM(o.amount) 74 | FROM orders as o 75 | JOIN 76 | customers as c 77 | on c.customer_id = o.customer_id 78 | WHERE EXTRACT(YEAR FROM c.join_date) = 79 | EXTRACT(YEAR FROM CURRENT_DATE) 80 | GROUP by 1 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | -- SELECT *, EXTRACT(YEAR FROM join_date) FROM Customers 106 | 107 | -- SELECT EXTRACT(YEAR FROM CURRENT_DATE); 108 | 109 | 110 | 111 | 112 | 113 | SELECT 114 | -- c.customer_id, 115 | c.customer_name, 116 | SUM(o.amount) as total_amt 117 | FROM orders as o 118 | JOIN 119 | customers as c 120 | ON c.customer_id = o.customer_id 121 | -- WHERE EXTRACT(YEAR FROM c.join_date) = 122 | -- EXTRACT(YEAR FROM CURRENT_DATE) 123 | WHERE c.join_date BETWEEN '2024-01-01' AND '2024-12-31' 124 | GROUP BY c.customer_id, c.customer_name 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | -- Your Task 137 | -- Write a SQL query to return each and and total orders for current year 138 | -- return month_number, total orders 139 | 140 | 141 | 142 | -------------------------------------------------------------------------------- /46.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge 46/50 2 | 3 | DROP TABLE IF EXISTS orders; 4 | -- Create table 5 | CREATE TABLE orders ( 6 | order_id SERIAL PRIMARY KEY, 7 | customer_id INT, 8 | order_date DATE, 9 | total_amount FLOAT 10 | ); 11 | 12 | -- Insert records 13 | INSERT INTO orders (customer_id, order_date, total_amount) 14 | VALUES 15 | (1001, '2024-01-01', 120.25), 16 | (1002, '2024-01-03', 80.99), 17 | (1003, '2024-01-05', 160.00), 18 | (1004, '2024-01-07', 95.50), 19 | (1001, '2024-02-09', 70.75), 20 | (1002, '2024-02-11', 220.00), 21 | (1003, '2024-02-13', 130.50), 22 | (1004, '2024-02-15', 70.25), 23 | (1001, '2024-02-17', 60.75), 24 | (1002, '2024-03-19', 180.99), 25 | (1003, '2024-03-21', 140.00), 26 | (1004, '2024-03-23', 110.50), 27 | (1001, '2024-03-25', 90.25), 28 | (1002, '2024-03-27', 200.00), 29 | (1003, '2024-03-29', 160.50), 30 | (1004, '2024-03-31', 120.75), 31 | (1001, '2024-03-02', 130.25), 32 | (1002, '2024-03-04', 90.99), 33 | (1003, '2024-03-06', 170.00), 34 | (1004, '2024-04-08', 105.50), 35 | (1001, '2024-04-10', 80.75), 36 | (1002, '2024-04-12', 240.00), 37 | (1003, '2024-04-14', 150.50), 38 | (1004, '2024-04-16', 80.25), 39 | (1001, '2024-04-18', 70.75); 40 | 41 | /* 42 | -- Amazon Data Analyst Interview Question 43 | You have orders table with columns 44 | order_id, customer_id, order_date, total_amount 45 | 46 | Calculate the running total of orders for each 47 | customer. 48 | 49 | Return the customer ID, order date, 50 | total amount of each order, and the 51 | cumulative total of orders for each customer 52 | sorted by customer ID and order date. 53 | */ 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | SELECT *, 83 | SUM(total_amount) OVER(PARTITION BY 84 | customer_id ORDER BY order_date) 85 | as running_total 86 | FROM orders 87 | -- ORDER BY customer_id, order_date 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | -- Find each customer_id and revenue collected from them in each month 106 | -------------------------------------------------------------------------------- /47.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge 47/50 2 | 3 | DROP TABLE IF EXISTS inventory; 4 | CREATE TABLE inventory ( 5 | product_id SERIAL PRIMARY KEY, 6 | product_name VARCHAR(100), 7 | quantity INT, 8 | price_per_unit FLOAT 9 | ); 10 | 11 | 12 | INSERT INTO inventory (product_name, quantity, price_per_unit) 13 | VALUES 14 | ('Laptop', 20, 999.99), 15 | ('Smartphone', 15, 699.99), 16 | ('Tablet', 8, 399.99), 17 | ('Headphones', 25, 149.99), 18 | ('Mouse', 30, 29.99), 19 | ('Wireless Earbuds', 12, 79.99), 20 | ('Portable Charger', 10, 49.99), 21 | ('Bluetooth Speaker', 18, 129.99), 22 | ('Fitness Tracker', 7, 89.99), 23 | ('External Hard Drive', 9, 149.99), 24 | ('Gaming Mouse', 14, 59.99), 25 | ('USB-C Cable', 22, 19.99), 26 | ('Smart Watch', 6, 199.99), 27 | ('Desk Lamp', 11, 34.99), 28 | ('Power Bank', 16, 39.99), 29 | ('Wireless Mouse', 13, 29.99), 30 | ('Bluetooth Headset', 20, 59.99), 31 | ('MicroSD Card', 5, 24.99), 32 | ('USB Flash Drive', 8, 14.99), 33 | ('HDMI Cable', 17, 9.99); 34 | 35 | 36 | /* 37 | Question: 38 | Write an SQL query to display inventory details 39 | including the product name, quantity in stock, 40 | remaining stock level ('Medium' if quantity is 41 | more than 10, 'Low' otherwise), and supplier ID. 42 | 43 | Assume each product has a unique 44 | supplier ID associated with it. 45 | */ 46 | -- product name, quantity in stock, stock level 47 | -- qty > 10 medium, Low 48 | -- supplier ID 49 | 50 | 51 | SELECT *, 52 | CASE 53 | WHEN quantity > 10 THEN 'Medium' 54 | ELSE 'low' 55 | END as stock_level 56 | FROM inventory; 57 | 58 | 59 | 60 | -------------------------------------------------------------------------------- /48.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge 48/50 2 | 3 | DROP TABlE IF EXISTS Customers; 4 | DROP TABlE IF EXISTS Orders; 5 | 6 | 7 | CREATE TABLE Customers ( 8 | CustomerID INT PRIMARY KEY, 9 | CustomerName VARCHAR(50), 10 | City VARCHAR(50), 11 | Country VARCHAR(50) 12 | ); 13 | 14 | CREATE TABLE Orders ( 15 | OrderID INT PRIMARY KEY, 16 | CustomerID INT, 17 | OrderDate DATE, 18 | TotalAmount DECIMAL(10,2), 19 | FOREIGN KEY (CustomerID) REFERENCES Customers(CustomerID) 20 | ); 21 | 22 | -- Insert records into the 'Customers' table 23 | INSERT INTO Customers (CustomerID, CustomerName, City, Country) 24 | VALUES 25 | (1, 'John Doe', 'New York', 'USA'), 26 | (2, 'Jane Smith', 'Los Angeles', 'USA'), 27 | (3, 'Michael Johnson', 'Chicago', 'USA'), 28 | (4, 'Emily Brown', 'Houston', 'USA'); 29 | 30 | -- Insert records into the 'Orders' table 31 | INSERT INTO Orders (OrderID, CustomerID, OrderDate, TotalAmount) 32 | VALUES 33 | (101, 1, '2024-05-10', 150.00), 34 | (102, 2, '2024-05-11', 200.00), 35 | (103, 1, '2024-05-12', 100.00), 36 | (104, 3, '2024-05-13', 300.00); 37 | 38 | 39 | /* 40 | -- Question 41 | You are given customers and orders table with 42 | columns 43 | customers has columns 44 | cx_id, cx_name, city, country 45 | 46 | orders table has COLUMNS 47 | order_id, cx_id, order_date, totalamount 48 | 49 | Write an SQL query to retrive 50 | customer details along with their total order amounts 51 | (if any). 52 | 53 | Include the customer's name, city, country, and total order amount. 54 | 55 | If a customer hasn't placed any orders, 56 | display 'NULL' for the total order amount." 57 | 58 | */ 59 | 60 | 61 | 62 | 63 | SELECT * FROM customers; 64 | SELECT * FROM orders; 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | SELECT 82 | c.*, 83 | SUM(o.totalamount) as total_orders 84 | FROM customers as c 85 | LEFt JOIN 86 | orders as o 87 | ON o.customerid = c.customerid 88 | GROUP BY 1 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | -------------------------------------------------------------------------------- /49.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge 49/50 2 | 3 | DROP TABLE IF EXISTS orders; 4 | -- Create table for orders 5 | CREATE TABLE orders ( 6 | order_id SERIAL PRIMARY KEY, 7 | order_date DATE 8 | ); 9 | 10 | -- Insert sample records for orders 11 | INSERT INTO orders (order_date) 12 | VALUES 13 | ('2024-05-01'), 14 | ('2024-05-01'), 15 | ('2024-05-01'), 16 | ('2024-05-02'), 17 | ('2024-05-02'), 18 | ('2024-05-02'), 19 | ('2024-05-03'), 20 | ('2024-05-03'), 21 | ('2024-05-03'), 22 | ('2024-05-03'), 23 | ('2024-05-03'), 24 | ('2024-05-04'), 25 | ('2024-05-04'), 26 | ('2024-05-04'), 27 | ('2024-05-04'), 28 | ('2024-05-04'), 29 | ('2024-05-05'), 30 | ('2024-05-05'), 31 | ('2024-05-05'), 32 | ('2024-05-05'), 33 | ('2024-05-06'), 34 | ('2024-05-06'), 35 | ('2024-05-06'), 36 | ('2024-05-06'), 37 | ('2024-05-06'); 38 | 39 | -- Display the records to verify 40 | 41 | 42 | /* 43 | -- Question: 44 | You are given a orders table with 45 | columns order_id, order_date 46 | 47 | Identify the busiest day for orders along 48 | with the total number of orders placed on that day. 49 | */ 50 | 51 | SELECT * FROM orders; 52 | 53 | SELECT 54 | order_date, 55 | COUNT(1) 56 | FROM orders 57 | GROUP by 1 58 | ORDER BY 2 DESC 59 | LIMIT 1 60 | -------------------------------------------------------------------------------- /50.sql: -------------------------------------------------------------------------------- 1 | -- SQL Challenge 50/50 2 | 3 | DROP TABLE IF EXISTS sellers; 4 | DROP TABLE IF EXISTS orders; 5 | -- Create table for sellers 6 | CREATE TABLE sellers ( 7 | seller_id SERIAL PRIMARY KEY, 8 | seller_name VARCHAR(100) NOT NULL 9 | ); 10 | 11 | -- Insert sample records into the sellers table 12 | INSERT INTO sellers (seller_name) 13 | VALUES 14 | ('Seller A'), 15 | ('Seller B'), 16 | ('Seller C'); 17 | 18 | -- Create table for orders 19 | CREATE TABLE orders ( 20 | order_id SERIAL PRIMARY KEY, 21 | seller_id INT REFERENCES sellers(seller_id), 22 | product_id INT, 23 | category VARCHAR(50), 24 | quantity INT, 25 | price_per_unit FLOAT 26 | ); 27 | 28 | -- Insert sample records into the orders table 29 | INSERT INTO orders (seller_id, product_id, category, quantity, price_per_unit) 30 | VALUES 31 | (1, 1, 'Electronics', 2, 999.99), 32 | (1, 2, 'Electronics', 3, 699.99), 33 | (2, 3, 'Home & Kitchen', 1, 49.99), 34 | (2, 4, 'Home & Kitchen', 2, 79.99), 35 | (2, 5, 'Electronics', 1, 29.99), 36 | (3, 1, 'Electronics', 2, 999.99), 37 | (3, 4, 'Home & Kitchen', 1, 79.99), 38 | (1, 3, 'Home & Kitchen', 2, 49.99), 39 | (2, 1, 'Electronics', 1, 999.99), 40 | (3, 2, 'Electronics', 1, 699.99), 41 | (1, 4, 'Home & Kitchen', 3, 79.99), 42 | (2, 2, 'Electronics', 2, 699.99), 43 | (3, 3, 'Home & Kitchen', 1, 49.99), 44 | (1, 5, 'Electronics', 2, 29.99), 45 | (2, 4, 'Home & Kitchen', 1, 79.99), 46 | (3, 1, 'Electronics', 1, 999.99), 47 | (1, 2, 'Electronics', 1, 699.99), 48 | (2, 3, 'Home & Kitchen', 2, 49.99), 49 | (3, 5, 'Electronics', 1, 29.99), 50 | (1, 3, 'Home & Kitchen', 1, 49.99), 51 | (2, 1, 'Electronics', 3, 999.99), 52 | (3, 2, 'Electronics', 2, 699.99), 53 | (1, 4, 'Home & Kitchen', 1, 79.99), 54 | (2, 2, 'Electronics', 1, 699.99), 55 | (3, 3, 'Home & Kitchen', 3, 49.99), 56 | (1, 5, 'Electronics', 1, 29.99); 57 | 58 | 59 | 60 | /* 61 | -- You have two tables, sellers and orders. 62 | 63 | sellers table columns are 64 | seller_id, seller_name 65 | 66 | orders table has columns 67 | seller_id, product_id, category, 68 | quantity, price_per_unit 69 | 70 | Write an SQL query to find each seller's 71 | revenue from each category and each product. 72 | 73 | return seller_name, total_revenue in each 74 | product inside each category 75 | 76 | */ 77 | 78 | -- seller_name get from seller_table 79 | -- category_name orders TABLE 80 | -- product_id 81 | -- revenue 82 | 83 | 84 | SELECT * FROM sellers; 85 | SELECT * FROM orders; 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | SELECT 107 | s.seller_name, 108 | o.category, 109 | o.product_id, 110 | SUM(o.price_per_unit * o.quantity) as total_revenue 111 | FROM orders as o 112 | JOIN 113 | sellers as s 114 | ON s.seller_id = o.seller_id 115 | GROUP BY 1, 2, 3 116 | ORDER BY 1, 3 117 | 118 | 119 | 120 | 121 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 50_days_sql_challenge -------------------------------------------------------------------------------- /SQL 15 .sql: -------------------------------------------------------------------------------- 1 | -- Day 15/50 SQL Challenge 2 | 3 | 4 | -- Creating the orders table 5 | DROP TABLE IF EXISTS orders; 6 | CREATE TABLE orders ( 7 | order_id SERIAL PRIMARY KEY, 8 | order_date DATE, 9 | product_id INT, 10 | quantity INT, 11 | price DECIMAL(10, 2) 12 | ); 13 | 14 | -- Inserting records for the current month 15 | INSERT INTO orders (order_date, product_id, quantity, price) VALUES 16 | ('2024-04-01', 1, 10, 50.00), 17 | ('2024-04-02', 2, 8, 40.00), 18 | ('2024-04-03', 3, 15, 30.00), 19 | ('2024-04-04', 4, 12, 25.00), 20 | ('2024-04-05', 5, 5, 60.00), 21 | ('2024-04-06', 6, 20, 20.00), 22 | ('2024-04-07', 7, 18, 35.00), 23 | ('2024-04-08', 8, 14, 45.00), 24 | ('2024-04-09', 1, 10, 50.00), 25 | ('2024-04-10', 2, 8, 40.00); 26 | 27 | -- Inserting records for the last month 28 | INSERT INTO orders (order_date, product_id, quantity, price) VALUES 29 | ('2024-03-01', 1, 12, 50.00), 30 | ('2024-03-02', 2, 10, 40.00), 31 | ('2024-03-03', 3, 18, 30.00), 32 | ('2024-03-04', 4, 14, 25.00), 33 | ('2024-03-05', 5, 7, 60.00), 34 | ('2024-03-06', 6, 22, 20.00), 35 | ('2024-03-07', 7, 20, 35.00), 36 | ('2024-03-08', 8, 16, 45.00), 37 | ('2024-03-09', 1, 12, 50.00), 38 | ('2024-03-10', 2, 10, 40.00); 39 | 40 | -- Inserting records for the previous month 41 | INSERT INTO orders (order_date, product_id, quantity, price) VALUES 42 | ('2024-02-01', 1, 15, 50.00), 43 | ('2024-02-02', 2, 12, 40.00), 44 | ('2024-02-03', 3, 20, 30.00), 45 | ('2024-02-04', 4, 16, 25.00), 46 | ('2024-02-05', 5, 9, 60.00), 47 | ('2024-02-06', 6, 25, 20.00), 48 | ('2024-02-07', 7, 22, 35.00), 49 | ('2024-02-08', 8, 18, 45.00), 50 | ('2024-02-09', 1, 15, 50.00), 51 | ('2024-02-10', 2, 12, 40.00); 52 | 53 | 54 | 55 | /* 56 | 57 | Write an SQL query to retrieve the product details for items whose revenue 58 | decreased compared to the previous month. 59 | 60 | Display the product ID, quantity sold, 61 | and revenue for both the current and previous months. 62 | 63 | */ 64 | -- --------------- 65 | -- MY Solution 66 | -- --------------- 67 | 68 | 69 | -- product_id total sale for current 70 | -- current month 71 | -- group by product_id 72 | 73 | SELECT * FROM orders; 74 | 75 | WITH current_month_revenue 76 | AS 77 | ( 78 | SELECT 79 | product_id, 80 | SUM(quantity) as qty_sold, 81 | SUM(price * quantity) as current_month_rev 82 | FROM orders 83 | WHERE EXTRACT(MONTH FROM order_date) = EXTRACT(MONTH FROM CURRENT_DATE) 84 | GROUP BY product_id 85 | ), 86 | prev_month_revenue 87 | AS 88 | ( 89 | SELECT 90 | product_id, 91 | SUM(quantity) as qty_sold, 92 | SUM(price * quantity) as prev_month_rev 93 | FROM orders 94 | WHERE EXTRACT(MONTH FROM order_date) = EXTRACT(MONTH FROM CURRENT_DATE)-1 95 | GROUP BY product_id 96 | ) 97 | 98 | SELECT 99 | cr.product_id, 100 | cr.qty_sold as cr_month_qty, 101 | pr.qty_sold as pr_month_qty, 102 | cr.current_month_rev, 103 | pr.prev_month_rev 104 | FROM current_month_revenue as cr 105 | JOIN 106 | prev_month_revenue as pr 107 | ON cr.product_id = pr.product_id 108 | WHERE cr.current_month_rev < pr.prev_month_rev 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | -- Task: Write a SQL query to find the products whose total revenue has decreased by more than 10% from the previous month to the current month. 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | /* 130 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 131 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 132 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 133 | */ 134 | 135 | 136 | 137 | -------------------------------------------------------------------------------- /day 03.sql: -------------------------------------------------------------------------------- 1 | -- Day 04/50 2 | 3 | 4 | DROP TABLE IF EXISTS products; 5 | 6 | -- Step 1: Create the products table 7 | CREATE TABLE products ( 8 | product_id INT PRIMARY KEY, 9 | product_name VARCHAR(50), 10 | category VARCHAR(50), 11 | quantity_sold INT 12 | ); 13 | 14 | -- Step 2: Insert sample records into the products table 15 | INSERT INTO products (product_id, product_name, category, quantity_sold) VALUES 16 | (1, 'Samsung Galaxy S20', 'Electronics', 100), 17 | (2, 'Apple iPhone 12 Pro', 'Electronics', 150), 18 | (3, 'Sony PlayStation 5', 'Electronics', 80), 19 | (4, 'Nike Air Max 270', 'Clothing', 200), 20 | (5, 'Adidas Ultraboost 20', 'Clothing', 200), 21 | (6, 'Levis Mens 501 Jeans', 'Clothing', 90), 22 | (7, 'Instant Pot Duo 7-in-1', 'Home & Kitchen', 180), 23 | (8, 'Keurig K-Classic Coffee Maker', 'Home & Kitchen', 130), 24 | (9, 'iRobot Roomba 675 Robot Vacuum', 'Home & Kitchen', 130), 25 | (10, 'Breville Compact Smart Oven', 'Home & Kitchen', 90), 26 | (11, 'Dyson V11 Animal Cordless Vacuum', 'Home & Kitchen', 90); 27 | 28 | 29 | 30 | /* 31 | 32 | Questions : 33 | Write SQL query to find the top-selling products in each category 34 | 35 | assuming products table has column 36 | product_id, product_name, category, quantity_sold 37 | */ 38 | -- 1 product from each category 39 | -- based on highest qty sold 40 | -- rank 41 | 42 | 43 | SELECT * FROm products; 44 | 45 | 46 | SELECT * 47 | FROM 48 | ( 49 | SELECT 50 | *, 51 | RANK() OVER(PARTITION BY category ORDER BY quantity_sold DESC) ranks 52 | FROm products 53 | ORDER BY category, quantity_sold DESC 54 | ) as subquery 55 | WHERE ranks = 1 56 | 57 | 58 | 59 | -- your task 60 | -- Write SQL query to find the least-selling products in each category 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | /* 71 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 72 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 73 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 74 | */ 75 | 76 | -------------------------------------------------------------------------------- /day 06.sql: -------------------------------------------------------------------------------- 1 | -- 06/50 Days SQL Challenge 2 | 3 | 4 | DROP TABLE IF EXISTS customers; 5 | CREATE TABLE customers ( 6 | customer_id INT, 7 | name VARCHAR(100), 8 | email VARCHAR(100) 9 | ); 10 | 11 | 12 | DROP TABLE IF EXISTS orders; 13 | CREATE TABLE orders ( 14 | order_id INT , 15 | customer_id INT, 16 | order_date DATE, 17 | amount DECIMAL(10, 2) 18 | ); 19 | 20 | 21 | 22 | -- Inserting sample customers 23 | INSERT INTO customers (customer_id, name, email) VALUES 24 | (1, 'John Doe', 'john@example.com'), 25 | (2, 'Jane Smith', 'jane@example.com'), 26 | (3, 'Alice Johnson', 'alice@example.com'), 27 | (4, 'Sam B', 'sb@example.com'), 28 | (5, 'John Smith', 'j@example.com') 29 | ; 30 | 31 | -- Inserting sample orders 32 | INSERT INTO orders (order_id, customer_id, order_date, amount) VALUES 33 | (1, 1, '2024-03-05', 50.00), 34 | (2, 2, '2024-03-10', 75.00), 35 | (5, 4, '2024-04-02', 45.00), 36 | (5, 2, '2024-04-02', 45.00) , 37 | (3, 4, '2024-04-15', 100.00), 38 | (4, 1, '2024-04-01', 60.00), 39 | (5, 5, '2024-04-02', 45.00); 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | /* 48 | Given tables customers (columns: customer_id, 49 | name, email) and orders (columns: order_id, 50 | customer_id, order_date, amount), 51 | 52 | Write an SQL query to find customers who 53 | haven't made any purchases in the last month, 54 | assuming today's date is April 2, 2024. 55 | */ 56 | 57 | -- customers details 58 | -- who has not done purchase in last month (orders) 59 | 60 | 61 | SELECT * FROM customers; 62 | SELECT * FROM orders; 63 | 64 | 65 | 66 | 67 | 68 | -- My Solutions 69 | 70 | 71 | 72 | SELECT 73 | * 74 | FROM customers 75 | WHERE customer_id NOT IN (SELECT customer_id FROM orders 76 | WHERE EXTRACT(MONTH from order_date) 77 | = EXTRACT(MONTH FROM current_date)-1 78 | AND 79 | EXTRACT(YEAR FROM order_date) = 80 | EXTRACT(YEAR FROM current_date) 81 | ); 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | -- Your task Find customer who has done purchase this month and also last month 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | /* 102 | Follow me in LinkedIn :: https://www.linkedin.com/in/najirr/ 103 | Follow me in insta :: https://www.instagram.com/zero_analyst/ 104 | Subscribe to our youtube channel :: https://www.youtube.com/@zero_analyst 105 | */ 106 | 107 | 108 | 109 | -------------------------------------------------------------------------------- /day 07.sql: -------------------------------------------------------------------------------- 1 | -- Day 07/50 days sql challenge 2 | 3 | 4 | DROP TABLE IF EXISTS employees; 5 | 6 | -- Create the employee_salary table 7 | CREATE TABLE employees ( 8 | emp_id INT, 9 | name VARCHAR(100), 10 | department VARCHAR(50), 11 | salary DECIMAL(10, 2) 12 | ); 13 | 14 | 15 | 16 | -- Insert all records again to simulate duplicates 17 | INSERT INTO employees(emp_id, name, department, salary) VALUES 18 | (1, 'John Doe', 'Finance', 60000.00), 19 | (2, 'Jane Smith', 'Finance', 65000.00), 20 | (2, 'Jane Smith', 'Finance', 65000.00), -- Duplicate 21 | (9, 'Lisa Anderson', 'Sales', 63000.00), 22 | (9, 'Lisa Anderson', 'Sales', 63000.00), -- Duplicate 23 | (9, 'Lisa Anderson', 'Sales', 63000.00), -- Duplicate 24 | (10, 'Kevin Martinez', 'Sales', 61000.00); 25 | 26 | 27 | /* 28 | Question: 29 | 30 | How would you identify duplicate entries in 31 | a SQL in given table employees columns are 32 | emp_id, name, department, salary 33 | 34 | */ 35 | 36 | 37 | -- ------------------------ 38 | -- My Solution 39 | -- ------------------------ 40 | 41 | SELECT * FROM employees; 42 | 43 | SELECT 44 | emp_id, 45 | name, 46 | COUNT(1) as total_frequency 47 | FROM employees 48 | GROUP BY emp_id, name 49 | HAVING COUNT(1) > 1 50 | 51 | 52 | -- Approach 2 53 | 54 | 55 | SELECT * 56 | FROM ( 57 | SELECT *, 58 | ROW_NUMBER() OVER(PARTITION BY name ORDER BY name) as rn 59 | FROM employees 60 | ) as subquery 61 | WHERE rn > 1 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | -- Your task 70 | -- Identify employee details who is appearing more than twice in the table employees --------------------------------------------------------------------------------