├── .gitignore ├── Attribute_Data_Analysis_CSV Upload.sql ├── BANKDATASET_STOREPROCEDURES.sql ├── CTE.sql ├── Commands_SQL.txt ├── Constrain_sql.sql ├── CreateTable_Insert_Update_Delete_Drop_Operations.txt ├── Data ├── AttributeDataSet.csv ├── bank.csv └── sales_data_final.csv ├── Indexing SQL.txt ├── Indexing_Union_Recursive.txt ├── Insert _Null_Data_Table.txt ├── LICENSE ├── Ntile.sql ├── PIVOT_UNPIVOT_SNOWFLAKE.txt ├── Partition11.txt ├── Pets Orders Operations.txt ├── Primary_Foreign_Key.sql ├── README.md ├── REGEX_1.txt ├── REGEX_2.txt ├── REGX.txt ├── SNOWFLAKE TEXT OPERATIONS.txt ├── SNOWFLAKE TIME TRAVEL.txt ├── SNOWFLAKE_ANALYSIS_JOINS.txt ├── SNOWFLAKE_DATE_TIME_FUNCTIONS.txt ├── SNOWFLAKE_PIVOT_UNPIVOT.txt ├── SNOWFLAKE_PIVOT_UNPIVOT_OPERATIONS.txt ├── SNOWFLAKE_ROLLUP_CUBE.txt ├── SNOWFLAKE_SQL_TEST_OPERATIONS.txt ├── SQL-LECTURE-1.sql ├── SQL ├── Sql_Connection_Python.ipynb └── glass.data ├── SQL_BASICS_SAKILA_WORLD.txt ├── SQL_Basics.txt ├── SQL_Operation_JOINS.txt ├── SQL_Python Connectivity ├── Sql_Connection_Python.ipynb └── glass.data ├── Sales_Data_Year_Operation.txt ├── Store Producer With Employee Table.sql ├── Store_Procedures_Case.tex ├── Update_Operation.txt ├── User Define Functions.sql ├── User_Define_Functions_Loop_IfElse.txt ├── Window_Function_RowNumber_Rank_DenseRank.txt └── trigger.txt /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /Attribute_Data_Analysis_CSV Upload.sql: -------------------------------------------------------------------------------- 1 | create database dress_data12; 2 | use dress_data12; 3 | create table if not exists dress12( 4 | `Dress_ID` varchar(30), 5 | `Style` varchar(30), 6 | `Price` varchar(30), 7 | `Rating` varchar(30), 8 | `Size` varchar(30), 9 | `Season` varchar(30), 10 | `NeckLine` varchar(30), 11 | `SleeveLength` varchar(30), 12 | `waiseline` varchar(30), 13 | `Material` varchar(30), 14 | `FabricType` varchar(30), 15 | `Decoration` varchar(30), 16 | `Pattern Type` varchar(30), 17 | `Recommendation` varchar(30)) 18 | 19 | LOAD DATA INFILE 20 | 'D:/AttributeDataSet.csv' 21 | into table dress12 22 | FIELDS TERMINATED by ',' 23 | ENCLOSED by '"' 24 | lines terminated by '\n' 25 | IGNORE 1 ROWS 26 | 27 | show databases 28 | 29 | ## secure-file-priv 30 | 31 | create table if not exists test ( 32 | test_id int auto_increment, 33 | test_name varchar(30) , 34 | test_mailid varchar(30), 35 | teast_adress varchar(30), 36 | primary key (test_id)) 37 | 38 | create table if not exists test3 ( 39 | test_id int , 40 | test_name varchar(30) , 41 | test_mailid varchar(30), 42 | test_adress varchar(30), 43 | test_salary int check(test_salary > 10000)) 44 | 45 | create table if not exists test8( 46 | test_id int NOT NULL auto_increment , 47 | test_name varchar(30) NOT NULL default 'unknown' , 48 | test_mailid varchar(30) unique NOT NULL, 49 | test_adress varchar(30) check (test_adress= 'bengalore') NOT NULL, 50 | test_salary int check(test_salary > 10000) NOT NULL, 51 | primary key (test_id)) 52 | 53 | select * from test8 54 | 55 | insert into test8 ( test_id , test_name , test_mailid , teast_adress,test_salary) values (100 , 'jal','jalpa@gmail.com','baroda' , 5000) 56 | 57 | 58 | 59 | 60 | -------------------------------------------------------------------------------- /BANKDATASET_STOREPROCEDURES.sql: -------------------------------------------------------------------------------- 1 | /* bank dataset*/ 2 | create database bank; 3 | create database if not exists bank; 4 | use bank; 5 | create table if not exists bank_details(age int, 6 | job varchar(30), 7 | marital varchar(30), 8 | education varchar(30), 9 | `default` varchar(30), 10 | balance int, 11 | housing varchar(30), 12 | loan varchar(30), 13 | contact varchar(30), 14 | `day` int, 15 | `month` varchar(30), 16 | duration int, 17 | campaign int, 18 | pdays int, 19 | previous int, 20 | poutcome varchar(30), 21 | y varchar(30)); 22 | select * from bank_details; 23 | show tables; 24 | insert into bank_details VALUES(30,"unemployed","married","primary","no",1787,"no","no","cellular",19,"oct",79,1,-1,0,"unknown","no") 25 | insert into bank_details values(44,"technician","single","secondary","no",29,"yes","no","unknown",5,"may",151,1,-1,0,"unknown","no"), 26 | (33,"entrepreneur","married","secondary","no",2,"yes","yes","unknown",5,"may",76,1,-1,0,"unknown","no"), 27 | (47,"blue-collar","married","unknown","no",1506,"yes","no","unknown",5,"may",92,1,-1,0,"unknown","no"), 28 | (33,"unknown","single","unknown","no",1,"no","no","unknown",5,"may",198,1,-1,0,"unknown","no"), 29 | (35,"management","married","tertiary","no",231,"yes","no","unknown",5,"may",139,1,-1,0,"unknown","no"), 30 | (28,"management","single","tertiary","no",447,"yes","yes","unknown",5,"may",217,1,-1,0,"unknown","no"), 31 | (42,"entrepreneur","divorced","tertiary","yes",2,"yes","no","unknown",5,"may",380,1,-1,0,"unknown","no"), 32 | (58,"retired","married","primary","no",121,"yes","no","unknown",5,"may",50,1,-1,0,"unknown","no"), 33 | (43,"technician","single","secondary","no",593,"yes","no","unknown",5,"may",55,1,-1,0,"unknown","no"), 34 | (41,"admin.","divorced","secondary","no",270,"yes","no","unknown",5,"may",222,1,-1,0,"unknown","no"), 35 | (29,"admin.","single","secondary","no",390,"yes","no","unknown",5,"may",137,1,-1,0,"unknown","no"), 36 | (53,"technician","married","secondary","no",6,"yes","no","unknown",5,"may",517,1,-1,0,"unknown","no"), 37 | (58,"technician","married","unknown","no",71,"yes","no","unknown",5,"may",71,1,-1,0,"unknown","no"), 38 | (57,"services","married","secondary","no",162,"yes","no","unknown",5,"may",174,1,-1,0,"unknown","no"), 39 | (51,"retired","married","primary","no",229,"yes","no","unknown",5,"may",353,1,-1,0,"unknown","no"), 40 | (45,"admin.","single","unknown","no",13,"yes","no","unknown",5,"may",98,1,-1,0,"unknown","no"), 41 | (57,"blue-collar","married","primary","no",52,"yes","no","unknown",5,"may",38,1,-1,0,"unknown","no"), 42 | (60,"retired","married","primary","no",60,"yes","no","unknown",5,"may",219,1,-1,0,"unknown","no"), 43 | (33,"services","married","secondary","no",0,"yes","no","unknown",5,"may",54,1,-1,0,"unknown","no"), 44 | (28,"blue-collar","married","secondary","no",723,"yes","yes","unknown",5,"may",262,1,-1,0,"unknown","no"), 45 | (56,"management","married","tertiary","no",779,"yes","no","unknown",5,"may",164,1,-1,0,"unknown","no"), 46 | (32,"blue-collar","single","primary","no",23,"yes","yes","unknown",5,"may",160,1,-1,0,"unknown","no"), 47 | (25,"services","married","secondary","no",50,"yes","no","unknown",5,"may",342,1,-1,0,"unknown","no"), 48 | (40,"retired","married","primary","no",0,"yes","yes","unknown",5,"may",181,1,-1,0,"unknown","no"), 49 | (44,"admin.","married","secondary","no",-372,"yes","no","unknown",5,"may",172,1,-1,0,"unknown","no"), 50 | (39,"management","single","tertiary","no",255,"yes","no","unknown",5,"may",296,1,-1,0,"unknown","no"), 51 | (52,"entrepreneur","married","secondary","no",113,"yes","yes","unknown",5,"may",127,1,-1,0,"unknown","no"), 52 | (46,"management","single","secondary","no",-246,"yes","no","unknown",5,"may",255,2,-1,0,"unknown","no"), 53 | (36,"technician","single","secondary","no",265,"yes","yes","unknown",5,"may",348,1,-1,0,"unknown","no"), 54 | (57,"technician","married","secondary","no",839,"no","yes","unknown",5,"may",225,1,-1,0,"unknown","no"), 55 | (49,"management","married","tertiary","no",378,"yes","no","unknown",5,"may",230,1,-1,0,"unknown","no"), 56 | (60,"admin.","married","secondary","no",39,"yes","yes","unknown",5,"may",208,1,-1,0,"unknown","no"), 57 | (59,"blue-collar","married","secondary","no",0,"yes","no","unknown",5,"may",226,1,-1,0,"unknown","no"), 58 | (51,"management","married","tertiary","no",10635,"yes","no","unknown",5,"may",336,1,-1,0,"unknown","no"), 59 | (57,"technician","divorced","secondary","no",63,"yes","no","unknown",5,"may",242,1,-1,0,"unknown","no"), 60 | (25,"blue-collar","married","secondary","no",-7,"yes","no","unknown",5,"may",365,1,-1,0,"unknown","no"), 61 | (53,"technician","married","secondary","no",-3,"no","no","unknown",5,"may",1666,1,-1,0,"unknown","no"), 62 | (36,"admin.","divorced","secondary","no",506,"yes","no","unknown",5,"may",577,1,-1,0,"unknown","no"), 63 | (37,"admin.","single","secondary","no",0,"yes","no","unknown",5,"may",137,1,-1,0,"unknown","no"), 64 | (44,"services","divorced","secondary","no",2586,"yes","no","unknown",5,"may",160,1,-1,0,"unknown","no"), 65 | (50,"management","married","secondary","no",49,"yes","no","unknown",5,"may",180,2,-1,0,"unknown","no"), 66 | (60,"blue-collar","married","unknown","no",104,"yes","no","unknown",5,"may",22,1,-1,0,"unknown","no"), 67 | (54,"retired","married","secondary","no",529,"yes","no","unknown",5,"may",1492,1,-1,0,"unknown","no"), 68 | (58,"retired","married","unknown","no",96,"yes","no","unknown",5,"may",616,1,-1,0,"unknown","no"), 69 | (36,"admin.","single","primary","no",-171,"yes","no","unknown",5,"may",242,1,-1,0,"unknown","no"), 70 | (58,"self-employed","married","tertiary","no",-364,"yes","no","unknown",5,"may",355,1,-1,0,"unknown","no"), 71 | (44,"technician","married","secondary","no",0,"yes","no","unknown",5,"may",225,2,-1,0,"unknown","no"), 72 | (55,"technician","divorced","secondary","no",0,"no","no","unknown",5,"may",160,1,-1,0,"unknown","no"), 73 | (29,"management","single","tertiary","no",0,"yes","no","unknown",5,"may",363,1,-1,0,"unknown","no"), 74 | (54,"blue-collar","married","secondary","no",1291,"yes","no","unknown",5,"may",266,1,-1,0,"unknown","no"), 75 | (48,"management","divorced","tertiary","no",-244,"yes","no","unknown",5,"may",253,1,-1,0,"unknown","no"), 76 | (32,"management","married","tertiary","no",0,"yes","no","unknown",5,"may",179,1,-1,0,"unknown","no"), 77 | (42,"admin.","single","secondary","no",-76,"yes","no","unknown",5,"may",787,1,-1,0,"unknown","no"), 78 | (24,"technician","single","secondary","no",-103,"yes","yes","unknown",5,"may",145,1,-1,0,"unknown","no"), 79 | (38,"entrepreneur","single","tertiary","no",243,"no","yes","unknown",5,"may",174,1,-1,0,"unknown","no"), 80 | (38,"management","single","tertiary","no",424,"yes","no","unknown",5,"may",104,1,-1,0,"unknown","no"), 81 | (47,"blue-collar","married","unknown","no",306,"yes","no","unknown",5,"may",13,1,-1,0,"unknown","no"), 82 | (40,"blue-collar","single","unknown","no",24,"yes","no","unknown",5,"may",185,1,-1,0,"unknown","no"), 83 | (46,"services","married","primary","no",179,"yes","no","unknown",5,"may",1778,1,-1,0,"unknown","no"), 84 | (32,"admin.","married","tertiary","no",0,"yes","no","unknown",5,"may",138,1,-1,0,"unknown","no"), 85 | (53,"technician","divorced","secondary","no",989,"yes","no","unknown",5,"may",812,1,-1,0,"unknown","no"), 86 | (57,"blue-collar","married","primary","no",249,"yes","no","unknown",5,"may",164,1,-1,0,"unknown","no"), 87 | (33,"services","married","secondary","no",790,"yes","no","unknown",5,"may",391,1,-1,0,"unknown","no"), 88 | (49,"blue-collar","married","unknown","no",154,"yes","no","unknown",5,"may",357,1,-1,0,"unknown","no"), 89 | (51,"management","married","tertiary","no",6530,"yes","no","unknown",5,"may",91,1,-1,0,"unknown","no"), 90 | (60,"retired","married","tertiary","no",100,"no","no","unknown",5,"may",528,1,-1,0,"unknown","no"), 91 | (59,"management","divorced","tertiary","no",59,"yes","no","unknown",5,"may",273,1,-1,0,"unknown","no"), 92 | (55,"technician","married","secondary","no",1205,"yes","no","unknown",5,"may",158,2,-1,0,"unknown","no"), 93 | (35,"blue-collar","single","secondary","no",12223,"yes","yes","unknown",5,"may",177,1,-1,0,"unknown","no"), 94 | (57,"blue-collar","married","secondary","no",5935,"yes","yes","unknown",5,"may",258,1,-1,0,"unknown","no"), 95 | (31,"services","married","secondary","no",25,"yes","yes","unknown",5,"may",172,1,-1,0,"unknown","no"), 96 | (54,"management","married","secondary","no",282,"yes","yes","unknown",5,"may",154,1,-1,0,"unknown","no"), 97 | (55,"blue-collar","married","primary","no",23,"yes","no","unknown",5,"may",291,1,-1,0,"unknown","no"), 98 | (43,"technician","married","secondary","no",1937,"yes","no","unknown",5,"may",181,1,-1,0,"unknown","no"), 99 | (53,"technician","married","secondary","no",384,"yes","no","unknown",5,"may",176,1,-1,0,"unknown","no"), 100 | (44,"blue-collar","married","secondary","no",582,"no","yes","unknown",5,"may",211,1,-1,0,"unknown","no"), 101 | (55,"services","divorced","secondary","no",91,"no","no","unknown",5,"may",349,1,-1,0,"unknown","no"), 102 | (49,"services","divorced","secondary","no",0,"yes","yes","unknown",5,"may",272,1,-1,0,"unknown","no"), 103 | (55,"services","divorced","secondary","yes",1,"yes","no","unknown",5,"may",208,1,-1,0,"unknown","no"), 104 | (45,"admin.","single","secondary","no",206,"yes","no","unknown",5,"may",193,1,-1,0,"unknown","no"), 105 | (47,"services","divorced","secondary","no",164,"no","no","unknown",5,"may",212,1,-1,0,"unknown","no"), 106 | (42,"technician","single","secondary","no",690,"yes","no","unknown",5,"may",20,1,-1,0,"unknown","no"), 107 | (59,"admin.","married","secondary","no",2343,"yes","no","unknown",5,"may",1042,1,-1,0,"unknown","yes"), 108 | (46,"self-employed","married","tertiary","no",137,"yes","yes","unknown",5,"may",246,1,-1,0,"unknown","no"), 109 | (51,"blue-collar","married","primary","no",173,"yes","no","unknown",5,"may",529,2,-1,0,"unknown","no"), 110 | (56,"admin.","married","secondary","no",45,"no","no","unknown",5,"may",1467,1,-1,0,"unknown","yes"), 111 | (41,"technician","married","secondary","no",1270,"yes","no","unknown",5,"may",1389,1,-1,0,"unknown","yes"), 112 | (46,"management","divorced","secondary","no",16,"yes","yes","unknown",5,"may",188,2,-1,0,"unknown","no"), 113 | (57,"retired","married","secondary","no",486,"yes","no","unknown",5,"may",180,2,-1,0,"unknown","no"), 114 | (42,"management","single","secondary","no",50,"no","no","unknown",5,"may",48,1,-1,0,"unknown","no"), 115 | (30,"technician","married","secondary","no",152,"yes","yes","unknown",5,"may",213,2,-1,0,"unknown","no"), 116 | (60,"admin.","married","secondary","no",290,"yes","no","unknown",5,"may",583,1,-1,0,"unknown","no"); 117 | select * from bank_details; 118 | select count(*) from bank_details; 119 | select * from bank_details; 120 | select age, loan, job from bank_details; 121 | select `default` from bank_details; 122 | select * from bank_details where age = 60; 123 | select * from bank_details where age =33; 124 | select * from bank_details where age=33 and marital= 'married'; 125 | select * from bank_details where age=40 and marital = 'single'; 126 | select * from bank_details where age=60 and job = 'retired'; 127 | select count(*) from bank_details where education ='unkhown' or marital ='single'; 128 | select * from bank_details where education ='unkhown' or marital='single' or balance<500; 129 | select * from bank_details where education ='unkhown' or marital='married' or balance<500; 130 | /* to find unique value */ 131 | select distinct job from bank_details; 132 | select distinct marital from bank_details; 133 | select distinct `default` from bank_details; 134 | select distinct duration from bank_details; 135 | select * from bank_details; 136 | /* order by acending*/ 137 | select * from bank_details order by age; 138 | /* order by desendinng*/ 139 | select * from bank_details order by age desc; 140 | /* avg balance */ 141 | select avg(balance) from bank_details; 142 | /* min balance */ 143 | select min(balance) from bank_details; 144 | /* max balance */ 145 | select max(balance) from bank_details; 146 | /* find person who have loan */ 147 | select * from bank_details where loan = 'yes'; 148 | /* find avg balancce whoes job role is admin */ 149 | select avg(balance) from bank_details where job='admin.'; 150 | /* find out record without job whoes age is below 45 */ 151 | select * from bank_details where job ='unemployed' and age<45; 152 | /* find eduction is primary and jobless */ 153 | select * from bank_details where education="primary" and job = 'unemployed'; 154 | /* find record whoes balance is negative */ 155 | select * from bank_details where balance<0; 156 | /* find out who is not having house along with their balance */ 157 | select balance,housing from bank_details where housing='no'; 158 | /* fatching full row of min balance */ 159 | select *, min(balance) from bank_details; 160 | select * from bank_details where balance = (select min(balance) from bank_details); 161 | select * from bank_details order by balance desc limit 1; 162 | 163 | /* DELIMITERS*/ 164 | 165 | DELIMITER $$ 166 | CREATE PROCEDURE MINIMUM() 167 | BEGIN 168 | select *, 169 | min(balance) 170 | from bank_details; 171 | end$$ 172 | DELIMITER ; 173 | call MINIMUM(); 174 | 175 | /* delimiter for max*/ 176 | DELIMITER $$ 177 | CREATE PROCEDURE MAXIMUM() 178 | BEGIN 179 | select *,max(balance) 180 | from bank_details; 181 | end$$ 182 | DELIMITER ; 183 | CALL MAXIMUM(); 184 | 185 | /* delimiter for avg*/ 186 | DELIMITER $$ 187 | CREATE PROCEDURE AVRAGE() 188 | BEGIN 189 | select *,avg(balance) 190 | from bank_details; 191 | end$$ 192 | DELIMITER ; 193 | CALL AVRAGE(); 194 | 195 | /* delimiter for min*/ 196 | DELIMITER $$ 197 | CREATE PROCEDURE MINIMUM1() 198 | BEGIN 199 | select *,min(balance) 200 | from bank_details; 201 | end$$ 202 | DELIMITER ; 203 | call minimum1; 204 | 205 | /*drop proedure*/ 206 | drop procedure minimum; 207 | 208 | 209 | /* Create view table*/ 210 | create view cust as 211 | select age,balance,marital 212 | from bank_details; 213 | 214 | select * from cust; 215 | select * from cust where age=(select min(age) from cust); -------------------------------------------------------------------------------- /CTE.sql: -------------------------------------------------------------------------------- 1 | /* One of the most powerful features added in MySQL is common table expressions, which allow for the construction of 2 | temporary result sets within a single SQL query. In our daily life queries, we often use common table expressions and 3 | it makes our work easier. In this article, we will understand 4 | the Common Table Expression with examples and also we will learn how to use the statements.*/ 5 | CREATE DATABASE CTE; 6 | USE CTE; 7 | CREATE TABLE employees ( 8 | id INT AUTO_INCREMENT PRIMARY KEY, 9 | name VARCHAR(50), 10 | department VARCHAR(50), 11 | salary DECIMAL(10, 2) 12 | ); 13 | INSERT INTO employees (name, department, salary) VALUES 14 | ('John Doe', 'Sales', 55000.00), 15 | ('Jane Smith', 'Sales', 60000.00), 16 | ('Jim Brown', 'Sales', 65000.00), 17 | ('Jake White', 'Engineering', 75000.00), 18 | ('Jill Green', 'Engineering', 80000.00), 19 | ('Jenny Black', 'Engineering', 85000.00), 20 | ('James Gray', 'Marketing', 50000.00), 21 | ('Janet Blue', 'Marketing', 52000.00), 22 | ('Joan Pink', 'Marketing', 54000.00); 23 | 24 | WITH department_salaries AS ( 25 | SELECT department, 26 | SUM(salary) AS total_salary, 27 | AVG(salary) AS average_salary 28 | FROM employees 29 | GROUP BY department 30 | ) 31 | SELECT department, total_salary, average_salary 32 | FROM department_salaries 33 | WHERE average_salary > 60000; 34 | 35 | 36 | /* Recursive CTE for Hierarchical Data 37 | Suppose these table categories represent a hierarchical category structure with a self-referencing foreign key parent_id. */ 38 | 39 | CREATE TABLE categories ( 40 | id INT AUTO_INCREMENT PRIMARY KEY, 41 | name VARCHAR(50), 42 | parent_id INT, 43 | FOREIGN KEY (parent_id) REFERENCES categories(id) 44 | ); 45 | INSERT INTO categories (name, parent_id) VALUES 46 | ('Electronics', NULL), 47 | ('Computers', 1), 48 | ('Laptops', 2), 49 | ('Desktops', 2), 50 | ('Smartphones', 1), 51 | ('Accessories', 1), 52 | ('Chargers', 6), 53 | ('Cables', 6); 54 | 55 | WITH RECURSIVE category_hierarchy AS ( 56 | SELECT id, name, parent_id, 1 AS level 57 | FROM categories 58 | WHERE parent_id IS NULL 59 | UNION ALL 60 | SELECT c.id, c.name, c.parent_id, ch.level + 1 61 | FROM categories c 62 | JOIN category_hierarchy ch ON c.parent_id = ch.id 63 | ) 64 | SELECT id, name, parent_id, level 65 | FROM category_hierarchy; 66 | 67 | /*Temporary Aggregation 68 | Let there be a table sales and you need to calculate the total sales for each salesperson, then filter those that achieved sales more than a given cutoff value.*/ 69 | 70 | CREATE TABLE sales ( 71 | id INT AUTO_INCREMENT PRIMARY KEY, 72 | salesperson_id INT, 73 | sales_amount DECIMAL(10, 2) 74 | ); 75 | 76 | INSERT INTO sales (salesperson_id, sales_amount) VALUES 77 | (1, 3000.00), 78 | (1, 2500.00), 79 | (1, 1500.00), 80 | (2, 4000.00), 81 | (2, 2000.00), 82 | (3, 1000.00), 83 | (3, 2000.00), 84 | (4, 7000.00), 85 | (5, 3000.00), 86 | (5, 2500.00); 87 | 88 | WITH total_sales AS ( 89 | SELECT salesperson_id, SUM(sales_amount) AS total_sales 90 | FROM sales 91 | GROUP BY salesperson_id 92 | ) 93 | SELECT salesperson_id, total_sales 94 | FROM total_sales 95 | WHERE total_sales > 5000; -------------------------------------------------------------------------------- /Commands_SQL.txt: -------------------------------------------------------------------------------- 1 | SELECT - extracts data from a database 2 | UPDATE - updates data in a database 3 | DELETE - deletes data from a database 4 | INSERT INTO - inserts new data into a database 5 | CREATE DATABASE - creates a new database 6 | ALTER DATABASE - modifies a database 7 | CREATE TABLE - creates a new table 8 | ALTER TABLE - modifies a table 9 | DROP TABLE - deletes a table 10 | CREATE INDEX - creates an index (search key) 11 | DROP INDEX - deletes an index 12 | -------------------------------------------------------------------------------- /Constrain_sql.sql: -------------------------------------------------------------------------------- 1 | show databases; 2 | create database bit_aa; 3 | use bit_aa; 4 | 5 | create table student_info( 6 | Student_name VARCHAR (30), 7 | Last_name VARCHAR(30), 8 | Student_Id INT(10), 9 | Student_city VARCHAR(30)); 10 | 11 | show tables; 12 | insert into bit_aa.student_info values ("jalpa", "patel", 40, "anand"); 13 | insert into bit_aa.student_info values ("visahl", "barod", 41, "baroda"); 14 | insert into bit_aa.student_info values ("mehul", "Sirohi", 44, "baroda"); 15 | 16 | select * from bit_aa.student_info; 17 | insert into student_info values 18 | ("alpa", "patel1", 20, "anand"), 19 | ("parag", "buch", 20, "anand"), 20 | ("pooja", "patel3", 10, "anand"), 21 | ("parth", "patel", 20, "anand"), 22 | ("xyz", "patel", 30, "anand"), 23 | ("dev", "darji", 400, "anand") 24 | 25 | insert into student_info values 26 | ("alpa", "patel1", 20, "anand"), 27 | ("alpa", "patel1", 20, "anand"), 28 | ("alpa", "patel1", 20, "anand") 29 | 30 | create table if not exists test12( 31 | test_id int auto_increment, 32 | test_name varchar(30), 33 | test_mailid varchar(30), 34 | test_adress varchar(30) 35 | ) 36 | 37 | create table if not exists test( 38 | test_id int auto_increment, 39 | test_name varchar(30) , 40 | test_mailid varchar(30), 41 | test_adress varchar(30), 42 | primary key (test_id) 43 | ) 44 | insert into test values (102,'jalpa','jalpa@gmail','abad'), 45 | (2,'jalpa','jalpa@gmail','abad'), 46 | (3,'jalpa','jalpa@gmail','abad'), 47 | (4,'jalpa','jalpa@gmail','abad') 48 | 49 | insert into test(test_name , test_mailid , test_adress) 50 | values ('jal','jalpa@gmail.com','baroda'), 51 | ('jal','jalpa@gmail.com','baroda'), 52 | ('jal','jalpa@gmail.com','baroda'), 53 | ('jal','jalpa@gmail.com','baroda') 54 | 55 | select * from test 56 | 57 | create table if not exists test ( 58 | test_id int auto_increment, 59 | test_name varchar(30) , 60 | test_mailid varchar(30), 61 | teast_adress varchar(30), 62 | primary key (test_id)) 63 | 64 | create table if not exists test5 ( 65 | test_id int , 66 | test_name varchar(30) , 67 | test_mailid varchar(30), 68 | test_adress varchar(30), 69 | test_salary int check(test_salary < 10000)) 70 | 71 | insert into test4 values 72 | (1,'jalpa','jalpa@gmail.com','anand',100000) 73 | 74 | 75 | create table if not exists test3 ( 76 | test_id int , 77 | test_name varchar(30) , 78 | test_mailid varchar(30), 79 | test_adress varchar(30) check (test_adress= 'bengalore'), 80 | test_salary int) 81 | 82 | insert into test3 ( test_id , test_name , test_mailid , teast_adress,test_salary) 83 | values (100 , 'jal','jalpa@gmail.com','baroda' , 5000) 84 | 85 | create table if not exists test8( 86 | test_id int NOT NULL auto_increment , 87 | test_name varchar(30) NOT NULL default 'unknown' , 88 | test_mailid varchar(30) unique NOT NULL, 89 | test_adress varchar(30) check (test_adress= 'bengalore') NOT NULL, 90 | test_salary int check(test_salary > 10000) NOT NULL, 91 | primary key (test_id)) 92 | 93 | select * from test8 94 | 95 | insert into test8 ( test_id , test_name , test_mailid , 96 | test_adress,test_salary) 97 | values (100 , 'jal','jalpa@gmail.com','bengalore' , 500000) -------------------------------------------------------------------------------- /CreateTable_Insert_Update_Delete_Drop_Operations.txt: -------------------------------------------------------------------------------- 1 | # Session 1 2 | /* 3 | 1. SQL create Databases 4 | 2. SQL Drop Databases 5 | 3. SQL Create Table 6 | 4. SQL Insert into table 7 | 5. SQL Drop Table 8 | */ 9 | 10 | 11 | create database customers; 12 | show databases; 13 | use customers; 14 | # drop database customers; 15 | # create table in databases 16 | 17 | create table customer_info1(Id Integer(10), first_name varchar(10), last_name varchar(10)); 18 | show databases; 19 | show tables; 20 | select * from cutomer_info1; 21 | Insert into customer_info1(id, first_name, last_name) values(1,'Jalpa','Patel'); 22 | Insert into customer_info1(id, first_name, last_name) values(2,'xyz','Patel'); 23 | select * from customer_info1; 24 | UPDATE customer_info1 SET first_name = 'Alfred' WHERE Id = 1; 25 | 26 | 27 | create database customer; 28 | show databases; 29 | use customer; 30 | create table customer( 31 | id integer auto_increment, 32 | first_name varchar(25), 33 | last_name varchar(25), 34 | salary integer, 35 | primary key(id) 36 | ); 37 | select * from customer; 38 | ## insert records in table 39 | insert into customer(first_name,last_name,salary) 40 | values 41 | ('jalpa','patel',5000), 42 | ('xyz','xx1',6000), 43 | ('xyz1','xx2',7000), 44 | ('xyz2','xx3',8000), 45 | ('xyz3','xx4',9000), 46 | ('xyz4','xx4',null); 47 | 48 | select * from customer; 49 | select * from customer where salary is null; 50 | select * from customer where salary is not null; 51 | 52 | ## sql update statement to replace null values 53 | update customer set salary=5000 where id = 6; 54 | select * from customer; 55 | 56 | ## sql delete statement 57 | 58 | delete from customer where id=5; 59 | 60 | ## sql alter table 61 | ## add columns in existing table 62 | 63 | alter table customer add email varchar(25); 64 | 65 | 66 | alter table cutomer modify dob year; 67 | ## alter table to drop column 68 | alter table cutomer drop column email 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /Indexing SQL.txt: -------------------------------------------------------------------------------- 1 | create database index1; 2 | use index1; 3 | create table if not exists course1( 4 | course_id varchar(40), 5 | course_name varchar(40), 6 | cousre_desc varchar(40), 7 | course_tag varchar(40), 8 | index(course_id) 9 | ); 10 | show index from course1; 11 | 12 | insert into course1 values(101,'fsda','full stack data analytics','analytics'), 13 | (102,'fsds','full stack data analytics','analytics'), 14 | (103,'fsds','full stack data analytics','DS'), 15 | (104,'big data','full stack big data','BD'), 16 | (105,'mern','web dev','mern'), 17 | (106,'blockchain','full stack blockchain','BC'), 18 | (101,'java','full stack java','java'), 19 | (102,'testing','full testing','testing'), 20 | (105,'cybersecurity','full stack cybersecurity','cybersedcurity'), 21 | (109,'c','c language','c'), 22 | (108,'c++','c++ language','c++'); 23 | 24 | create table if not exists course2( 25 | course_id varchar(40), 26 | course_name varchar(40), 27 | cousre_desc varchar(40), 28 | course_tag varchar(40), 29 | index(course_id,course_name,course_tag) 30 | ); 31 | insert into course2 values(101,'fsda','full stack data analytics','analytics'), 32 | (102,'fsds','full stack data analytics','analytics'), 33 | (103,'fsds','full stack data analytics','DS'), 34 | (104,'big data','full stack big data','BD'), 35 | (105,'mern','web dev','mern'), 36 | (106,'blockchain','full stack blockchain','BC'), 37 | (101,'java','full stack java','java'), 38 | (102,'testing','full testing','testing'), 39 | (105,'cybersecurity','full stack cybersecurity','cybersedcurity'), 40 | (109,'c','c language','c'), 41 | (108,'c++','c++ language','c++'); 42 | 43 | explain analyze select * from course2 where course_id = 106 or course_name = 'fsds'; 44 | 45 | analyze table course2; 46 | describe course2; 47 | 48 | create table if not exists student1( 49 | student_id varchar(40), 50 | student_name varchar(40), 51 | student_desc varchar(40), 52 | student_tag varchar(40) 53 | ); 54 | insert into student1 values(01,'fs','full ','ana'), 55 | (02,'ds','full ','ana'), 56 | (03,'sd','data ','DA'), 57 | (04,'big ','f data','DB'), 58 | (05,'mern','dev','N'), 59 | (06,'chain',' blockchain','CB'), 60 | (01,'QS',' java','SQ'), 61 | (02,'ing','fulling','test'), 62 | (05,'cyber','security','cybers'), 63 | (09,'me',' language','cZ'), 64 | (08,'c+','c+ language','c+'); 65 | 66 | select course_id ,course_name from course1 67 | union 68 | select student_id, student_name from student1; 69 | 70 | with sample_student as( 71 | select * from course1 where course_id in (101,102,106)) 72 | select * from sample_student where course_tag = 'java'; 73 | 74 | with out_cross as ( select c.course_id ,c.course_name, s.student_id,s.student_name from course1 c 75 | cross join student1 s) select course_id,course_name,student_id from out_cross where student_id =02; 76 | 77 | with recursive cte1 as 78 | (select 1 as n union all select n+1 from cte1 where n<5) 79 | select * from cte1; 80 | 81 | with recursive cte as ( 82 | select 1 as n,1 as p, -1 as q 83 | union all 84 | select n+1,p+2,q+4 from cte where n<5) 85 | select * from cte; -------------------------------------------------------------------------------- /Indexing_Union_Recursive.txt: -------------------------------------------------------------------------------- 1 | Create database index1 2 | use index1 3 | 4 | 5 | create table if not exists course1 ( 6 | course_id int , 7 | course_name varchar(50), 8 | course_desc varchar(60), 9 | course_tag varchar(50), 10 | index(course_id) 11 | ) 12 | 13 | show index from course1 14 | 15 | insert into course1 values(101 , 'fsda' , 'full stack data analytics' , 'Analytics'), 16 | (102 , 'fsds' , 'full stack data analytics' , 'Analytics'), 17 | (103 , 'fsds' , 'full stack data science' , 'DS'), 18 | (104 , 'big data' , 'full stack big data' , 'BD'), 19 | (105 , 'mern' , 'web dev' , 'mern'), 20 | (106 , 'blockchain' , 'full stack blockchain' , 'BC'), 21 | (101 , 'java' , 'full stack java' , 'java'), 22 | (102 , 'testing' , 'full testing ' , 'testing '), 23 | (105 , 'cybersecurity' , 'full stack cybersecurity' , 'cybersecurity'), 24 | (109 , 'c' , 'c language' , 'c'), 25 | (108 , 'c++' , 'C++ language' , 'language') 26 | 27 | 28 | create table if not exists course2 ( 29 | course_id int , 30 | course_name varchar(50), 31 | course_desc varchar(60), 32 | course_tag varchar(50), 33 | index(course_id,course_name) 34 | ) 35 | 36 | show index from course2 37 | 38 | 39 | create table if not exists course3( 40 | course_id int , 41 | course_name varchar(50), 42 | course_desc varchar(60), 43 | course_tag varchar(50), 44 | index(course_desc,course_name,course_tag) 45 | ) 46 | 47 | show index from course4 48 | 49 | create table if not exists course4 ( 50 | course_id int , 51 | course_name varchar(50), 52 | course_desc varchar(60), 53 | course_tag varchar(50), 54 | index(course_desc,course_name,course_tag) 55 | ) 56 | 57 | insert into course4 values(101 , 'fsda' , 'full stack data analytics' , 'Analytics'), 58 | (102 , 'fsds' , 'full stack data analytics' , 'Analytics'), 59 | (103 , 'fsds' , 'full stack data science' , 'DS'), 60 | (104 , 'big data' , 'full stack big data' , 'BD'), 61 | (105 , 'mern' , 'web dev' , 'mern'), 62 | (106 , 'blockchain' , 'full stack blockchain' , 'BC'), 63 | (101 , 'java' , 'full stack java' , 'java'), 64 | (102 , 'testing' , 'full testing ' , 'testing '), 65 | (105 , 'cybersecurity' , 'full stack cybersecurity' , 'cybersecurity'), 66 | (109 , 'c' , 'c language' , 'c'), 67 | (108 , 'c++' , 'C++ language' , 'language') 68 | 69 | show index from course4 70 | 71 | EXPLAIN ANALYZE select * from course4 where course_id = 106 or course_name = 'fsds' 72 | 73 | explain select * from course4 where course_id = 106 74 | 75 | analyze table course4 76 | 77 | describe course4 78 | 79 | create table if not exists course5( 80 | course_id int , 81 | course_name varchar(50), 82 | course_desc varchar(60), 83 | course_tag varchar(50), 84 | unique index(course_desc,course_name) 85 | ) 86 | show index from course5 87 | 88 | select course_id , course_name from course 89 | union 90 | select student_id , student_name from student 91 | 92 | with sample_students as ( 93 | select * from course1 where course_id in (101,102,106)) 94 | select * from sample_students where course_tag = 'java' 95 | 96 | with outcoume_corss as (select c.course_id , c.course_name , c.course_desc ,s.student_id,s.student_name ,s.student_course_id from course1 c 97 | cross join student s ) select course_id , course_name ,student_id from outcoume_corss where student_id = 301 98 | 99 | 100 | with recursive cte1 as 101 | (select 1 as n union all select n+1 from cte1 where n<5 ) 102 | select * from cte1 103 | 104 | with recursive cte as ( 105 | select 1 as n, 1 as p, -1 as q 106 | union all 107 | select n+1, p+2, q+4 from cte where n<5 ) 108 | select * from cte 109 | -------------------------------------------------------------------------------- /Insert _Null_Data_Table.txt: -------------------------------------------------------------------------------- 1 | show databases; 2 | use customer; 3 | create table customer( 4 | id integer auto_increment, 5 | first_name varchar(25), 6 | last_name varchar(25), 7 | salary integer, 8 | primary key(id) 9 | ); 10 | 11 | select * from customer; 12 | ## insert records in table 13 | 14 | insert into customer(first_name,last_name,salary) 15 | values 16 | ('jalpa','patel',5000), 17 | ('xyz','xx1',6000), 18 | ('xyz1','xx2',7000), 19 | ('xyz2','xx3',8000), 20 | ('xyz3','xx4',9000), 21 | ('xyz4','xx4',null); -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Jalpa Patel Desai 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Ntile.sql: -------------------------------------------------------------------------------- 1 | /*-----------------------------NTILE-------------------------------*/ 2 | 3 | /* NTILE is a ranking function in SQL Server to distribute rows into a specified 4 | number of groups and assign a number to each group in incremental order starting with One. 5 | NTILE function works with ORDER BY clause to sort the rows in the specified order and splits them into groups, 6 | 7 | Let us assume we have 10 rows in a table and using NTILE(5) function. 8 | Considering we have even number of rows in each group as 10 divided by NTILE(5) comes to 2 rows in each group. 9 | Thus NTILE function will distribute 2 rows in each group and assign them a number from 1 to 5. 10 | 11 | In case of an odd number of rows in a table. Assuming we have 11 rows and using NTILE(5) function. 12 | NTILE function determines the number of rows in a table before splitting them into groups. 13 | Count of rows in a table divided by the number of groups specified with NTILE create groups with an 14 | unequal number of rows. Groups with a bigger number of rows will be listed ahead of groups with a small number of rows. 15 | 16 | In case we have 11 rows divided by NTILE(5) comes to 3 rows in the first group followed by 2 rows in 17 | rest of the four groups */ 18 | 19 | CREATE DATABASE NTILE_PRACTICE; 20 | USE NTILE_PRACTICE; 21 | 22 | /* 1 NTILE-On Even Number Of Rows In Table 23 | 24 | In the following example, 25 | we have created Student table with 10 rows having Id, Name and Percentage columns. 26 | Now using NTILE(5) function splits 10 rows into 5 groups and ordering the result by Percentage column. 27 | We have 10 rows and creating 5 groups using NTILE results in 2 rows in each group because 10 divided by 28 | NTILE(5) is 2 rows In each group. */ 29 | 30 | CREATE TABLE Student( 31 | ID INT PRIMARY KEY, NAME VARCHAR(255) ,Percentage INT ); 32 | 33 | INSERT INTO Student 34 | VALUES (1,'Atul',90), 35 | (2,'Vishal',91), 36 | (3,'Shailesh',3), 37 | (4,'Niraj',92), 38 | (5,'Chetan',89), 39 | (6,'Sangram',87), 40 | (7,'Rohit',87), 41 | (8,'Prashant',93), 42 | (9,'Ravi' ,91), 43 | (10,'Akansha',94); 44 | 45 | SELECT * FROM STUDENT 46 | # Creates 5 groups with Even number of rows in each group - 47 | 48 | SELECT ID, Name, Percentage, NTILE(5) OVER (ORDER BY Percentage DESC) as 49 | NTILEGROUP FROM Student; 50 | 51 | /* 2. NTILE On Odd Number Of Rows In Table 52 | 53 | In the following example, 54 | we have created Student table with 11 rows having Id, Name and Percentage columns. 55 | Using NTILE(5) function on 11 rows results in 3 rows in the first group and 2 rows in the rest of the 4 groups. 56 | Groups with a bigger number of rows will be listed ahead of groups with a small number of rows. 57 | We've ordered the result by Percentage column in NTILE function. */ 58 | 59 | CREATE TABLE Student1( 60 | ID INT PRIMARY KEY, NAME VARCHAR(255) ,Percentage INT ); 61 | 62 | INSERT INTO Student1 63 | VALUES (1,'Atul',90), 64 | (2,'Vishal',91), 65 | (3,'Shailesh',3), 66 | (4,'Niraj',92), 67 | (5,'Chetan',89), 68 | (6,'Sangram',87), 69 | (7,'Rohit',87), 70 | (8,'Prashant',93), 71 | (9,'Ravi' ,91), 72 | (10,'Akansha',94), 73 | (11,'Falak',86); 74 | 75 | # Creates first group with 3 rows and four group with 2 rows each. 76 | 77 | SELECT ID, Name, Percentage, NTILE(5) OVER (ORDER BY Percentage DESC) as 78 | NTILEGROUP1 FROM Student1; 79 | 80 | /* 3. NTILE With Partition By Clause 81 | 82 | Following example demonstrates the use of NTILE function with partition by clause. 83 | Most importantly this example uses partition by to divide the rows into different groups 84 | based on Subject column. 85 | Later applying NTILE(2) function to create groups inside each partition. 86 | Therefore we have created two partitions by Subject, "English" and "Math" then applying 87 | NTILE function to each partition dividing 6 rows into 2 different groups of 3 rows using NTILE(2) function. */ 88 | 89 | CREATE TABLE Student2( 90 | ID INT,Name VARCHAR (255),Subject VARCHAR(20),Marks INT); 91 | 92 | INSERT INTO Student2 VALUES 93 | (1,'Atul','English',90), 94 | (2,'Vishal','English',91), 95 | (3,'Shailesh', 'English',97), 96 | (4,'Niraj','English',92), 97 | (5,'Chetan', 'English',89), 98 | (6,'Sangram', 'English',87), 99 | (7,'Rohit','Math' ,87), 100 | (8,'Prashant','Math' ,93), 101 | (9,'Ravi' ,'Math' ,91), 102 | (10,'Akansha','Math' ,94), 103 | (11,'Falak','Math' ,86), 104 | (12,'Avni','Math' ,73); 105 | 106 | SELECT ID, Name,SUBJECT,MARKS, NTILE(2) OVER 107 | (PARTITION BY SUBJECT ORDER BY MARKS DESC) as 108 | NTILEGROUP2 FROM Student2; 109 | 110 | /* 4. NTILE With Partition By Clause With Uneven Rows 111 | 112 | We've created two partitions using Subject "English" and "Math" having 7 rows in each partition. 113 | Besides we apply NTILE(2) function to each individual partition of an odd number of rows, 114 | therefore, it creates two unequal groups. 115 | The first group has 4 rows and the second group has 3 rows in both the partition as per the rule, 116 | the bigger number of rows are placed in the first group followed by the lower number of rows in later groups. */ 117 | 118 | CREATE TABLE Student3( 119 | ID INT,Name VARCHAR (255),Subject VARCHAR(20),Marks INT); 120 | 121 | INSERT INTO Student3 VALUES 122 | (1,'Atul','English',90), 123 | (2,'Vishal','English',91), 124 | (3,'Shailesh', 'English',97), 125 | (4,'Niraj','English',92), 126 | (5,'Chetan', 'English',89), 127 | (6,'Sangram', 'English',87), 128 | (7,'Rohit','Math' ,87), 129 | (8,'Prashant','Math' ,93), 130 | (9,'Ravi' ,'Math' ,91), 131 | (10,'Akansha','Math' ,94), 132 | (11,'Falak','Math' ,86), 133 | (12,'Avni','Math' ,73), 134 | (13,'Aadesh','English' ,83), 135 | (14,'Ranjana','Math' ,94); 136 | 137 | SELECT ID, Name,SUBJECT,MARKS, NTILE(2) OVER (PARTITION BY SUBJECT ORDER BY MARKS DESC) as 138 | NTILEGROUP3 FROM Student3; 139 | 140 | /*----------------- TRY DIFFERENT VALUES OF 'N' FOR BETTER UNDERSTANDING-----------------------*/ -------------------------------------------------------------------------------- /PIVOT_UNPIVOT_SNOWFLAKE.txt: -------------------------------------------------------------------------------- 1 | /* You can use the PIVOT and UNPIVOT relational operators to 2 | change a table-valued expression into another table. */ 3 | 4 | use pivote 5 | 6 | create table order_table12 ( 7 | orderid int , 8 | employeeid int , 9 | vendorid int ); 10 | 11 | insert into order_table12 values (1, 258, 1580), 12 | (2, 254, 1496), 13 | (3, 257, 1494), 14 | (4, 261, 1650), 15 | (5, 251, 1654), 16 | (6, 253, 1664); 17 | 18 | select * from order_table12 ; 19 | 20 | select orderid, 21 | if(employeeid = 254,1,NULL) as "254" , 22 | if(employeeid = 257,1,NULL) as emp257 , 23 | if(employeeid = 261,1,NULL) as emp261 , 24 | if(employeeid = 251,1,NULL) as emp251 , 25 | if(employeeid = 253,1,NULL) as emp253 from 26 | order_table1 ; 27 | 28 | -- Pivot and Unpivot operations are work on sql server or snowflake 29 | 30 | Create Table order_table13 31 | ( 32 | CourseName varchar(50), 33 | CourseCategory varchar(50), 34 | Price int 35 | ) 36 | 37 | Insert into order_table13 values('C', 'PROGRAMMING', 5000) 38 | Insert into order_table13 values('JAVA', 'PROGRAMMING', 6000) 39 | Insert into order_table13 values('PYTHON', 'PROGRAMMING', 8000) 40 | Insert into order_table13 values('PLACEMENT 100', 'INTERVIEWPREPARATION', 5000) 41 | 42 | SELECT * FROM order_table13 43 | -- Pivot 44 | SELECT CourseName, PROGRAMMING, INTERVIEWPREPARATION 45 | FROM order_table13 46 | PIVOT 47 | ( 48 | SUM(Price) FOR CourseCategory IN (PROGRAMMING, INTERVIEWPREPARATION ) 49 | ) AS PivotTable 50 | 51 | -- Unpivot 52 | SELECT CourseName, CourseCategory, Price 53 | FROM 54 | ( 55 | SELECT CourseName, PROGRAMMING, INTERVIEWPREPARATION FROM geeksforgeeks 56 | PIVOT 57 | ( 58 | SUM(Price) FOR CourseCategory IN (PROGRAMMING, INTERVIEWPREPARATION) 59 | ) AS PivotTable 60 | ) P 61 | UNPIVOT 62 | ( 63 | Price FOR CourseCategory IN (PROGRAMMING, INTERVIEWPREPARATION) 64 | ) 65 | AS UnpivotTable -------------------------------------------------------------------------------- /Partition11.txt: -------------------------------------------------------------------------------- 1 | create database if not exists bit_partition 2 | use bit_partition 3 | 4 | create table bit_course( 5 | course_name varchar(50) , 6 | course_id int(10) , 7 | course_title varchar(60), 8 | corse_desc varchar(60), 9 | launch_date date, 10 | course_fee int, 11 | course_mentor varchar(60), 12 | course_lauch_year int) 13 | 14 | select * from bit_course ; 15 | 16 | insert into bit_course values('machine_learning' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 17 | 18 | ('aiops' , 101 , 'ML', "this is aiops course" ,'2019-07-07',3540,'jalpa',2019) , 19 | ('dlcvnlp' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 20 | ('aws cloud' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 21 | ('blockchain' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 22 | ('RL' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 23 | ('Dl' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 24 | ('interview prep' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 25 | ('big data' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'sudhanshu',2020) , 26 | ('data analytics' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 27 | ('fsds' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 28 | ('fsda' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 29 | ('fabe' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 30 | ('java' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 31 | ('MERN' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) 32 | 33 | select * from bit_course where course_lauch_year = 2020 34 | 35 | -- drop table bit_courses1 36 | create table bit_courses11( 37 | course_name varchar(50), 38 | course_id int(10), 39 | course_title varchar(60), 40 | course_desc varchar(80), 41 | launch_date date, 42 | course_fee int, 43 | course_mentor varchar(60), 44 | course_launch_year int) 45 | partition by range(course_launch_year)( 46 | partition p0 values less than (2019), 47 | partition p1 values less than (2020), 48 | partition p2 values less than (2021), 49 | partition p3 values less than (2022), 50 | partition p4 values less than (2023)); 51 | 52 | insert into bit_courses11 values('machine_learning' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 53 | 54 | ('aiops' , 101 , 'ML', "this is aiops course" ,'2019-07-07',3540,'jalpa',2019) , 55 | ('dlcvnlp' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 56 | ('aws cloud' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 57 | ('blockchain' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 58 | ('RL' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 59 | ('Dl' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 60 | ('interview prep' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 61 | ('big data' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 62 | ('data analytics' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 63 | ('fsds' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 64 | ('fsda' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 65 | ('fabe' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 66 | ('java' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 67 | ('MERN' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) 68 | 69 | select * from bit_courses1 where course_launch_year = 2020 70 | 71 | select * from bit_course where course_lauch_year = 2020 72 | 73 | select partition_name , table_name , table_rows from information_schema.partitions where table_name = 'bit_courses11' 74 | 75 | 76 | 77 | create table bit_courses2( 78 | course_name varchar(50), 79 | course_id int(10), 80 | course_title varchar(60), 81 | course_desc varchar(80), 82 | launch_date date, 83 | course_fee int, 84 | course_mentor varchar(60), 85 | course_launch_year int) 86 | partition by hash(course_launch_year) 87 | partitions 5; 88 | 89 | select partition_name , table_name , table_rows from information_schema.partitions where table_name = 'bit_courses2' 90 | 91 | create table bit_courses33( 92 | course_name varchar(50), 93 | course_id int(10), 94 | course_title varchar(60), 95 | course_desc varchar(80), 96 | launch_date date, 97 | course_fee int, 98 | course_mentor varchar(60), 99 | course_launch_year int) 100 | partition by hash(course_launch_year) 101 | partitions 10; 102 | 103 | select partition_name , table_name , table_rows from information_schema.partitions where table_name = 'bit_courses33' 104 | 105 | 106 | insert into bit_courses3 values('machine_learning' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 107 | 108 | ('aiops' , 101 , 'ML', "this is aiops course" ,'2019-07-07',3540,'jalpa',2019) , 109 | ('dlcvnlp' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 110 | ('aws cloud' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 111 | ('blockchain' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 112 | ('RL' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 113 | ('Dl' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 114 | ('interview prep' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 115 | ('big data' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 116 | ('data analytics' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 117 | ('fsds' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 118 | ('fsda' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 119 | ('fabe' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 120 | ('java' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 121 | ('MERN' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) 122 | 123 | create table bit_courses4( 124 | course_name varchar(50), 125 | course_id int(10) primary key , 126 | course_title varchar(60), 127 | course_desc varchar(80), 128 | launch_date date, 129 | course_fee int, 130 | course_mentor varchar(60), 131 | course_launch_year int) 132 | partition by key() 133 | partitions 10; 134 | 135 | 136 | 137 | select partition_name , table_name , table_rows from information_schema.partitions where table_name = 'bit_courses4' 138 | 139 | 140 | insert into bit_courses4 values('machine_learning' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'sudhanshu',2019) , 141 | 142 | ('aiops' , 102 , 'ML', "this is aiops course" ,'2019-07-07',3540,'sudhanshu',2019) , 143 | ('dlcvnlp' , 103 , 'ML', "this is ML course" ,'2020-07-07',3540,'sudhanshu',2020) , 144 | ('aws cloud' , 104 , 'ML', "this is ML course" ,'2020-07-07',3540,'sudhanshu',2020) , 145 | ('blockchain' , 105, 'ML', "this is ML course" ,'2021-07-07',3540,'sudhanshu',2021) , 146 | ('RL' , 106 , 'ML', "this is ML course" ,'2022-07-07',3540,'sudhanshu',2022) , 147 | ('Dl' , 107 , 'ML', "this is ML course" ,'2022-07-07',3540,'sudhanshu',2022) , 148 | ('interview prep' , 108 , 'ML', "this is ML course" ,'2019-07-07',3540,'sudhanshu',2019) , 149 | ('big data' , 109 , 'ML', "this is ML course" ,'2020-07-07',3540,'sudhanshu',2020) , 150 | ('data analytics' , 110 , 'ML', "this is ML course" ,'2021-07-07',3540,'sudhanshu',2021) , 151 | ('fsds' , 1011 , 'ML', "this is ML course" ,'2022-07-07',3540,'sudhanshu',2022) , 152 | ('fsda' , 1012 , 'ML', "this is ML course" ,'2021-07-07',3540,'sudhanshu',2021) , 153 | ('fabe' , 1013 , 'ML', "this is ML course" ,'2022-07-07',3540,'sudhanshu',2022) , 154 | ('java' , 1014 , 'ML', "this is ML course" ,'2020-07-07',3540,'sudhanshu',2020) , 155 | ('MERN' , 1015 , 'ML', "this is ML course" ,'2019-07-07',3540,'sudhanshu',2019) 156 | 157 | select * from bit_courses4 158 | 159 | SELECT MD5('MERN') 160 | 161 | 162 | create table bit_courses6( 163 | course_name varchar(50) , 164 | course_id int(10) , 165 | course_title varchar(60), 166 | course_desc varchar(80), 167 | launch_date date, 168 | course_fee int, 169 | course_mentor varchar(60), 170 | course_launch_year int) 171 | partition by list(course_launch_year)( 172 | partition p0 values in(2019,2020), 173 | partition p1 values in(2022,2021) 174 | ) 175 | 176 | insert into bit_courses6 values('machine_learning' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'sudhanshu',2019) , 177 | 178 | ('aiops' , 102 , 'ML', "this is aiops course" ,'2019-07-07',3540,'sudhanshu',2019) , 179 | ('dlcvnlp' , 103 , 'ML', "this is ML course" ,'2020-07-07',3540,'sudhanshu',2020) , 180 | ('aws cloud' , 104 , 'ML', "this is ML course" ,'2020-07-07',3540,'sudhanshu',2020) , 181 | ('blockchain' , 105, 'ML', "this is ML course" ,'2021-07-07',3540,'sudhanshu',2021) , 182 | ('RL' , 106 , 'ML', "this is ML course" ,'2022-07-07',3540,'sudhanshu',2022) , 183 | ('Dl' , 107 , 'ML', "this is ML course" ,'2022-07-07',3540,'sudhanshu',2022) , 184 | ('interview prep' , 108 , 'ML', "this is ML course" ,'2019-07-07',3540,'sudhanshu',2019) , 185 | ('big data' , 109 , 'ML', "this is ML course" ,'2020-07-07',3540,'sudhanshu',2020) , 186 | ('data analytics' , 110 , 'ML', "this is ML course" ,'2021-07-07',3540,'sudhanshu',2021) , 187 | ('fsds' , 1011 , 'ML', "this is ML course" ,'2022-07-07',3540,'sudhanshu',2022) , 188 | ('fsda' , 1012 , 'ML', "this is ML course" ,'2021-07-07',3540,'sudhanshu',2021) , 189 | ('fabe' , 1013 , 'ML', "this is ML course" ,'2022-07-07',3540,'sudhanshu',2022) , 190 | ('java' , 1014 , 'ML', "this is ML course" ,'2020-07-07',3540,'sudhanshu',2020) , 191 | ('MERN' , 1015 , 'ML', "this is ML course" ,'2019-07-07',3540,'sudhanshu',2019) 192 | 193 | select partition_name , table_name , table_rows from information_schema.partitions where table_name = 'bit_courses6' 194 | 195 | 196 | create table bit_courses8( 197 | course_name varchar(50) , 198 | course_id int(10) , 199 | course_title varchar(60), 200 | course_desc varchar(80), 201 | launch_date date, 202 | course_fee int, 203 | course_mentor varchar(60), 204 | course_launch_year int) 205 | partition by range columns(course_name ,course_id,course_launch_year )( 206 | partition p0 values less than ('aiops',105,2019), 207 | partition p1 values less than ('fsds' ,110,2021), 208 | partition p2 values less than ('MERN',116,2023) 209 | ) 210 | select partition_name , table_name , table_rows from information_schema.partitions where table_name = 'bit_courses8' 211 | 212 | select ('aiops',105,2019) < ('fsds' ,110,2021) 213 | select ('a') > ('b') 214 | 215 | select * from bit_courses8 216 | insert ignore into bit_courses8 values('machine_learning' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 217 | 218 | ('aiops' , 101 , 'ML', "this is aiops course" ,'2019-07-07',3540,'jalpa',2019) , 219 | ('dlcvnlp' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 220 | ('aws cloud' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 221 | ('blockchain' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 222 | ('RL' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 223 | ('Dl' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 224 | ('interview prep' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 225 | ('big data' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 226 | ('data analytics' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 227 | ('fsds' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 228 | ('fsda' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 229 | ('fabe' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 230 | ('java' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 231 | ('MERN' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) 232 | 233 | create table bit_courses99( 234 | course_name varchar(50) , 235 | course_id int(10) , 236 | course_title varchar(60), 237 | course_desc varchar(80), 238 | launch_date date, 239 | course_fee int, 240 | course_mentor varchar(60), 241 | course_launch_year int) 242 | partition by list columns(course_name)( 243 | partition p0 values in('aiops','data analytics','Dl','RL'), 244 | partition p1 values in('fsds' ,'big data','blockchain'), 245 | partition p2 values in('MERN','java','interview prep','fsda') 246 | ) 247 | 248 | 249 | insert ignore into bit_courses9 values('machine_learning' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 250 | 251 | ('aiops' , 101 , 'ML', "this is aiops course" ,'2019-07-07',3540,'jalpa',2019) , 252 | ('dlcvnlp' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 253 | ('aws cloud' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 254 | ('blockchain' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 255 | ('RL' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 256 | ('Dl' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 257 | ('interview prep' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 258 | ('big data' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 259 | ('data analytics' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 260 | ('fsds' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 261 | ('fsda' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 262 | ('fabe' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 263 | ('java' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 264 | ('MERN' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) 265 | 266 | drop table ineuron_courses1 267 | 268 | create table bit_courses10( 269 | course_name varchar(50), 270 | course_id int(10), 271 | course_title varchar(60), 272 | course_desc varchar(80), 273 | launch_date date, 274 | course_fee int, 275 | course_mentor varchar(60), 276 | course_launch_year int) 277 | partition by range(course_launch_year) 278 | subpartition by hash(course_launch_year) 279 | subpartitions 5 ( 280 | partition p0 values less than (2019) , 281 | partition p1 values less than (2020) , 282 | partition p2 values less than (2021) , 283 | partition p3 values less than (2022) 284 | ); 285 | 286 | insert ignore into bit_courses10 values('machine_learning' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 287 | 288 | ('aiops' , 101 , 'ML', "this is aiops course" ,'2019-07-07',3540,'jalpa',2019) , 289 | ('dlcvnlp' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 290 | ('aws cloud' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 291 | ('blockchain' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 292 | ('RL' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 293 | ('Dl' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 294 | ('interview prep' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) , 295 | ('big data' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 296 | ('data analytics' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 297 | ('fsds' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 298 | ('fsda' , 101 , 'ML', "this is ML course" ,'2021-07-07',3540,'jalpa',2021) , 299 | ('fabe' , 101 , 'ML', "this is ML course" ,'2022-07-07',3540,'jalpa',2022) , 300 | ('java' , 101 , 'ML', "this is ML course" ,'2020-07-07',3540,'jalpa',2020) , 301 | ('MERN' , 101 , 'ML', "this is ML course" ,'2019-07-07',3540,'jalpa',2019) ; 302 | 303 | select partition_name , table_name , table_rows from information_schema.partitions where table_name = 'bit_courses8' -------------------------------------------------------------------------------- /Pets Orders Operations.txt: -------------------------------------------------------------------------------- 1 | CREATE DATABASE PETS_ORDERS; 2 | USE PETS_ORDERS; 3 | CREATE OR REPLACE TABLE OWNER 4 | ( 5 | OwnerID INTEGER NOT NULL PRIMARY KEY , 6 | Name VARCHAR2(20), 7 | Surname STRING, 8 | StreetAddress VARCHAR2(50), 9 | City STRING, 10 | StatE CHAR(4), 11 | StateFull STRING, 12 | ZipCode INTEGER 13 | ); 14 | 15 | CREATE OR REPLACE TABLE PETS 16 | ( 17 | PetID VARCHAR(10) NOT NULL PRIMARY KEY, 18 | Name VARCHAR(20), 19 | Kind STRING, 20 | Gender CHAR(7), 21 | Age INTEGER, 22 | OwnerID INTEGER NOT NULL REFERENCES OWNER 23 | ); 24 | SELECT * FROM OWNER; 25 | SELECT * FROM PETS; 26 | SELECT COUNT(DISTINCT OwnerID) from OWNER; 27 | SELECT COUNT(DISTINCT PetID) from PETS; 28 | 29 | -- NEED THE NAME OF OWNER & THEIR DOGS NAME ALONG WITH THEIR AGE ---- INNER JOIN 30 | SELECT O.Name AS OWNER_NAME,p.NAME AS PET_NAME,p.age AS PET_AGE 31 | FROM OWNER o 32 | INNER JOIN PETS p ON o.OwnerID = p.OwnerID; 33 | 34 | --NEED THE NAME OF ALL THE OWNERS IRRESPECTIVE WETHER OR NOT THEY ARE HAVING PETS 35 | SELECT O.Name AS OWNER_NAME,p.NAME AS PET_NAME,p.age AS PET_AGE 36 | FROM OWNER o 37 | LEFT OUTER JOIN PETS p ON o.OwnerID = p.OwnerID; 38 | 39 | --- COUNT OF PETS EACH OWNER HAS 40 | SELECT O.Name AS OWNER_NAME,COUNT(DISTINCT p.PETID) 41 | FROM OWNER o 42 | INNER JOIN PETS p ON o.OwnerID = p.OwnerID 43 | GROUP BY 1 44 | ORDER BY 2 DESC; 45 | 46 | ---RIGHT JOIN 47 | SELECT O.Name AS OWNER_NAME,p.NAME AS PET_NAME,p.age AS PET_AGE 48 | FROM OWNER o 49 | RIGHT JOIN PETS p ON o.OwnerID = p.OwnerID; 50 | 51 | --FULL OUTER JOIN 52 | SELECT O.*,P.* 53 | FROM OWNER O 54 | FULL OUTER JOIN PETS p ON o.OwnerID = p.OwnerID; 55 | 56 | -- INFO OF ALL THE PETS HOLD BY THEIR OWNER 57 | SELECT DISTINCT KIND FROM PETS; 58 | SELECT KIND,COUNT(*) FROM PETS 59 | GROUP BY 1; 60 | 61 | 62 | --- CROSS JOIN 63 | SELECT O.*,P.* 64 | FROM OWNER O 65 | CROSS JOIN PETS p ; -------------------------------------------------------------------------------- /Primary_Foreign_Key.sql: -------------------------------------------------------------------------------- 1 | /* A relational database is designed to enforce the uniqueness of primary keys by allowing only one row with a given primary key value in a table. 2 | A foreign key is a column or a set of columns in a table whose values correspond to the values of the primary key in another table. */ 3 | 4 | use customer 5 | select * from customer_X 6 | 7 | CREATE TABLE if not exists customer_X( 8 | Id int NOT NULL, 9 | Name varchar(20) NOT NULL, 10 | Age int NOT NULL, 11 | Address varchar(25) , 12 | Salary decimal (18, 2), 13 | PRIMARY KEY (id) 14 | ); 15 | 16 | CREATE TABLE if not exists Orders( 17 | OrderID int NOT NULL, 18 | OrderNumber int NOT NULL, 19 | Id int, 20 | PRIMARY KEY(OrderID), 21 | CONSTRAINT FK_customerOrder FOREIGN KEY(Id) 22 | REFERENCES CUSTOMER_X (id) 23 | ); 24 | desc orders 25 | 26 | ALTER TABLE Orders 27 | ADD FOREIGN KEY (ID) REFERENCES customer(id); 28 | 29 | ALTER TABLE Orders 30 | DROP FOREIGN KEY FK_customerOrder; 31 | 32 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SQL Introduction 2 | MySQL Workbench is a powerful, integrated development environment (IDE) for MySQL, designed to simplify database management and development tasks. Developed by Oracle, MySQL Workbench provides tools for database design, SQL development, and administration, making it an essential tool for database administrators (DBAs), developers, and data architects. 3 | 4 | With its visual interface, users can model complex databases, create and edit tables, define relationships, and visualize database structures. MySQL Workbench also supports SQL query execution, allowing users to write, optimize, and debug SQL code in a user-friendly editor. Additionally, it offers features for database migration, performance tuning, and server administration, including options for user management, backup, and configuration. 5 | 6 | MySQL Workbench is cross-platform, available on Windows, macOS, and Linux, and offers compatibility with various MySQL server versions. Its intuitive interface and extensive capabilities make it a valuable tool for managing MySQL databases effectively and efficiently. 7 | 8 | To install MySQL Workbench, follow these steps to ensure a successful setup: 9 | 10 | 1. **Download the MySQL Installer**: Visit the official [MySQL website](https://dev.mysql.com/downloads/installer/) and download the MySQL Installer for Windows. This installer simplifies the process and ensures that all necessary components are installed together. 11 | 12 | 2. **Run the Installer**: After downloading, open the installer. When prompted, select the setup type — the "Custom" setup is recommended if you want to install MySQL Server, MySQL Workbench, and additional tools like MySQL Shell. 13 | 14 | 3. **Select Components**: In the installation interface, choose "MySQL Workbench" (and optionally, other tools like MySQL Server if not already installed). Move the desired components to the "Products to be installed" section. 15 | 16 | 4. **Install and Configure**: Click on "Execute" to start the installation. Once the installation finishes, you may proceed with configuration settings, including networking and authentication. Set a root password for added security. 17 | 18 | 5. **Finish Setup**: Finalize the configuration by clicking "Finish." You can now launch MySQL Workbench and connect to your local MySQL server by entering the root password. 19 | 20 | This process should allow MySQL Workbench to function correctly on your system, provided that all prerequisites are met. This approach applies to both Windows 10 and 11 environments and ensures access to the latest version of MySQL Workbench. 21 | -------------------------------------------------------------------------------- /REGEX_1.txt: -------------------------------------------------------------------------------- 1 | CREATE DATABSE DEMO_DATABASE; 2 | USE DATABASE DEMO_DATABASE; 3 | 4 | 5 | create or replace table like_ex(subject varchar(20)); 6 | insert into like_ex values 7 | ('John Dddoe'), 8 | ('Joe Doe'), 9 | ('John_down'), 10 | ('Joe down'), 11 | ('Elaine'), 12 | (''), -- empty string 13 | (null); 14 | 15 | select subject 16 | from like_ex 17 | where subject like '%Jo%oe%' 18 | order by subject; 19 | 20 | --LIKE ANY 21 | -- Allows case-sensitive matching of strings based on comparison with one or more patterns. 22 | --The operation is similar to LIKE. If the input string matches any of the patterns, this returns the input string. 23 | 24 | -- LIKE ANY ( [, ... ] ) [ ESCAPE ] 25 | 26 | create or replace table like_example(subject varchar(20)); 27 | insert into like_example values 28 | ('John Dddoe'), 29 | ('Joe Doe'), 30 | ('John_down'), 31 | ('Joe down'), 32 | ('Tom Doe'), 33 | ('Tim down'), 34 | (null); 35 | 36 | select * from like_example; 37 | 38 | select * from like_example 39 | where subject like any ('%Jo%oe%','T%e') 40 | order by subject; 41 | 42 | select * from like_example 43 | where subject like any ('%J%h%^_do%', 'T%^%e') escape '^' 44 | order by subject; 45 | 46 | USE DATABASE DEMO_DATABASE; 47 | 48 | create or replace table strings (v varchar(50)); 49 | insert into strings (v) values 50 | ('San Francisco'), 51 | ('San Jose'), 52 | ('Santa Clara'), 53 | ('Sacramento'); 54 | 55 | --Use wildcards to search for a pattern: 56 | select v from strings 57 | where v regexp 'San* [Jjo].*' 58 | order by v; 59 | 60 | 61 | 62 | SELECT TRIM(REGEXP_REPLACE(string, '[^[:digit:]]', ' ')) AS Numeric_value 63 | FROM (SELECT ' Area code for employee ID 112244 is 12345.' AS string) a; 64 | 65 | CREATE TABLE demo3 (id INT, string1 VARCHAR); 66 | INSERT INTO demo3 (id, string1) VALUES 67 | (5, 'A MAN A PLAN A CANAL') 68 | ; 69 | SELECT * FROM DEMO3; 70 | select id, 71 | regexp_substr(string1, 'A\\W+(\\w+)', 1, 1, 'e', 1) as "RESULT1", 72 | regexp_substr(string1, 'A\\W+(\\w+)', 1, 2, 'e', 1) as "RESULT2", 73 | regexp_substr(string1, 'A\\W+(\\w+)', 1, 3, 'e', 1) as "RESULT3", 74 | regexp_substr(string1, 'A\\W+(\\w+)', 1, 4, 'e', 1) as "RESULT4" 75 | from demo3; 76 | 77 | 78 | /* Snowflake Regular Expression Functions 79 | The regular expression functions are string functions that match a given regular expression. These functions are commonly called as a ‘regex’ functions. 80 | 81 | Below are some of the regular expression function that Snowflake cloud data warehouse supports: 82 | 83 | REGEXP_COUNT 84 | REGEXP_INSTR 85 | REGEXP_LIKE 86 | REGEXP_SUBSTR 87 | REGEXP_REPLACE 88 | REGEXP 89 | RLIKE */ 90 | 91 | /* Snowflake REGEXP_COUNT Function 92 | The REGEXP_COUNT function searches a string and returns an integer that indicates the number of 93 | times the pattern occurs in the string. If no match is found, then the function returns 0. 94 | 95 | syntax : REGEXP_COUNT( , [ , , ] ) */ 96 | select regexp_count('qqqabcrtrababcbcd', 'abc'); 97 | select regexp_count('qqqabcrtrababcbcd', '[abc]') as abc_character_count; 98 | select REGEXP_COUNT('QQQABCRTRABABCBCD', '[ABC]{3}'); 99 | 100 | 101 | /* 102 | The Snowflake REGEXP_REPLACE function returns the string by replacing specified pattern. 103 | If no matches found, original string will be returned. 104 | 105 | Following is the syntax of the Regexp_replace function. 106 | 107 | REGEXP_REPLACE( , [ , , , , ] ) 108 | 109 | 1. Extract date from a text string using Snowflake REGEXP_REPLACE Function 110 | The REGEXP_REPLACE function is one of the easiest functions to get the required value when manipulating strings data. 111 | Consider the below example to replace all characters except the date value. */ 112 | 113 | --For example, consider following query to return only user name. 114 | select regexp_replace( 'jalpapatel@gmail.com', '@.*\\.(com)'); 115 | 116 | select regexp_replace('Customers - (NY)','\\(|\\)','') as customers; 117 | 118 | SELECT TRIM(REGEXP_REPLACE(string, '[a-z/-/A-Z/.]', '')) 119 | AS date_value 120 | FROM (SELECT 'My DOB is 04-12-1976.' AS string) a; 121 | 122 | /* 2. Extract date using REGEXP_SUBSTR 123 | Alternatively, REGEXP_SUBSTR function can be used to get date field from the string data. 124 | 125 | For example, consider the below example to get date value from a string containing text and the date. */ 126 | SELECT REGEXP_SUBSTR('I am celebrating my birthday on 05-12-2020 this year','[0-9][0-9]-[0-9][0-9]-[0-9][0-9][0-9][0-9]') as dob; 127 | 128 | -- 3. Validate if date is in a valid format using REGEXP_LIKE function 129 | SELECT * FROM (SELECT '04-12-1976' AS string) a where REGEXP_LIKE(string,'\\d{1,2}\\-\\d{1,2}-\\d{4,4}'); 130 | 131 | --4. String pattern matching using REGEXP_LIKE 132 | WITH tbl 133 | AS (select t.column1 mycol 134 | from values('A1 something'),('B1 something'),('Should not be matched'),('C1 should be matched') t ) 135 | 136 | SELECT * FROM tbl WHERE regexp_like (mycol,'[a-zA-z]\\d{1,}[\\s0-9a-zA-Z]*'); 137 | 138 | 139 | /* 140 | -- Snowflake REGEXP Function 141 | The Snowflake REGEXP function is an alias for RLIKE. 142 | 143 | Following is the syntax of the REGEXP function. 144 | 145 | -- 1st syntax 146 | REGEXP( , [ , ] ) 147 | 148 | -- 2nd syntax 149 | REGEXP */ 150 | 151 | --For example, consider following query to matches string with query. 152 | SELECT city REGEXP 'B.*' 153 | FROM ( 154 | SELECT 'Bangalore' AS city 155 | UNION ALL 156 | SELECT 'Mangalore' AS city ) AS tmp; 157 | 158 | /* 159 | Snowflake RLIKE Function 160 | The Snowflake RLIKE function is an alias for REGEXP and regexp_like. 161 | 162 | Following is the syntax of the RLIKE function. 163 | 164 | -- 1st syntax 165 | RLIKE( , [ , ] ) 166 | 167 | -- 2nd syntax 168 | RLIKE 169 | */ 170 | 171 | --For example, consider following query to matches string with query. 172 | SELECT city RLIKE 'M.*' 173 | FROM ( 174 | SELECT 'Bangalore' AS city 175 | UNION ALL 176 | SELECT 'Mangalore' AS city ) AS tmp; 177 | 178 | 179 | 180 | /* Snowflake Extract Numbers from the string examples 181 | The regular expression functions come handy when you want to extract numerical values from the string data. 182 | Though you can use built-in functions to check if a string is numeric. 183 | But, getting particular numeric values is done easily using regular expressions. 184 | 185 | For example, extract the number from the string using Snowflake regexp_replace regular expression Function. */ 186 | 187 | SELECT TRIM(REGEXP_REPLACE(string, '[^[:digit:]]', ' ')) AS Numeric_value 188 | FROM (SELECT ' Area code for employee ID 112244 is 12345.' AS string) a; 189 | 190 | --For example, consider below query that uses different regex patterns. 191 | 192 | SELECT TRIM(REGEXP_REPLACE(string, '[a-z/-/A-z/./#/*]', '')) AS Numeric_value 193 | FROM (SELECT ' Area code for employee ID 112244 is 12345.' AS string) a; 194 | 195 | 196 | /* The most common requirement in the data warehouse environment is to extract certain digits from the string. 197 | For example, extract the 6 digit number from string data. 198 | 199 | There are many methods that you can use, however, the easiest method is to use the 200 | Snowflake REGEXP_SUBSTR regular expressions for this requirement. 201 | 202 | You can modify the regular expression pattern to extract any number of digits based on your requirements. */ 203 | 204 | --Snowflake Extract 6 digit’s numbers from string value examples 205 | SELECT REGEXP_SUBSTR(string, '(^|[^[:word:]]|[[:space:]])\\d{6}([^[:word:]]|[[:space:]]|$)') AS ID 206 | FROM (SELECT ' Area code for employee ID 112244 is 12345.' AS string) a; 207 | 208 | -- Another common requirement is to extract alphanumeric values from a string data. 209 | -- Snowflake Extract Alphanumeric from the string examples 210 | -- For example, consider below example to extract ID which is a combination of ‘ID’ and numeric value. 211 | 212 | SELECT REGEXP_SUBSTR('abc jjs Updates ID 123 ID_112233','ID_[0-9][0-9][0-9][0-9][0-9][0-9]') as ID; 213 | 214 | --01PI10EC014 1pi10eC014 215 | 216 | 217 | 218 | --How to Remove Spaces in the String in snowflake? 219 | 220 | /* Nowadays, data is required everywhere. 221 | Many organizations automatically capture the data using tools or machines. 222 | Machines may introduce the unwanted data such as white space when it captures the actual data. 223 | These junk data is of no use in reporting, thus you need to remove them before loading into the target table. 224 | 225 | In a data warehouse, you will receive data from multiple sources. 226 | You may have to pre-process the data before loading it to target table. 227 | The pre-process step such as removing white spaces from data is commonly used. 228 | In this LECTURE we will check how to remove spaces in a string using Snowflake built-in functions. 229 | 230 | Snowflake provides many built-in functions to remove white space or any unwanted data from a string. 231 | 232 | You can use any of the following string functions as per your requirements. 233 | 234 | Replace String Function 235 | TRIM Function 236 | Translate Function 237 | REGEXP_REPLACE Function */ 238 | 239 | SELECT REPLACE('AB C D ', ' ', '') as space_removed_output; 240 | 241 | SELECT TRANSLATE('AB C D ', ' ', '') as output; 242 | 243 | /* Remove White Spaces using REGEXP_REPLACE Function 244 | 245 | The Regexp_replace remove all occurrences of white space in a string. 246 | For example, consider following regexp_replace example to replace all spaces in the string with nothing. */ 247 | 248 | select REGEXP_REPLACE('AB C D hello how are you hi an a n d ','( ){1,}','') as output; 249 | 250 | select regexp_replace('It was the best of times, it was the worst of times', '( ){1,}','') as "result" from dual; -------------------------------------------------------------------------------- /REGEX_2.txt: -------------------------------------------------------------------------------- 1 | /* 2 | Regular expressions (REGEX) are powerful for data validation, extraction, transformation, and analysis. 3 | 4 | A great use case for regular expressions (RegEx) in data analytics involves cleaning and transforming unstructured or semi-structured data, such as parsing email addresses, extracting specific patterns from strings, or validating the format of data. 5 | 6 | 7 | Use Case: Extracting Domain Names from Email Addresses 8 | 9 | Scenario: 10 | 11 | Scenario: Imagine you have a large dataset with customer information that contains inconsistencies in formatting for emails, phone numbers, and addresses. 12 | You want to clean and analyze the data by extracting, matching, and standardizing these fields. 13 | 14 | */ 15 | USE DEMO_DATABASE; 16 | 17 | CREATE OR REPLACE TABLE customers_EMAIL ( 18 | customer_id INT PRIMARY KEY, 19 | name VARCHAR(100), 20 | email VARCHAR(255) 21 | ); 22 | 23 | SELECT * FROM customers LIMIT 1000; 24 | 25 | SELECT TOP 10* FROM customers; 26 | 27 | CREATE OR REPLACE TABLE customers_with_phone ( 28 | customer_id INT PRIMARY KEY, 29 | name VARCHAR(100), 30 | phone VARCHAR(20) 31 | ); 32 | 33 | SELECT * FROM customers_with_phone LIMIT 1000; 34 | 35 | SELECT TOP 100 * FROM customers_with_phone; 36 | 37 | --SQL Query to Extract Valid Emails Using REGEXP: 38 | SELECT customer_id, name, 39 | REGEXP_SUBSTR(email, '[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,4}') AS valid_email 40 | FROM customers 41 | WHERE REGEXP_LIKE(email, '[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,4}'); 42 | 43 | -- REGEXP_SUBSTR extracts the portion of the email that matches the pattern of a valid email address. 44 | -- REGEXP_LIKE is used to filter rows that have valid email addresses based on the same pattern. 45 | 46 | -- Use Case: Extracting Phone Numbers 47 | -- Problem: The phone column has inconsistent phone number formats, such as "(123) 456-7890", "123-456-7890", or even "1234567890". 48 | -- You need to standardize them. 49 | 50 | SELECT customer_id, name, 51 | REGEXP_REPLACE(phone, '[^0-9]', '') AS standardized_phone -- REGEXP_REPLACE removes any non-numeric characters from the phone field, leaving only digits. 52 | FROM customers_with_phone; 53 | 54 | 55 | -- Use Case: Extract the domain part from email addresses (e.g., @gmail.com, @yahoo.com). 56 | SELECT customer_id, email, 57 | REGEXP_SUBSTR(email, '@[a-zA-Z0-9.-]+' ) AS domain -- The REGEXP_SUBSTR extracts the domain part after the "@" symbol. 58 | FROM customers; 59 | 60 | 61 | -- Validating Social Security Numbers (SSNs) 62 | -- Use Case: Validate if the values in a column are formatted like U.S. Social Security Numbers (SSNs) in the format XXX-XX-XXXX. 63 | SELECT customer_id, ssn 64 | FROM customers 65 | WHERE REGEXP_LIKE(ssn, '^\d{3}-\d{2}-\d{4}$'); 66 | --^\d{3}-\d{2}-\d{4}$ validates that the SSN contains exactly 3 digits, followed by a hyphen, 2 digits, a hyphen, and 4 digits. 67 | 68 | 69 | --Extracting Hashtags from Social Media Text 70 | -- Use Case: Extract hashtags (e.g., #DataAnalytics, #SQL) from a social media post column. 71 | 72 | SELECT post_id, post_text, 73 | REGEXP_SUBSTR(post_text, '#[A-Za-z0-9_]+') AS hashtag 74 | FROM social_media_posts 75 | WHERE REGEXP_LIKE(post_text, '#[A-Za-z0-9_]+'); 76 | 77 | -- Identifying Invalid Phone Numbers 78 | -- Use Case: Identify phone numbers that don’t match a specific format, such as (XXX) XXX-XXXX or XXX-XXX-XXXX. 79 | SELECT customer_id, phone 80 | FROM customers_with_phone 81 | WHERE NOT REGEXP_LIKE(phone, '^\(?\d{3}\)?[- ]?\d{3}[- ]?\d{4}$'); 82 | --The pattern ^\(?\d{3}\)?[- ]?\d{3}[- ]?\d{4}$ allows for phone numbers with or without parentheses and dashes. 83 | 84 | -- Validate date formats in columns where the date might be entered as YYYY-MM-DD or MM/DD/YYYY. 85 | SELECT event_id, event_date 86 | FROM events 87 | WHERE REGEXP_LIKE(event_date, '^\d{4}-\d{2}-\d{2}$') -- Format: YYYY-MM-DD 88 | OR REGEXP_LIKE(event_date, '^\d{2}/\d{2}/\d{4}$'); -- Format: MM/DD/YYYY 89 | 90 | -- Finding Records with Alphabetic Characters in a Numeric Field 91 | -- Use Case: Identify records where a numeric field, such as a product code or ID, contains alphabetic characters by mistake. 92 | 93 | SELECT product_id, product_code 94 | FROM products 95 | WHERE REGEXP_LIKE(product_code, '[A-Za-z]'); 96 | -- The REGEXP_LIKE checks if the product_code field contains any alphabetic characters, flagging those entries as errors. 97 | 98 | 99 | -- Extracting First and Last Name from Full Name 100 | -- Use Case: Split a full_name column into first_name and last_name. 101 | 102 | SELECT full_name, 103 | REGEXP_SUBSTR(full_name, '^[A-Za-z]+') AS first_name, 104 | REGEXP_SUBSTR(full_name, '[A-Za-z]+$') AS last_name 105 | FROM customers; 106 | 107 | -- ^[A-Za-z]+ extracts the first name (first word before a space). 108 | -- [A-Za-z]+$ extracts the last name (last word after a space). 109 | 110 | 111 | -- Extracting Numbers from Alphanumeric Strings 112 | -- Use Case: Extract the numeric part from an alphanumeric product code like ABC12345 or Product567. 113 | 114 | SELECT product_id, product_code, 115 | REGEXP_SUBSTR(product_code, '\d+') AS numeric_part ----REGEXP_SUBSTR extracts the numeric portion from the alphanumeric product_code. 116 | FROM products 117 | WHERE REGEXP_LIKE(product_code, '\d+'); 118 | 119 | -- Matching Specific Word Patterns in Sentences 120 | -- Use Case: Find rows in a text column that contain specific words like error, failed, or success. 121 | 122 | 123 | SELECT log_id, log_message 124 | FROM system_logs 125 | WHERE REGEXP_LIKE(log_message, '(error|failed|success)', 'i'); 126 | 127 | -- This pattern searches for any occurrence of the words "error", "failed", or "success" in the log_message column, case-insensitive ('i' flag). 128 | 129 | -- Replacing Substrings with REGEXP_REPLACE 130 | -- Use Case: Replace all non-alphanumeric characters from a column. 131 | 132 | SELECT customer_id, name, 133 | REGEXP_REPLACE(name, '[^A-Za-z0-9 ]', '') AS clean_name 134 | FROM customers; 135 | 136 | --REGEXP_REPLACE removes any character that is not a letter, digit, or space from the name field. 137 | 138 | -- Detecting Leading and Trailing Spaces 139 | -- Use Case: Identify and clean entries that have leading or trailing spaces in a name column. 140 | 141 | SELECT customer_id, name 142 | FROM customers 143 | WHERE REGEXP_LIKE(name, '^\s+|\s+$'); 144 | 145 | -- ^\s+|\s+$ detects any leading (^\s+) or trailing (\s+$) spaces in the name column. 146 | 147 | -- Matching URL Patterns 148 | -- Use Case: Extract valid URLs from a text field or check if a URL format is valid. 149 | 150 | SELECT article_id, url, 151 | REGEXP_SUBSTR(url, 'https?://[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}') AS valid_url 152 | FROM articles 153 | WHERE REGEXP_LIKE(url, 'https?://[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}'); 154 | 155 | -- This regular expression captures URLs that start with http:// or https://, followed by a domain name.13. Extracting IP Addresses 156 | -- Use Case: Extract valid IP addresses from a log table. 157 | 158 | SELECT log_id, log_message, 159 | REGEXP_SUBSTR(log_message, '\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b') AS ip_address 160 | FROM logs 161 | WHERE REGEXP_LIKE(log_message, '\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b'); 162 | 163 | -- This pattern matches IP addresses in the format XXX.XXX.XXX.XXX where each block is between 1 and 3 digits. 164 | 165 | --Detecting Duplicates in a Text Column 166 | -- Use Case: Detect duplicate words in a single column value. 167 | 168 | SELECT comment_id, comment, 169 | REGEXP_LIKE(comment, '\b(\w+)\s+\1\b') AS has_duplicate_words -- This regular expression checks for duplicate consecutive words in a string. 170 | FROM user_comments; -------------------------------------------------------------------------------- /REGX.txt: -------------------------------------------------------------------------------- 1 | USE DATABASE DEMO_DATABASE; 2 | 3 | 4 | create or replace table like_ex(subject varchar(20)); 5 | insert into like_ex values 6 | ('John Dddoe'), 7 | ('Joe Doe'), 8 | ('John_down'), 9 | ('Joe down'), 10 | ('Elaine'), 11 | (''), -- empty string 12 | (null); 13 | 14 | select subject 15 | from like_ex 16 | where subject like '%Jo%oe%' 17 | order by subject; 18 | 19 | --LIKE ANY 20 | -- Allows case-sensitive matching of strings based on comparison with one or more patterns. 21 | --The operation is similar to LIKE. If the input string matches any of the patterns, this returns the input string. 22 | 23 | -- LIKE ANY ( [, ... ] ) [ ESCAPE ] 24 | 25 | create or replace table like_example(subject varchar(20)); 26 | insert into like_example values 27 | ('John Dddoe'), 28 | ('Joe Doe'), 29 | ('John_down'), 30 | ('Joe down'), 31 | ('Tom Doe'), 32 | ('Tim down'), 33 | (null); 34 | 35 | select * from like_example; 36 | 37 | select * from like_example 38 | where subject like any ('%Jo%oe%','T%e') 39 | order by subject; 40 | 41 | select * from like_example 42 | where subject like any ('%J%h%^_do%', 'T%^%e') escape '^' 43 | order by subject; 44 | 45 | USE DATABASE DEMO_DATABASE; 46 | 47 | create or replace table strings (v varchar(50)); 48 | insert into strings (v) values 49 | ('San Francisco'), 50 | ('San Jose'), 51 | ('Santa Clara'), 52 | ('Sacramento'); 53 | 54 | --Use wildcards to search for a pattern: 55 | select v from strings 56 | where v regexp 'San* [fF].*' 57 | order by v; 58 | 59 | 60 | SELECT TRIM(REGEXP_REPLACE(string, '[^[:digit:]]', ' ')) AS Numeric_value 61 | FROM (SELECT ' Area code for employee ID 112244 is 12345.' AS string) a; 62 | 63 | CREATE TABLE demo3 (id INT, string1 VARCHAR); 64 | INSERT INTO demo3 (id, string1) VALUES 65 | (5, 'A MAN A PLAN A CANAL') 66 | ; 67 | SELECT * FROM DEMO3; 68 | select id, 69 | regexp_substr(string1, 'A\\W+(\\w+)', 1, 1, 'e', 1) as "RESULT1", 70 | regexp_substr(string1, 'A\\W+(\\w+)', 1, 2, 'e', 1) as "RESULT2", 71 | regexp_substr(string1, 'A\\W+(\\w+)', 1, 3, 'e', 1) as "RESULT3", 72 | regexp_substr(string1, 'A\\W+(\\w+)', 1, 4, 'e', 1) as "RESULT4" 73 | from demo3; 74 | 75 | 76 | /* Snowflake Regular Expression Functions 77 | The regular expression functions are string functions that match a given regular expression. These functions are commonly called as a ‘regex’ functions. 78 | 79 | Below are some of the regular expression function that Snowflake cloud data warehouse supports: 80 | 81 | REGEXP_COUNT 82 | REGEXP_INSTR 83 | REGEXP_LIKE 84 | REGEXP_SUBSTR 85 | REGEXP_REPLACE 86 | REGEXP 87 | RLIKE */ 88 | 89 | /* Snowflake REGEXP_COUNT Function 90 | The REGEXP_COUNT function searches a string and returns an integer that indicates the number of 91 | times the pattern occurs in the string. If no match is found, then the function returns 0. 92 | 93 | syntax : REGEXP_COUNT( , [ , , ] ) */ 94 | select regexp_count('qqqabcrtrababcbcd', 'abc'); 95 | select regexp_count('qqqabcrtrababcbcd', '[abc]') as abc_character_count; 96 | select REGEXP_COUNT('QQQABCRTRABABCBCD', '[ABC]{3}'); 97 | 98 | 99 | /* 100 | The Snowflake REGEXP_REPLACE function returns the string by replacing specified pattern. 101 | If no matches found, original string will be returned. 102 | 103 | Following is the syntax of the Regexp_replace function. 104 | 105 | REGEXP_REPLACE( , [ , , , , ] ) 106 | 107 | 1. Extract date from a text string using Snowflake REGEXP_REPLACE Function 108 | The REGEXP_REPLACE function is one of the easiest functions to get the required value when manipulating strings data. 109 | Consider the below example to replace all characters except the date value. */ 110 | 111 | --For example, consider following query to return only user name. 112 | select regexp_replace( 'anandjha2309@gmail.com', '@.*\\.(com)'); 113 | 114 | select regexp_replace('Customers - (NY)','\\(|\\)','') as customers; 115 | 116 | SELECT TRIM(REGEXP_REPLACE(string, '[a-z/-/A-Z/.]', '')) 117 | AS date_value 118 | FROM (SELECT 'My DOB is 04-12-1976.' AS string) a; 119 | 120 | /* 2. Extract date using REGEXP_SUBSTR 121 | Alternatively, REGEXP_SUBSTR function can be used to get date field from the string data. 122 | 123 | For example, consider the below example to get date value from a string containing text and the date. */ 124 | SELECT REGEXP_SUBSTR('I am celebrating my birthday on 05/12/2020 this year','[0-9][0-9]/[0-9][0-9]/[0-9][0-9][0-9][0-9]') as dob; 125 | 126 | -- 3. Validate if date is in a valid format using REGEXP_LIKE function 127 | SELECT * FROM (SELECT '04-12-1976' AS string) a where REGEXP_LIKE(string,'\\d{1,2}\\-\\d{1,2}-\\d{4,4}'); 128 | 129 | --4. String pattern matching using REGEXP_LIKE 130 | WITH tbl 131 | AS (select t.column1 mycol 132 | from values('A1 something'),('B1 something'),('Should not be matched'),('C1 should be matched') t ) 133 | 134 | SELECT * FROM tbl WHERE regexp_like (mycol,'[a-zA-z]\\d{1,}[\\s0-9a-zA-Z]*'); 135 | 136 | 137 | /* 138 | -- Snowflake REGEXP Function 139 | The Snowflake REGEXP function is an alias for RLIKE. 140 | 141 | Following is the syntax of the REGEXP function. 142 | 143 | -- 1st syntax 144 | REGEXP( , [ , ] ) 145 | 146 | -- 2nd syntax 147 | REGEXP */ 148 | 149 | --For example, consider following query to matches string with query. 150 | SELECT city REGEXP 'M.*' 151 | FROM ( 152 | SELECT 'Bangalore' AS city 153 | UNION ALL 154 | SELECT 'Mangalore' AS city ) AS tmp; 155 | 156 | /* 157 | Snowflake RLIKE Function 158 | The Snowflake RLIKE function is an alias for REGEXP and regexp_like. 159 | 160 | Following is the syntax of the RLIKE function. 161 | 162 | -- 1st syntax 163 | RLIKE( , [ , ] ) 164 | 165 | -- 2nd syntax 166 | RLIKE 167 | */ 168 | 169 | --For example, consider following query to matches string with query. 170 | SELECT city RLIKE 'M.*' 171 | FROM ( 172 | SELECT 'Bangalore' AS city 173 | UNION ALL 174 | SELECT 'Mangalore' AS city ) AS tmp; 175 | 176 | 177 | 178 | /* Snowflake Extract Numbers from the string examples 179 | The regular expression functions come handy when you want to extract numerical values from the string data. 180 | Though you can use built-in functions to check if a string is numeric. 181 | But, getting particular numeric values is done easily using regular expressions. 182 | 183 | For example, extract the number from the string using Snowflake regexp_replace regular expression Function. */ 184 | 185 | SELECT TRIM(REGEXP_REPLACE(string, '[^[:digit:]]', ' ')) AS Numeric_value 186 | FROM (SELECT ' Area code for employee ID 112244 is 12345.' AS string) a; 187 | 188 | --For example, consider below query that uses different regex patterns. 189 | 190 | SELECT TRIM(REGEXP_REPLACE(string, '[a-z/-/A-z/./#/*]', '')) AS Numeric_value 191 | FROM (SELECT ' Area code for employee ID 112244 is 12345.' AS string) a; 192 | 193 | 194 | /* The most common requirement in the data warehouse environment is to extract certain digits from the string. 195 | For example, extract the 6 digit number from string data. 196 | 197 | There are many methods that you can use, however, the easiest method is to use the 198 | Snowflake REGEXP_SUBSTR regular expressions for this requirement. 199 | 200 | You can modify the regular expression pattern to extract any number of digits based on your requirements. */ 201 | 202 | --Snowflake Extract 6 digit’s numbers from string value examples 203 | SELECT REGEXP_SUBSTR(string, '(^|[^[:word:]]|[[:space:]])\\d{6}([^[:word:]]|[[:space:]]|$)') AS ID 204 | FROM (SELECT ' Area code for employee ID 112244 is 12345.' AS string) a; 205 | 206 | -- Another common requirement is to extract alphanumeric values from a string data. 207 | -- Snowflake Extract Alphanumeric from the string examples 208 | -- For example, consider below example to extract ID which is a combination of ‘ID’ and numeric value. 209 | 210 | SELECT REGEXP_SUBSTR('abc jjs Updates ID 123 ID_112233','ID_[0-9][0-9][0-9][0-9][0-9][0-9]') as ID; 211 | 212 | --01PI10EC014 1pi10eC014 213 | 214 | 215 | 216 | --How to Remove Spaces in the String in snowflake? 217 | 218 | /* Nowadays, data is required everywhere. 219 | Many organizations automatically capture the data using tools or machines. 220 | Machines may introduce the unwanted data such as white space when it captures the actual data. 221 | These junk data is of no use in reporting, thus you need to remove them before loading into the target table. 222 | 223 | In a data warehouse, you will receive data from multiple sources. 224 | You may have to pre-process the data before loading it to target table. 225 | The pre-process step such as removing white spaces from data is commonly used. 226 | In this LECTURE we will check how to remove spaces in a string using Snowflake built-in functions. 227 | 228 | Snowflake provides many built-in functions to remove white space or any unwanted data from a string. 229 | 230 | You can use any of the following string functions as per your requirements. 231 | 232 | Replace String Function 233 | TRIM Function 234 | Translate Function 235 | REGEXP_REPLACE Function */ 236 | 237 | SELECT REPLACE('AB C D ', ' ', '') as space_removed_output; 238 | 239 | SELECT TRANSLATE('AB C D ', ' ', '') as output; 240 | 241 | /* Remove White Spaces using REGEXP_REPLACE Function 242 | 243 | The Regexp_replace remove all occurrences of white space in a string. 244 | For example, consider following regexp_replace example to replace all spaces in the string with nothing. */ 245 | 246 | select REGEXP_REPLACE('AB C D hello how are you hi an a n d ','( ){1,}','') as output; 247 | 248 | select regexp_replace('It was the best of times, it was the worst of times', '( ){1,}','') as "result" from dual; 249 | 250 | -------------------------------------------------------------------------------- /SNOWFLAKE TEXT OPERATIONS.txt: -------------------------------------------------------------------------------- 1 | USE DATABASE JD_DEMODATABASE; 2 | 3 | CREATE OR REPLACE TABLE AGENTS 4 | ( 5 | AGENT_CODE CHAR(6) NOT NULL PRIMARY KEY, 6 | AGENT_NAME CHAR(40) , 7 | WORKING_AREA CHAR(35), 8 | COMMISSION NUMBER(10,2) DEFAULT 0.05, 9 | PHONE_NO CHAR(15), 10 | COUNTRY VARCHAR2(25) 11 | ); 12 | 13 | INSERT INTO AGENTS VALUES ('A007', 'Ram', 'Bangalore',0.15,'077-25814763', ''); 14 | 15 | INSERT INTO AGENTS(AGENT_CODE,AGENT_NAME,WORKING_AREA) VALUES ('A110', 'Anand', 'Germany'); 16 | 17 | INSERT INTO AGENTS VALUES ('A003', 'Alex ', 'UK', '0.13', '075-12458969', ''); 18 | INSERT INTO AGENTS VALUES ('A008', 'Alford', 'US', '0.12', '044-25874365', ''); 19 | INSERT INTO AGENTS VALUES ('A011', 'Ravi', 'BangaloRE', '0.15', '077-45625874', ''); 20 | INSERT INTO AGENTS VALUES ('A010', 'Santakumar', 'Chennai', '0.14', '007-22388644', ''); 21 | INSERT INTO AGENTS VALUES ('A012', 'Lucida', 'San Jose', '0.12', '044-52981425', ''); 22 | INSERT INTO AGENTS VALUES ('A005', 'Anderson', 'Brisban', '0.13', '045-21447739', ''); 23 | INSERT INTO AGENTS VALUES ('A001', 'Subbarao', 'Bangalore', '0.14', '077-12346674', ''); 24 | INSERT INTO AGENTS VALUES ('A002', 'Mukesh', 'Mumbai', '0.11', '029-12358964', ''); 25 | INSERT INTO AGENTS VALUES ('A006', 'McDen', 'London', '0.15', '078-22255588', ''); 26 | INSERT INTO AGENTS VALUES ('A004', 'Ivan', 'Torento', '0.15', '008-22544166', ''); 27 | INSERT INTO AGENTS VALUES ('A009', 'Benjamin', 'Hampshair', '0.11', '008-22536178', ''); 28 | 29 | SELECT * FROM AGENTS; 30 | 31 | /* The SUBSTRING () function returns the position of a string or binary value from the complete string, 32 | starting with the character specified by substring_start_index. If any input is null, null is returned */ 33 | 34 | --Example 1: Get the substring from a specific string in Snowflake 35 | select substring('JALPA DESAI', 1, 7); 36 | select substring('RAJ VAKANI', 0, 7); 37 | select substr('RAJ HARSH',0,3); 38 | select substr('RAJ HARSH',3); 39 | 40 | select substring('JALPA XYZW PATEL', -7); 41 | 42 | 43 | 44 | --Example 2: Get the substring from a specific string by using table data 45 | select AGENT_CODE,AGENT_NAME,substring(AGENT_NAME,0,2) AS AGENT_INITIALS from agents; 46 | 47 | /* To get a specific substring from an expression or string. 48 | You can also use the substring function if you want to get the substrings in reverse order from the strings. */ 49 | 50 | -- If you use the substrings in reverse order, use the starting index as a negative value. 51 | select AGENT_CODE,AGENT_NAME,substring(AGENT_NAME,-3,3) AS NAME_BACKWARDS from agents; 52 | 53 | /* 54 | Snowflake CAST is a data-type conversion command. Snowflake CAST works similar to the TO_ datatype conversion functions. 55 | If a particular data type conversion is not possible, 56 | it raises an error. Let’s understand the Snowflake CAST in detail via the syntax and a few examples. 57 | */ 58 | 59 | select cast('1.6845' as decimal(6,2)); 60 | select '1.6845'::decimal(6,1); 61 | 62 | select cast('10-Sep-2021' as timestamp); 63 | 64 | -- When the provided precision is insufficient to hold the input value, the Snowflake CAST command raises an error as follows: 65 | select cast('123.12' as number(4,2)); 66 | --Here, precision is set as 4 but the input value has a total of 5 digits, thereby raising the error. 67 | 68 | --TRY_CAST( AS ) 69 | select try_cast('05-Mar-2016' as timestamp); 70 | 71 | --The Snowflake TRY_CAST command returns NULL as the input value 72 | --has more characters than the provided precision in the target data type. 73 | select try_cast('ANAND' as char(4)); 74 | 75 | --trim function 76 | select trim('❄-❄ABC-❄-', '❄-') as trimmed_string; 77 | select trim('❄-❄ABC-❄-', '') as trimmed_string; 78 | SELECT TRIM('********T E S T I N G 1 2 3 4********','*') AS TRIMMED_SPACE; 79 | 80 | --ltrim 81 | select ltrim('#000000123', '0#'); 82 | select ltrim('#0000AISHWARYA', '0#'); 83 | select ltrim(' JALPA DESAI', ''); 84 | 85 | --RTRIM 86 | select rtrim('$125.00', '0.'); 87 | select rtrim('JALPA DESAI*****', '*'); 88 | 89 | --To remove the white spaces or the blank spaces from the string TRIM function can be used. 90 | --It can remove the whitespaces from the start and end both. 91 | select TRIM(' Snwoflake Space Remove ', ' '); 92 | 93 | --To remove the first character from the string you can pass the string in the RTRIM function. 94 | select LTRIM('Snowflake Remove ', 'S'); 95 | --To remove the last character from the string you can pass the string in the RTRIM function. 96 | select RTRIM('Snwoflake Remove ', 'e'); 97 | 98 | select TRIM(' Snwoflake Space Remove ', ' '); 99 | 100 | --LENGTH FUNCTION 101 | SELECT LEN(trim(' Snowflake Space Remove ')) as length_string; 102 | SELECT LENGTH(trim(' Snowflake Space Remove ')) as length_string; 103 | 104 | --concat 105 | select * from agents; 106 | 107 | SELECT CONCAT('KA', ', ', 'India') as state_country; 108 | 109 | SELECT *,concat(AGENT_CODE, '-', AGENT_NAME) AS agent_details from agents; 110 | 111 | --Snowflake CONCAT_WS Function 112 | /* The concat_ws function concatenates two or more strings, or concatenates two or more binary values 113 | and adds separator between those strings. 114 | The CONCAT_WS operator requires at least two arguments, and uses the first argument to separate all following arguments 115 | 116 | Following is the concat_ws function syntax 117 | CONCAT_WS( , [ , ... ] ) */ 118 | 119 | SELECT CONCAT_WS('-', 'KA','India') as state_country; 120 | 121 | /* 122 | Snowflake Concat Operator (||) 123 | The concatenation operator concatenates two strings on either side of the || symbol and returns the concatenated string. 124 | The || operator provides alternative syntax for CONCAT and requires at least two arguments. 125 | 126 | For example, 127 | */ 128 | select 'Nested' || ' CONCAT' || ' example!' as Concat_operator; 129 | 130 | 131 | --Handling NULL Values in CONCAT function and the Concatenation operator 132 | --For both the CONCAT function and the concatenation operator, 133 | --if one or both strings are null, the result of the concatenation is null. 134 | --For example, 135 | 136 | select concat('Bangalore, ', NULL) as null_example; 137 | select 'Bangalore, '|| NULL as null_example; 138 | 139 | --how to handle it? 140 | select concat('Bangalore ', NVL(NULL,'')) as null_example; 141 | select 'Bangalore'|| NVL(NULL, '') as null_example; 142 | 143 | -- REVERSE IN STRING 144 | select reverse('JALPA, DESAI!'); 145 | 146 | -- SPLIT 147 | select split('127.0.0.1', '.'); 148 | SELECT SPLIT('DAMINI-KUMARI-HIRE','-'); 149 | 150 | select 0, split_part('11.22.33', '.', 0); 151 | 152 | select split_part('aaa--bbb-BBB--ccc', '--',1); 153 | select split_part('aaa--bbb-BBB--ccc', '--',2); 154 | select split_part('aaa--bbb-BBB--ccc', '--',3); 155 | select split_part('aaa--bbb-BBB--ccc', '--',4); 156 | 157 | SELECT split(AGENT_DETAILS, '-') 158 | FROM ( 159 | SELECT *,concat(AGENT_CODE, '-', AGENT_NAME) AS agent_details 160 | from agents ); 161 | 162 | 163 | SELECT lower('India Is My Country') as lwr_strng; 164 | SELECT UPPER('India Is My Country') as upr_strng; 165 | 166 | --REPLACE COMMAND 167 | -- REPLACE( , [ , ] ) 168 | 169 | select REPLACE( ' JALPA DESAI ' ,' ','$'); 170 | select REPLACE( ' HARSHJIT KUMAR DESAI ' ,' '); -- 171 | 172 | SELECT REPLACE(' T E S T I N G 1 2 3 4 ',' ') -------------------------------------------------------------------------------- /SNOWFLAKE TIME TRAVEL.txt: -------------------------------------------------------------------------------- 1 | USE DATABASE JD_DEMODATABASE; 2 | 3 | create or replace table JD_time_travel_table 4 | ( 5 | orderkey number(38,0), 6 | custkey number(38,0), 7 | orderstatus varchar(1), 8 | totalprice number(12,2), 9 | orderdate date, 10 | orderpriority varchar(15), 11 | clerk varchar(15), 12 | shippriority number(38,0), 13 | comment varchar(79) 14 | ) 15 | 16 | data_retention_time_in_days = 1; 17 | 18 | show tables like 'jd_time_travel_table'; 19 | 20 | describe table jd_time_travel_table; 21 | 22 | --command to set data_retention_time_in_days to given value 23 | alter table jd_time_travel_table 24 | set data_retention_time_in_days=55; 25 | 26 | 27 | CREATE or replace DATABASE TIMETRAVEL; 28 | 29 | use database timetravel; 30 | 31 | CREATE or replace table jd_CONSUMER_COMPLAINTS 32 | 33 | ( DATE_RECEIVED STRING, 34 | PRODUCT_NAME VARCHAR2(50), 35 | SUB_PRODUCT VARCHAR2(100), 36 | ISSUE VARCHAR2(100), 37 | SUB_ISSUE VARCHAR2(100), 38 | CONSUMER_COMPLAINT_NARRATIVE string, 39 | Company_Public_Response STRING, 40 | Company VARCHAR(100), 41 | State_Name CHAR(4), 42 | Zip_Code string, 43 | Tags VARCHAR(40), 44 | Consumer_Consent_Provided CHAR(25), 45 | Submitted_via STRING, 46 | Date_Sent_to_Company STRING, 47 | Company_Response_to_Consumer VARCHAR(40), 48 | Timely_Response CHAR(4), 49 | CONSUMER_DISPUTED CHAR(4), 50 | COMPLAINT_ID NUMBER(12,0) NOT NULL PRIMARY KEY 51 | ); 52 | 53 | DESCRIBE TABLE JD_CONSUMER_COMPLAINTS; 54 | 55 | select * from JD_CONSUMER_COMPLAINTS; 56 | 57 | -- get the current timestammp 58 | SELECT CURRENT_TIMESTAMP; -- 2022-11-28 18:05:27.882 +0000 59 | 60 | -- set timezone to UTC 61 | ALTER SESSION SET TIMEZONE = 'UTC'; 62 | 63 | SELECT DISTINCT SUB_ISSUE FROM JD_CONSUMER_COMPLAINTS; 64 | 65 | -- update all age as zero 66 | update JD_CONSUMER_COMPLAINTS set sub_issue = NULL; 67 | 68 | SELECT * FROM JD_CONSUMER_COMPLAINTS; 69 | 70 | -- time travel to a time based on the timestamp 71 | SELECT DISTINCT SUB_ISSUE FROM JD_CONSUMER_COMPLAINTS 72 | before(timestamp => '2022-11-28 18:05:27.882 +0000' ::timestamp); 73 | 74 | select * from JD_CONSUMER_COMPLAINTS before(timestamp => '2022-11-28 18:05:27.882 +0000' ::timestamp); 75 | 76 | -- time travel to 5 minutes ago 77 | select * from JD_CONSUMER_COMPLAINTS AT(offset => -60*5); 78 | 79 | -- note down the query id of this query as we will use it in the time travel query as well 80 | update JD_CONSUMER_COMPLAINTS set TAGS = NULL; 81 | --01a89dc3-3200-9a0b-0002-0dfe00088222 82 | 83 | -- time travel to the time before the query id specified 84 | select * from JD_CONSUMER_COMPLAINTS before(statement => '01a89dc3-3200-9a0b-0002-0dfe00088222'); 85 | 86 | 87 | DECLARE 88 | query_id_1 VARCHAR; 89 | query_id_2 VARCHAR; 90 | BEGIN 91 | SELECT 1; 92 | query_id_1 := SQLID; 93 | SELECT 2; 94 | query_id_2 := SQLID; 95 | RETURN [query_id_1, query_id_2]; 96 | END; -------------------------------------------------------------------------------- /SNOWFLAKE_ANALYSIS_JOINS.txt: -------------------------------------------------------------------------------- 1 | SELECT * FROM "SNOWFLAKE_SAMPLE_DATA"."TPCH_SF10"."LINEITEM"; 2 | 3 | SELECT COUNT (*) FROM "SNOWFLAKE_SAMPLE_DATA"."TPCH_SF10"."LINEITEM"; 4 | 5 | SELECT * FROM "SNOWFLAKE_SAMPLE_DATA"."TPCDS_SF100TCL"."CATALOG_SALES"; 6 | 7 | SELECT * FROM "SNOWFLAKE_SAMPLE_DATA"."TPCDS_SF100TCL"."CATALOG_SALES" LIMIT 100000; 8 | 9 | CREATE DATABASE JD_demodatabase 10 | 11 | use JD_demodatabase 12 | 13 | CREATE OR REPLACE TABLE OWNER 14 | ( 15 | OwnerID INTEGER NOT NULL PRIMARY KEY , 16 | Name VARCHAR2(20), 17 | Surname STRING, 18 | StreetAddress VARCHAR2(50), 19 | City STRING, 20 | StatE CHAR(4), 21 | StateFull STRING, 22 | ZipCode INTEGER 23 | ); 24 | 25 | CREATE OR REPLACE TABLE PETS 26 | ( 27 | PetID VARCHAR(10) NOT NULL PRIMARY KEY, 28 | Name VARCHAR(20), 29 | Kind STRING, 30 | Gender CHAR(7), 31 | Age INTEGER, 32 | OwnerID INTEGER NOT NULL REFERENCES OWNER 33 | ); 34 | SELECT * FROM OWNER 35 | SELECT * FROM PETS 36 | SELECT COUNT(DISTINCT OwnerID) from OWNER; 37 | SELECT COUNT(DISTINCT PetID) from PETS; 38 | 39 | -- NEED THE NAME OF OWNER & THEIR DOGS NAME ALONG WITH THEIR AGE ---- INNER JOIN 40 | SELECT O.Name AS OWNER_NAME,p.NAME AS PET_NAME,p.age AS PET_AGE 41 | FROM OWNER o 42 | INNER JOIN PETS p ON o.OwnerID = p.OwnerID; 43 | 44 | --NEED THE NAME OF ALL THE OWNERS IRRESPECTIVE WETHER OR NOT THEY ARE HAVING PETS 45 | SELECT O.Name AS OWNER_NAME,p.NAME AS PET_NAME,p.age AS PET_AGE 46 | FROM OWNER o 47 | LEFT OUTER JOIN PETS p ON o.OwnerID = p.OwnerID; 48 | 49 | --- COUNT OF PETS EACH OWNER HAS 50 | SELECT O.Name AS OWNER_NAME,COUNT(DISTINCT p.PETID) 51 | FROM OWNER o 52 | INNER JOIN PETS p ON o.OwnerID = p.OwnerID 53 | GROUP BY 1 54 | ORDER BY 2 DESC; 55 | 56 | ---RIGHT JOIN 57 | SELECT O.Name AS OWNER_NAME,p.NAME AS PET_NAME,p.age AS PET_AGE 58 | FROM OWNER o 59 | RIGHT JOIN PETS p ON o.OwnerID = p.OwnerID; 60 | 61 | --FULL OUTER JOIN 62 | SELECT O.*,P.* 63 | FROM OWNER O 64 | FULL OUTER JOIN PETS p ON o.OwnerID = p.OwnerID; 65 | 66 | -- INFO OF ALL THE PETS HOLD BY THEIR OWNER 67 | SELECT DISTINCT KIND FROM PETS; 68 | SELECT KIND,COUNT(*) FROM PETS 69 | GROUP BY 1; 70 | 71 | 72 | --- CROSS JOIN 73 | SELECT O.*,P.* 74 | FROM OWNER O 75 | CROSS JOIN PETS p ; 76 | 77 | 78 | CREATE OR REPLACE TABLE EMPLOYEE 79 | ( 80 | EMPID INTEGER NOT NULL PRIMARY KEY, 81 | EMP_NAME VARCHAR2(20), 82 | JOB_ROLE STRING, 83 | SALARY NUMBER(10,2) 84 | ); 85 | 86 | INSERT INTO EMPLOYEE 87 | VALUES('101','JALPA','DATA SCIENTIST',50000); 88 | 89 | INSERT INTO EMPLOYEE 90 | VALUES(102,'BHARGAV', 'Data Enginner',60000); 91 | 92 | INSERT INTO EMPLOYEE 93 | VALUES(103,'RUHANI', 'Data Scientist',48000); 94 | 95 | INSERT INTO EMPLOYEE 96 | VALUES(104,'PIYA', 'Analyst',98000); 97 | 98 | INSERT INTO EMPLOYEE 99 | VALUES(105,'PULKIT', 'Data Scientist',72000); 100 | 101 | INSERT INTO EMPLOYEE 102 | VALUES(106,'RIYA','Analyst',100000); 103 | 104 | INSERT INTO EMPLOYEE 105 | VALUES(107,'RISHABH','Data Engineer',67000); 106 | 107 | INSERT INTO EMPLOYEE 108 | VALUES(108,'SHUBHAM','Manager',148000); 109 | 110 | INSERT INTO EMPLOYEE 111 | VALUES(109,'RAVI','Manager',213000); 112 | 113 | INSERT INTO EMPLOYEE 114 | VALUES(110,'DHRUV','Data Scientist',89000); 115 | 116 | DELETE FROM EMPLOYEE WHERE EMPID = 110; 117 | 118 | SELECT * FROM EMPLOYEE; 119 | 120 | 121 | INSERT INTO EMPLOYEE 122 | VALUES('101','ANAND JHA','Analyst',50000); 123 | 124 | INSERT INTO EMPLOYEE 125 | VALUES(102,'ALex', 'Data Enginner',60000); 126 | 127 | INSERT INTO EMPLOYEE 128 | VALUES(103,'Ravi', 'Data Scientist',48000); 129 | 130 | INSERT INTO EMPLOYEE 131 | VALUES(104,'Peter', 'Analyst',98000); 132 | 133 | INSERT INTO EMPLOYEE 134 | VALUES(105,'Pulkit', 'Data Scientist',72000); 135 | 136 | INSERT INTO EMPLOYEE 137 | VALUES(106,'Robert','Analyst',100000); 138 | 139 | INSERT INTO EMPLOYEE 140 | VALUES(107,'Rishabh','Data Engineer',67000); 141 | 142 | INSERT INTO EMPLOYEE 143 | VALUES(108,'Subhash','Manager',148000); 144 | 145 | INSERT INTO EMPLOYEE 146 | VALUES(109,'Michaeal','Manager',213000); 147 | 148 | INSERT INTO EMPLOYEE 149 | VALUES(110,'Dhruv','Data Scientist',89000); 150 | 151 | DELETE FROM EMPLOYEE WHERE EMPID = 110; 152 | 153 | SELECT * FROM EMPLOYEE; 154 | 155 | 156 | --hOW TO DOWNLOAD DATA FROM SNAKEFLOW 157 | SELECT * FROM "SNOWFLAKE_SAMPLE_DATA"."TPCH_SF10"."LINEITEM"; 158 | 159 | SELECT * FROM 160 | (SELECT ROW *,ROW_NUMBER() OVER(ORDER BY L_COMMITDATE) AS ROW_NUM 161 | FROM "SNOWFLAKE_SAMPLE_DATA"."TPCH_SF10"."LINEITEM") 162 | WHERE ROW_NUM <= 3000; -------------------------------------------------------------------------------- /SNOWFLAKE_DATE_TIME_FUNCTIONS.txt: -------------------------------------------------------------------------------- 1 | -- sql date functions 2 | USE BIT 3 | 4 | -- GET CURRENT DATE 5 | SELECT CURRENT_DATE; 6 | 7 | -- GET CURRENT TIME 8 | SELECT CURRENT_TIMESTAMP; 9 | 10 | -- GET CURRENT DATE 11 | SELECT CURRENT_TIME; 12 | 13 | 14 | -- CONVERT TIMEZONE 15 | SELECT CONVERT_TIMEZONE('UTC',CURRENT_TIMESTAMP) AS UTC_TIMEZONE; 16 | SELECT CONVERT_TIMEZONE('Asia/Kolkata',CURRENT_TIMESTAMP) AS ISC_TIMEZONE; 17 | 18 | -- CONVERT DATE TO SUBSEQUENT 4 MONTHS AHEAD 19 | SELECT ADD_MONTHS(CURRENT_DATE,4) as DATE_AFTER_4_MONTHS; 20 | 21 | -- 3 MONTHS BACK DATE 22 | SELECT TO_CHAR(ADD_MONTHS(CURRENT_DATE,-3),'DD-MM-YYYY') as DATE_BEFORE_3_MONTHS; 23 | 24 | -- GET YR FROM DATE 25 | SELECT DATE_TRUNC('YEAR',CURRENT_DATE) AS YR_FROM_DATE; 26 | 27 | -- GET MTH FROM DATE 28 | SELECT DATE_TRUNC('MONTH',CURRENTA_DATE) AS MTH_FROM_DATE; 29 | 30 | -- GET DAY FROM DATE 31 | SELECT DATE_TRUNC('DAY',CURRENT_DATE) AS DAY_FROM_DATE; 32 | 33 | -- GET LAST DAY OF current MONTH 34 | SELECT LAST_DAY(CURRENT_DATE) as LAST_DAY_CURRENT_MONTH; 35 | 36 | -- GET LAST DAY OF PREVIOUS MONTH 37 | SELECT LAST_DAY(CURRENT_DATE - INTERVAL '1 MONTH') AS LAST_DAY_PREV_MNTH; 38 | 39 | SELECT LAST_DAY(CURRENT_DATE - INTERVAL '2 MONTH') + INTERVAL '1 DAY' AS FIRST_DAY; 40 | 41 | SELECT QUARTER(CURRENT_DATE) AS QTR; 42 | 43 | SELECT EXTRACT(YEAR FROM CURRENT_DATE) AS YR; 44 | SELECT EXTRACT(MONTH FROM CURRENT_DATE) AS MTH; 45 | SELECT EXTRACT(DAY FROM CURRENT_DATE) AS DAY; 46 | 47 | SELECT QUARTER(to_date('2022-08-24')); 48 | 49 | SELECT TO_DATE('08-23-2022','mm-dd-yyyy'); 50 | 51 | SELECT TO_DATE('1993-08-17') AS DATE; 52 | 53 | SELECT TO_CHAR(TO_DATE('1993-08-17'),'DD-MM-YYYY') AS DATE_DD_MM_YYYY; 54 | 55 | SELECT TO_CHAR(TO_DATE('1993-08-17'),'MM-YYYY') AS DATE_MM_YYYY; 56 | 57 | SELECT TO_CHAR(TO_DATE('1993-08-17'),'MON-YYYY') AS DATE_MON_YYYY; 58 | 59 | SELECT TO_CHAR(TO_DATE('1993-08-17'),'MON-YY') AS DATE_MON_YY; 60 | 61 | SELECT TO_CHAR(TO_DATE('1993-08-17'),'DY') AS DATE_DAY; 62 | 63 | SELECT DAYNAME ('2016-11-12'); 64 | 65 | SELECT TO_CHAR(TO_DATE('1993-08-17'),'YYYY-DD') AS DATE; 66 | 67 | SELECT TO_CHAR(TO_DATE('1993-08-17'),'DD-MM') AS DATE; 68 | 69 | SELECT MONTH(CURRENT_DATE); 70 | SELECT EXTRACT(MONTH FROM CURRENT_DATE) AS MTH; 71 | 72 | SELECT ADD_MONTHS(CURRENT_DATE,-3) AS DATE_3_MNTHS_BACK; 73 | SELECT ADD_MONTHS(CURRENT_DATE,5) AS DATE_5_MNTHS_AHEAD; 74 | 75 | SELECT DATEDIFF('day', '2022-06-01',CURRENT_DATE); 76 | SELECT DATEDIFF('day', '2022-07-23','2023-07-19'); 77 | 78 | SELECT DATEDIFF('MONTH', '2021-06-01',CURRENT_DATE); 79 | SELECT DATEDIFF('YEAR', '2014-06-01',CURRENT_DATE); 80 | 81 | SELECT DATEDIFF('day',-23,'2022-06-01'); 82 | SELECT DATEDIFF('month',-2,'2022-06-01'); 83 | SELECT DATEDIFF('year',-5,'2022-06-01'); 84 | 85 | SELECT WEEK(CURRENT_DATE); -- FROM 1ST JAN 2022 HOW MNAY EEKS HAVE SURPASSED 86 | SELECT MONTH(CURRENT_DATE); -- -- FROM 1ST JAN 2022 HOW MNAY MONTHS HAVE SURPASSED 87 | SELECT YEAR(AJSHBJCASbcsasb,); ---- 88 | 89 | 90 | 91 | SELECT DATEDIFF('MONTH', '2022-06-01',CURRENT_DATE); 92 | SELECT DATEDIFF('YEAR', '2014-06-01',CURRENT_DATE); 93 | 94 | SELECT DATE_TRUNC('DAY',CURRENT_DATE) AS DAY_FROM_DATE; 95 | -------------------------------------------------------------------------------- /SNOWFLAKE_PIVOT_UNPIVOT.txt: -------------------------------------------------------------------------------- 1 | CREATE DATABASE PIVOT_UNPIVOT; 2 | 3 | -- Pivot Table 4 | 5 | CREATE OR REPLACE TABLE monthly_sales1(empid INT, amount INT, month TEXT); 6 | INSERT INTO monthly_sales1 VALUES 7 | (1, 10000, 'JAN'), 8 | (1, 400, 'JAN'), 9 | (2, 4500, 'JAN'), 10 | (2, 35000, 'JAN'), 11 | (1, 5000, 'FEB'), 12 | (1, 3000, 'FEB'), 13 | (2, 200, 'FEB'), 14 | (2, 90500, 'FEB'), 15 | (1, 6000, 'MAR'), 16 | (1, 5000, 'MAR'), 17 | (2, 2500, 'MAR'), 18 | (2, 9500, 'MAR'), 19 | (1, 8000, 'APR'), 20 | (1, 10000, 'APR'), 21 | (2, 800, 'APR'), 22 | (2, 4500, 'APR'); 23 | 24 | select * from monthly_sales1; 25 | 26 | SELECT * 27 | FROM monthly_sales1 28 | PIVOT(SUM(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) 29 | AS p 30 | ORDER BY EMPID; 31 | 32 | SELECT * 33 | FROM monthly_sales1 34 | PIVOT(SUM(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) 35 | AS p (EMP_ID_renamed, JAN, FEB, MAR, APR) 36 | ORDER BY EMP_ID_renamed; 37 | 38 | SELECT EMPID AS EMP_ID, "'JAN'" AS JANUARY, "'FEB'" AS FEBRUARY, "'MAR'" AS MARCH, 39 | "'APR'" AS APRIL 40 | FROM monthly_sales1 41 | PIVOT(sum(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) 42 | AS p 43 | ORDER BY EMPID; 44 | 45 | -- Unpivot Table 46 | 47 | CREATE OR REPLACE TABLE monthly_sales(empid INT, dept TEXT, jan INT, feb INT, mar INT, april INT); 48 | 49 | INSERT INTO monthly_sales VALUES 50 | (1, 'electronics', 100, 200, 300, 100), 51 | (2, 'clothes', 100, 300, 150, 200), 52 | (3, 'cars', 200, 400, 100, 50); 53 | 54 | SELECT * FROM monthly_sales 55 | UNPIVOT(sales FOR month IN (jan, feb, mar, april)) 56 | ORDER BY empid; 57 | -------------------------------------------------------------------------------- /SNOWFLAKE_PIVOT_UNPIVOT_OPERATIONS.txt: -------------------------------------------------------------------------------- 1 | CREATE DATABASE PIVOT_UNPIVOT; 2 | 3 | -- Pivot Table 4 | USE PIVOT_UNPIVOT 5 | 6 | CREATE OR REPLACE TABLE monthly_sales1(empid INT, amount INT, month TEXT); 7 | INSERT INTO monthly_sales1 VALUES 8 | (1, 10000, 'JAN'), 9 | (1, 400, 'JAN'), 10 | (2, 4500, 'JAN'), 11 | (2, 35000, 'JAN'), 12 | (1, 5000, 'FEB'), 13 | (1, 3000, 'FEB'), 14 | (2, 200, 'FEB'), 15 | (2, 90500, 'FEB'), 16 | (1, 6000, 'MAR'), 17 | (1, 5000, 'MAR'), 18 | (2, 2500, 'MAR'), 19 | (2, 9500, 'MAR'), 20 | (1, 8000, 'APR'), 21 | (1, 10000, 'APR'), 22 | (2, 800, 'APR'), 23 | (2, 4500, 'APR'); 24 | 25 | select * from monthly_sales1; 26 | 27 | SELECT * 28 | FROM monthly_sales1 29 | PIVOT(SUM(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) 30 | AS p 31 | ORDER BY EMPID; 32 | 33 | SELECT * 34 | FROM monthly_sales1 35 | PIVOT(SUM(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) 36 | AS p (EMP_ID_renamed, JAN, FEB, MAR, APR) 37 | ORDER BY EMP_ID_renamed; 38 | 39 | SELECT EMPID AS EMP_ID, "'JAN'" AS JANUARY, "'FEB'" AS FEBRUARY, "'MAR'" AS MARCH, 40 | "'APR'" AS APRIL 41 | FROM monthly_sales1 42 | PIVOT(sum(amount) FOR MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) 43 | AS p 44 | ORDER BY EMPID; 45 | 46 | -- Unpivot Table 47 | 48 | CREATE OR REPLACE TABLE monthly_sales(empid INT, dept TEXT, jan INT, feb INT, mar INT, april INT); 49 | 50 | INSERT INTO monthly_sales VALUES 51 | (1, 'electronics', 100, 200, 300, 100), 52 | (2, 'clothes', 100, 300, 150, 200), 53 | (3, 'cars', 200, 400, 100, 50); 54 | 55 | SELECT * FROM monthly_sales 56 | UNPIVOT(sales FOR month IN (jan, feb, mar, april)) 57 | ORDER BY empid; -------------------------------------------------------------------------------- /SNOWFLAKE_ROLLUP_CUBE.txt: -------------------------------------------------------------------------------- 1 | use bit; 2 | create table emp(id int, 3 | `name` varchar(30) not null, 4 | gender varchar(30) not null, 5 | salary int not null, 6 | department varchar(30) not null); 7 | 8 | insert into emp values 9 | (1,'krupa','female',32000,'Sales'), 10 | (2,'karan','male',42000,'HR'), 11 | (3,'charchil','male',12000,'IT'), 12 | (4,'archil','male',32000,'maketing'), 13 | (5,'binita','female',22000,'finces'), 14 | (6,'chelsa','female',31000,'Sales'), 15 | (7,'tyu','male',25000,'HR'), 16 | (8,'lkj','female',22000,'IT'), 17 | (9,'lkjh','female',42000,'maketing'), 18 | (10,'fghjkd','male',15000,'finces'), 19 | (11,'hefyg','female',33000,'Sales'), 20 | (12,'podnrb','female',32000,'HR'), 21 | (13,'tehk','male',18000,'IT'), 22 | (14,'thdgt','male',20000,'maketing'), 23 | (15,'ahfhn','female',32000,'finces'); 24 | 25 | select * from emp; 26 | 27 | select department,sum(salary) from emp group by department; 28 | 29 | #ROLLUP modifier used to produce the summary output, including extra rows that represent super-aggregate (higher-level) summary operations. 30 | #multipule levels of analyze 31 | select department,sum(salary) from emp group by rollup (department) ; 32 | 33 | 34 | select coalesce(department,'all department') as department 35 | ,sum(salary) from emp group by rollup(department); 36 | 37 | select coalesce(department,'all department') as department, 38 | coalesce(gender,'all gender') as gender 39 | ,sum(salary) from emp group by rollup (department, gender); 40 | 41 | 42 | SELECT department ,sum(salary) 43 | FROM emp 44 | GROUP BY 45 | CUBE(department); -------------------------------------------------------------------------------- /SNOWFLAKE_SQL_TEST_OPERATIONS.txt: -------------------------------------------------------------------------------- 1 | USE DATABASE JD_DEMODATABASE; 2 | 3 | CREATE OR REPLACE TABLE AGENTS 4 | ( 5 | AGENT_CODE CHAR(6) NOT NULL PRIMARY KEY, 6 | AGENT_NAME CHAR(40) , 7 | WORKING_AREA CHAR(35), 8 | COMMISSION NUMBER(10,2) DEFAULT 0.05, 9 | PHONE_NO CHAR(15), 10 | COUNTRY VARCHAR2(25) 11 | ); 12 | 13 | INSERT INTO AGENTS VALUES ('A007', 'Ram', 'Bangalore',0.15,'077-25814763', ''); 14 | INSERT INTO AGENTS(AGENT_CODE,AGENT_NAME,WORKING_AREA) 15 | VALUES ('A110', 'Anand', 'Germany'); 16 | 17 | 18 | INSERT INTO AGENTS VALUES ('A003', 'Alex ', 'UK', '0.13', '075-12458969', ''); 19 | INSERT INTO AGENTS VALUES ('A008', 'Alford', 'US', '0.12', '044-25874365', ''); 20 | INSERT INTO AGENTS VALUES ('A011', 'Ravi', 'BangaloRE', '0.15', '077-45625874', ''); 21 | INSERT INTO AGENTS VALUES ('A010', 'Santakumar', 'Chennai', '0.14', '007-22388644', ''); 22 | INSERT INTO AGENTS VALUES ('A012', 'Lucida', 'San Jose', '0.12', '044-52981425', ''); 23 | INSERT INTO AGENTS VALUES ('A005', 'Anderson', 'Brisban', '0.13', '045-21447739', ''); 24 | INSERT INTO AGENTS VALUES ('A001', 'Subbarao', 'Bangalore', '0.14', '077-12346674', ''); 25 | INSERT INTO AGENTS VALUES ('A002', 'Mukesh', 'Mumbai', '0.11', '029-12358964', ''); 26 | INSERT INTO AGENTS VALUES ('A006', 'McDen', 'London', '0.15', '078-22255588', ''); 27 | INSERT INTO AGENTS VALUES ('A004', 'Ivan', 'Torento', '0.15', '008-22544166', ''); 28 | INSERT INTO AGENTS VALUES ('A009', 'Benjamin', 'Hampshair', '0.11', '008-22536178', ''); 29 | 30 | SELECT * FROM AGENTS; 31 | 32 | /* The SUBSTRING () function returns the position of a string or binary value from the complete string, 33 | starting with the character specified by substring_start_index. If any input is null, null is returned */ 34 | 35 | --Example 1: Get the substring from a specific string in Snowflake 36 | select substring('JALPA DESAI', 1, 7); 37 | select substring('RAJ VAKANI', 0, 7); 38 | select substr('RAJ HARSH',0,3); 39 | select substr('RAJ HARSH',3); 40 | 41 | select substring('JALPA XYZW PATEL', -7); 42 | 43 | ---sajkdsdjfsfbffbkjtqjUK 44 | 45 | --Example 2: Get the substring from a specific string by using table data 46 | select AGENT_CODE,AGENT_NAME,substring(AGENT_NAME,0,2) AS AGENT_INITIALS from agents; 47 | 48 | /* To get a specific substring from an expression or string. 49 | You can also use the substring function if you want to get the substrings in reverse order from the strings. */ 50 | 51 | -- If you use the substrings in reverse order, use the starting index as a negative value. 52 | select AGENT_CODE,AGENT_NAME,substring(AGENT_NAME,-3,3) AS NAME_BACKWARDS from agents; 53 | 54 | /* 55 | Snowflake CAST is a data-type conversion command. Snowflake CAST works similar to the TO_ datatype conversion functions. 56 | If a particular data type conversion is not possible, 57 | it raises an error. Let’s understand the Snowflake CAST in detail via the syntax and a few examples. 58 | */ 59 | 60 | select cast('1.6845' as decimal(6,2)); 61 | select '1.6845'::decimal(6,1); 62 | 63 | select cast('10-Sep-2021' as timestamp); 64 | 65 | -- When the provided precision is insufficient to hold the input value, the Snowflake CAST command raises an error as follows: 66 | select cast('123.12' as number(4,2)); 67 | --Here, precision is set as 4 but the input value has a total of 5 digits, thereby raising the error. 68 | 69 | --TRY_CAST( AS ) 70 | select try_cast('05-Mar-2016' as timestamp); 71 | 72 | --The Snowflake TRY_CAST command returns NULL as the input value 73 | --has more characters than the provided precision in the target data type. 74 | select try_cast('ANAND' as char(4)); 75 | 76 | --trim function 77 | select trim('❄-❄ABC-❄-', '❄-') as trimmed_string; 78 | select trim('❄-❄ABC-❄-', '') as trimmed_string; 79 | SELECT TRIM('********T E S T I N G 1 2 3 4********','*') AS TRIMMED_SPACE; 80 | 81 | --ltrim 82 | select ltrim('#000000123', '0#'); 83 | select ltrim('#0000AISHWARYA', '0#'); 84 | select ltrim(' JALPA DESAI', ''); 85 | 86 | --RTRIM 87 | select rtrim('$125.00', '0.'); 88 | select rtrim('JALPA DESAI*****', '*'); 89 | 90 | --To remove the white spaces or the blank spaces from the string TRIM function can be used. 91 | --It can remove the whitespaces from the start and end both. 92 | select TRIM(' Snwoflake Space Remove ', ' '); 93 | 94 | --To remove the first character from the string you can pass the string in the RTRIM function. 95 | select LTRIM('Snowflake Remove ', 'S'); 96 | --To remove the last character from the string you can pass the string in the RTRIM function. 97 | select RTRIM('Snwoflake Remove ', 'e'); 98 | 99 | select BTRIM(' Snwoflake Space Remove ', ' '); 100 | 101 | --LENGTH FUNCTION 102 | SELECT LEN(trim(' Snowflake Space Remove ')) as length_string; 103 | SELECT LENGTH(trim(' Snowflake Space Remove ')) as length_string; 104 | 105 | --concat 106 | select * from agents; 107 | 108 | SELECT CONCAT('KA', ', ', 'India') as state_country; 109 | 110 | SELECT *,concat(AGENT_CODE, '-', AGENT_NAME) AS agent_details from agents; 111 | 112 | --Snowflake CONCAT_WS Function 113 | /* The concat_ws function concatenates two or more strings, or concatenates two or more binary values 114 | and adds separator between those strings. 115 | The CONCAT_WS operator requires at least two arguments, and uses the first argument to separate all following arguments 116 | 117 | Following is the concat_ws function syntax 118 | CONCAT_WS( , [ , ... ] ) */ 119 | 120 | SELECT CONCAT_WS('-', 'KA','India') as state_country; 121 | 122 | /* 123 | Snowflake Concat Operator (||) 124 | The concatenation operator concatenates two strings on either side of the || symbol and returns the concatenated string. 125 | The || operator provides alternative syntax for CONCAT and requires at least two arguments. 126 | 127 | For example, 128 | */ 129 | select 'Nested' || ' CONCAT' || ' example!' as Concat_operator; 130 | 131 | 132 | --Handling NULL Values in CONCAT function and the Concatenation operator 133 | --For both the CONCAT function and the concatenation operator, 134 | --if one or both strings are null, the result of the concatenation is null. 135 | --For example, 136 | 137 | select concat('Bangalore, ', NULL) as null_example; 138 | select 'Bangalore, '|| NULL as null_example; 139 | 140 | --how to handle it? 141 | select concat('Bangalore ', NVL(NULL,'')) as null_example; 142 | select 'Bangalore'|| NVL(NULL, '') as null_example; 143 | 144 | -- REVERSE IN STRING 145 | select reverse('Hello, world!'); 146 | 147 | -- SPLIT 148 | select split('127.0.0.1', '.'); 149 | SELECT SPLIT('ANAND-KUMAR-JHA','-'); 150 | 151 | select 0, split_part('11.22.33', '.', 0); 152 | 153 | select split_part('aaa--bbb-BBB--ccc', '--',1); 154 | select split_part('aaa--bbb-BBB--ccc', '--',2); 155 | select split_part('aaa--bbb-BBB--ccc', '--',3); 156 | select split_part('aaa--bbb-BBB--ccc', '--',4); 157 | 158 | SELECT split(AGENT_DETAILS, '-') 159 | FROM ( 160 | SELECT *,concat(AGENT_CODE, '-', AGENT_NAME) AS agent_details 161 | from agents ); 162 | 163 | 164 | SELECT lower('India Is My Country') as lwr_strng; 165 | SELECT UPPER('India Is My Country') as upr_strng; 166 | 167 | --REPLACE COMMAND 168 | -- REPLACE( , [ , ] ) 169 | 170 | select REPLACE( ' JALPA DESAI ' ,' ','$'); 171 | select REPLACE( ' HARSHJIT KUMAR DESAI ' ,' '); -- 172 | 173 | SELECT REPLACE(' T E S T I N G 1 2 3 4 ',' ') 174 | -------------------------------------------------------------------------------- /SQL-LECTURE-1.sql: -------------------------------------------------------------------------------- 1 | show databases; 2 | use sakila; 3 | SELECT * FROM CUSTOMER; 4 | SELECT FIRST_NAME, LAST_NAME, ADDRESS_ID FROM CUSTOMER; 5 | SELECT DISTINCT ACTIVE FROM customer; 6 | SELECT first_name FROM customer WHERE last_name = 'DAVIS'; 7 | SELECT first_name , last_name FROM customer WHERE active = 0; 8 | SELECT first_name , last_name FROM customer WHERE active = 1; 9 | SELECT * FROM CUSTOMER WHERE ADDRESS_ID = 10; 10 | SELECT first_name , last_name, EMAIL FROM customer WHERE 11 | ADDRESS_ID < 20; 12 | SELECT * FROM customer WHERE ADDRESS_ID < 20; 13 | SELECT * FROM customer WHERE ADDRESS_ID = 20; 14 | SELECT first_name , last_name FROM customer WHERE 15 | ADDRESS_ID BETWEEN 20 AND 25; 16 | SELECT first_name, email, address_id FROM customer 17 | WHERE fiRSt_name = 'IAN' AND last_name = 'STILL'; 18 | UPDATE customer SET first_name = 'KAUSHIK' WHERE last_name ='SMITH'; 19 | UPDATE customer SET first_name = 'AISHWARYA' WHERE last_name ='WILLIAMS'; 20 | UPDATE customer SET first_name = 'jingle' WHERE last_name ='GREY'; 21 | SELECT * FROM CUSTOMER WHERE FIRST_NAME = 'KAUSHIK'; 22 | 23 | Select store_id, first_name,last_name, email, address_id 24 | FROM customer WHERE NOT store_id = 2; 25 | 26 | SELECT first_name, last_name,email FROM customer ORDER BY 27 | first_name DESC; 28 | 29 | SELECT first_name, last_name,email FROM customer ORDER BY first_name; 30 | 31 | SELECT first_name, last_name,email FROM customer ORDER BY first_name 32 | ASC LIMIT 10; 33 | 34 | SELECT first_name, last_name,email, ADDRESS_ID FROM customer 35 | ORDER BY ADDRESS_ID; 36 | 37 | SELECT * FROM CUSTOMER LIMIT 10; 38 | 39 | SELECT first_name, last_name,email,ACTIVE FROM 40 | customer WHERE ACTIVE =1 LIMIT 10; 41 | 42 | SELECT MIN(address_id) FROM customer; 43 | SELECT MAX(address_id) FROM customer; 44 | SELECT SUM(address_id) FROM customer; 45 | SELECT AVG(address_id) FROM customer; 46 | SELECT COUNT(email) FROM customer; 47 | 48 | SELECT FIRST_NAME, LAST_NAME, EMAIL FROM CUSTOMER 49 | WHERE ADDRESS_ID = (SELECT MIN(ADDRESS_ID) FROM CUSTOMER); 50 | 51 | SELECT FIRST_NAME, LAST_NAME, EMAIL FROM CUSTOMER WHERE ADDRESS_ID = 5 52 | 53 | SELECT COUNT(email) FROM customer; 54 | SELECT AVG(active) FROM customer; 55 | 56 | SELECT SUM(active) FROM customer; 57 | SELECT * FROM customer WHERE first_name LIKE 'R%'; 58 | SELECT * FROM customer WHERE first_name LIKE '%A'; 59 | SELECT * FROM customer WHERE first_name LIKE '%or%'; 60 | SELECT * FROM customer WHERE Last_name LIKE '%PR%'; 61 | SELECT * FROM customer WHERE first_name LIKE 'a_______%'; 62 | SELECT * FROM customer WHERE first_name LIKE 'a%o'; 63 | SELECT * FROM customer WHERE customer_id IN (1,2,3); 64 | SELECT * FROM customer WHERE customer_id NOT IN (1,2,3); 65 | SELECT * FROM customer WHERE customer_id BETWEEN 1 AND 20; 66 | SELECT * FROM customer WHERE customer_id 67 | NOT BETWEEN 1 AND 570; 68 | SELECT * FROM CUSTOMER; 69 | SELECT first_name AS first, last_name AS last 70 | FROM customer; 71 | 72 | SELECT COUNT(customer_id) FROM customer GROUP BY active; 73 | SELECT COUNT(customer_id),ACTIVE FROM customer 74 | GROUP BY active; 75 | SHOW DATABASES; 76 | USE WORLD 77 | 78 | SELECT * FROM COUNTRY; 79 | SELECT sum(POPULATION),CONTINENT FROM COUNTRY GROUP BY CONTINENT; 80 | 81 | SELECT NAME, REGION,CONTINENT FROM COUNTRY WHERE 82 | SURFACEAREA = (SELECT MAX(SURFACEAREA) FROM COUNTRY); 83 | 84 | SELECT COUNT(*) FROM COUNTRY 85 | SELECT MAX(SURFACEAREA) FROM COUNTRY 86 | 87 | SELECT * FROM COUNTRY WHERE CONTINENT = 'EUROPE' 88 | 89 | SELECT DISTINCT CONTINENT FROM COUNTRY; 90 | 91 | SELECT NAME, CONTINENT FROM COUNTRY WHERE 92 | SURFACEAREA = (SELECT MIN(SURFACEAREA) FROM COUNTRY); 93 | 94 | SELECT NAME, CONTINENT FROM COUNTRY WHERE 95 | SURFACEAREA = (SELECT MAX(SURFACEAREA) FROM COUNTRY); 96 | 97 | SELECT MAX(SURFACEAREA) FROM COUNTRY; 98 | 99 | SELECT * from country group by(continent) having count(region) >6; 100 | 101 | SELECT NAME,REGION, SURFACEAREA FROM COUNTRY GROUP BY CONTINENT; 102 | 103 | SELECT COUNT(NAME),CONTINENT FROM COUNTRY GROUP BY CONTINENT; 104 | SELECT CONTINENT, SUM(POPULATION) FROM COUNTRY GROUP BY CONTINENT; 105 | 106 | SELECT NAME, CONTINENT FROM COUNTRY WHERE 107 | SURFACEAREA = (SELECT MAX(SURFACEAREA) FROM COUNTRY) OR 108 | POPULATION = (SELECT MAX(POPULATION) FROM COUNTRY) ; 109 | 110 | -------------------------------------------------------------------------------- /SQL/glass.data: -------------------------------------------------------------------------------- 1 | 1,1.52101,13.64,4.49,1.10,71.78,0.06,8.75,0.00,0.00,1 2 | 2,1.51761,13.89,3.60,1.36,72.73,0.48,7.83,0.00,0.00,1 3 | 3,1.51618,13.53,3.55,1.54,72.99,0.39,7.78,0.00,0.00,1 4 | 4,1.51766,13.21,3.69,1.29,72.61,0.57,8.22,0.00,0.00,1 5 | 5,1.51742,13.27,3.62,1.24,73.08,0.55,8.07,0.00,0.00,1 6 | 6,1.51596,12.79,3.61,1.62,72.97,0.64,8.07,0.00,0.26,1 7 | 7,1.51743,13.30,3.60,1.14,73.09,0.58,8.17,0.00,0.00,1 8 | 8,1.51756,13.15,3.61,1.05,73.24,0.57,8.24,0.00,0.00,1 9 | 9,1.51918,14.04,3.58,1.37,72.08,0.56,8.30,0.00,0.00,1 10 | 10,1.51755,13.00,3.60,1.36,72.99,0.57,8.40,0.00,0.11,1 11 | 11,1.51571,12.72,3.46,1.56,73.20,0.67,8.09,0.00,0.24,1 12 | 12,1.51763,12.80,3.66,1.27,73.01,0.60,8.56,0.00,0.00,1 13 | 13,1.51589,12.88,3.43,1.40,73.28,0.69,8.05,0.00,0.24,1 14 | 14,1.51748,12.86,3.56,1.27,73.21,0.54,8.38,0.00,0.17,1 15 | 15,1.51763,12.61,3.59,1.31,73.29,0.58,8.50,0.00,0.00,1 16 | 16,1.51761,12.81,3.54,1.23,73.24,0.58,8.39,0.00,0.00,1 17 | 17,1.51784,12.68,3.67,1.16,73.11,0.61,8.70,0.00,0.00,1 18 | 18,1.52196,14.36,3.85,0.89,71.36,0.15,9.15,0.00,0.00,1 19 | 19,1.51911,13.90,3.73,1.18,72.12,0.06,8.89,0.00,0.00,1 20 | 20,1.51735,13.02,3.54,1.69,72.73,0.54,8.44,0.00,0.07,1 21 | 21,1.51750,12.82,3.55,1.49,72.75,0.54,8.52,0.00,0.19,1 22 | 22,1.51966,14.77,3.75,0.29,72.02,0.03,9.00,0.00,0.00,1 23 | 23,1.51736,12.78,3.62,1.29,72.79,0.59,8.70,0.00,0.00,1 24 | 24,1.51751,12.81,3.57,1.35,73.02,0.62,8.59,0.00,0.00,1 25 | 25,1.51720,13.38,3.50,1.15,72.85,0.50,8.43,0.00,0.00,1 26 | 26,1.51764,12.98,3.54,1.21,73.00,0.65,8.53,0.00,0.00,1 27 | 27,1.51793,13.21,3.48,1.41,72.64,0.59,8.43,0.00,0.00,1 28 | 28,1.51721,12.87,3.48,1.33,73.04,0.56,8.43,0.00,0.00,1 29 | 29,1.51768,12.56,3.52,1.43,73.15,0.57,8.54,0.00,0.00,1 30 | 30,1.51784,13.08,3.49,1.28,72.86,0.60,8.49,0.00,0.00,1 31 | 31,1.51768,12.65,3.56,1.30,73.08,0.61,8.69,0.00,0.14,1 32 | 32,1.51747,12.84,3.50,1.14,73.27,0.56,8.55,0.00,0.00,1 33 | 33,1.51775,12.85,3.48,1.23,72.97,0.61,8.56,0.09,0.22,1 34 | 34,1.51753,12.57,3.47,1.38,73.39,0.60,8.55,0.00,0.06,1 35 | 35,1.51783,12.69,3.54,1.34,72.95,0.57,8.75,0.00,0.00,1 36 | 36,1.51567,13.29,3.45,1.21,72.74,0.56,8.57,0.00,0.00,1 37 | 37,1.51909,13.89,3.53,1.32,71.81,0.51,8.78,0.11,0.00,1 38 | 38,1.51797,12.74,3.48,1.35,72.96,0.64,8.68,0.00,0.00,1 39 | 39,1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0.00,0.00,1 40 | 40,1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0.00,0.00,1 41 | 41,1.51793,12.79,3.50,1.12,73.03,0.64,8.77,0.00,0.00,1 42 | 42,1.51755,12.71,3.42,1.20,73.20,0.59,8.64,0.00,0.00,1 43 | 43,1.51779,13.21,3.39,1.33,72.76,0.59,8.59,0.00,0.00,1 44 | 44,1.52210,13.73,3.84,0.72,71.76,0.17,9.74,0.00,0.00,1 45 | 45,1.51786,12.73,3.43,1.19,72.95,0.62,8.76,0.00,0.30,1 46 | 46,1.51900,13.49,3.48,1.35,71.95,0.55,9.00,0.00,0.00,1 47 | 47,1.51869,13.19,3.37,1.18,72.72,0.57,8.83,0.00,0.16,1 48 | 48,1.52667,13.99,3.70,0.71,71.57,0.02,9.82,0.00,0.10,1 49 | 49,1.52223,13.21,3.77,0.79,71.99,0.13,10.02,0.00,0.00,1 50 | 50,1.51898,13.58,3.35,1.23,72.08,0.59,8.91,0.00,0.00,1 51 | 51,1.52320,13.72,3.72,0.51,71.75,0.09,10.06,0.00,0.16,1 52 | 52,1.51926,13.20,3.33,1.28,72.36,0.60,9.14,0.00,0.11,1 53 | 53,1.51808,13.43,2.87,1.19,72.84,0.55,9.03,0.00,0.00,1 54 | 54,1.51837,13.14,2.84,1.28,72.85,0.55,9.07,0.00,0.00,1 55 | 55,1.51778,13.21,2.81,1.29,72.98,0.51,9.02,0.00,0.09,1 56 | 56,1.51769,12.45,2.71,1.29,73.70,0.56,9.06,0.00,0.24,1 57 | 57,1.51215,12.99,3.47,1.12,72.98,0.62,8.35,0.00,0.31,1 58 | 58,1.51824,12.87,3.48,1.29,72.95,0.60,8.43,0.00,0.00,1 59 | 59,1.51754,13.48,3.74,1.17,72.99,0.59,8.03,0.00,0.00,1 60 | 60,1.51754,13.39,3.66,1.19,72.79,0.57,8.27,0.00,0.11,1 61 | 61,1.51905,13.60,3.62,1.11,72.64,0.14,8.76,0.00,0.00,1 62 | 62,1.51977,13.81,3.58,1.32,71.72,0.12,8.67,0.69,0.00,1 63 | 63,1.52172,13.51,3.86,0.88,71.79,0.23,9.54,0.00,0.11,1 64 | 64,1.52227,14.17,3.81,0.78,71.35,0.00,9.69,0.00,0.00,1 65 | 65,1.52172,13.48,3.74,0.90,72.01,0.18,9.61,0.00,0.07,1 66 | 66,1.52099,13.69,3.59,1.12,71.96,0.09,9.40,0.00,0.00,1 67 | 67,1.52152,13.05,3.65,0.87,72.22,0.19,9.85,0.00,0.17,1 68 | 68,1.52152,13.05,3.65,0.87,72.32,0.19,9.85,0.00,0.17,1 69 | 69,1.52152,13.12,3.58,0.90,72.20,0.23,9.82,0.00,0.16,1 70 | 70,1.52300,13.31,3.58,0.82,71.99,0.12,10.17,0.00,0.03,1 71 | 71,1.51574,14.86,3.67,1.74,71.87,0.16,7.36,0.00,0.12,2 72 | 72,1.51848,13.64,3.87,1.27,71.96,0.54,8.32,0.00,0.32,2 73 | 73,1.51593,13.09,3.59,1.52,73.10,0.67,7.83,0.00,0.00,2 74 | 74,1.51631,13.34,3.57,1.57,72.87,0.61,7.89,0.00,0.00,2 75 | 75,1.51596,13.02,3.56,1.54,73.11,0.72,7.90,0.00,0.00,2 76 | 76,1.51590,13.02,3.58,1.51,73.12,0.69,7.96,0.00,0.00,2 77 | 77,1.51645,13.44,3.61,1.54,72.39,0.66,8.03,0.00,0.00,2 78 | 78,1.51627,13.00,3.58,1.54,72.83,0.61,8.04,0.00,0.00,2 79 | 79,1.51613,13.92,3.52,1.25,72.88,0.37,7.94,0.00,0.14,2 80 | 80,1.51590,12.82,3.52,1.90,72.86,0.69,7.97,0.00,0.00,2 81 | 81,1.51592,12.86,3.52,2.12,72.66,0.69,7.97,0.00,0.00,2 82 | 82,1.51593,13.25,3.45,1.43,73.17,0.61,7.86,0.00,0.00,2 83 | 83,1.51646,13.41,3.55,1.25,72.81,0.68,8.10,0.00,0.00,2 84 | 84,1.51594,13.09,3.52,1.55,72.87,0.68,8.05,0.00,0.09,2 85 | 85,1.51409,14.25,3.09,2.08,72.28,1.10,7.08,0.00,0.00,2 86 | 86,1.51625,13.36,3.58,1.49,72.72,0.45,8.21,0.00,0.00,2 87 | 87,1.51569,13.24,3.49,1.47,73.25,0.38,8.03,0.00,0.00,2 88 | 88,1.51645,13.40,3.49,1.52,72.65,0.67,8.08,0.00,0.10,2 89 | 89,1.51618,13.01,3.50,1.48,72.89,0.60,8.12,0.00,0.00,2 90 | 90,1.51640,12.55,3.48,1.87,73.23,0.63,8.08,0.00,0.09,2 91 | 91,1.51841,12.93,3.74,1.11,72.28,0.64,8.96,0.00,0.22,2 92 | 92,1.51605,12.90,3.44,1.45,73.06,0.44,8.27,0.00,0.00,2 93 | 93,1.51588,13.12,3.41,1.58,73.26,0.07,8.39,0.00,0.19,2 94 | 94,1.51590,13.24,3.34,1.47,73.10,0.39,8.22,0.00,0.00,2 95 | 95,1.51629,12.71,3.33,1.49,73.28,0.67,8.24,0.00,0.00,2 96 | 96,1.51860,13.36,3.43,1.43,72.26,0.51,8.60,0.00,0.00,2 97 | 97,1.51841,13.02,3.62,1.06,72.34,0.64,9.13,0.00,0.15,2 98 | 98,1.51743,12.20,3.25,1.16,73.55,0.62,8.90,0.00,0.24,2 99 | 99,1.51689,12.67,2.88,1.71,73.21,0.73,8.54,0.00,0.00,2 100 | 100,1.51811,12.96,2.96,1.43,72.92,0.60,8.79,0.14,0.00,2 101 | 101,1.51655,12.75,2.85,1.44,73.27,0.57,8.79,0.11,0.22,2 102 | 102,1.51730,12.35,2.72,1.63,72.87,0.70,9.23,0.00,0.00,2 103 | 103,1.51820,12.62,2.76,0.83,73.81,0.35,9.42,0.00,0.20,2 104 | 104,1.52725,13.80,3.15,0.66,70.57,0.08,11.64,0.00,0.00,2 105 | 105,1.52410,13.83,2.90,1.17,71.15,0.08,10.79,0.00,0.00,2 106 | 106,1.52475,11.45,0.00,1.88,72.19,0.81,13.24,0.00,0.34,2 107 | 107,1.53125,10.73,0.00,2.10,69.81,0.58,13.30,3.15,0.28,2 108 | 108,1.53393,12.30,0.00,1.00,70.16,0.12,16.19,0.00,0.24,2 109 | 109,1.52222,14.43,0.00,1.00,72.67,0.10,11.52,0.00,0.08,2 110 | 110,1.51818,13.72,0.00,0.56,74.45,0.00,10.99,0.00,0.00,2 111 | 111,1.52664,11.23,0.00,0.77,73.21,0.00,14.68,0.00,0.00,2 112 | 112,1.52739,11.02,0.00,0.75,73.08,0.00,14.96,0.00,0.00,2 113 | 113,1.52777,12.64,0.00,0.67,72.02,0.06,14.40,0.00,0.00,2 114 | 114,1.51892,13.46,3.83,1.26,72.55,0.57,8.21,0.00,0.14,2 115 | 115,1.51847,13.10,3.97,1.19,72.44,0.60,8.43,0.00,0.00,2 116 | 116,1.51846,13.41,3.89,1.33,72.38,0.51,8.28,0.00,0.00,2 117 | 117,1.51829,13.24,3.90,1.41,72.33,0.55,8.31,0.00,0.10,2 118 | 118,1.51708,13.72,3.68,1.81,72.06,0.64,7.88,0.00,0.00,2 119 | 119,1.51673,13.30,3.64,1.53,72.53,0.65,8.03,0.00,0.29,2 120 | 120,1.51652,13.56,3.57,1.47,72.45,0.64,7.96,0.00,0.00,2 121 | 121,1.51844,13.25,3.76,1.32,72.40,0.58,8.42,0.00,0.00,2 122 | 122,1.51663,12.93,3.54,1.62,72.96,0.64,8.03,0.00,0.21,2 123 | 123,1.51687,13.23,3.54,1.48,72.84,0.56,8.10,0.00,0.00,2 124 | 124,1.51707,13.48,3.48,1.71,72.52,0.62,7.99,0.00,0.00,2 125 | 125,1.52177,13.20,3.68,1.15,72.75,0.54,8.52,0.00,0.00,2 126 | 126,1.51872,12.93,3.66,1.56,72.51,0.58,8.55,0.00,0.12,2 127 | 127,1.51667,12.94,3.61,1.26,72.75,0.56,8.60,0.00,0.00,2 128 | 128,1.52081,13.78,2.28,1.43,71.99,0.49,9.85,0.00,0.17,2 129 | 129,1.52068,13.55,2.09,1.67,72.18,0.53,9.57,0.27,0.17,2 130 | 130,1.52020,13.98,1.35,1.63,71.76,0.39,10.56,0.00,0.18,2 131 | 131,1.52177,13.75,1.01,1.36,72.19,0.33,11.14,0.00,0.00,2 132 | 132,1.52614,13.70,0.00,1.36,71.24,0.19,13.44,0.00,0.10,2 133 | 133,1.51813,13.43,3.98,1.18,72.49,0.58,8.15,0.00,0.00,2 134 | 134,1.51800,13.71,3.93,1.54,71.81,0.54,8.21,0.00,0.15,2 135 | 135,1.51811,13.33,3.85,1.25,72.78,0.52,8.12,0.00,0.00,2 136 | 136,1.51789,13.19,3.90,1.30,72.33,0.55,8.44,0.00,0.28,2 137 | 137,1.51806,13.00,3.80,1.08,73.07,0.56,8.38,0.00,0.12,2 138 | 138,1.51711,12.89,3.62,1.57,72.96,0.61,8.11,0.00,0.00,2 139 | 139,1.51674,12.79,3.52,1.54,73.36,0.66,7.90,0.00,0.00,2 140 | 140,1.51674,12.87,3.56,1.64,73.14,0.65,7.99,0.00,0.00,2 141 | 141,1.51690,13.33,3.54,1.61,72.54,0.68,8.11,0.00,0.00,2 142 | 142,1.51851,13.20,3.63,1.07,72.83,0.57,8.41,0.09,0.17,2 143 | 143,1.51662,12.85,3.51,1.44,73.01,0.68,8.23,0.06,0.25,2 144 | 144,1.51709,13.00,3.47,1.79,72.72,0.66,8.18,0.00,0.00,2 145 | 145,1.51660,12.99,3.18,1.23,72.97,0.58,8.81,0.00,0.24,2 146 | 146,1.51839,12.85,3.67,1.24,72.57,0.62,8.68,0.00,0.35,2 147 | 147,1.51769,13.65,3.66,1.11,72.77,0.11,8.60,0.00,0.00,3 148 | 148,1.51610,13.33,3.53,1.34,72.67,0.56,8.33,0.00,0.00,3 149 | 149,1.51670,13.24,3.57,1.38,72.70,0.56,8.44,0.00,0.10,3 150 | 150,1.51643,12.16,3.52,1.35,72.89,0.57,8.53,0.00,0.00,3 151 | 151,1.51665,13.14,3.45,1.76,72.48,0.60,8.38,0.00,0.17,3 152 | 152,1.52127,14.32,3.90,0.83,71.50,0.00,9.49,0.00,0.00,3 153 | 153,1.51779,13.64,3.65,0.65,73.00,0.06,8.93,0.00,0.00,3 154 | 154,1.51610,13.42,3.40,1.22,72.69,0.59,8.32,0.00,0.00,3 155 | 155,1.51694,12.86,3.58,1.31,72.61,0.61,8.79,0.00,0.00,3 156 | 156,1.51646,13.04,3.40,1.26,73.01,0.52,8.58,0.00,0.00,3 157 | 157,1.51655,13.41,3.39,1.28,72.64,0.52,8.65,0.00,0.00,3 158 | 158,1.52121,14.03,3.76,0.58,71.79,0.11,9.65,0.00,0.00,3 159 | 159,1.51776,13.53,3.41,1.52,72.04,0.58,8.79,0.00,0.00,3 160 | 160,1.51796,13.50,3.36,1.63,71.94,0.57,8.81,0.00,0.09,3 161 | 161,1.51832,13.33,3.34,1.54,72.14,0.56,8.99,0.00,0.00,3 162 | 162,1.51934,13.64,3.54,0.75,72.65,0.16,8.89,0.15,0.24,3 163 | 163,1.52211,14.19,3.78,0.91,71.36,0.23,9.14,0.00,0.37,3 164 | 164,1.51514,14.01,2.68,3.50,69.89,1.68,5.87,2.20,0.00,5 165 | 165,1.51915,12.73,1.85,1.86,72.69,0.60,10.09,0.00,0.00,5 166 | 166,1.52171,11.56,1.88,1.56,72.86,0.47,11.41,0.00,0.00,5 167 | 167,1.52151,11.03,1.71,1.56,73.44,0.58,11.62,0.00,0.00,5 168 | 168,1.51969,12.64,0.00,1.65,73.75,0.38,11.53,0.00,0.00,5 169 | 169,1.51666,12.86,0.00,1.83,73.88,0.97,10.17,0.00,0.00,5 170 | 170,1.51994,13.27,0.00,1.76,73.03,0.47,11.32,0.00,0.00,5 171 | 171,1.52369,13.44,0.00,1.58,72.22,0.32,12.24,0.00,0.00,5 172 | 172,1.51316,13.02,0.00,3.04,70.48,6.21,6.96,0.00,0.00,5 173 | 173,1.51321,13.00,0.00,3.02,70.70,6.21,6.93,0.00,0.00,5 174 | 174,1.52043,13.38,0.00,1.40,72.25,0.33,12.50,0.00,0.00,5 175 | 175,1.52058,12.85,1.61,2.17,72.18,0.76,9.70,0.24,0.51,5 176 | 176,1.52119,12.97,0.33,1.51,73.39,0.13,11.27,0.00,0.28,5 177 | 177,1.51905,14.00,2.39,1.56,72.37,0.00,9.57,0.00,0.00,6 178 | 178,1.51937,13.79,2.41,1.19,72.76,0.00,9.77,0.00,0.00,6 179 | 179,1.51829,14.46,2.24,1.62,72.38,0.00,9.26,0.00,0.00,6 180 | 180,1.51852,14.09,2.19,1.66,72.67,0.00,9.32,0.00,0.00,6 181 | 181,1.51299,14.40,1.74,1.54,74.55,0.00,7.59,0.00,0.00,6 182 | 182,1.51888,14.99,0.78,1.74,72.50,0.00,9.95,0.00,0.00,6 183 | 183,1.51916,14.15,0.00,2.09,72.74,0.00,10.88,0.00,0.00,6 184 | 184,1.51969,14.56,0.00,0.56,73.48,0.00,11.22,0.00,0.00,6 185 | 185,1.51115,17.38,0.00,0.34,75.41,0.00,6.65,0.00,0.00,6 186 | 186,1.51131,13.69,3.20,1.81,72.81,1.76,5.43,1.19,0.00,7 187 | 187,1.51838,14.32,3.26,2.22,71.25,1.46,5.79,1.63,0.00,7 188 | 188,1.52315,13.44,3.34,1.23,72.38,0.60,8.83,0.00,0.00,7 189 | 189,1.52247,14.86,2.20,2.06,70.26,0.76,9.76,0.00,0.00,7 190 | 190,1.52365,15.79,1.83,1.31,70.43,0.31,8.61,1.68,0.00,7 191 | 191,1.51613,13.88,1.78,1.79,73.10,0.00,8.67,0.76,0.00,7 192 | 192,1.51602,14.85,0.00,2.38,73.28,0.00,8.76,0.64,0.09,7 193 | 193,1.51623,14.20,0.00,2.79,73.46,0.04,9.04,0.40,0.09,7 194 | 194,1.51719,14.75,0.00,2.00,73.02,0.00,8.53,1.59,0.08,7 195 | 195,1.51683,14.56,0.00,1.98,73.29,0.00,8.52,1.57,0.07,7 196 | 196,1.51545,14.14,0.00,2.68,73.39,0.08,9.07,0.61,0.05,7 197 | 197,1.51556,13.87,0.00,2.54,73.23,0.14,9.41,0.81,0.01,7 198 | 198,1.51727,14.70,0.00,2.34,73.28,0.00,8.95,0.66,0.00,7 199 | 199,1.51531,14.38,0.00,2.66,73.10,0.04,9.08,0.64,0.00,7 200 | 200,1.51609,15.01,0.00,2.51,73.05,0.05,8.83,0.53,0.00,7 201 | 201,1.51508,15.15,0.00,2.25,73.50,0.00,8.34,0.63,0.00,7 202 | 202,1.51653,11.95,0.00,1.19,75.18,2.70,8.93,0.00,0.00,7 203 | 203,1.51514,14.85,0.00,2.42,73.72,0.00,8.39,0.56,0.00,7 204 | 204,1.51658,14.80,0.00,1.99,73.11,0.00,8.28,1.71,0.00,7 205 | 205,1.51617,14.95,0.00,2.27,73.30,0.00,8.71,0.67,0.00,7 206 | 206,1.51732,14.95,0.00,1.80,72.99,0.00,8.61,1.55,0.00,7 207 | 207,1.51645,14.94,0.00,1.87,73.11,0.00,8.67,1.38,0.00,7 208 | 208,1.51831,14.39,0.00,1.82,72.86,1.41,6.47,2.88,0.00,7 209 | 209,1.51640,14.37,0.00,2.74,72.85,0.00,9.45,0.54,0.00,7 210 | 210,1.51623,14.14,0.00,2.88,72.61,0.08,9.18,1.06,0.00,7 211 | 211,1.51685,14.92,0.00,1.99,73.06,0.00,8.40,1.59,0.00,7 212 | 212,1.52065,14.36,0.00,2.02,73.42,0.00,8.44,1.64,0.00,7 213 | 213,1.51651,14.38,0.00,1.94,73.61,0.00,8.48,1.57,0.00,7 214 | 214,1.51711,14.23,0.00,2.08,73.36,0.00,8.62,1.67,0.00,7 215 | -------------------------------------------------------------------------------- /SQL_BASICS_SAKILA_WORLD.txt: -------------------------------------------------------------------------------- 1 | show databases; 2 | use sakila; 3 | SELECT * FROM CUSTOMER 4 | SELECT FIRST_NAME, LAST_NAME, ADDRESS_ID FROM CUSTOMER; 5 | SELECT DISTINCT first_name FROM customer; 6 | SELECT DISTINCT ACTIVE FROM customer; 7 | SELECT first_name FROM customer WHERE last_name = 'DAVIS'; 8 | SELECT first_name , last_name FROM customer WHERE active = 0; 9 | SELECT first_name , last_name FROM customer WHERE active = 1; 10 | SELECT first_name , last_name, EMAIL FROM customer WHERE ADDRESS_ID < 20; 11 | SELECT * FROM customer WHERE ADDRESS_ID < 20; 12 | SELECT * FROM customer WHERE ADDRESS_ID = 20; 13 | SELECT first_name , last_name FROM customer WHERE ADDRESS_ID BETWEEN 20 AND 25; 14 | SELECT first_name, email, address_id FROM customer WHERE fiRSt_name = 'IAN' AND last_name = 'STILL'; 15 | UPDATE customer SET first_name = 'jingle' WHERE last_name ='SMITH'; 16 | UPDATE customer SET first_name = 'AISHWARYA' WHERE last_name ='WILLIAMS'; 17 | UPDATE customer SET first_name = 'jingle' WHERE last_name ='GREY'; 18 | SELECT * FROM CUSTOMER WHERE FIRST_NAME = 'AISHWARYA'; 19 | Select store_id, first_name,last_name, email, address_id FROM customer WHERE NOT store_id = 2; 20 | SELECT first_name, last_name,email FROM customer ORDER BY first_name DESC; 21 | SELECT first_name, last_name,email FROM customer ORDER BY first_name; 22 | SELECT first_name, last_name,email FROM customer ORDER BY first_name ASC; 23 | SELECT first_name, last_name,email FROM customer WHERE first_name = 'AUSTIN' LIMIT 20; 24 | SELECT first_name, last_name,email FROM customer WHERE ACTIVE =1 LIMIT 10; 25 | SELECT MIN(address_id) FROM customer; 26 | SELECT MAX(address_id) FROM customer; 27 | SELECT SUM(address_id) FROM customer; 28 | SELECT AVG(address_id) FROM customer; 29 | SELECT COUNT(email) FROM customer; 30 | SELECT FIRST_NAME, LAST_NAME, EMAIL FROM CUSTOMER WHERE ADDRESS_ID = (SELECT MIN(ADDRESS_ID) FROM CUSTOMER); 31 | SELECT COUNT(email) FROM customer; 32 | SELECT AVG(active) FROM customer; 33 | SELECT SUM(active) FROM customer; 34 | SELECT * FROM customer WHERE first_name LIKE 'A%'; 35 | SELECT * FROM customer WHERE first_name LIKE '%A'; 36 | SELECT * FROM customer WHERE first_name LIKE '%or%'; 37 | SELECT * FROM customer WHERE Last_name LIKE '%PR%'; 38 | SELECT * FROM customer WHERE first_name LIKE 'a______%'; 39 | SELECT * FROM customer WHERE first_name LIKE 'a%o'; 40 | SELECT * FROM customer WHERE customer_id IN (1,2,3); 41 | SELECT * FROM customer WHERE customer_id NOT IN (1,2,3); 42 | SELECT * FROM customer WHERE customer_id BETWEEN 1 AND 20; 43 | SELECT * FROM customer WHERE customer_id NOT BETWEEN 1 AND 570; 44 | SELECT first_name AS first, last_name AS last FROM customer; 45 | SELECT COUNT(customer_id) FROM customer GROUP BY active; 46 | SELECT COUNT(customer_id),ACTIVE FROM customer GROUP BY active; 47 | 48 | 49 | USE WORLD; 50 | SELECT * FROM COUNTRY; 51 | 52 | SELECT * FROM COUNTRY WHERE CONTINENT = 'EUROPE' 53 | 54 | SELECT DISTINCT CONTINENT FROM COUNTRY; 55 | 56 | SELECT NAME, CONTINENT FROM COUNTRY WHERE 57 | SURFACEAREA = (SELECT MIN(SURFACEAREA) FROM COUNTRY); 58 | 59 | SELECT NAME, CONTINENT FROM COUNTRY WHERE 60 | SURFACEAREA = (SELECT MAX(SURFACEAREA) FROM COUNTRY); 61 | 62 | SELECT MAX(SURFACEAREA) FROM COUNTRY; 63 | 64 | SELECT * from country group by(continent) having count(region) >6; 65 | 66 | SELECT NAME,REGION, SURFACEAREA FROM COUNTRY GROUP BY CONTINENT; 67 | 68 | SELECT COUNT(NAME),CONTINENT FROM COUNTRY GROUP BY CONTINENT; 69 | 70 | SELECT NAME, CONTINENT FROM COUNTRY WHERE 71 | SURFACEAREA = (SELECT MAX(SURFACEAREA) FROM COUNTRY) OR 72 | POPULATION = (SELECT MAX(POPULATION) FROM COUNTRY) ; 73 | 74 | -------------------------------------------------------------------------------- /SQL_Basics.txt: -------------------------------------------------------------------------------- 1 | -- SHOW DATABASES; 2 | 3 | -- TODO: create customer table 4 | 5 | -- CREATE TABLE customers( 6 | -- id INT NOT NULL AUTO_INCREMENT, 7 | -- name VARCHAR(30) NOT NULL, 8 | -- email VARCHAR(40) NOT NULL DEFAULT 'No email provided', 9 | -- amount INT, 10 | -- PRIMARY KEY (id) 11 | -- ); 12 | 13 | -- TODO: add some values in customers 14 | 15 | -- INSERT INTO customers(name, amount) 16 | -- VALUES ( 17 | -- 'Rob', 18 | -- 30 19 | -- ); 20 | 21 | -- TODO: add this data for practice 22 | -- INSERT INTO customers(name, email, amount) 23 | -- VALUES ('hitesh', 'hitesh@lco.dev', 35), 24 | -- ('George', 'geo@lco.dev', 45), 25 | -- ('hitesh', 'hitesh@gmail.com', 88), 26 | -- ('lina', 'lina@gmail.com', 78), 27 | -- ('Jimmy', 'jimmy@yahoo.co.in', 54), 28 | -- ('lina', 'lina@yahoo.co.in', 35), 29 | -- ('jalpa', 'jalpa@gmail.com', 56); 30 | 31 | -- TODO: answer some questions: 32 | -- SELECT name from customers; 33 | 34 | -- SELECT email from customers; 35 | 36 | -- SELECT amount from customers; 37 | -- SELECT amount AS Purchases from customers; 38 | 39 | -- TODO: Update tasks 40 | 41 | -- SELECT * from customers WHERE name="jimmy"; 42 | -- UPDATE customers SET email='jimmy@yahoo.com' WHERE name="jimmy"; 43 | 44 | -- SELECT * from customers WHERE name="lina"; 45 | -- SELECT * from customers WHERE id=6; 46 | 47 | -- UPDATE customers SET amount=38 WHERE id=6; 48 | 49 | -- SELECT * from customers WHERE name="lina"; 50 | 51 | -- UPDATE customers SET email='lina@gmail.com' WHERE name="lina"; 52 | 53 | -- TODO: Delete some data 54 | 55 | -- SELECT * from customers WHERE name = 'george'; 56 | 57 | -- DELETE from customers WHERE name = 'george'; 58 | 59 | -- SELECT * from customers WHERE name = 'jalpa'; 60 | 61 | DELETE FROM customers WHERE name='jalpa'; -------------------------------------------------------------------------------- /SQL_Operation_JOINS.txt: -------------------------------------------------------------------------------- 1 | SHOW databases; 2 | use sakila; 3 | select * from customer; 4 | select store_id,first_name,last_name,email,address_id from customer where not store_id=2; 5 | select store_id,first_name,last_name,email,address_id from customer where store_id=1; 6 | select first_name,last_name,email from customer where first_name='dan' limit 20; 7 | 8 | select count(email) from customer; 9 | select count(*) from customer; 10 | select avg(active) from customer; 11 | select sum(active) from customer; 12 | select * from customer where first_name like 'a%';#start whith a end with any chereacher 13 | select * from customer where first_name like '%a';#start whith any character end with a 14 | select * from customer where first_name like 'm%a';#start with mand end with a 15 | select * from customer where first_name like '%e%';#inbetwwen any character is e 16 | 17 | select * from customer where first_name like 'a______%';#start with a and atlist 6 character 18 | select * from customer where customer_id in(1,2,3); 19 | select * from customer where customer_id not in(1,2,3); 20 | 21 | select customer_id from customer where customer_id between 1 and 20; 22 | #alias 23 | select first_name as first, last_name as last from customer;#coloum alias 24 | select c.first_name,c.last_name from customer as c;#table alias 25 | 26 | #group by 27 | select count(customer_id) from customer group by active; 28 | select * from customer group by active; 29 | use world; 30 | select * from country; 31 | select * from country group by (Continent) having count(population)>10000; 32 | SELECT DISTINCT Region; 33 | 34 | #Union 35 | #both tables all the rows 36 | 37 | use sakila; 38 | select * from city; 39 | select * from country; 40 | 41 | select city.city_id,city.country_id from city 42 | INNER JOIN country 43 | on city.country_id =country.country_id; 44 | 45 | #INNER JOIN 46 | select city.city_id,country.country_id,city.last_update,country.last_update from city 47 | INNER JOIN country 48 | on city.country_id =country.country_id; 49 | 50 | #LEFT JOIN 51 | select city.city_id,country.country_id,city.last_update,country.last_update from city 52 | LEFT JOIN country 53 | on city.country_id =country.country_id; 54 | 55 | #RIGHT JOIN 56 | select city.city_id,country.country_id,city.last_update,country.last_update from city 57 | RIGHT JOIN country 58 | on city.country_id =country.country_id; 59 | 60 | # FULL OUTER JOIN 61 | SELECT * from city 62 | LEFT JOIN country 63 | ON city.country_id =country.country_id 64 | 65 | # UNION 66 | SELECT * from city 67 | RIGHT JOIN country 68 | ON city.country_id =country.country_id; 69 | 70 | #NULL VALUES 71 | USE customer; 72 | create table customer(id int,first_name varchar(30),last_name varchar(30),salary int); 73 | create table if not exists customer1(id int auto_increment,first_name varchar(30),last_name varchar(30),salary int,primary key (id)); 74 | select * from customer; 75 | insert into customer1(first_name,last_name,salary) 76 | values 77 | ("krupa","patel",32000), 78 | ("karan","patel",52000), 79 | ("shivam","shah",22000), 80 | ("devangi","patel",12000), 81 | ("dhyani","patel",42000); 82 | select * from customer1; 83 | insert into customer1(first_name,last_name,salary) 84 | values("cr","patel",NULL); 85 | 86 | SELECT * FROM customer1 where salary is null; 87 | 88 | SELECT * FROM customer1 where salary is not null; 89 | 90 | update customer1 set last_name='patel' where id =3; 91 | select * from customer1 ; 92 | delete from customer1 where id=2; 93 | 94 | #alter table 95 | alter table customer1 add emilid varchar(30); 96 | alter table customer1 add column dob date; 97 | alter table customer1 drop column emil; 98 | 99 | use customer; 100 | create table test(test_id int auto_increment, 101 | test_name varchar(30), 102 | test_email varchar(30), 103 | test_address varchar(30), 104 | primary key (test_id)); 105 | 106 | show tables; 107 | select * from test; 108 | create table if not exists test3(test_id int , 109 | test_name varchar(30), 110 | test_email varchar(30), 111 | test_address varchar(30), 112 | test_salary int check (test_salary > 10000) 113 | ); 114 | 115 | insert into test3 values 116 | (1,"krupa","krupa@gmail.com","vadodara",32000), 117 | (2,"karan","patel988@yahoo.com","ahemdabad",52000), 118 | (3,"shivam","shivam8990@gmail.com","anand",22000), 119 | (4,"devangi","patel1276@gmail.com","vadodara",12000), 120 | (5,"dhyani","patelew@gmail.com","vadodara",42000); 121 | 122 | select * from test3; -------------------------------------------------------------------------------- /SQL_Python Connectivity/glass.data: -------------------------------------------------------------------------------- 1 | 1,1.52101,13.64,4.49,1.10,71.78,0.06,8.75,0.00,0.00,1 2 | 2,1.51761,13.89,3.60,1.36,72.73,0.48,7.83,0.00,0.00,1 3 | 3,1.51618,13.53,3.55,1.54,72.99,0.39,7.78,0.00,0.00,1 4 | 4,1.51766,13.21,3.69,1.29,72.61,0.57,8.22,0.00,0.00,1 5 | 5,1.51742,13.27,3.62,1.24,73.08,0.55,8.07,0.00,0.00,1 6 | 6,1.51596,12.79,3.61,1.62,72.97,0.64,8.07,0.00,0.26,1 7 | 7,1.51743,13.30,3.60,1.14,73.09,0.58,8.17,0.00,0.00,1 8 | 8,1.51756,13.15,3.61,1.05,73.24,0.57,8.24,0.00,0.00,1 9 | 9,1.51918,14.04,3.58,1.37,72.08,0.56,8.30,0.00,0.00,1 10 | 10,1.51755,13.00,3.60,1.36,72.99,0.57,8.40,0.00,0.11,1 11 | 11,1.51571,12.72,3.46,1.56,73.20,0.67,8.09,0.00,0.24,1 12 | 12,1.51763,12.80,3.66,1.27,73.01,0.60,8.56,0.00,0.00,1 13 | 13,1.51589,12.88,3.43,1.40,73.28,0.69,8.05,0.00,0.24,1 14 | 14,1.51748,12.86,3.56,1.27,73.21,0.54,8.38,0.00,0.17,1 15 | 15,1.51763,12.61,3.59,1.31,73.29,0.58,8.50,0.00,0.00,1 16 | 16,1.51761,12.81,3.54,1.23,73.24,0.58,8.39,0.00,0.00,1 17 | 17,1.51784,12.68,3.67,1.16,73.11,0.61,8.70,0.00,0.00,1 18 | 18,1.52196,14.36,3.85,0.89,71.36,0.15,9.15,0.00,0.00,1 19 | 19,1.51911,13.90,3.73,1.18,72.12,0.06,8.89,0.00,0.00,1 20 | 20,1.51735,13.02,3.54,1.69,72.73,0.54,8.44,0.00,0.07,1 21 | 21,1.51750,12.82,3.55,1.49,72.75,0.54,8.52,0.00,0.19,1 22 | 22,1.51966,14.77,3.75,0.29,72.02,0.03,9.00,0.00,0.00,1 23 | 23,1.51736,12.78,3.62,1.29,72.79,0.59,8.70,0.00,0.00,1 24 | 24,1.51751,12.81,3.57,1.35,73.02,0.62,8.59,0.00,0.00,1 25 | 25,1.51720,13.38,3.50,1.15,72.85,0.50,8.43,0.00,0.00,1 26 | 26,1.51764,12.98,3.54,1.21,73.00,0.65,8.53,0.00,0.00,1 27 | 27,1.51793,13.21,3.48,1.41,72.64,0.59,8.43,0.00,0.00,1 28 | 28,1.51721,12.87,3.48,1.33,73.04,0.56,8.43,0.00,0.00,1 29 | 29,1.51768,12.56,3.52,1.43,73.15,0.57,8.54,0.00,0.00,1 30 | 30,1.51784,13.08,3.49,1.28,72.86,0.60,8.49,0.00,0.00,1 31 | 31,1.51768,12.65,3.56,1.30,73.08,0.61,8.69,0.00,0.14,1 32 | 32,1.51747,12.84,3.50,1.14,73.27,0.56,8.55,0.00,0.00,1 33 | 33,1.51775,12.85,3.48,1.23,72.97,0.61,8.56,0.09,0.22,1 34 | 34,1.51753,12.57,3.47,1.38,73.39,0.60,8.55,0.00,0.06,1 35 | 35,1.51783,12.69,3.54,1.34,72.95,0.57,8.75,0.00,0.00,1 36 | 36,1.51567,13.29,3.45,1.21,72.74,0.56,8.57,0.00,0.00,1 37 | 37,1.51909,13.89,3.53,1.32,71.81,0.51,8.78,0.11,0.00,1 38 | 38,1.51797,12.74,3.48,1.35,72.96,0.64,8.68,0.00,0.00,1 39 | 39,1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0.00,0.00,1 40 | 40,1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0.00,0.00,1 41 | 41,1.51793,12.79,3.50,1.12,73.03,0.64,8.77,0.00,0.00,1 42 | 42,1.51755,12.71,3.42,1.20,73.20,0.59,8.64,0.00,0.00,1 43 | 43,1.51779,13.21,3.39,1.33,72.76,0.59,8.59,0.00,0.00,1 44 | 44,1.52210,13.73,3.84,0.72,71.76,0.17,9.74,0.00,0.00,1 45 | 45,1.51786,12.73,3.43,1.19,72.95,0.62,8.76,0.00,0.30,1 46 | 46,1.51900,13.49,3.48,1.35,71.95,0.55,9.00,0.00,0.00,1 47 | 47,1.51869,13.19,3.37,1.18,72.72,0.57,8.83,0.00,0.16,1 48 | 48,1.52667,13.99,3.70,0.71,71.57,0.02,9.82,0.00,0.10,1 49 | 49,1.52223,13.21,3.77,0.79,71.99,0.13,10.02,0.00,0.00,1 50 | 50,1.51898,13.58,3.35,1.23,72.08,0.59,8.91,0.00,0.00,1 51 | 51,1.52320,13.72,3.72,0.51,71.75,0.09,10.06,0.00,0.16,1 52 | 52,1.51926,13.20,3.33,1.28,72.36,0.60,9.14,0.00,0.11,1 53 | 53,1.51808,13.43,2.87,1.19,72.84,0.55,9.03,0.00,0.00,1 54 | 54,1.51837,13.14,2.84,1.28,72.85,0.55,9.07,0.00,0.00,1 55 | 55,1.51778,13.21,2.81,1.29,72.98,0.51,9.02,0.00,0.09,1 56 | 56,1.51769,12.45,2.71,1.29,73.70,0.56,9.06,0.00,0.24,1 57 | 57,1.51215,12.99,3.47,1.12,72.98,0.62,8.35,0.00,0.31,1 58 | 58,1.51824,12.87,3.48,1.29,72.95,0.60,8.43,0.00,0.00,1 59 | 59,1.51754,13.48,3.74,1.17,72.99,0.59,8.03,0.00,0.00,1 60 | 60,1.51754,13.39,3.66,1.19,72.79,0.57,8.27,0.00,0.11,1 61 | 61,1.51905,13.60,3.62,1.11,72.64,0.14,8.76,0.00,0.00,1 62 | 62,1.51977,13.81,3.58,1.32,71.72,0.12,8.67,0.69,0.00,1 63 | 63,1.52172,13.51,3.86,0.88,71.79,0.23,9.54,0.00,0.11,1 64 | 64,1.52227,14.17,3.81,0.78,71.35,0.00,9.69,0.00,0.00,1 65 | 65,1.52172,13.48,3.74,0.90,72.01,0.18,9.61,0.00,0.07,1 66 | 66,1.52099,13.69,3.59,1.12,71.96,0.09,9.40,0.00,0.00,1 67 | 67,1.52152,13.05,3.65,0.87,72.22,0.19,9.85,0.00,0.17,1 68 | 68,1.52152,13.05,3.65,0.87,72.32,0.19,9.85,0.00,0.17,1 69 | 69,1.52152,13.12,3.58,0.90,72.20,0.23,9.82,0.00,0.16,1 70 | 70,1.52300,13.31,3.58,0.82,71.99,0.12,10.17,0.00,0.03,1 71 | 71,1.51574,14.86,3.67,1.74,71.87,0.16,7.36,0.00,0.12,2 72 | 72,1.51848,13.64,3.87,1.27,71.96,0.54,8.32,0.00,0.32,2 73 | 73,1.51593,13.09,3.59,1.52,73.10,0.67,7.83,0.00,0.00,2 74 | 74,1.51631,13.34,3.57,1.57,72.87,0.61,7.89,0.00,0.00,2 75 | 75,1.51596,13.02,3.56,1.54,73.11,0.72,7.90,0.00,0.00,2 76 | 76,1.51590,13.02,3.58,1.51,73.12,0.69,7.96,0.00,0.00,2 77 | 77,1.51645,13.44,3.61,1.54,72.39,0.66,8.03,0.00,0.00,2 78 | 78,1.51627,13.00,3.58,1.54,72.83,0.61,8.04,0.00,0.00,2 79 | 79,1.51613,13.92,3.52,1.25,72.88,0.37,7.94,0.00,0.14,2 80 | 80,1.51590,12.82,3.52,1.90,72.86,0.69,7.97,0.00,0.00,2 81 | 81,1.51592,12.86,3.52,2.12,72.66,0.69,7.97,0.00,0.00,2 82 | 82,1.51593,13.25,3.45,1.43,73.17,0.61,7.86,0.00,0.00,2 83 | 83,1.51646,13.41,3.55,1.25,72.81,0.68,8.10,0.00,0.00,2 84 | 84,1.51594,13.09,3.52,1.55,72.87,0.68,8.05,0.00,0.09,2 85 | 85,1.51409,14.25,3.09,2.08,72.28,1.10,7.08,0.00,0.00,2 86 | 86,1.51625,13.36,3.58,1.49,72.72,0.45,8.21,0.00,0.00,2 87 | 87,1.51569,13.24,3.49,1.47,73.25,0.38,8.03,0.00,0.00,2 88 | 88,1.51645,13.40,3.49,1.52,72.65,0.67,8.08,0.00,0.10,2 89 | 89,1.51618,13.01,3.50,1.48,72.89,0.60,8.12,0.00,0.00,2 90 | 90,1.51640,12.55,3.48,1.87,73.23,0.63,8.08,0.00,0.09,2 91 | 91,1.51841,12.93,3.74,1.11,72.28,0.64,8.96,0.00,0.22,2 92 | 92,1.51605,12.90,3.44,1.45,73.06,0.44,8.27,0.00,0.00,2 93 | 93,1.51588,13.12,3.41,1.58,73.26,0.07,8.39,0.00,0.19,2 94 | 94,1.51590,13.24,3.34,1.47,73.10,0.39,8.22,0.00,0.00,2 95 | 95,1.51629,12.71,3.33,1.49,73.28,0.67,8.24,0.00,0.00,2 96 | 96,1.51860,13.36,3.43,1.43,72.26,0.51,8.60,0.00,0.00,2 97 | 97,1.51841,13.02,3.62,1.06,72.34,0.64,9.13,0.00,0.15,2 98 | 98,1.51743,12.20,3.25,1.16,73.55,0.62,8.90,0.00,0.24,2 99 | 99,1.51689,12.67,2.88,1.71,73.21,0.73,8.54,0.00,0.00,2 100 | 100,1.51811,12.96,2.96,1.43,72.92,0.60,8.79,0.14,0.00,2 101 | 101,1.51655,12.75,2.85,1.44,73.27,0.57,8.79,0.11,0.22,2 102 | 102,1.51730,12.35,2.72,1.63,72.87,0.70,9.23,0.00,0.00,2 103 | 103,1.51820,12.62,2.76,0.83,73.81,0.35,9.42,0.00,0.20,2 104 | 104,1.52725,13.80,3.15,0.66,70.57,0.08,11.64,0.00,0.00,2 105 | 105,1.52410,13.83,2.90,1.17,71.15,0.08,10.79,0.00,0.00,2 106 | 106,1.52475,11.45,0.00,1.88,72.19,0.81,13.24,0.00,0.34,2 107 | 107,1.53125,10.73,0.00,2.10,69.81,0.58,13.30,3.15,0.28,2 108 | 108,1.53393,12.30,0.00,1.00,70.16,0.12,16.19,0.00,0.24,2 109 | 109,1.52222,14.43,0.00,1.00,72.67,0.10,11.52,0.00,0.08,2 110 | 110,1.51818,13.72,0.00,0.56,74.45,0.00,10.99,0.00,0.00,2 111 | 111,1.52664,11.23,0.00,0.77,73.21,0.00,14.68,0.00,0.00,2 112 | 112,1.52739,11.02,0.00,0.75,73.08,0.00,14.96,0.00,0.00,2 113 | 113,1.52777,12.64,0.00,0.67,72.02,0.06,14.40,0.00,0.00,2 114 | 114,1.51892,13.46,3.83,1.26,72.55,0.57,8.21,0.00,0.14,2 115 | 115,1.51847,13.10,3.97,1.19,72.44,0.60,8.43,0.00,0.00,2 116 | 116,1.51846,13.41,3.89,1.33,72.38,0.51,8.28,0.00,0.00,2 117 | 117,1.51829,13.24,3.90,1.41,72.33,0.55,8.31,0.00,0.10,2 118 | 118,1.51708,13.72,3.68,1.81,72.06,0.64,7.88,0.00,0.00,2 119 | 119,1.51673,13.30,3.64,1.53,72.53,0.65,8.03,0.00,0.29,2 120 | 120,1.51652,13.56,3.57,1.47,72.45,0.64,7.96,0.00,0.00,2 121 | 121,1.51844,13.25,3.76,1.32,72.40,0.58,8.42,0.00,0.00,2 122 | 122,1.51663,12.93,3.54,1.62,72.96,0.64,8.03,0.00,0.21,2 123 | 123,1.51687,13.23,3.54,1.48,72.84,0.56,8.10,0.00,0.00,2 124 | 124,1.51707,13.48,3.48,1.71,72.52,0.62,7.99,0.00,0.00,2 125 | 125,1.52177,13.20,3.68,1.15,72.75,0.54,8.52,0.00,0.00,2 126 | 126,1.51872,12.93,3.66,1.56,72.51,0.58,8.55,0.00,0.12,2 127 | 127,1.51667,12.94,3.61,1.26,72.75,0.56,8.60,0.00,0.00,2 128 | 128,1.52081,13.78,2.28,1.43,71.99,0.49,9.85,0.00,0.17,2 129 | 129,1.52068,13.55,2.09,1.67,72.18,0.53,9.57,0.27,0.17,2 130 | 130,1.52020,13.98,1.35,1.63,71.76,0.39,10.56,0.00,0.18,2 131 | 131,1.52177,13.75,1.01,1.36,72.19,0.33,11.14,0.00,0.00,2 132 | 132,1.52614,13.70,0.00,1.36,71.24,0.19,13.44,0.00,0.10,2 133 | 133,1.51813,13.43,3.98,1.18,72.49,0.58,8.15,0.00,0.00,2 134 | 134,1.51800,13.71,3.93,1.54,71.81,0.54,8.21,0.00,0.15,2 135 | 135,1.51811,13.33,3.85,1.25,72.78,0.52,8.12,0.00,0.00,2 136 | 136,1.51789,13.19,3.90,1.30,72.33,0.55,8.44,0.00,0.28,2 137 | 137,1.51806,13.00,3.80,1.08,73.07,0.56,8.38,0.00,0.12,2 138 | 138,1.51711,12.89,3.62,1.57,72.96,0.61,8.11,0.00,0.00,2 139 | 139,1.51674,12.79,3.52,1.54,73.36,0.66,7.90,0.00,0.00,2 140 | 140,1.51674,12.87,3.56,1.64,73.14,0.65,7.99,0.00,0.00,2 141 | 141,1.51690,13.33,3.54,1.61,72.54,0.68,8.11,0.00,0.00,2 142 | 142,1.51851,13.20,3.63,1.07,72.83,0.57,8.41,0.09,0.17,2 143 | 143,1.51662,12.85,3.51,1.44,73.01,0.68,8.23,0.06,0.25,2 144 | 144,1.51709,13.00,3.47,1.79,72.72,0.66,8.18,0.00,0.00,2 145 | 145,1.51660,12.99,3.18,1.23,72.97,0.58,8.81,0.00,0.24,2 146 | 146,1.51839,12.85,3.67,1.24,72.57,0.62,8.68,0.00,0.35,2 147 | 147,1.51769,13.65,3.66,1.11,72.77,0.11,8.60,0.00,0.00,3 148 | 148,1.51610,13.33,3.53,1.34,72.67,0.56,8.33,0.00,0.00,3 149 | 149,1.51670,13.24,3.57,1.38,72.70,0.56,8.44,0.00,0.10,3 150 | 150,1.51643,12.16,3.52,1.35,72.89,0.57,8.53,0.00,0.00,3 151 | 151,1.51665,13.14,3.45,1.76,72.48,0.60,8.38,0.00,0.17,3 152 | 152,1.52127,14.32,3.90,0.83,71.50,0.00,9.49,0.00,0.00,3 153 | 153,1.51779,13.64,3.65,0.65,73.00,0.06,8.93,0.00,0.00,3 154 | 154,1.51610,13.42,3.40,1.22,72.69,0.59,8.32,0.00,0.00,3 155 | 155,1.51694,12.86,3.58,1.31,72.61,0.61,8.79,0.00,0.00,3 156 | 156,1.51646,13.04,3.40,1.26,73.01,0.52,8.58,0.00,0.00,3 157 | 157,1.51655,13.41,3.39,1.28,72.64,0.52,8.65,0.00,0.00,3 158 | 158,1.52121,14.03,3.76,0.58,71.79,0.11,9.65,0.00,0.00,3 159 | 159,1.51776,13.53,3.41,1.52,72.04,0.58,8.79,0.00,0.00,3 160 | 160,1.51796,13.50,3.36,1.63,71.94,0.57,8.81,0.00,0.09,3 161 | 161,1.51832,13.33,3.34,1.54,72.14,0.56,8.99,0.00,0.00,3 162 | 162,1.51934,13.64,3.54,0.75,72.65,0.16,8.89,0.15,0.24,3 163 | 163,1.52211,14.19,3.78,0.91,71.36,0.23,9.14,0.00,0.37,3 164 | 164,1.51514,14.01,2.68,3.50,69.89,1.68,5.87,2.20,0.00,5 165 | 165,1.51915,12.73,1.85,1.86,72.69,0.60,10.09,0.00,0.00,5 166 | 166,1.52171,11.56,1.88,1.56,72.86,0.47,11.41,0.00,0.00,5 167 | 167,1.52151,11.03,1.71,1.56,73.44,0.58,11.62,0.00,0.00,5 168 | 168,1.51969,12.64,0.00,1.65,73.75,0.38,11.53,0.00,0.00,5 169 | 169,1.51666,12.86,0.00,1.83,73.88,0.97,10.17,0.00,0.00,5 170 | 170,1.51994,13.27,0.00,1.76,73.03,0.47,11.32,0.00,0.00,5 171 | 171,1.52369,13.44,0.00,1.58,72.22,0.32,12.24,0.00,0.00,5 172 | 172,1.51316,13.02,0.00,3.04,70.48,6.21,6.96,0.00,0.00,5 173 | 173,1.51321,13.00,0.00,3.02,70.70,6.21,6.93,0.00,0.00,5 174 | 174,1.52043,13.38,0.00,1.40,72.25,0.33,12.50,0.00,0.00,5 175 | 175,1.52058,12.85,1.61,2.17,72.18,0.76,9.70,0.24,0.51,5 176 | 176,1.52119,12.97,0.33,1.51,73.39,0.13,11.27,0.00,0.28,5 177 | 177,1.51905,14.00,2.39,1.56,72.37,0.00,9.57,0.00,0.00,6 178 | 178,1.51937,13.79,2.41,1.19,72.76,0.00,9.77,0.00,0.00,6 179 | 179,1.51829,14.46,2.24,1.62,72.38,0.00,9.26,0.00,0.00,6 180 | 180,1.51852,14.09,2.19,1.66,72.67,0.00,9.32,0.00,0.00,6 181 | 181,1.51299,14.40,1.74,1.54,74.55,0.00,7.59,0.00,0.00,6 182 | 182,1.51888,14.99,0.78,1.74,72.50,0.00,9.95,0.00,0.00,6 183 | 183,1.51916,14.15,0.00,2.09,72.74,0.00,10.88,0.00,0.00,6 184 | 184,1.51969,14.56,0.00,0.56,73.48,0.00,11.22,0.00,0.00,6 185 | 185,1.51115,17.38,0.00,0.34,75.41,0.00,6.65,0.00,0.00,6 186 | 186,1.51131,13.69,3.20,1.81,72.81,1.76,5.43,1.19,0.00,7 187 | 187,1.51838,14.32,3.26,2.22,71.25,1.46,5.79,1.63,0.00,7 188 | 188,1.52315,13.44,3.34,1.23,72.38,0.60,8.83,0.00,0.00,7 189 | 189,1.52247,14.86,2.20,2.06,70.26,0.76,9.76,0.00,0.00,7 190 | 190,1.52365,15.79,1.83,1.31,70.43,0.31,8.61,1.68,0.00,7 191 | 191,1.51613,13.88,1.78,1.79,73.10,0.00,8.67,0.76,0.00,7 192 | 192,1.51602,14.85,0.00,2.38,73.28,0.00,8.76,0.64,0.09,7 193 | 193,1.51623,14.20,0.00,2.79,73.46,0.04,9.04,0.40,0.09,7 194 | 194,1.51719,14.75,0.00,2.00,73.02,0.00,8.53,1.59,0.08,7 195 | 195,1.51683,14.56,0.00,1.98,73.29,0.00,8.52,1.57,0.07,7 196 | 196,1.51545,14.14,0.00,2.68,73.39,0.08,9.07,0.61,0.05,7 197 | 197,1.51556,13.87,0.00,2.54,73.23,0.14,9.41,0.81,0.01,7 198 | 198,1.51727,14.70,0.00,2.34,73.28,0.00,8.95,0.66,0.00,7 199 | 199,1.51531,14.38,0.00,2.66,73.10,0.04,9.08,0.64,0.00,7 200 | 200,1.51609,15.01,0.00,2.51,73.05,0.05,8.83,0.53,0.00,7 201 | 201,1.51508,15.15,0.00,2.25,73.50,0.00,8.34,0.63,0.00,7 202 | 202,1.51653,11.95,0.00,1.19,75.18,2.70,8.93,0.00,0.00,7 203 | 203,1.51514,14.85,0.00,2.42,73.72,0.00,8.39,0.56,0.00,7 204 | 204,1.51658,14.80,0.00,1.99,73.11,0.00,8.28,1.71,0.00,7 205 | 205,1.51617,14.95,0.00,2.27,73.30,0.00,8.71,0.67,0.00,7 206 | 206,1.51732,14.95,0.00,1.80,72.99,0.00,8.61,1.55,0.00,7 207 | 207,1.51645,14.94,0.00,1.87,73.11,0.00,8.67,1.38,0.00,7 208 | 208,1.51831,14.39,0.00,1.82,72.86,1.41,6.47,2.88,0.00,7 209 | 209,1.51640,14.37,0.00,2.74,72.85,0.00,9.45,0.54,0.00,7 210 | 210,1.51623,14.14,0.00,2.88,72.61,0.08,9.18,1.06,0.00,7 211 | 211,1.51685,14.92,0.00,1.99,73.06,0.00,8.40,1.59,0.00,7 212 | 212,1.52065,14.36,0.00,2.02,73.42,0.00,8.44,1.64,0.00,7 213 | 213,1.51651,14.38,0.00,1.94,73.61,0.00,8.48,1.57,0.00,7 214 | 214,1.51711,14.23,0.00,2.08,73.36,0.00,8.62,1.67,0.00,7 215 | -------------------------------------------------------------------------------- /Sales_Data_Year_Operation.txt: -------------------------------------------------------------------------------- 1 | 2 | create database sales 3 | use sales 4 | CREATE TABLE sales1 ( 5 | order_id VARCHAR(15) NOT NULL, 6 | order_date VARCHAR(15) NOT NULL, 7 | ship_date VARCHAR(15) NOT NULL, 8 | ship_mode VARCHAR(14) NOT NULL, 9 | customer_name VARCHAR(22) NOT NULL, 10 | segment VARCHAR(11) NOT NULL, 11 | state VARCHAR(36) NOT NULL, 12 | country VARCHAR(32) NOT NULL, 13 | market VARCHAR(6) NOT NULL, 14 | region VARCHAR(14) NOT NULL, 15 | product_id VARCHAR(16) NOT NULL, 16 | category VARCHAR(15) NOT NULL, 17 | sub_category VARCHAR(11) NOT NULL, 18 | product_name VARCHAR(127) NOT NULL, 19 | sales DECIMAL(38, 0) NOT NULL, 20 | quantity DECIMAL(38, 0) NOT NULL, 21 | discount DECIMAL(38, 3) NOT NULL, 22 | profit DECIMAL(38, 8) NOT NULL, 23 | shipping_cost DECIMAL(38, 2) NOT NULL, 24 | order_priority VARCHAR(8) NOT NULL, 25 | `year` DECIMAL(38, 0) NOT NULL 26 | ); 27 | SET SESSION sql_mode = '' 28 | 29 | load data infile 30 | 'D:/sales_data_final.csv' 31 | into table sales1 32 | fields terminated by ',' 33 | enclosed by '"' 34 | lines terminated by '\n' 35 | ignore 1 rows 36 | 37 | select * from sales1 38 | 39 | select str_to_date(order_date,'%m/%d/%y') from sales1 40 | 41 | alter table sales1 42 | add column order_date_new date after order_date 43 | 44 | update sales1 45 | set order_date_new = str_to_date(order_date,'%m/%d/%Y') 46 | 47 | alter table sales1 48 | add column ship_date_new date after ship_date 49 | 50 | update sales1 51 | set ship_date_new = str_to_date(ship_date, '%m/%d/%Y') 52 | 53 | select * from sales1 54 | 55 | SET SQL_SAFE_UPDATES = 0; 56 | 57 | select * from sales1 where ship_date_new = '2011-01-05' 58 | select * from sales1 where ship_date_new > '2011-01-05' 59 | select * from sales1 where ship_date_new < '2011-01-05' 60 | select * from sales1 where ship_date_new between '2011-01-05' and '2011-08-30' 61 | select now() 62 | select curdate() 63 | select curtime() 64 | 65 | select * from sales1 where ship_date_new < date_sub(now() , interval 1 week) 66 | 67 | select date_sub(now() , interval 1 week) 68 | select date_sub(now() , interval 30 day) 69 | select date_sub(now() , interval 30 year) 70 | select year(now()) 71 | select dayname('2022-09-20 21:10:30') 72 | 73 | alter table sales1 74 | add column flag date after order_id 75 | 76 | update sales1 77 | set flag = now() 78 | 79 | select * from sales1 80 | 81 | ALTER TABLE sales1 82 | modify column year datetime; 83 | 84 | alter table sales1 85 | modify column Year_New int; 86 | 87 | alter table sales1 88 | modify column Month_New int; 89 | 90 | alter table sales1 91 | modify column Day_New int; 92 | 93 | 94 | update sales1 set Month_new= month(order_date_new) 95 | update sales1 set day_new= day(order_date_new); 96 | update sales1 set year_new= year(order_date_new); 97 | 98 | 99 | select * from sales1 limit 5 100 | 101 | select month(order_date_new) from sales1 102 | 103 | select year_new , avg(sales) from sales1 group by year_new 104 | 105 | select year_new , sum(sales) from sales1 group by year_new 106 | 107 | select year_new , min(sales) from sales1 group by year_new 108 | select year_new , max(sales) from sales1 group by year_new 109 | 110 | select year_new , sum(quantity) from sales1 group by year_new 111 | 112 | 113 | select (sales*discount+shipping_cost) as CTC from sales1; 114 | select order_id ,discount , if(discount > 0 ,'yes' , 'no') as discount_flag from sales1 115 | 116 | alter table sales1 117 | modify column discount_flag varchar(20) after discount 118 | 119 | select * from sales1 ; 120 | 121 | select discount_flag , count(*) from sales1 group by discount_flag 122 | 123 | select count(*) from sales1 where discount > 0 124 | 125 | update sales1 126 | set discount_flag = if(discount > 0, 'yes', 'no'); 127 | 128 | -------------------------------------------------------------------------------- /Store Producer With Employee Table.sql: -------------------------------------------------------------------------------- 1 | CREATE DATABASE N1; 2 | USE N1; 3 | 4 | CREATE TABLE employees ( 5 | employee_id INT AUTO_INCREMENT PRIMARY KEY, -- Unique identifier for each employee 6 | employee_name VARCHAR(100) NOT NULL, -- Name of the employee 7 | department_id INT NOT NULL, -- ID of the department the employee belongs to 8 | salary DECIMAL(10, 2) NOT NULL -- Salary of the employee 9 | ); 10 | 11 | 12 | INSERT INTO employees (employee_name, department_id, salary) VALUES 13 | ('Alice Johnson', 101, 60000.00), 14 | ('Bob Smith', 102, 55000.00), 15 | ('Charlie Brown', 103, 45000.00), 16 | ('Diana Prince', 101, 70000.00), 17 | ('Edward Norton', 102, 50000.00), 18 | ('Fiona Apple', 103, 52000.00), 19 | ('George Martin', 101, 62000.00), 20 | ('Hannah Lee', 104, 58000.00), 21 | ('Ian Wright', 104, 47000.00), 22 | ('Julia Roberts', 102, 65000.00), 23 | ('Kevin Hart', 103, 54000.00), 24 | ('Liam Neeson', 101, 63000.00), 25 | ('Monica Bell', 104, 51000.00), 26 | ('Nathan Drake', 101, 72000.00), 27 | ('Olivia Wilde', 102, 69000.00), 28 | ('Paul Walker', 103, 48000.00), 29 | ('Quincy Adams', 104, 53000.00), 30 | ('Rachel Green', 101, 75000.00), 31 | ('Steve Rogers', 102, 46000.00), 32 | ('Tina Fey', 103, 61000.00); 33 | 34 | 35 | /* Insert Data into a Table */ 36 | 37 | DELIMITER $$ 38 | 39 | CREATE PROCEDURE AddNewEmployee( 40 | IN emp_name VARCHAR(100), 41 | IN dept_id INT, 42 | IN salary DECIMAL(10, 2) 43 | ) 44 | BEGIN 45 | INSERT INTO employees (employee_name, department_id, salary) 46 | VALUES (emp_name, dept_id, salary); 47 | END $$ 48 | 49 | DELIMITER ; 50 | 51 | CALL AddNewEmployee('John Doe', 101, 75000.00); 52 | 53 | /* Update Employee Salary */ 54 | 55 | DELIMITER $$ 56 | 57 | CREATE PROCEDURE UpdateEmployeeSalary( 58 | IN emp_id INT, 59 | IN new_salary DECIMAL(10, 2) 60 | ) 61 | BEGIN 62 | UPDATE employees 63 | SET salary = new_salary 64 | WHERE employee_id = emp_id; 65 | END $$ 66 | 67 | DELIMITER ; 68 | 69 | CALL UpdateEmployeeSalary(1, 80000.00); 70 | 71 | /* Retrieve Total Employees in a Department */ 72 | 73 | DELIMITER $$ 74 | 75 | CREATE PROCEDURE CountEmployeesInDepartment( 76 | IN dept_id INT, 77 | OUT total_employees INT 78 | ) 79 | BEGIN 80 | SELECT COUNT(*) INTO total_employees 81 | FROM employees 82 | WHERE department_id = dept_id; 83 | END $$ 84 | 85 | DELIMITER ; 86 | 87 | CALL CountEmployeesInDepartment(101, @total); 88 | SELECT @total; 89 | 90 | 91 | /* Delete an Employee by ID */ 92 | 93 | DELIMITER $$ 94 | 95 | CREATE PROCEDURE DeleteEmployeeById( 96 | IN emp_id INT 97 | ) 98 | BEGIN 99 | DELETE FROM employees 100 | WHERE employee_id = emp_id; 101 | END $$ 102 | 103 | DELIMITER ; 104 | 105 | CALL DeleteEmployeeById(1); 106 | 107 | /*Fetch Employees with Salary Greater Than a Given Amount */ 108 | DELIMITER $$ 109 | 110 | CREATE PROCEDURE GetHighSalaryEmployees( 111 | IN min_salary DECIMAL(10, 2) 112 | ) 113 | BEGIN 114 | SELECT employee_id, employee_name, salary 115 | FROM employees 116 | WHERE salary > min_salary; 117 | END $$ 118 | 119 | DELIMITER ; 120 | 121 | CALL GetHighSalaryEmployees(50000.00); 122 | 123 | /* Retrieve Employees by Salary Range */ 124 | DELIMITER $$ 125 | 126 | CREATE PROCEDURE GetEmployeesBySalaryRange( 127 | IN min_salary DECIMAL(10, 2), 128 | IN max_salary DECIMAL(10, 2) 129 | ) 130 | BEGIN 131 | SELECT employee_id, employee_name, department_id, salary 132 | FROM employees 133 | WHERE salary BETWEEN min_salary AND max_salary; 134 | END $$ 135 | 136 | DELIMITER ; 137 | 138 | CALL GetEmployeesBySalaryRange(50000.00, 70000.00); 139 | 140 | /* Count Employees in Each Department */ 141 | DELIMITER $$ 142 | 143 | CREATE PROCEDURE CountEmployeesByDepartment() 144 | BEGIN 145 | SELECT department_id, COUNT(*) AS employee_count 146 | FROM employees 147 | GROUP BY department_id; 148 | END $$ 149 | 150 | DELIMITER ; 151 | CALL CountEmployeesByDepartment(); 152 | 153 | /* Increase Salary by Percentage */ 154 | DELIMITER $$ 155 | 156 | CREATE PROCEDURE IncreaseSalaryByPercentage( 157 | IN dept_id INT, 158 | IN percentage DECIMAL(5, 2) 159 | ) 160 | BEGIN 161 | UPDATE employees 162 | SET salary = salary + (salary * (percentage / 100)) 163 | WHERE department_id = dept_id; 164 | END $$ 165 | 166 | DELIMITER ; 167 | 168 | CALL IncreaseSalaryByPercentage(101, 10); 169 | 170 | /* Delete Employees by Department */ 171 | DELIMITER $$ 172 | 173 | CREATE PROCEDURE DeleteEmployeesByDepartment( 174 | IN dept_id INT 175 | ) 176 | BEGIN 177 | DELETE FROM employees 178 | WHERE department_id = dept_id; 179 | END $$ 180 | 181 | DELIMITER ; 182 | 183 | CALL DeleteEmployeesByDepartment(104); 184 | 185 | 186 | /* Get Employees Above Average Salary */ 187 | DELIMITER $$ 188 | 189 | CREATE PROCEDURE GetEmployeesAboveAverageSalary() 190 | BEGIN 191 | SELECT employee_id, employee_name, department_id, salary 192 | FROM employees 193 | WHERE salary > (SELECT AVG(salary) FROM employees); 194 | END $$ 195 | 196 | DELIMITER ; 197 | 198 | CALL GetEmployeesAboveAverageSalary(); 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | -------------------------------------------------------------------------------- /Store_Procedures_Case.tex: -------------------------------------------------------------------------------- 1 | /* Procedure to find out Square Root of any number*/ 2 | create database N1; 3 | use N1; 4 | /* Square_Root of any Number */ 5 | 6 | delimiter $$ 7 | create procedure Sqrt_Root(a int, out b float) 8 | begin 9 | set b = sqrt(a); 10 | end $$ 11 | delimiter ; 12 | 13 | call Sqrt_Root(64, @b); 14 | select @b; 15 | 16 | /* INOUT Procedure */ 17 | 18 | Delimiter $$ 19 | create procedure SetCounter(INOUT counter INT, In inc INT ) 20 | begin 21 | set counter = counter + inc; 22 | end $$ 23 | set @counter = 1; 24 | call SetCounter(@counter, 1); 25 | call SetCounter(@counter, 3); 26 | select @counter 27 | 28 | /* User Variables */ 29 | 30 | Delimiter // 31 | create procedure User_Var() 32 | begin 33 | set @x = 15; 34 | set @y = 10; 35 | select @x,@y, @x-@y; 36 | end// 37 | 38 | call User_Var(); 39 | 40 | use N1; 41 | /* CASE Statement */ 42 | use N1; 43 | Delimiter $$ 44 | create procedure Student_Case(In S_Sub varchar(30), OUT S_Course varchar(30)) 45 | Begin 46 | declare sub varchar(20); 47 | select subject into sub from student where S_Sub = subject; 48 | 49 | case sub 50 | when 'Computer' then 51 | set S_Course = 'B.Tech'; 52 | when 'History' then 53 | set S_Course = 'BA'; 54 | else 55 | set S_Course = ' Subject is not available '; 56 | End case; 57 | End $$ 58 | 59 | call Student_Case ('Computer', @S_Course); 60 | select @S_Course; 61 | 62 | call Student_Case ('Hostory', @S_Course); 63 | select @S_Course; 64 | 65 | call Student_Case ('Economics', @S_Course); 66 | select @S_Course; -------------------------------------------------------------------------------- /Update_Operation.txt: -------------------------------------------------------------------------------- 1 | create database customer; 2 | show databases; 3 | use customer; 4 | create table customer( 5 | id integer auto_increment, 6 | first_name varchar(25), 7 | last_name varchar(25), 8 | salary integer, 9 | primary key(id) 10 | ); 11 | select * from customer; 12 | ## insert records in table 13 | insert into customer(first_name,last_name,salary) 14 | values 15 | ('jalpa','patel',5000), 16 | ('xyz','xx1',6000), 17 | ('xyz1','xx2',7000), 18 | ('xyz2','xx3',8000), 19 | ('xyz3','xx4',9000), 20 | ('xyz4','xx4',null); 21 | 22 | select * from customer; 23 | select * from customer where salary is null; 24 | select * from customer where salary is not null; 25 | 26 | ## sql update statement to replace null values 27 | update customer set salary=5000 where id = 6; 28 | select * from customer; 29 | 30 | ## sql delete statement 31 | 32 | delete from customer where id=5; 33 | 34 | ## sql alter table 35 | ## add columns in existing table 36 | 37 | alter table customer add email varchar(25); 38 | 39 | 40 | alter table cutomer modify dob year; 41 | ## alter table to drop column 42 | alter table cutomer drop column email 43 | 44 | -------------------------------------------------------------------------------- /User Define Functions.sql: -------------------------------------------------------------------------------- 1 | CREATE DATABASE UDF; 2 | USE UDF; 3 | 4 | CREATE TABLE employees ( 5 | id INT AUTO_INCREMENT PRIMARY KEY, 6 | first_name VARCHAR(50), 7 | last_name VARCHAR(50), 8 | age INT, 9 | department VARCHAR(50), 10 | salary DECIMAL(10,2) 11 | ); 12 | 13 | INSERT INTO employees (first_name, last_name, age, department, salary) 14 | VALUES 15 | ('John', 'Doe', 30, 'HR', 50000.00), 16 | ('Jane', 'Smith', 27, 'IT', 60000.00), 17 | ('Mike', 'Brown', 35, 'Finance', 75000.00), 18 | ('Sara', 'Johnson', 29, 'Marketing', 55000.00), 19 | ('David', 'White', 40, 'Sales', 80000.00), 20 | ('Emily', 'Taylor', 32, 'IT', 62000.00), 21 | ('Brian', 'Adams', 45, 'Finance', 90000.00), 22 | ('Sophia', 'Clark', 26, 'HR', 48000.00), 23 | ('James', 'Lewis', 38, 'Marketing', 72000.00), 24 | ('Olivia', 'Walker', 29, 'IT', 64000.00), 25 | ('Ethan', 'Harris', 31, 'Finance', 78000.00), 26 | ('Ava', 'Martin', 27, 'Sales', 55000.00), 27 | ('William', 'Thompson', 35, 'IT', 70000.00), 28 | ('Mia', 'Anderson', 28, 'HR', 52000.00), 29 | ('Benjamin', 'Thomas', 37, 'Marketing', 75000.00), 30 | ('Charlotte', 'Robinson', 30, 'Finance', 81000.00); 31 | 32 | /* Get Full Name of an Employee */ 33 | DELIMITER $$ 34 | CREATE FUNCTION get_full_name(emp_id INT) RETURNS VARCHAR(100) 35 | DETERMINISTIC 36 | BEGIN 37 | DECLARE full_name VARCHAR(100); 38 | SELECT CONCAT(first_name, ' ', last_name) INTO full_name FROM employees WHERE id = emp_id; 39 | RETURN full_name; 40 | END$$ 41 | DELIMITER ; 42 | 43 | /* Calculate Annual Salary */ 44 | DELIMITER $$ 45 | CREATE FUNCTION get_annual_salary(emp_id INT) RETURNS DECIMAL(10,2) 46 | DETERMINISTIC 47 | BEGIN 48 | DECLARE annual_salary DECIMAL(10,2); 49 | SELECT salary * 12 INTO annual_salary FROM employees WHERE id = emp_id; 50 | RETURN annual_salary; 51 | END$$ 52 | DELIMITER ; 53 | 54 | /* Get Employee Age Category */ 55 | DELIMITER $$ 56 | CREATE FUNCTION age_category(emp_id INT) RETURNS VARCHAR(20) 57 | DETERMINISTIC 58 | BEGIN 59 | DECLARE age INT; 60 | DECLARE category VARCHAR(20); 61 | 62 | SELECT employees.age INTO age FROM employees WHERE id = emp_id; 63 | 64 | IF age < 25 THEN 65 | SET category = 'Young'; 66 | ELSEIF age BETWEEN 25 AND 40 THEN 67 | SET category = 'Middle Age'; 68 | ELSE 69 | SET category = 'Senior'; 70 | END IF; 71 | 72 | RETURN category; 73 | END$$ 74 | DELIMITER ; 75 | 76 | /* Get Department Name of an Employee */ 77 | DELIMITER $$ 78 | CREATE FUNCTION get_department(emp_id INT) RETURNS VARCHAR(50) 79 | DETERMINISTIC 80 | BEGIN 81 | DECLARE dept VARCHAR(50); 82 | SELECT department INTO dept FROM employees WHERE id = emp_id; 83 | RETURN dept; 84 | END$$ 85 | DELIMITER ; 86 | 87 | 88 | /*Calculate Bonus (10% of Salary) */ 89 | DELIMITER $$ 90 | CREATE FUNCTION calculate_bonus(emp_id INT) RETURNS DECIMAL(10,2) 91 | DETERMINISTIC 92 | BEGIN 93 | DECLARE bonus DECIMAL(10,2); 94 | SELECT salary * 0.10 INTO bonus FROM employees WHERE id = emp_id; 95 | RETURN bonus; 96 | END$$ 97 | DELIMITER ; 98 | 99 | /* Check if Employee is Eligible for Promotion (Salary > 70k) */ 100 | DELIMITER $$ 101 | CREATE FUNCTION is_eligible_for_promotion(emp_id INT) RETURNS VARCHAR(10) 102 | DETERMINISTIC 103 | BEGIN 104 | DECLARE salary DECIMAL(10,2); 105 | DECLARE eligible VARCHAR(10); 106 | 107 | SELECT employees.salary INTO salary FROM employees WHERE id = emp_id; 108 | 109 | IF salary > 70000 THEN 110 | SET eligible = 'Yes'; 111 | ELSE 112 | SET eligible = 'No'; 113 | END IF; 114 | 115 | RETURN eligible; 116 | END$$ 117 | DELIMITER ; 118 | 119 | /* Calculate Tax (20% of Salary) */ 120 | DELIMITER $$ 121 | CREATE FUNCTION calculate_tax(emp_id INT) RETURNS DECIMAL(10,2) 122 | DETERMINISTIC 123 | BEGIN 124 | DECLARE tax DECIMAL(10,2); 125 | SELECT salary * 0.20 INTO tax FROM employees WHERE id = emp_id; 126 | RETURN tax; 127 | END$$ 128 | DELIMITER ; 129 | 130 | /* Get Employee Count in a Department */ 131 | DELIMITER $$ 132 | CREATE FUNCTION employee_count(dept_name VARCHAR(50)) RETURNS INT 133 | DETERMINISTIC 134 | BEGIN 135 | DECLARE count_emp INT; 136 | SELECT COUNT(*) INTO count_emp FROM employees WHERE department = dept_name; 137 | RETURN count_emp; 138 | END$$ 139 | DELIMITER ; 140 | 141 | /* Get Average Salary in a Department */ 142 | DELIMITER $$ 143 | CREATE FUNCTION average_salary(dept_name VARCHAR(50)) RETURNS DECIMAL(10,2) 144 | DETERMINISTIC 145 | BEGIN 146 | DECLARE avg_salary DECIMAL(10,2); 147 | SELECT AVG(salary) INTO avg_salary FROM employees WHERE department = dept_name; 148 | RETURN avg_salary; 149 | END$$ 150 | DELIMITER ; 151 | 152 | /* Get Highest Salary in a Department */ 153 | DELIMITER $$ 154 | CREATE FUNCTION highest_salary(dept_name VARCHAR(50)) RETURNS DECIMAL(10,2) 155 | DETERMINISTIC 156 | BEGIN 157 | DECLARE max_salary DECIMAL(10,2); 158 | SELECT MAX(salary) INTO max_salary FROM employees WHERE department = dept_name; 159 | RETURN max_salary; 160 | END$$ 161 | DELIMITER ; 162 | 163 | 164 | /* Get Experience Level Based on Age */ 165 | DELIMITER $$ 166 | 167 | CREATE FUNCTION experience_level(emp_id INT) RETURNS VARCHAR(20) 168 | DETERMINISTIC 169 | BEGIN 170 | DECLARE age INT; 171 | DECLARE level VARCHAR(20); 172 | 173 | -- Get the employee's age 174 | SELECT employees.age INTO age FROM employees WHERE id = emp_id; 175 | 176 | -- Define experience level based on age 177 | IF age < 30 THEN 178 | SET level = 'Junior'; 179 | ELSEIF age BETWEEN 30 AND 40 THEN 180 | SET level = 'Mid-Level'; 181 | ELSE 182 | SET level = 'Senior'; 183 | END IF; 184 | 185 | RETURN level; 186 | END$$ 187 | 188 | DELIMITER ; 189 | 190 | 191 | /* Get Salary Grade */ 192 | DELIMITER $$ 193 | 194 | CREATE FUNCTION salary_grade(emp_id INT) RETURNS CHAR(1) 195 | DETERMINISTIC 196 | BEGIN 197 | DECLARE salary DECIMAL(10,2); 198 | DECLARE grade CHAR(1); 199 | 200 | -- Get the employee's salary 201 | SELECT employees.salary INTO salary FROM employees WHERE id = emp_id; 202 | 203 | -- Define salary grade 204 | IF salary >= 90000 THEN 205 | SET grade = 'A'; 206 | ELSEIF salary BETWEEN 75000 AND 89999 THEN 207 | SET grade = 'B'; 208 | ELSEIF salary BETWEEN 60000 AND 74999 THEN 209 | SET grade = 'C'; 210 | ELSEIF salary BETWEEN 45000 AND 59999 THEN 211 | SET grade = 'D'; 212 | ELSE 213 | SET grade = 'E'; 214 | END IF; 215 | 216 | RETURN grade; 217 | END$$ 218 | 219 | DELIMITER ; 220 | 221 | 222 | 223 | 224 | 225 | -------------------------------------------------------------------------------- /User_Define_Functions_Loop_IfElse.txt: -------------------------------------------------------------------------------- 1 | use sales; 2 | 3 | select * from sales1; 4 | 5 | DELIMITER $$ 6 | create function add_to_column(a INT) 7 | returns INT 8 | DETERMINISTIC 9 | BEGIN 10 | DECLARE b int ; 11 | set b = a + 20 ; 12 | return b ; 13 | end $$ 14 | 15 | DELIMITER $$ 16 | create function final_profits(profit int , discount int ) 17 | returns int 18 | Deterministic 19 | Begin 20 | Declare final_profit int ; 21 | set final_profit = profit - discount ; 22 | return final_profit; 23 | end $$ 24 | 25 | select profit, discount , final_profits(profit, discount) from sales1 ; 26 | 27 | DELIMITER $$ 28 | create function final_profits_real(profit decimal(20,4) , discount decimal(10,4) , sales decimal(10,4) ) 29 | returns int 30 | Deterministic 31 | Begin 32 | Declare final_profit int ; 33 | set final_profit = profit - sales * discount ; 34 | return final_profit; 35 | end $$ 36 | 37 | select profit, discount ,sales , final_profits_real(profit, discount,sales) from sales1 ; 38 | 39 | select add_to_col3(15) 40 | 41 | select * from sales1 42 | 43 | ## To Convert integer to string 44 | select quantity , add_to_col3(quantity ) from sales1 45 | 46 | DELIMITER $$ 47 | create function int_to_str(a INT) 48 | returns varchar 49 | DETERMINISTIC 50 | BEGIN 51 | DECLARE b varchar ; 52 | set b = a ; 53 | return b 54 | end $$ 55 | 56 | 57 | select int_to_str(45) 58 | 59 | select * from sales1 60 | 61 | select quantity, int_to_str(quantity) from sales1 ; 62 | 63 | select max(sales) , min(sales) from sales1 64 | 65 | /* 1 - 100 - super affordable product 66 | 100-300 - affordable 67 | 300 - 600 - moderate price 68 | 600 + - expensive */ 69 | 70 | ## IF-ELSE Condition 71 | 72 | DELIMITER && 73 | create function mark_sales2(sales int ) 74 | returns varchar(30) 75 | DETERMINISTIC 76 | begin 77 | declare flag_sales varchar(30); 78 | if sales <= 100 then 79 | set flag_sales = "super affordable product" ; 80 | elseif sales > 100 and sales < 300 then 81 | set flag_sales = "affordable" ; 82 | elseif sales >300 and sales < 600 then 83 | set flag_sales = "moderate price" ; 84 | else 85 | set flag_sales = "expensive" ; 86 | end if ; 87 | return flag_sales; 88 | end && 89 | 90 | 91 | select mark_sales2(100) 92 | 93 | select sales , mark_sales2(sales ) from sales1 ; 94 | 95 | create table loop_table(val int) 96 | 97 | ## Create Loop 98 | 99 | Delimiter $$ 100 | create procedure insert_data() 101 | Begin 102 | set @var = 10 ; 103 | generate_data : loop 104 | insert into loop_table values (@var); 105 | set @var = @var + 1 ; 106 | if @var = 100 then 107 | leave generate_data; 108 | end if ; 109 | end loop generate_data; 110 | End $$ 111 | 112 | call insert_data() 113 | 114 | select * from loop_table 115 | 116 | 117 | 118 | -------------------------------------------------------------------------------- /Window_Function_RowNumber_Rank_DenseRank.txt: -------------------------------------------------------------------------------- 1 | /* Window functions applies aggregate and ranking functions over a particular window (set of rows). 2 | OVER clause is used with window functions to define that window. 3 | OVER clause does two things : 4 | Partitions rows into form set of rows. (PARTITION BY clause is used) 5 | Orders rows within those partitions into a particular order. (ORDER BY clause is used) */ 6 | 7 | create database if not exists win1_fun 8 | use win1_fun 9 | create table if not exists bit_students( 10 | student_id int , 11 | student_batch varchar(40), 12 | student_name varchar(40), 13 | student_stream varchar(30), 14 | students_marks int , 15 | student_mail_id varchar(50)) 16 | 17 | insert into bit_students values(119 ,'fsds' , 'jalpa','EC',60,'jalpa@gmail.com') 18 | 19 | select * from bit_students 20 | insert into bit_students values(100 ,'fsda' , 'saurabh','cs',80,'saurabh@gmail.com'), 21 | (102 ,'fsda' , 'sanket','cs',81,'sanket@gmail.com'), 22 | (103 ,'fsda' , 'shyam','cs',80,'shyam@gmail.com'), 23 | (104 ,'fsda' , 'sanket','cs',82,'sanket@gmail.com'), 24 | (105 ,'fsda' , 'shyam','ME',67,'shyam@gmail.com'), 25 | (106 ,'fsds' , 'ajay','ME',45,'ajay@gmail.com'), 26 | (106 ,'fsds' , 'ajay','ME',78,'ajay12@gmail.com'), 27 | (108 ,'fsds' , 'snehal','CI',89,'snehal@gmail.com'), 28 | (109 ,'fsds' , 'manisha','CI',34,'manisha@gmail.com'), 29 | (110 ,'fsds' , 'rakesh','CI',45,'rakesh@gmail.com'), 30 | (111 ,'fsde' , 'anuj','CI',43,'anuj@gmail.com'), 31 | (112 ,'fsde' , 'mohit','EE',67,'mohit@gmail.com'), 32 | (113 ,'fsde' , 'vivek','EE',23,'vivek@gmail.com'), 33 | (114 ,'fsde' , 'gaurav','EE',45,'gaurav@gmail.com'), 34 | (115 ,'fsde' , 'prateek','EE',89,'prateek@gmail.com'), 35 | (116 ,'fsde' , 'mithun','ECE',23,'mithun@gmail.com'), 36 | (117 ,'fsbc' , 'chaitra','ECE',23,'chaitra@gmail.com'), 37 | (118 ,'fsbc' , 'pranay','ECE',45,'pranay@gmail.com'), 38 | (119 ,'fsbc' , 'sandeep','ECE',65,'sandeep@gmail.com') 39 | 40 | select * from bit_students 41 | 42 | select student_batch ,sum(students_marks) from bit_students group by student_batch 43 | select student_batch ,min(students_marks) from bit_students group by student_batch 44 | select student_batch ,max(students_marks) from bit_students group by student_batch 45 | select student_batch ,avg(students_marks) from bit_students group by student_batch 46 | select count(student_batch) from bit_students 47 | select count(distinct student_batch) from bit_students 48 | select student_batch , count(*) from bit_students group by student_batch 49 | 50 | select * from bit_students 51 | select max(students_marks) from bit_students where student_batch='fsda' 52 | select student_name, student_batch, max(students_marks) from bit_students group by student_batch 53 | select student_name,max(students_marks) from bit_students where student_batch='fsda' 54 | 55 | select student_name from bit_students where students_marks in 56 | (select max(students_marks) from bit_students where student_batch="fsda") 57 | 58 | select student_name from bit_students where student_batch='fsda' order by students_marks DESC limit 2 59 | 60 | select * from bit_students 61 | select * from bit_students where student_batch = 'fsda' order by students_marks desc limit 1,1; 62 | 63 | select * from bit_students where student_batch = 'fsda' order by students_marks desc limit 2,2; 64 | 65 | select * from bit_students where student_batch = 'fsda' order by students_marks desc limit 2 , 1; 66 | select * from bit_students where student_batch = 'fsda' order by students_marks desc limit 3, 1; 67 | select * from bit_students where student_batch = 'fsda' order by students_marks desc limit 4 , 1; 68 | select * from bit_students where student_batch = 'fsda' order by students_marks desc limit 5 , 1; 69 | select * from bit_students where student_batch = 'fsda' order by students_marks desc limit 6 , 1; 70 | 71 | select * from bit_students where student_batch = 'fsda' order by students_marks desc limit 3, 3; 72 | 73 | select * from bit_students where student_batch = 'fsda' order by students_marks desc limit 3; 74 | select * from bit_students where student_batch = 'fsda' order by students_marks desc 75 | 76 | select * from bit_students where student_batch = 'fsda' order by students_marks desc limit 2, 3; 77 | 78 | select * from bit_students where students_marks =( 79 | select min(students_marks) from 80 | (select students_marks from bit_students 81 | where student_batch ="fsda" 82 | order by students_marks desc 83 | limit 3 ) as top); 84 | 85 | select students_marks from bit_students 86 | where student_batch ="fsda" order by students_marks desc limit 3 87 | 88 | select * from bit_students 89 | 90 | /* Ranking Window Functions : 91 | Ranking functions are, RANK(), DENSE_RANK(), ROW_NUMBER() 92 | 93 | RANK() – 94 | As the name suggests, the rank function assigns rank to all the rows within every partition. Rank is assigned such that rank 1 given to the first row and 95 | rows having same value are assigned same rank. For the next rank after two same rank values, one rank value will be skipped. 96 | 97 | DENSE_RANK() – 98 | It assigns rank to each row within partition. Just like rank function first row is assigned rank 1 and rows having same value have same rank. 99 | The difference between RANK() and DENSE_RANK() is that in DENSE_RANK(), for the next rank after two same rank, consecutive integer is used, no rank is skipped. 100 | 101 | ROW_NUMBER() – 102 | It assigns consecutive integers to all the rows within partition. 103 | Within a partition, no two rows can have same row number. */ 104 | 105 | select student_id , student_batch , student_stream,students_marks , 106 | row_number() over(order by students_marks) as 'row_number' from bit_students 107 | 108 | select * from (select student_id , student_batch , student_stream,students_marks , 109 | row_number() over(partition by student_batch order by students_marks desc) as 'row_num' 110 | from bit_students ) as test where row_num = 1 111 | 112 | select student_id , student_batch , student_stream,students_marks , 113 | row_number() over(partition by student_batch order by students_marks desc ) as 'row_num' 114 | from bit_students 115 | 116 | select student_id , student_batch , student_stream,students_marks , 117 | row_number() over(order by students_marks desc) as 'row_number', 118 | rank() over(order by students_marks desc ) as 'row_rank' 119 | from bit_students 120 | 121 | select * from (select student_id , student_batch , student_stream,students_marks , 122 | row_number() over(partition by student_batch order by students_marks desc) as 'row_number', 123 | rank() over(partition by student_batch order by students_marks desc ) as 'row_rank' 124 | from bit_students ) as test where row_rank = 1 125 | 126 | select * from (select student_id , student_batch , student_stream,students_marks , 127 | row_number() over(partition by student_batch order by students_marks desc) as 'row_number', 128 | rank() over(partition by student_batch order by students_marks desc ) as 'row_rank', 129 | dense_rank() over( partition by student_batch order by students_marks desc) as 'dense_rank' 130 | from bit_students ) as test where `dense_rank` = 3 -------------------------------------------------------------------------------- /trigger.txt: -------------------------------------------------------------------------------- 1 | #A trigger is a stored procedure in database which automatically invokes whenever a special event in the database occurs. 2 | create database if not exists bit; 3 | use bit; 4 | 5 | create table course2( 6 | course_id int, 7 | course_desc varchar(30), 8 | course_mentor varchar(60), 9 | course_price int, 10 | course_discount int, 11 | create_date date, 12 | user_info varchar(30)); 13 | 14 | create table course_update( 15 | course_mentor_update varchar(50), 16 | course_price_update int, 17 | course_discount_update int); 18 | 19 | 20 | #BEFORE triggers run the trigger action before the triggering statement is run. 21 | 22 | 23 | create trigger course_before_insert1123 24 | before insert 25 | on course2 for each row 26 | begin 27 | declare user_val varchar(50); 28 | set new.create_date = sysdate(); 29 | select user() into user_val; 30 | set new.user_info = user_val; 31 | insert into ref_course values(sysdate() , user_val); 32 | 33 | end; // 34 | 35 | create table ref_course( 36 | recode_insert_date date, 37 | recode_user_insert varchar(50) 38 | ); 39 | 40 | select * from ref_course; 41 | select * from course2; 42 | 43 | insert into course2(course_id,course_desc ,course_mentor,course_price,course_discount) 44 | values(101,"fsds","jalpa",1000,10); 45 | 46 | 47 | create table test1( 48 | c1 varchar(30), 49 | c2 date, 50 | c3 int); 51 | 52 | 53 | create table test2( 54 | c1 varchar(30), 55 | c2 date, 56 | c3 int); 57 | 58 | 59 | create table test3( 60 | c1 varchar(30), 61 | c2 date, 62 | c3 int); 63 | 64 | 65 | delimiter // 66 | create trigger to_update_other 67 | before insert 68 | on test1 for each row 69 | begin 70 | 71 | insert into test2 values('xyz',sysdate() ,23543); 72 | insert into test3 values('xyz',sysdate() ,23543); 73 | 74 | end; // 75 | 76 | insert into test1 values( 77 | 'xyz',sysdate() ,3456 78 | ); 79 | 80 | select * from test1; 81 | select * from test2; 82 | select * from test3; 83 | 84 | 85 | #AFTER triggers run the trigger action after the triggering statement is run. 86 | 87 | delimiter // 88 | create trigger to_update_other_table 89 | after insert 90 | on test1 for each row 91 | begin 92 | 93 | update test2 set c1 ='abc' where c1='xyz'; 94 | delete from test3 where c1 ='xyz'; 95 | 96 | end; // 97 | 98 | insert into test1 values( 99 | 'harsh',sysdate() ,3456); 100 | 101 | 102 | 103 | delimiter // 104 | create trigger to_delete_other_table 105 | after delete 106 | on test1 for each row 107 | begin 108 | insert into test3 values('after delete',sysdate(),43256); 109 | end; // 110 | 111 | /*Deleting a row in a view could either mean deleting it from the base table or updating some values 112 | so that it is no longer selected by the view*/ 113 | 114 | select * from test1; 115 | select * from test3; 116 | 117 | delete from test1 where c1='harsh'; 118 | 119 | 120 | 121 | delimiter // 122 | create trigger to_delete_other_before_table 123 | before delete 124 | on test1 for each row 125 | begin 126 | insert into test2(c1,c2,c3) values(old.c1,old.c2,old.c3); 127 | end; // 128 | 129 | 130 | delete from test1 where c1='abc'; 131 | 132 | 133 | create table test11( 134 | c1 varchar(30), 135 | c2 date, 136 | c3 int); 137 | 138 | 139 | create table test12( 140 | c1 varchar(30), 141 | c2 date, 142 | c3 int); 143 | 144 | 145 | create table test13( 146 | c1 varchar(30), 147 | c2 date, 148 | c3 int); 149 | 150 | insert into test11 values('krupa1',sysdate(),2345), 151 | ('krupa13',sysdate(),2345), 152 | ('krupa12',sysdate(),2345), 153 | ('krupa14',sysdate(),2345), 154 | ('krupa15',sysdate(),2345); 155 | 156 | insert into test12 values('krupa1',sysdate(),2345), 157 | ('krupa13',sysdate(),2345), 158 | ('krupa12',sysdate(),2345), 159 | ('krupa14',sysdate(),2345), 160 | ('krupa15',sysdate(),2345); 161 | 162 | insert into test13 values('krupa1',sysdate(),2345), 163 | ('krupa13',sysdate(),2345), 164 | ('krupa12',sysdate(),2345), 165 | ('krupa14',sysdate(),2345), 166 | ('krupa15',sysdate(),2345); 167 | 168 | select * from test11; 169 | select * from test12; 170 | select * from test13; 171 | 172 | 173 | delimiter // 174 | create trigger to_update_other_before 175 | before update 176 | on test11 for each row 177 | begin 178 | insert into test12(c1,c2,c3) values(new.c1,new.c2,new.c3); 179 | end; // 180 | 181 | /*Updating a column in a view that involves joins might change the semantics of other columns that are not projected by the view. 182 | 183 | */ 184 | drop trigger to_update_other_before; 185 | 186 | update test11 set c1='insert new' where c1='krupa1'; 187 | 188 | 189 | --------------------------------------------------------------------------------