├── .flake8 ├── .githooks ├── pre-commit └── pre-push ├── .github └── workflows │ └── ci-test.yml ├── .gitignore ├── .mypy.ini ├── .vscode └── settings.json ├── 00-TableOfContents.ipynb ├── CRUD.ipynb ├── DB-Introduction.ipynb ├── DB-Table.ipynb ├── DELETE.ipynb ├── Dockerfile ├── INSERT.ipynb ├── Index.ipynb ├── JSON.ipynb ├── Joins.ipynb ├── LICENSE ├── Makefile ├── MongoDB-CRUD.ipynb ├── MongoDB-Data-API.ipynb ├── MongoDB.ipynb ├── README.md ├── Relational-Databases.ipynb ├── SELECT.ipynb ├── SQL-Introduction.ipynb ├── SQLite-Commands.ipynb ├── SQLite-Database.ipynb ├── SQLite-Python.ipynb ├── SupplyChainDB ├── Categories.tsv ├── Customers.tsv ├── Employees.tsv ├── OrderDetailsN.tsv ├── OrdersN.tsv ├── ProductsN.tsv ├── Shippers.tsv └── Suppliers.tsv ├── Tables.ipynb ├── Transactions.ipynb ├── Trigger.ipynb ├── UPDATE.ipynb ├── ci-cd-requirements.txt ├── command.sql ├── data ├── bank.db └── chinook.sqlite ├── kattis-cli ├── .gitignore ├── LICENSE ├── kattis ├── kattis.bat └── submit.py ├── python ├── __init__.py └── db.py ├── requirements.txt ├── run-jupyter.sh ├── run.sh ├── script.sh ├── setup.sh ├── test.db └── tests ├── __init__.py └── test_db.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | extend-ignore = E203 3 | exclude = .git,__pycache__,docs/source/conf.py,old,build,dist,kattis-cli 4 | max-complexity = 10 -------------------------------------------------------------------------------- /.githooks/pre-commit: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | LC_ALL=C 3 | 4 | #current_branch=$(git symbolic-ref --short head) 5 | current_branch=$(git rev-parse --abbrev-ref HEAD) 6 | echo "Running pre-commit hook on branch: $current_branch" 7 | if [[ "$current_branch" == "main" ]]; then 8 | echo "Commit on main is now allowed. Please use a feature branch." 9 | exit 1 10 | fi 11 | 12 | valid_branch_regex="^(lab|project|assignment|homework|issue|dev|feature|bugfix|improvement|library|prerelease|release|hotfix)\/[a-z0-9._-]+$" 13 | 14 | message="ERROR on Commit. Branch name must adhere to this contract: $valid_branch_regex. Rename your branch to a valid name and try again." 15 | 16 | if [[ ! $current_branch =~ $valid_branch_regex ]] 17 | then 18 | echo "$message" 19 | echo "git branch -m " 20 | exit 1 21 | fi 22 | 23 | exit 0 24 | -------------------------------------------------------------------------------- /.githooks/pre-push: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | #current_branch=$(git symbolic-ref --short head) 4 | current_branch=$(git rev-parse --abbrev-ref HEAD) 5 | if [[ $current_branch != "main" ]]; then 6 | exit 0 7 | fi 8 | echo "Running pre-push hook on $current_branch branch" 9 | make all 10 | # $? stores exit value of the last command 11 | if [[ $? != 0 ]]; then 12 | echo "Tests must pass before push to main!" 13 | exit 1 14 | fi 15 | 16 | exit 0 17 | -------------------------------------------------------------------------------- /.github/workflows/ci-test.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: GitHub Actions CI/CD 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | build: 17 | 18 | runs-on: ubuntu-latest 19 | steps: 20 | - uses: actions/checkout@v3 21 | - name: Set up Python 3.10 22 | uses: actions/setup-python@v3 23 | with: 24 | python-version: "3.10" 25 | - name: Install dependencies 26 | run: | 27 | python -m pip install --upgrade pip 28 | pip install flake8 29 | if [ -f ci-cd-requirements.txt ]; then pip install -r ci-cd-requirements.txt; fi 30 | - name: Lint with flake8 31 | run: | 32 | # stop the build if there are Python syntax errors or undefined names 33 | flake8 --count --show-source --statistics . 34 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 35 | # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 36 | - name: Check types with mypy 37 | run: | 38 | mypy --strict . 39 | - name: Test with pytest 40 | run: | 41 | pytest --verbose . 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | *.pem 131 | .DS_Store 132 | *.key 133 | -------------------------------------------------------------------------------- /.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | exclude = (?x)( 3 | kattis-cli # file/folder that starts with kattis-cli 4 | | \.github # file/folder that starts with .github 5 | | \.githooks # file/folder that starts with .githooks 6 | ) 7 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "[python]": { 3 | "editor.defaultFormatter": "ms-python.autopep8" 4 | }, 5 | "python.formatting.provider": "none" 6 | } -------------------------------------------------------------------------------- /00-TableOfContents.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "99a5bf37", 6 | "metadata": {}, 7 | "source": [ 8 | "# Table of Contents\n", 9 | "\n", 10 | "## [1. Introduction](DB-Introduction.ipynb)\n", 11 | "## [2. Relational Databases](Relational-Databases.ipynb)\n", 12 | "## [3. SQLite Database](SQLite-Database.ipynb)\n", 13 | "## [4. SQLite Commands](SQLite-Commands.ipynb)\n", 14 | "## [5. SQL Introduction](SQL-Introduction.ipynb)\n", 15 | "## [6. SQLite with Python](SQLite-Python.ipynb)\n", 16 | "## [7. SQL CRUD Statements](CRUD.ipynb)\n", 17 | "## [8. INSERT Statement](INSERT.ipynb)\n", 18 | "## [9. SELECT Statement](SELECT.ipynb)\n", 19 | "## [10. UPDATE Statement](UPDATE.ipynb)\n", 20 | "## [11. DELETE Statement](DELETE.ipynb)\n", 21 | "## [12. Database Tables](Tables.ipynb)\n", 22 | "## [13. Index](Index.ipynb)\n", 23 | "## [14. Trigger](Trigger.ipynb)\n", 24 | "## [15. Joins](Joins.ipynb)\n", 25 | "## [16. Transactions](Transactions.ipynb)\n", 26 | "## [17. JSON](JSON.ipynb)\n", 27 | "## [18. MongoDB and NoSQL](MongoDB.ipynb)\n", 28 | "## [19. MongoDB CRUD Statements](MongoDB-CRUD.ipynb)\n", 29 | "## [20. MongoDB Data API](MongoDB-Data-API.ipynb)" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": null, 35 | "id": "eb797d20", 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [] 39 | } 40 | ], 41 | "metadata": { 42 | "kernelspec": { 43 | "display_name": "Python 3 (ipykernel)", 44 | "language": "python", 45 | "name": "python3" 46 | }, 47 | "language_info": { 48 | "codemirror_mode": { 49 | "name": "ipython", 50 | "version": 3 51 | }, 52 | "file_extension": ".py", 53 | "mimetype": "text/x-python", 54 | "name": "python", 55 | "nbconvert_exporter": "python", 56 | "pygments_lexer": "ipython3", 57 | "version": "3.10.8" 58 | } 59 | }, 60 | "nbformat": 4, 61 | "nbformat_minor": 5 62 | } 63 | -------------------------------------------------------------------------------- /CRUD.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "2ca6e74b", 6 | "metadata": {}, 7 | "source": [ 8 | "# CRUD\n", 9 | "\n", 10 | "- an acronym that stands for **C**reate, **R**ead, **U**pdate, and **D**elete\n", 11 | "- represents the four fundamental operations that can be performed on data in a database or a persistent storage system\n", 12 | "- CRUD serves as a simple way to categorize and describe the basic actions that applications can perform on data. These operations are commonly associated with databases but are also applicable to various types of data storage systems, including APIs, file systems, and more.\n", 13 | "\n", 14 | "## Create (C)\n", 15 | "\n", 16 | "- this operation involves adding new data or records to a database, etc.\n", 17 | "- it typically corresponds to the INSERT statement in SQL\n", 18 | "- creating new records is essential for adding new information to a DB system\n", 19 | "\n", 20 | "## Read (R)\n", 21 | "\n", 22 | "- the read operation is about retrieving or fetching existing data from a database\n", 23 | "- this corresponds to the SELECT statement in SQL\n", 24 | "- reading allows you to access the stored data for viewing or processing\n", 25 | "\n", 26 | "## Update (U)\n", 27 | "\n", 28 | "- updating involves modifying existing data in a database\n", 29 | "- this corresponds to the UPDATE statement in SQL\n", 30 | "- updating is used to change the values of specific attributes in a record\n", 31 | "\n", 32 | "## Delete (D)\n", 33 | "\n", 34 | "- delete operation is about removing data from a database\n", 35 | "- this corresponds to the DELETE statement in SQL\n", 36 | "- Deleting records is used to eliminate information that is no longer needed or relevant\n", 37 | "\n", 38 | "CRUD operations are the building blocks for most applications that involve data management and interaction. Nearly all software systems require these basic operations to allow users or processes to interact with stored data. Whether it's a web application, a mobile app, or a desktop program, CRUD operations enable users to manage and manipulate data in a structured and organized manner.\n", 39 | "\n", 40 | "\n", 41 | "\n" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": null, 47 | "id": "3f85064f", 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [] 51 | } 52 | ], 53 | "metadata": { 54 | "kernelspec": { 55 | "display_name": "Python 3 (ipykernel)", 56 | "language": "python", 57 | "name": "python3" 58 | }, 59 | "language_info": { 60 | "codemirror_mode": { 61 | "name": "ipython", 62 | "version": 3 63 | }, 64 | "file_extension": ".py", 65 | "mimetype": "text/x-python", 66 | "name": "python", 67 | "nbconvert_exporter": "python", 68 | "pygments_lexer": "ipython3", 69 | "version": "3.10.8" 70 | } 71 | }, 72 | "nbformat": 4, 73 | "nbformat_minor": 5 74 | } 75 | -------------------------------------------------------------------------------- /DB-Introduction.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "8e0173c3", 6 | "metadata": {}, 7 | "source": [ 8 | "# DB Introduction\n", 9 | "\n", 10 | "## Topics\n", 11 | "\n", 12 | "- What is a DB?\n", 13 | "- Different Dypes of DB\n", 14 | "- Relational Databases\n", 15 | "- NoSQL Databases\n" 16 | ] 17 | }, 18 | { 19 | "cell_type": "markdown", 20 | "id": "212626da", 21 | "metadata": {}, 22 | "source": [ 23 | "## What is a Database (DB)?\n", 24 | "- database is a structured collection of data \n", 25 | "- data is organized and stored in a way that allows for efficient storage, retrieval, and management of information\n", 26 | "- DB serves as a central repository for storing and managing various types of data, such as text, numbers, images, and more\n", 27 | "- DBs are commonly used in computer systems and software applications to handle large amounts of data and enable users or applications to access and manipulate that data as needed\n", 28 | "- crucial components of modern software applications, websites, and information systems, enabling efficient data management, storage, and retrieval for a wide range of purposes\n", 29 | "\n", 30 | "## Key Features\n", 31 | "\n", 32 | "### Data Structure\n", 33 | "- provide a structured way to store data, often using tables (in relational databases) or other data structures like documents (in NoSQL databases)\n", 34 | "\n", 35 | "### Data Integrity\n", 36 | "- offer mechanisms to ensure data accuracy and consistency \n", 37 | "- includes enforcing rules, constraints, and validations to prevent incorrect or conflicting data from being stored.\n", 38 | "\n", 39 | "### Data Retrieval\n", 40 | "- enable efficient searching and retrieval of specific pieces of data using queries\n", 41 | "- allows users or applications to quickly access the information they need\n", 42 | "\n", 43 | "### Data Manipulation\n", 44 | "- support various operations to modify, update, or delete data\n", 45 | "- users can add new records, modify existing ones, or remove unnecessary data\n", 46 | "\n", 47 | "### Data Security\n", 48 | "- provide access controls to restrict who can view, modify, or delete data\n", 49 | "- helps protect sensitive information and maintain data privacy\n", 50 | "\n", 51 | "### Concurrency Control\n", 52 | "- handle multiple users or applications accessing and modifying data simultaneously\n", 53 | "- ensure that data remains consistent even in a multi-user environment\n", 54 | "\n", 55 | "### Backup and Recovery\n", 56 | "- often offer features for creating backups of data and restoring it in case of data loss or system failures\n", 57 | "\n", 58 | "## Types of Databases\n", 59 | "\n", 60 | "### Relational Databases\n", 61 | "- use a structured format based on tables, rows, and columns\n", 62 | "- they use a query language (such as SQL) to manage and retrieve data \n", 63 | "- Examples include Sqlite, MySQL, PostgreSQL, and Microsoft SQL Server\n", 64 | "\n", 65 | "### NoSQL Databases\n", 66 | "- suitable for handling unstructured or semi-structured data\n", 67 | "- document-based databases (like MongoDB), key-value stores (like Redis), column-family stores, and graph databases\n" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "id": "3d49e4cd", 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [] 77 | } 78 | ], 79 | "metadata": { 80 | "kernelspec": { 81 | "display_name": "Python 3 (ipykernel)", 82 | "language": "python", 83 | "name": "python3" 84 | }, 85 | "language_info": { 86 | "codemirror_mode": { 87 | "name": "ipython", 88 | "version": 3 89 | }, 90 | "file_extension": ".py", 91 | "mimetype": "text/x-python", 92 | "name": "python", 93 | "nbconvert_exporter": "python", 94 | "pygments_lexer": "ipython3", 95 | "version": "3.10.8" 96 | } 97 | }, 98 | "nbformat": 4, 99 | "nbformat_minor": 5 100 | } 101 | -------------------------------------------------------------------------------- /DB-Table.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "2becf8fd", 6 | "metadata": {}, 7 | "source": [ 8 | "# Tables\n", 9 | "\n", 10 | "## Table Definition\n", 11 | "\n", 12 | "- tables are the primary data storage units\n", 13 | "- each table is characterized by its name, columns, data types, and constraints.\n", 14 | "- columns (also called attributes or fields) are data/values stored in each record\n", 15 | "- tables need to be created before data can be stored into it\n", 16 | "\n", 17 | "## Column Definition\n", 18 | "- \n", 19 | "\n", 20 | "## Primary keys\n", 21 | "- a primary key is a unique identifier for a record within a relational database table\n", 22 | "- is used to uniquely identify each row or record in a table\n", 23 | "- the primary key ensures that there are no duplicate values within the column(s) designated as the primary key\n", 24 | "- primary keys help to maintain data integrity and provide a reliable and efficient way to access and manage data\n", 25 | "- primary key is used to establish relationship among the tables\n", 26 | "- CustomerID column in the table above is the primary key\n", 27 | "\n", 28 | "### Key characteristics of Primary Key\n", 29 | "\n", 30 | "#### Uniquness\n", 31 | " - a primary key must contain unique values for each record in the table\n", 32 | "- No two records can have the same primary key value\n", 33 | "\n", 34 | "#### Non-Null\n", 35 | "- values in a primary key column cannot be null (empty) because null values are not unique\n", 36 | "\n", 37 | "#### Single or Composite\n", 38 | "- can consist of a single column or multiple columns, depending on the requirements of the database design\n", 39 | "\n", 40 | "#### Data Integrity\n", 41 | "- by enforcing uniqueness and non-null constraints, the primary key ensures that each record is uniquely identifiable, preventing data duplication and inconsistency\n", 42 | "\n", 43 | "#### Access and Referencing\n", 44 | "- primary keys are used to uniquely identify records within the table\n", 45 | "- they also serve as a basis for establishing relationships between tables using foreign keys.\n", 46 | "\n", 47 | "#### Automatically Generated Primary Keys\n", 48 | "- in some cases, primary key values are automatically generated by the DBMS when new records are added\n", 49 | "- this is often seen with auto-incrementing integer values\n", 50 | "\n", 51 | "\n" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "id": "18eb5e1d", 58 | "metadata": {}, 59 | "outputs": [], 60 | "source": [] 61 | } 62 | ], 63 | "metadata": { 64 | "kernelspec": { 65 | "display_name": "Python 3 (ipykernel)", 66 | "language": "python", 67 | "name": "python3" 68 | }, 69 | "language_info": { 70 | "codemirror_mode": { 71 | "name": "ipython", 72 | "version": 3 73 | }, 74 | "file_extension": ".py", 75 | "mimetype": "text/x-python", 76 | "name": "python", 77 | "nbconvert_exporter": "python", 78 | "pygments_lexer": "ipython3", 79 | "version": "3.10.8" 80 | } 81 | }, 82 | "nbformat": 4, 83 | "nbformat_minor": 5 84 | } 85 | -------------------------------------------------------------------------------- /DELETE.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "c079ee41", 6 | "metadata": {}, 7 | "source": [ 8 | "# DELETE\n", 9 | "\n", 10 | "- the \"D\" in \"CRUD\" operations refers to the action of deleting existing records in a DBMS\n", 11 | "- allows you to delete one or more rows based on the specified conditions\n", 12 | "- helps you keep your database up to date by allowing you to delete records that are no longer needed\n", 13 | "- https://sqlite.org/lang_update.html\n", 14 | "- syntax:\n", 15 | "\n", 16 | "```\n", 17 | "DELETE FROM table_name\n", 18 | "WHERE condition;\n", 19 | "```\n", 20 | "\n", 21 | "- optional OREDER BY and LIMI clause can be used but rarely\n", 22 | "- e.g. query:\n", 23 | "\n", 24 | "```sql\n", 25 | "DELETE FROM employees;\n", 26 | "```\n", 27 | "\n", 28 | "- what do you think will be the result of above query?\n", 29 | "- NOTE: delete statement is powerful tool for deleting records in a table, so be cautious when using it, especially with the WHERE clause, to ensure that you delete the correct records and values\n", 30 | "- once DELETE is executed, you can't undo the operation\n", 31 | "\n", 32 | "## WHERE\n", 33 | "\n", 34 | "- optional clause but used almost all the time with DELETE\n", 35 | "- used to provide filter/search condition for rows to be deleted by the query\n", 36 | " - WHERE clause should uniquely identify the records to be deleted \n", 37 | " - Primary Key is primariy used in WHERE clause\n", 38 | "- e.g.:\n", 39 | "\n", 40 | "```sql\n", 41 | "DELETE employees\n", 42 | "WHERE EmployeeId = 8;\n", 43 | "```\n", 44 | "\n", 45 | "- run the above query on chinook sqlite db with DBeaver" 46 | ] 47 | }, 48 | { 49 | "cell_type": "markdown", 50 | "id": "ea4214b0", 51 | "metadata": {}, 52 | "source": [ 53 | "## DELETE with Python\n", 54 | "\n", 55 | "- DELETE doesn't return records but deletes rows from the table\n", 56 | "- use cursor's execute() method to delete one or more records based on the WHERE clause\n", 57 | "- must use parameterized query using ? placeholder to use untrusted data as part of the query" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": 1, 63 | "id": "6d310046", 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "from python import db" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 2, 73 | "id": "d0f33bf4", 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "db_file = 'data/chinook.sqlite'" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 4, 83 | "id": "5569d681", 84 | "metadata": {}, 85 | "outputs": [ 86 | { 87 | "name": "stdout", 88 | "output_type": "stream", 89 | "text": [ 90 | "Enter employee id: 9\n" 91 | ] 92 | } 93 | ], 94 | "source": [ 95 | "emp_id = int(input('Enter employee id: '))" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 5, 101 | "id": "2250eec7", 102 | "metadata": {}, 103 | "outputs": [], 104 | "source": [ 105 | "query = \"\"\"\n", 106 | " DELETE FROM employees\n", 107 | " WHERE EmployeeId = ?;\n", 108 | " \"\"\"" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": 8, 114 | "id": "1819551a", 115 | "metadata": {}, 116 | "outputs": [], 117 | "source": [ 118 | "row_count = db.delete(db_file, query, (emp_id,))" 119 | ] 120 | }, 121 | { 122 | "cell_type": "code", 123 | "execution_count": 9, 124 | "id": "b2bb3865", 125 | "metadata": {}, 126 | "outputs": [ 127 | { 128 | "name": "stdout", 129 | "output_type": "stream", 130 | "text": [ 131 | "0\n" 132 | ] 133 | } 134 | ], 135 | "source": [ 136 | "print(row_count)" 137 | ] 138 | }, 139 | { 140 | "cell_type": "markdown", 141 | "id": "aad9567d", 142 | "metadata": {}, 143 | "source": [] 144 | } 145 | ], 146 | "metadata": { 147 | "kernelspec": { 148 | "display_name": "Python 3 (ipykernel)", 149 | "language": "python", 150 | "name": "python3" 151 | }, 152 | "language_info": { 153 | "codemirror_mode": { 154 | "name": "ipython", 155 | "version": 3 156 | }, 157 | "file_extension": ".py", 158 | "mimetype": "text/x-python", 159 | "name": "python", 160 | "nbconvert_exporter": "python", 161 | "pygments_lexer": "ipython3", 162 | "version": "3.9.13" 163 | } 164 | }, 165 | "nbformat": 4, 166 | "nbformat_minor": 5 167 | } 168 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3 2 | 3 | RUN apt update \ 4 | && apt install -y \ 5 | sqlite3 build-essential time curl cmake git nano dos2unix \ 6 | net-tools iputils-ping iproute2 sudo gdb less 7 | 8 | ARG USER=user 9 | ARG UID=1000 10 | ARG GID=1000 11 | 12 | # Set environment variables 13 | ENV USER ${USER} 14 | ENV HOME /home/${USER} 15 | 16 | # Create user and setup permissions on /etc/sudoers 17 | RUN useradd -m -s /bin/bash -N -u $UID $USER && \ 18 | echo "${USER} ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers && \ 19 | chmod 0440 /etc/sudoers && \ 20 | chmod g+w /etc/passwd 21 | 22 | WORKDIR ${HOME} 23 | 24 | RUN pip install --upgrade pip 25 | 26 | COPY requirements.txt ./ 27 | RUN pip install --no-cache-dir -r requirements.txt 28 | 29 | 30 | #WORKDIR /app 31 | 32 | # Uses "Bira" theme with some customization. Uses some bundled plugins and installs some more from github 33 | RUN sh -c "$(wget -O- https://github.com/deluan/zsh-in-docker/releases/download/v1.1.5/zsh-in-docker.sh)" -- \ 34 | -t bira \ 35 | -p git \ 36 | -p ssh-agent \ 37 | -p https://github.com/zsh-users/zsh-autosuggestions \ 38 | -p https://github.com/zsh-users/zsh-completions 39 | 40 | ENV PATH="${HOME}:${HOME}/.local/bin:${HOME}/kattis-cli:${PATH}" 41 | #RUN echo export PATH="${HOME}:${HOME}/.local/bin:${PATH}" >> ${HOME}/.zshrc 42 | ENV KATTIS_CLI="${HOME}/kattis-cli" 43 | 44 | USER user 45 | 46 | CMD zsh 47 | -------------------------------------------------------------------------------- /INSERT.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "a99f30c0", 6 | "metadata": {}, 7 | "source": [ 8 | "# CREATE\n", 9 | "\n", 10 | "- Create in CRUD is synonums to SQL INSERT statement\n", 11 | "- SQL INSERT statement is used to add new records or rows of data into a database table\n", 12 | "- allows you to specify the values to be inserted into each column of the table for the new record\n", 13 | "- INSERT statement is a key component of the \"Create\" (C) operation in the CRUD (Create, Read, Update, Delete) paradigm\n", 14 | "- https://sqlite.org/lang_insert.html\n", 15 | "- basic syntax of the SQL INSERT statement is as follows:\n", 16 | "\n", 17 | "```sql\n", 18 | "INSERT INTO table_name (column1, column2, column3, ...)\n", 19 | "VALUES (value1, value2, value3, ...);\n", 20 | "```\n", 21 | "\n", 22 | "- Example:\n", 23 | "\n", 24 | "```sql\n", 25 | "INSERT INTO Employees (EmployeeID, FirstName, LastName, Department)\n", 26 | "VALUES (101, 'John', 'Doe', 'Marketing');\n", 27 | "```\n", 28 | "\n", 29 | "- this SQL statement would insert a new record into the \"Employees\" table with the specified values for each column\n", 30 | "- if the table has columns with default values, you might not need to provide a value for those columns in the INSERT statement\n", 31 | "- if the table has an auto-incrementing primary key, you generally don't need to provide a value for that column, as the database system will automatically generate it\n", 32 | "\n", 33 | "## Use DBeaver GUI to run insert statements\n", 34 | "\n", 35 | "- connect chinook.db and run the collowing sql statement\n", 36 | "\n", 37 | "```db\n", 38 | "INSERT INTO artists (name)\n", 39 | "VALUES('Bud Powell');\n", 40 | "```\n", 41 | "- because the ArtistId column is an auto-increment column, you can ignore it in the statement.\n", 42 | "\n", 43 | "\n", 44 | "### Insert multiple rows into a table\n", 45 | "\n", 46 | "```sql\n", 47 | "INSERT INTO table_name (column1, column2 ,..)\n", 48 | "VALUES \n", 49 | " (value1,value2 ,...),\n", 50 | " (value1,value2 ,...),\n", 51 | " ...\n", 52 | " (value1,value2 ,...);\n", 53 | "```\n", 54 | "- each row of values represent one record or entity\n", 55 | "\n", 56 | "```sql\n", 57 | "INSERT INTO artists (name)\n", 58 | "VALUES\n", 59 | "\t(\"Buddy Rich\"),\n", 60 | "\t(\"Candido\"),\n", 61 | "\t(\"Charlie Byrd\");\n", 62 | "```\n", 63 | "\n", 64 | "### Insert default values\n", 65 | "\n", 66 | "```sql\n", 67 | "INSERT INTO artists DEFAULT VALUES;\n", 68 | "```\n" 69 | ] 70 | }, 71 | { 72 | "cell_type": "markdown", 73 | "id": "004475e8", 74 | "metadata": {}, 75 | "source": [ 76 | "## Use Python to insert records\n", 77 | "- use parameterized query replacing value with ?\n", 78 | "- provide data as a tuple\n", 79 | "- Tuple represents one row" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": 1, 85 | "id": "e826a5eb", 86 | "metadata": {}, 87 | "outputs": [], 88 | "source": [ 89 | "from python import db" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 2, 95 | "id": "176fcbab", 96 | "metadata": {}, 97 | "outputs": [ 98 | { 99 | "name": "stdout", 100 | "output_type": "stream", 101 | "text": [ 102 | "Help on module python.db in python:\n", 103 | "\n", 104 | "NAME\n", 105 | " python.db - Sqlite Database wrapper.\n", 106 | "\n", 107 | "DESCRIPTION\n", 108 | " This module provides a wrapper for sqlite3 database operations.\n", 109 | " \n", 110 | " Example:\n", 111 | " import db\n", 112 | " \n", 113 | " # create a database connection\n", 114 | " conn = db.create_connection(\"sqlite.db\")\n", 115 | " db.close_connection(conn)\n", 116 | "\n", 117 | "FUNCTIONS\n", 118 | " close_connection(conn: sqlite3.Connection) -> None\n", 119 | " Close a database connection to a SQLite database.\n", 120 | " Args:\n", 121 | " conn (Connection): Connection object\n", 122 | " \n", 123 | " create_connection(db_file: str) -> sqlite3.Connection\n", 124 | " Create sqlite3 connection and return it.\n", 125 | " \n", 126 | " Args:\n", 127 | " db_file (str): sqlite filename to open or create.\n", 128 | " \n", 129 | " Raises:\n", 130 | " err: sqlite3.Error as an exception.\n", 131 | " \n", 132 | " Returns:\n", 133 | " sqlite3.Connection: sqlite3 connection object.\n", 134 | " \n", 135 | " create_table(db_file: str, create_table_sql: str) -> None\n", 136 | " Create a table from the create_table_sql statement\n", 137 | " Args:\n", 138 | " db_file (str): database file path\n", 139 | " create_table_sql (str): a CREATE TABLE statement\n", 140 | " \n", 141 | " Raises:\n", 142 | " err: sqlite3.Error as an exception.\n", 143 | " \n", 144 | " Return:\n", 145 | " None\n", 146 | " \n", 147 | " delete(db_file: str, delete_sql: str, where: tuple[typing.Any]) -> int\n", 148 | " Delete a table from the delete_sql statement\n", 149 | " Args:\n", 150 | " db_file (str): database file path\n", 151 | " delete_sql (str): a DELETE statement\n", 152 | " where (tuple): where clause as tuple for ? placeholder\n", 153 | " \n", 154 | " Raises:\n", 155 | " err: sqlite3.Error as an exception.\n", 156 | " \n", 157 | " Return:\n", 158 | " rows_affected (int): number of rows affected\n", 159 | " \n", 160 | " insert_many_rows(db_file: str, insert_rows_sql: str, rows: list[tuple[str]]) -> int\n", 161 | " Insert data into a table from the insert_data_sql statement\n", 162 | " Args:\n", 163 | " db_file (str): database file path\n", 164 | " insert_data_sql (str): an INSERT INTO statement\n", 165 | " rows (list[tuple]): list of tuples as rows to be inserted for parameterized query\n", 166 | " \n", 167 | " Raises:\n", 168 | " err: sqlite3.Error as an exception.\n", 169 | " \n", 170 | " Return:\n", 171 | " row_id (int): row id of the last inserted row\n", 172 | " \n", 173 | " insert_one_row(db_file: str, insert_row_sql: str, row: tuple[typing.Any]) -> int\n", 174 | " Insert data into a table from the insert_data_sql statement\n", 175 | " Args:\n", 176 | " db_file (str): database file path\n", 177 | " insert_data_sql (str): an INSERT INTO statement\n", 178 | " row (tuple): row as tuple to be inserted\n", 179 | " \n", 180 | " Raises:\n", 181 | " err: sqlite3.Error as an exception.\n", 182 | " \n", 183 | " Return:\n", 184 | " row_id (int): row id of the last inserted row\n", 185 | " \n", 186 | " select_many_rows(db_file: str, select_rows_sql: str, where: tuple[typing.Any]) -> Any\n", 187 | " Select all rows from a table from the select_data_sql statement\n", 188 | " Args:\n", 189 | " db_file (str): database file path\n", 190 | " select_data_sql (str): an SELECT statement\n", 191 | " where (tuple): where clause as tuple for ? placeholder\n", 192 | " \n", 193 | " Raises:\n", 194 | " err: sqlite3.Error as an exception.\n", 195 | " \n", 196 | " Return:\n", 197 | " rows (Any): list of tuples as rows or None\n", 198 | " \n", 199 | " select_one_row(db_file: str, select_row_sql: str, where: tuple[str]) -> Any\n", 200 | " API to select one row from a table from the select_data_sql statement.\n", 201 | " \n", 202 | " Args:\n", 203 | " db_file (str): database file path\n", 204 | " select_row_sql (str): a SELECT statement\n", 205 | " where (tuple[str]): where clause as tuple for ? placeholder\n", 206 | " \n", 207 | " Raises:\n", 208 | " err: sqlite3.Error as an exception.\n", 209 | " \n", 210 | " Returns:\n", 211 | " tuple[str]: row as tuple or None\n", 212 | " \n", 213 | " update(db_file: str, update_sql: str, where: tuple[typing.Any]) -> int\n", 214 | " Update a table from the update_sql statement\n", 215 | " Args:\n", 216 | " db_file (str): database file path\n", 217 | " update_sql (str): an UPDATE statement\n", 218 | " where (tuple): where clause as tuple for ? placeholder\n", 219 | " \n", 220 | " Raises:\n", 221 | " err: sqlite3.Error as an exception.\n", 222 | " \n", 223 | " Return:\n", 224 | " rows_affected (int): number of rows affected\n", 225 | "\n", 226 | "DATA\n", 227 | " Any = typing.Any\n", 228 | " Special type indicating an unconstrained type.\n", 229 | " \n", 230 | " - Any is compatible with every type.\n", 231 | " - Any assumed to have all methods.\n", 232 | " - All values assumed to be instances of Any.\n", 233 | " \n", 234 | " Note that all the above statements are true from the point of view of\n", 235 | " static type checkers. At runtime, Any should not be used with instance\n", 236 | " or class checks.\n", 237 | "\n", 238 | "FILE\n", 239 | " /Users/rbasnet/projects/Intro-Database/python/db.py\n", 240 | "\n", 241 | "\n" 242 | ] 243 | } 244 | ], 245 | "source": [ 246 | "help(db)" 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": 3, 252 | "id": "8e7a114e", 253 | "metadata": {}, 254 | "outputs": [], 255 | "source": [ 256 | "db_file = 'data/chinook.sqlite'" 257 | ] 258 | }, 259 | { 260 | "cell_type": "code", 261 | "execution_count": 4, 262 | "id": "9d77fcdd", 263 | "metadata": {}, 264 | "outputs": [], 265 | "source": [ 266 | "sql = \"INSERT INTO artists (name) VALUES (?);\"\n", 267 | "# must provide data as a Python tuple and there must be an extra comma if there's only one value\n", 268 | "row = ('Jake Powell',)" 269 | ] 270 | }, 271 | { 272 | "cell_type": "code", 273 | "execution_count": 5, 274 | "id": "29fa445f", 275 | "metadata": {}, 276 | "outputs": [ 277 | { 278 | "data": { 279 | "text/plain": [ 280 | "1" 281 | ] 282 | }, 283 | "execution_count": 5, 284 | "metadata": {}, 285 | "output_type": "execute_result" 286 | } 287 | ], 288 | "source": [ 289 | "len(row)" 290 | ] 291 | }, 292 | { 293 | "cell_type": "code", 294 | "execution_count": 6, 295 | "id": "1342f041", 296 | "metadata": {}, 297 | "outputs": [ 298 | { 299 | "data": { 300 | "text/plain": [ 301 | "332" 302 | ] 303 | }, 304 | "execution_count": 6, 305 | "metadata": {}, 306 | "output_type": "execute_result" 307 | } 308 | ], 309 | "source": [ 310 | "db.insert_one_row(db_file, sql, row)" 311 | ] 312 | }, 313 | { 314 | "cell_type": "code", 315 | "execution_count": 7, 316 | "id": "e27e0234", 317 | "metadata": {}, 318 | "outputs": [ 319 | { 320 | "data": { 321 | "text/plain": [ 322 | "333" 323 | ] 324 | }, 325 | "execution_count": 7, 326 | "metadata": {}, 327 | "output_type": "execute_result" 328 | } 329 | ], 330 | "source": [ 331 | "sql = 'INSERT INTO artists DEFAULT VALUES;'\n", 332 | "db.insert_one_row(db_file, sql, ())" 333 | ] 334 | }, 335 | { 336 | "cell_type": "code", 337 | "execution_count": 8, 338 | "id": "580378bc", 339 | "metadata": {}, 340 | "outputs": [], 341 | "source": [ 342 | "sql = \"\"\"INSERT INTO artists (name)\n", 343 | " VALUES\n", 344 | " (\"Buddy Rich\"),\n", 345 | " (\"Candido\"),\n", 346 | " (\"Charlie Byrd\");\n", 347 | " \"\"\"" 348 | ] 349 | }, 350 | { 351 | "cell_type": "code", 352 | "execution_count": 9, 353 | "id": "3a6e1e40", 354 | "metadata": {}, 355 | "outputs": [ 356 | { 357 | "data": { 358 | "text/plain": [ 359 | "'INSERT INTO artists (name)\\n VALUES\\n (\"Buddy Rich\"),\\n (\"Candido\"),\\n (\"Charlie Byrd\");\\n '" 360 | ] 361 | }, 362 | "execution_count": 9, 363 | "metadata": {}, 364 | "output_type": "execute_result" 365 | } 366 | ], 367 | "source": [ 368 | "sql" 369 | ] 370 | }, 371 | { 372 | "cell_type": "code", 373 | "execution_count": 10, 374 | "id": "41465136", 375 | "metadata": {}, 376 | "outputs": [ 377 | { 378 | "data": { 379 | "text/plain": [ 380 | "336" 381 | ] 382 | }, 383 | "execution_count": 10, 384 | "metadata": {}, 385 | "output_type": "execute_result" 386 | } 387 | ], 388 | "source": [ 389 | "# SQL will remove extra white spaces\n", 390 | "# we need to call insert_one_row in this case because it's a single SQL statement\n", 391 | "db.insert_one_row(db_file, sql, ())" 392 | ] 393 | }, 394 | { 395 | "cell_type": "code", 396 | "execution_count": 11, 397 | "id": "1ceeb23e", 398 | "metadata": {}, 399 | "outputs": [], 400 | "source": [ 401 | "# if we use parameterized query, we need to use insert_many_rows\n", 402 | "sql = \"\"\"INSERT INTO artists (name) VALUES (?)\"\"\"\n", 403 | "data = [('Night Byrd',), ('Shin Lim',), ('Maya',)]" 404 | ] 405 | }, 406 | { 407 | "cell_type": "code", 408 | "execution_count": 12, 409 | "id": "2944eba1", 410 | "metadata": {}, 411 | "outputs": [], 412 | "source": [ 413 | "db.insert_many_rows(db_file, sql, data)\n", 414 | "# check the artists table's data contents" 415 | ] 416 | }, 417 | { 418 | "cell_type": "markdown", 419 | "id": "317dbeea", 420 | "metadata": {}, 421 | "source": [ 422 | "## INSERT datetime\n", 423 | "\n", 424 | "- employees table stores BirthDate and HireDate\n", 425 | "- let's insert an new employee in the table" 426 | ] 427 | }, 428 | { 429 | "cell_type": "code", 430 | "execution_count": 13, 431 | "id": "26219212", 432 | "metadata": {}, 433 | "outputs": [], 434 | "source": [ 435 | "from datetime import datetime" 436 | ] 437 | }, 438 | { 439 | "cell_type": "code", 440 | "execution_count": 14, 441 | "id": "bb29c3aa", 442 | "metadata": {}, 443 | "outputs": [], 444 | "source": [ 445 | "sql = \"\"\"\n", 446 | "INSERT INTO Employees \n", 447 | " (LastName, FirstName, Title, ReportsTo, BirthDate, HireDate, Address, City, State, Country, \n", 448 | " PostalCode, Phone, Fax, Email)\n", 449 | "VALUES (?, ?, ?, ?, ?, ?, ?,?,?,?,?,?,?,?)\n", 450 | "\"\"\"" 451 | ] 452 | }, 453 | { 454 | "cell_type": "code", 455 | "execution_count": 15, 456 | "id": "009f80c8", 457 | "metadata": {}, 458 | "outputs": [ 459 | { 460 | "data": { 461 | "text/plain": [ 462 | "datetime.datetime(2023, 9, 28, 20, 43, 52, 881564)" 463 | ] 464 | }, 465 | "execution_count": 15, 466 | "metadata": {}, 467 | "output_type": "execute_result" 468 | } 469 | ], 470 | "source": [ 471 | "datetime.now()" 472 | ] 473 | }, 474 | { 475 | "cell_type": "code", 476 | "execution_count": 16, 477 | "id": "eb6dd574", 478 | "metadata": {}, 479 | "outputs": [], 480 | "source": [ 481 | "hire_date = datetime.now()" 482 | ] 483 | }, 484 | { 485 | "cell_type": "code", 486 | "execution_count": 17, 487 | "id": "32698d44", 488 | "metadata": {}, 489 | "outputs": [], 490 | "source": [ 491 | "dob = datetime.strptime('1/1/1990', '%m/%d/%Y')" 492 | ] 493 | }, 494 | { 495 | "cell_type": "code", 496 | "execution_count": 18, 497 | "id": "f1ddac00", 498 | "metadata": {}, 499 | "outputs": [ 500 | { 501 | "data": { 502 | "text/plain": [ 503 | "datetime.datetime(1990, 1, 1, 0, 0)" 504 | ] 505 | }, 506 | "execution_count": 18, 507 | "metadata": {}, 508 | "output_type": "execute_result" 509 | } 510 | ], 511 | "source": [ 512 | "dob" 513 | ] 514 | }, 515 | { 516 | "cell_type": "code", 517 | "execution_count": 19, 518 | "id": "f1c60998", 519 | "metadata": {}, 520 | "outputs": [], 521 | "source": [ 522 | "emp = ('John', 'Doe', 'CFO', 1, dob, hire_date, '123 street', 'GJ', 'CO', 'USA', \\\n", 523 | " '12344-234', '970-111-1111', '970-222-2222', 'john@doe.com')" 524 | ] 525 | }, 526 | { 527 | "cell_type": "code", 528 | "execution_count": 20, 529 | "id": "09608950", 530 | "metadata": {}, 531 | "outputs": [ 532 | { 533 | "data": { 534 | "text/plain": [ 535 | "10" 536 | ] 537 | }, 538 | "execution_count": 20, 539 | "metadata": {}, 540 | "output_type": "execute_result" 541 | } 542 | ], 543 | "source": [ 544 | "db.insert_one_row(db_file, sql, emp)\n", 545 | "# let's check the chinook.sqlite database" 546 | ] 547 | }, 548 | { 549 | "cell_type": "markdown", 550 | "id": "b92115cf", 551 | "metadata": {}, 552 | "source": [ 553 | "## Exercise\n", 554 | "\n", 555 | "1. Add several playlists records to chinook db's playlists table\n", 556 | " - Add one playlist at a time\n", 557 | " - Add mulitple playlists at once\n", 558 | " \n", 559 | " \n", 560 | "2. Add several album records to chinook db's albums table\n", 561 | " - Add one album at at a time\n", 562 | " - Add multiple albums at once" 563 | ] 564 | }, 565 | { 566 | "cell_type": "code", 567 | "execution_count": null, 568 | "id": "ba348081", 569 | "metadata": {}, 570 | "outputs": [], 571 | "source": [] 572 | } 573 | ], 574 | "metadata": { 575 | "kernelspec": { 576 | "display_name": "Python 3 (ipykernel)", 577 | "language": "python", 578 | "name": "python3" 579 | }, 580 | "language_info": { 581 | "codemirror_mode": { 582 | "name": "ipython", 583 | "version": 3 584 | }, 585 | "file_extension": ".py", 586 | "mimetype": "text/x-python", 587 | "name": "python", 588 | "nbconvert_exporter": "python", 589 | "pygments_lexer": "ipython3", 590 | "version": "3.10.8" 591 | } 592 | }, 593 | "nbformat": 4, 594 | "nbformat_minor": 5 595 | } 596 | -------------------------------------------------------------------------------- /Index.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "108fb99a", 6 | "metadata": {}, 7 | "source": [ 8 | "# Database Index\n", 9 | "\n", 10 | "- An index in a database is a data structure that enhances the speed of data retrieval operations on a table\n", 11 | "- provides a way to quickly look up records based on the values in one or more columns\n", 12 | "- indexes are used to optimize query performance by reducing the amount of data that needs to be scanned when searching for specific information\n", 13 | "- instead of scanning the entire table, the database engine can use the index to locate the relevant rows efficiently\n", 14 | "- work similar to the way an index works in a book\n", 15 | "- the index in a book lists keywords along with the page numbers where those keywords can be found\n", 16 | "- a database index contains values from one or more columns along with pointers to the corresponding rows in the table\n", 17 | "\n", 18 | "## Key points about database indexes\n", 19 | "\n", 20 | "1. **Faster Data Retrieval**: \n", 21 | "- indexes allow the database to avoid full-table scans and jump directly to the relevant data\n", 22 | "- significantly speeds up query execution for frequently searched columns\n", 23 | "\n", 24 | "2. **B-Tree Structure**:\n", 25 | "- most database systems use a B-Tree (balanced tree) structure to store indexes efficiently\n", 26 | "- this structure ensures that index lookups have a logarithmic time complexity\n", 27 | "\n", 28 | "3. **Column Selection**:\n", 29 | "- you can create indexes on one or multiple columns in a table\n", 30 | "- the choice of which columns to index depends on the queries you frequently run\n", 31 | "\n", 32 | "4. **Trade-Offs**: \n", 33 | "- while indexes improve read performance, they can slow down write operations (such as inserts, updates, and deletes) because the index needs to be updated whenever the data changes\n", 34 | "\n", 35 | "5. **Maintenance**: \n", 36 | "- indexes need to be maintained as data changes\n", 37 | "- this adds overhead during data modification operations\n", 38 | "\n", 39 | "6. **Unique Index**: \n", 40 | "- a unique index enforces the uniqueness of values in a column\n", 41 | "- tt's commonly used for primary key columns\n", 42 | "\n", 43 | "7. **Composite Index**:\n", 44 | "- an index that spans multiple columns\n", 45 | "- it's useful when queries involve multiple columns together\n", 46 | "\n", 47 | "8. **Covering Index**:\n", 48 | "- an index that includes all the columns required to satisfy a query, allowing the query to be executed solely using the index without accessing the actual table\n", 49 | "\n", 50 | "9. **Clustered vs. Non-Clustered**:\n", 51 | "- some database systems differentiate between clustered and non-clustered indexes\n", 52 | "- A clustered index determines the physical order of data in the table, while a non-clustered index is a separate structure pointing to the data\n", 53 | "\n" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "id": "31573ab1", 59 | "metadata": {}, 60 | "source": [ 61 | "## CREATE INDEX\n", 62 | "\n", 63 | "- https://sqlite.org/lang_createindex.html\n", 64 | "\n", 65 | "- syntax:\n", 66 | "\n", 67 | "```sql\n", 68 | "CREATE INDEX index_name ON table_name(column1_name, column2_name, ...);\n", 69 | "```\n", 70 | "\n", 71 | "- e.g. let's create an index on employees table of chinook sqlite database\n", 72 | "\n", 73 | "```sql\n", 74 | "CREATE INDEX idx_last_name ON employees(LastName);\n", 75 | "```\n", 76 | "\n", 77 | "### CREATE UNIQUE INDEX\n", 78 | "- uniqe indexes are contraints that prevent duplicate values in the index column(s)\n", 79 | "- similar to UNIQE constraint on column\n", 80 | "\n", 81 | "- e.g.,\n", 82 | "\n", 83 | "```sql\n", 84 | "CREATE UNIQUE INDEX idx_email ON employees(Email);\n", 85 | "```\n", 86 | "\n", 87 | "### CHECK IF INDEX EXISTS\n", 88 | "\n", 89 | "```sql\n", 90 | "SELECT *\n", 91 | "FROM\n", 92 | " sqlite_master\n", 93 | "WHERE\n", 94 | " type= 'index' and tbl_name = '' and name = '';\n", 95 | "```" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 1, 101 | "id": "3f57a821", 102 | "metadata": {}, 103 | "outputs": [], 104 | "source": [ 105 | "from python import db" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": 4, 111 | "id": "9ec7094d", 112 | "metadata": {}, 113 | "outputs": [], 114 | "source": [ 115 | "db_file = \"data/chinook.sqlite\"" 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "execution_count": 21, 121 | "id": "72ab288f", 122 | "metadata": {}, 123 | "outputs": [], 124 | "source": [ 125 | "sql_create_index = \"CREATE UNIQUE INDEX idx_email ON employees(Email);\"" 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": 22, 131 | "id": "219179e6", 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | "db.execute_non_query(db_file, sql_create_index)" 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": null, 141 | "id": "32bf5192", 142 | "metadata": {}, 143 | "outputs": [], 144 | "source": [ 145 | "# Manually check if the index is created\n", 146 | "# automatically check if index exists" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 23, 152 | "id": "4eb84819", 153 | "metadata": {}, 154 | "outputs": [], 155 | "source": [ 156 | "sql_select_index = \"SELECT * FROM sqlite_master WHERE type='index' and name='idx_email';\"" 157 | ] 158 | }, 159 | { 160 | "cell_type": "code", 161 | "execution_count": 24, 162 | "id": "f6f58801", 163 | "metadata": {}, 164 | "outputs": [], 165 | "source": [ 166 | "row = db.select_one_row(db_file, sql_select_index, ())" 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": 25, 172 | "id": "bd9fe1a1", 173 | "metadata": {}, 174 | "outputs": [ 175 | { 176 | "name": "stdout", 177 | "output_type": "stream", 178 | "text": [ 179 | "('index', 'idx_email', 'employees', 874, 'CREATE UNIQUE INDEX idx_email ON employees(Email)')\n" 180 | ] 181 | } 182 | ], 183 | "source": [ 184 | "print(row)\n", 185 | "# should return one row with 5 columns: type, tbl_name, rootpage, sql" 186 | ] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "execution_count": 26, 191 | "id": "0ccbe56f", 192 | "metadata": {}, 193 | "outputs": [], 194 | "source": [ 195 | "# assertion test\n", 196 | "assert(row[1] == 'idx_email')" 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": 27, 202 | "id": "1b34e620", 203 | "metadata": {}, 204 | "outputs": [], 205 | "source": [ 206 | "assert(len(row) == 5)" 207 | ] 208 | }, 209 | { 210 | "cell_type": "markdown", 211 | "id": "dfa9a5a0", 212 | "metadata": {}, 213 | "source": [ 214 | "### DROP INDEX\n", 215 | "\n", 216 | "- e.g.,\n", 217 | "\n", 218 | "```sql\n", 219 | "DROP INDEX IF EXISTS idx_email;\n", 220 | "```" 221 | ] 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": 30, 226 | "id": "9178c649", 227 | "metadata": {}, 228 | "outputs": [], 229 | "source": [ 230 | "sql_drop_index = 'DROP INDEX IF EXISTS idx_email'" 231 | ] 232 | }, 233 | { 234 | "cell_type": "code", 235 | "execution_count": 31, 236 | "id": "cb7c89f9", 237 | "metadata": {}, 238 | "outputs": [], 239 | "source": [ 240 | "db.execute_non_query(db_file, sql_drop_index)" 241 | ] 242 | }, 243 | { 244 | "cell_type": "code", 245 | "execution_count": 34, 246 | "id": "fcc4ac1a", 247 | "metadata": {}, 248 | "outputs": [], 249 | "source": [ 250 | "row = db.select_one_row(db_file, sql_select_index, ())\n", 251 | "# should return None" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "execution_count": 36, 257 | "id": "e81e3efd", 258 | "metadata": {}, 259 | "outputs": [ 260 | { 261 | "name": "stdout", 262 | "output_type": "stream", 263 | "text": [ 264 | "None\n" 265 | ] 266 | } 267 | ], 268 | "source": [ 269 | "print(row)" 270 | ] 271 | }, 272 | { 273 | "cell_type": "code", 274 | "execution_count": null, 275 | "id": "e446c28b", 276 | "metadata": {}, 277 | "outputs": [], 278 | "source": [] 279 | } 280 | ], 281 | "metadata": { 282 | "kernelspec": { 283 | "display_name": "Python 3 (ipykernel)", 284 | "language": "python", 285 | "name": "python3" 286 | }, 287 | "language_info": { 288 | "codemirror_mode": { 289 | "name": "ipython", 290 | "version": 3 291 | }, 292 | "file_extension": ".py", 293 | "mimetype": "text/x-python", 294 | "name": "python", 295 | "nbconvert_exporter": "python", 296 | "pygments_lexer": "ipython3", 297 | "version": "3.10.8" 298 | } 299 | }, 300 | "nbformat": 4, 301 | "nbformat_minor": 5 302 | } 303 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Ram Basnet 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | TEST = pytest 2 | TEST_ARGS = --verbose --color=yes 3 | TYPE_CHECK = mypy --strict 4 | STYLE_CHECK = flake8 5 | STYLE_FIX = autopep8 --in-place --recursive --aggressive --aggressive 6 | 7 | .PHONY: all 8 | all: style-check type-check run-test clean 9 | 10 | .PHONY: type-check 11 | type-check: 12 | $(TYPE_CHECK) . 13 | 14 | .PHONY: style-check 15 | style-check: 16 | $(STYLE_CHECK) . 17 | 18 | # discover and run all tests 19 | .PHONY: run-test 20 | run-test: 21 | $(TEST) $(TEST_ARGS) . 22 | 23 | .PHONY: clean 24 | clean: 25 | rm -rf __pycache__ 26 | rm -rf .pytest_cache 27 | rm -rf .mypy_cache 28 | rm -rf .hypothesis 29 | 30 | 31 | .PHONY: push 32 | push: run-test clean 33 | 34 | 35 | .PHONY: fix-style 36 | fix-style: 37 | $(STYLE_FIX) . 38 | -------------------------------------------------------------------------------- /MongoDB.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "7d04d94b", 6 | "metadata": {}, 7 | "source": [ 8 | "# MongoDB\n", 9 | "\n", 10 | "- https://www.mongodb.com/\n", 11 | "\n", 12 | "- popular open-source NoSQL (non-relational) database management system designed for handling large volumes of unstructured or semi-structured data\n", 13 | "- often used in modern web and mobile applications, as well as in various other data-intensive use cases\n", 14 | " - including content management systems, e-commerce platforms, real-time analytics, and IoT (Internet of Things) applications. Its flexibility, scalability, and ease of use make it a popular choice for developers working with diverse and evolving data requirements\n", 15 | "\n", 16 | "## Here are some key characteristics and features of MongoDB:\n", 17 | "\n", 18 | "### Document-Oriented\n", 19 | "\n", 20 | "- MongoDB stores data in a format called BSON (Binary JSON), which is a binary-encoded serialization of JSON-like documents\n", 21 | "- these documents can have flexible and varying structures, making it suitable for handling data with changing schemas\n", 22 | "\n", 23 | "### Schema-less\n", 24 | "\n", 25 | "- MongoDB is schema-less, which means that you can insert data without defining a rigid schema beforehand\n", 26 | "- this flexibility makes it well-suited for projects where data structures evolve over time\n", 27 | "\n", 28 | "## Scalability\n", 29 | "\n", 30 | "- MongoDB is designed for horizontal scalability, making it capable of handling large amounts of data and high levels of traffic\n", 31 | "- It can be used in clusters to distribute data across multiple servers for load balancing and fault tolerance\n", 32 | "\n", 33 | "### Rich Query Language\n", 34 | "- MongoDB provides a powerful query language for querying and retrieving data\n", 35 | "- You can perform complex queries, including filtering, sorting, and aggregation, using the MongoDB Query Language (MQL)\n", 36 | "\n", 37 | "### Indexing\n", 38 | "- MongoDB supports the creation of indexes on fields in your documents, which can significantly improve query performance\n", 39 | "\n", 40 | "### Geospatial Capabilities\n", 41 | "- MongoDB includes geospatial features, allowing you to perform geospatial queries and store and analyze location-based data\n", 42 | "\n", 43 | "### Replication and High Availability\n", 44 | "- MongoDB supports replica sets, which are groups of MongoDB servers that maintain identical copies of data \n", 45 | "- this provides data redundancy and high availability in case of server failures\n", 46 | "\n", 47 | "### Sharding\n", 48 | "- MongoDB allows data to be distributed across multiple servers or shards, which can improve performance and scalability even further\n", 49 | "\n", 50 | "\n", 51 | "## MongoDB Document\n", 52 | "\n", 53 | "- records in MongoDB are called documents\n", 54 | "- has structure similar to JSON called BSON (Binary JSON)\n", 55 | "- e.g.,\n", 56 | "\n", 57 | "```json\n", 58 | "{\n", 59 | "\ttitle: \"Post Title 1\",\n", 60 | "\tbody: \"Body of post.\",\n", 61 | "\tcategory: \"News\",\n", 62 | "\tlikes: 1,\n", 63 | "\ttags: [\"news\", \"events\"],\n", 64 | "\tdate: Date()\n", 65 | "}\n", 66 | "```\n", 67 | "- keys are strings without quotes, and the field values may include numbers, strings, booleans, arrays, or even nested documents\n", 68 | "\n", 69 | "## MongoDB Server\n", 70 | "\n", 71 | "- can download and install free community edition or use cloub-based Atlas\n", 72 | "- we'll utilize cloud-based free edition of MongoDB server\n", 73 | "- use Python PyMongo database driver - [https://www.mongodb.com/docs/drivers/pymongo/](https://www.mongodb.com/docs/drivers/pymongo/)\n", 74 | "- install pymongo using pip\n", 75 | "\n", 76 | "```bash\n", 77 | "$ pip install pymongo\n", 78 | "$ python -m pip install pymongo\n", 79 | "```\n", 80 | "\n", 81 | "## Create your free account or signup using GitHub or Google\n", 82 | "- [https://account.mongodb.com/account/login](https://account.mongodb.com/account/login)\n", 83 | "\n", 84 | "### Create Organization\n", 85 | "\n", 86 | "- let's call it intro-db\n", 87 | "\n", 88 | "### Create New Project\n", 89 | "\n", 90 | "- let's call it demo-db\n", 91 | "\n", 92 | "### Create Deployment\n", 93 | "\n", 94 | "- Pick FREE deployment\n", 95 | "\n", 96 | "### Create a db user\n", 97 | "\n", 98 | "- give a name such as db-user\n", 99 | "- use Certificate for authentication\n", 100 | "- set certificate expiration - 6 months\n", 101 | "- save the certificate in your system\n", 102 | "- add your current IP address in list\n", 103 | "- load sample data\n", 104 | "\n", 105 | "### MongoDB ServerAPI docs\n", 106 | "\n", 107 | "- [https://pymongo.readthedocs.io/en/stable/api/pymongo/server_api.html](https://pymongo.readthedocs.io/en/stable/api/pymongo/server_api.html)" 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": 5, 113 | "id": "d671bce5-5d21-4628-99ec-008194dffac5", 114 | "metadata": {}, 115 | "outputs": [], 116 | "source": [ 117 | "from pymongo import MongoClient\n", 118 | "from pymongo.server_api import ServerApi" 119 | ] 120 | }, 121 | { 122 | "cell_type": "code", 123 | "execution_count": 3, 124 | "id": "94300039-c045-4fa6-8cb6-16e23b062afc", 125 | "metadata": {}, 126 | "outputs": [], 127 | "source": [ 128 | "path_to_certificate = 'python/x509-cert-MongoDB-Atlas.pem'" 129 | ] 130 | }, 131 | { 132 | "cell_type": "code", 133 | "execution_count": 14, 134 | "id": "af60a179-8290-4377-adf3-7075c30e8a6b", 135 | "metadata": {}, 136 | "outputs": [], 137 | "source": [ 138 | "uri = \"mongodb+srv://cluster0.xeszaub.mongodb.net/?authSource=%24external&authMechanism=MONGODB-X509&retryWrites=true&w=majority\"" 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": 15, 144 | "id": "33258db7", 145 | "metadata": {}, 146 | "outputs": [], 147 | "source": [ 148 | "client = MongoClient(uri,\n", 149 | " tls=True,\n", 150 | " tlsCertificateKeyFile=path_to_certificate,\n", 151 | " server_api=ServerApi('1'))" 152 | ] 153 | }, 154 | { 155 | "cell_type": "code", 156 | "execution_count": 16, 157 | "id": "fb43b9fe-60af-4bec-a755-ecfe8e3a79e4", 158 | "metadata": {}, 159 | "outputs": [ 160 | { 161 | "name": "stdout", 162 | "output_type": "stream", 163 | "text": [ 164 | "0\n" 165 | ] 166 | } 167 | ], 168 | "source": [ 169 | "db = client['testDB']\n", 170 | "collection = db['testCol']\n", 171 | "doc_count = collection.count_documents({})\n", 172 | "print(doc_count) # Should print 0 as the testDB doesn't exist " 173 | ] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "execution_count": 12, 178 | "id": "52d65a29-6c2f-46a5-8301-980efe52b021", 179 | "metadata": {}, 180 | "outputs": [], 181 | "source": [ 182 | "# let's connect to sample_airbnb database\n", 183 | "# this database is generated when you load sample data\n", 184 | "db = client['sample_airbnb']" 185 | ] 186 | }, 187 | { 188 | "cell_type": "code", 189 | "execution_count": 9, 190 | "id": "af11aa61-66f3-4fca-b1d0-0988791bdbde", 191 | "metadata": {}, 192 | "outputs": [], 193 | "source": [ 194 | "collection = db['listingsAndReviews']" 195 | ] 196 | }, 197 | { 198 | "cell_type": "code", 199 | "execution_count": 10, 200 | "id": "ee0ed326-5601-4c2c-894e-58e4a208ed04", 201 | "metadata": {}, 202 | "outputs": [], 203 | "source": [ 204 | "doc_count = collection.count_documents({})" 205 | ] 206 | }, 207 | { 208 | "cell_type": "code", 209 | "execution_count": 11, 210 | "id": "4044b4ed-1cd1-4124-9e72-478a7efdea25", 211 | "metadata": {}, 212 | "outputs": [ 213 | { 214 | "data": { 215 | "text/plain": [ 216 | "5555" 217 | ] 218 | }, 219 | "execution_count": 11, 220 | "metadata": {}, 221 | "output_type": "execute_result" 222 | } 223 | ], 224 | "source": [ 225 | "doc_count" 226 | ] 227 | }, 228 | { 229 | "cell_type": "markdown", 230 | "id": "f7d8c636-7130-44c2-99c9-915632c8ad66", 231 | "metadata": {}, 232 | "source": [ 233 | "## SQL to MongoDB Mapping Chart\n", 234 | "\n", 235 | "- [See docs](https://www.mongodb.com/docs/manual/reference/sql-comparison/)" 236 | ] 237 | }, 238 | { 239 | "cell_type": "code", 240 | "execution_count": null, 241 | "id": "2c0aab8f-2c71-4290-b0c5-50c432f9a752", 242 | "metadata": {}, 243 | "outputs": [], 244 | "source": [] 245 | } 246 | ], 247 | "metadata": { 248 | "kernelspec": { 249 | "display_name": "Python 3 (ipykernel)", 250 | "language": "python", 251 | "name": "python3" 252 | }, 253 | "language_info": { 254 | "codemirror_mode": { 255 | "name": "ipython", 256 | "version": 3 257 | }, 258 | "file_extension": ".py", 259 | "mimetype": "text/x-python", 260 | "name": "python", 261 | "nbconvert_exporter": "python", 262 | "pygments_lexer": "ipython3", 263 | "version": "3.12.0" 264 | } 265 | }, 266 | "nbformat": 4, 267 | "nbformat_minor": 5 268 | } 269 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Intro To Database 2 | 3 | Introduction to Database Course Materials 4 | 5 | ## Software Tools 6 | 7 | - Docker, git, python3, sqlite3, Jupyter notebook, DBeaver Community, MongoDB, etc. 8 | 9 | ## Run Jupyter Notebook 10 | 11 | ### Local System 12 | 13 | - install python and jupyter notebook on your system 14 | - clone this repository 15 | - run jupyter notebook from within the repository folder 16 | 17 | ```bash 18 | $ jupyter notebook 19 | ``` 20 | 21 | ### Using Docker Containter 22 | 23 | #### Initial setup 24 | 25 | - Install Docker Desktop: [https://docs.docker.com/desktop/](https://docs.docker.com/desktop/) 26 | 27 | - On Windows System, it takes a bit of extra work: 28 | 29 | 1. Install git along with git-bash: [https://git-scm.com/download/win](https://git-scm.com/download/win) 30 | 2. Follow the instructions here: [https://www.makeuseof.com/how-to-install-docker-windows-10-11/](https://www.makeuseof.com/how-to-install-docker-windows-10-11/) 31 | 32 | - Create a course folder 33 | - e.g., `Users///` 34 | - `Users/rbasnet/fall2023/intro-database` 35 | - NOTE - course folder must be lowercase as it's the name used for docker image 36 | 37 | - Copy all the files (except README.md) from python folder into your course folder 38 | 39 | - Using a Terminal (git-bash on Windows) do the following: 40 | 41 | - change working directory to your course folder 42 | 43 | ```bash 44 | $ cd 45 | $ pwd 46 | $ ls 47 | ``` 48 | 49 | - run the run.sh script using bash program 50 | 51 | ```bash 52 | $ bash run.sh 53 | ``` 54 | - if all goes well, you'll see a Ubuntu Bash Terminal 55 | - Note that the initial setup may take some time to build docker image. 56 | - type the following for a quick test: 57 | 58 | ```bash 59 | $ uname -a 60 | $ pwd 61 | $ ls 62 | $ sqlite3 --version 63 | $ python --version 64 | $ python hello.py 65 | ``` 66 | 67 | - Clone the course jupyter notebook repository 68 | 69 | #### Run Jupyter Notebook server 70 | 71 | - Open a Terminal (git-bash on Windows) 72 | - Change current working directory to your course folder 73 | 74 | ```bash 75 | $ cd intro-database 76 | ``` 77 | 78 | - Run run-jupyter.sh script 79 | 80 | ```bash 81 | $ bash run-jupyter.sh 82 | ``` 83 | 84 | - Press Ctrl+C to quit the jupyter notebook server 85 | - Copy the URL to the browswer 86 | -------------------------------------------------------------------------------- /Relational-Databases.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "e4872dd2", 6 | "metadata": {}, 7 | "source": [ 8 | "# Relational Database Concepts\n", 9 | "\n", 10 | "- widely used type of database management system (DBMS) that organize data into tables with rows and columns\n", 11 | "- relational databases use the following concepts\n", 12 | "- deep understanding of these concepts help you mastering relational databases\n", 13 | "\n", 14 | "## Database Schema\n", 15 | "\n", 16 | "- a schema refers to the logical and structural organization of a database\n", 17 | "- defines the layout, design, and relationships of the database objects, such as tables, views, indexes, and constraints\n", 18 | "- it provides a blueprint for how data is stored, how it is organized, and how different parts of the database are interconnected\n", 19 | "\n", 20 | "### Databae Schema: E-Commerce System\n", 21 | "\n", 22 | "- let's consider a simple example of a database schema for an e-commerce platform that tracks products, customers, and orders\n", 23 | "- this schema will define the structure and relationships between tables\n", 24 | "\n", 25 | "#### Table: Customers\n", 26 | "\n", 27 | "| Column | Data Type | Constraints |\n", 28 | "|:-------------|:------------|:--------------------|\n", 29 | "| CustomerID | INT | Primary Key |\n", 30 | "| FirstName | VARCHAR(50) | Not Null |\n", 31 | "| LastName | VARCHAR(50) | Not Null |\n", 32 | "| Email | VARCHAR(100)| Not Null, Unique |\n", 33 | "| Phone | VARCHAR(20) | |\n", 34 | "\n", 35 | "#### Table: Products\n", 36 | "\n", 37 | "| Column | Data Type | Constraints |\n", 38 | "|:-------------|:------------|:--------------------|\n", 39 | "| ProductID | INT | Primary Key |\n", 40 | "| ProductName | VARCHAR(100)| Not Null |\n", 41 | "| Price | DECIMAL(10,2)| Not Null |\n", 42 | "| Category | VARCHAR(50) | |\n", 43 | "\n", 44 | "#### Table: Orders\n", 45 | "\n", 46 | "| Column | Data Type | Constraints |\n", 47 | "|:-------------|:------------|:--------------------|\n", 48 | "| OrderID | INT | Primary Key |\n", 49 | "| CustomerID | INT | Foreign Key (Customers) |\n", 50 | "| OrderDate | DATE | Not Null |\n", 51 | "| TotalAmount | DECIMAL(10,2)| Not Null |\n", 52 | "\n", 53 | "#### Table: OrderDetails\n", 54 | "\n", 55 | "| Column | Data Type | Constraints |\n", 56 | "|--------------|-------------|---------------------|\n", 57 | "| OrderID | INT | Foreign Key (Orders)|\n", 58 | "| ProductID | INT | Foreign Key (Products)|\n", 59 | "| Quantity | INT | Not Null |\n", 60 | "\n", 61 | "\n", 62 | "## Tables and Records\n", 63 | "\n", 64 | "- explanation of tables as the primary data storage units\n", 65 | "- understanding rows (also called records or tuples) as individual data entries in a table\n", 66 | "- columns (also called attributes or fields) as data characteristics stored in each record\n", 67 | "\n", 68 | "### Example: Customer Table with Records\n", 69 | "\n", 70 | "| CustomerID | FirstName | LastName | Email |\n", 71 | "|------------|-----------|----------|---------------------|\n", 72 | "| 101 | John | Doe | john@example.com |\n", 73 | "| 102 | Jane | Smith | jane@example.com |\n", 74 | "| 103 | Michael | Johnson | michael@example.com|\n", 75 | "\n", 76 | "\n", 77 | "## Primary keys\n", 78 | "\n", 79 | "- definition of primary keys as unique identifiers for records within a table\n", 80 | "- importance of primary keys in maintaining data integrity and ensuring record uniqueness\n", 81 | "- illustration of primary key constraints\n", 82 | "\n", 83 | "## Foreign Keys and Relationships\n", 84 | "\n", 85 | "- introduction to foreign keys as references to primary keys in other tables.\n", 86 | "- explanation of how foreign keys establish relationships between tables.\n", 87 | "- different types of relationships: one-to-one, one-to-many, and many-to-many.\n", 88 | "\n", 89 | "## Constraints and Data Integrity\n", 90 | "\n", 91 | "- discussion of various constraints, including unique constraints and not-null constraints\n", 92 | "- Explanation of how constraints ensure data validity and integrity\n", 93 | "- Role of constraints in preventing incorrect or incomplete data\n", 94 | "\n", 95 | "## Database Normalization\n", 96 | "\n", 97 | "- introduction to normalization as a process for organizing data to minimize redundancy and data anomalies\n", 98 | "- explanation of normalization forms (1NF, 2NF, 3NF, etc.)\n", 99 | "- illustration of normalization through examples and practical scenarios\n", 100 | "\n", 101 | "## Denormalization\n", 102 | "\n", 103 | "- understanding denormalization as the deliberate introduction of redundancy for performance optimization\n", 104 | "- when and why denormalization might be used in certain scenarios\n", 105 | "\n", 106 | "## Relational Algebra\n", 107 | "\n", 108 | "- basic introduction to relational algebra, a set of mathematical operations used to manipulate relational data\n", 109 | "- operations like SELECT, PROJECT, JOIN, and UNION, and their practical implications\n", 110 | "\n", 111 | "## Entity-Relationship (ER) Modeling\n", 112 | "\n", 113 | "- overview of ER modeling as a visual representation of data relationships\n", 114 | "- use of symbols like entities, attributes, and relationships in creating ER diagrams\n", 115 | "\n", 116 | "\n", 117 | "## Practical Examples and Queries\n", 118 | "- Applying concepts to real-world scenarios\n", 119 | "- Writing SQL queries to perform CRUD (Create, Read, Update, Delete) operations" 120 | ] 121 | }, 122 | { 123 | "cell_type": "code", 124 | "execution_count": null, 125 | "id": "05149b74", 126 | "metadata": {}, 127 | "outputs": [], 128 | "source": [] 129 | } 130 | ], 131 | "metadata": { 132 | "kernelspec": { 133 | "display_name": "Python 3 (ipykernel)", 134 | "language": "python", 135 | "name": "python3" 136 | }, 137 | "language_info": { 138 | "codemirror_mode": { 139 | "name": "ipython", 140 | "version": 3 141 | }, 142 | "file_extension": ".py", 143 | "mimetype": "text/x-python", 144 | "name": "python", 145 | "nbconvert_exporter": "python", 146 | "pygments_lexer": "ipython3", 147 | "version": "3.10.8" 148 | } 149 | }, 150 | "nbformat": 4, 151 | "nbformat_minor": 5 152 | } 153 | -------------------------------------------------------------------------------- /SQL-Introduction.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "026ec886", 6 | "metadata": {}, 7 | "source": [ 8 | "# SQL\n", 9 | "\n", 10 | "## Topics\n", 11 | "- What is SQL?\n", 12 | "- What are some key aspects of SQL?\n", 13 | "\n", 14 | "## What is SQL?\n", 15 | "\n", 16 | "- SQL stands for Structured Query Language\n", 17 | "- a domain-specific language used for managing and manipulating relational databases\n", 18 | "- SQL provides a standardized way to interact with databases, allowing users and applications to perform tasks such as querying data, inserting, updating, and deleting records, defining the structure of the database, and more\n", 19 | "- SQL is case insensitive bu it is the best practice to write most SQL keywords in CAPS\n", 20 | "\n", 21 | "## Some key aspects of SQL\n", 22 | "\n", 23 | "### Database Operations\n", 24 | " - SQL supports a wide range of operations to work with databases, including:\n", 25 | "\n", 26 | "#### Querying\n", 27 | "- retrieving specific data from one or more tables using SELECT statements\n", 28 | "\n", 29 | "#### Inserting\n", 30 | "- adding new records into tables using INSERT statements\n", 31 | "\n", 32 | "#### Updating\n", 33 | "- modifying existing records using UPDATE statements\n", 34 | "\n", 35 | "#### Deleting\n", 36 | "- removing records from tables using DELETE statements\n", 37 | "\n", 38 | "#### Creating and Modifying Schema\n", 39 | "- defining the structure of the database using CREATE, ALTER, and DROP statements for tables, indexes, views, and other database objects\n", 40 | "\n", 41 | "#### Data Manipulation Language (DML)\n", 42 | "- includes statements like SELECT, INSERT, UPDATE, and DELETE for working with data\n", 43 | "\n", 44 | "#### Data Definition Language (DDL)\n", 45 | "- includes statements like CREATE, ALTER, and DROP for defining and managing the structure of the database\n", 46 | "\n", 47 | "#### Data Control Language (DCL)\n", 48 | "- includes statements like GRANT and REVOKE for managing permissions and access control.\n", 49 | "\n", 50 | "### Relational Databases\n", 51 | "- SQL is specifically designed for relational databases, which store data in tables with rows and columns\n", 52 | "- each row represents a record, and each column represents a data attribute\n", 53 | "\n", 54 | "### Standardization\n", 55 | "- SQL is an industry-standard language with standardized syntax and commands\n", 56 | "- however, there are variations and extensions in implementations by different database vendors\n", 57 | "\n", 58 | "### Data Manipulation\n", 59 | "- SQL allows you to retrieve specific data using complex queries that involve filtering, sorting, grouping, and joining data from multiple tables\n", 60 | "\n", 61 | "### Data Integrity\n", 62 | "- SQL enforces data integrity rules through constraints like primary keys, foreign keys, unique constraints, and check constraints\n", 63 | "\n", 64 | "### Transactions\n", 65 | "- SQL supports transactions, which allow multiple related database operations to be grouped together and executed as a single unit\n", 66 | "- Transactions ensure data consistency and reliability\n", 67 | "\n", 68 | "### Query Optimization\n", 69 | "- Database systems often have query optimizers that analyze SQL queries and determine the most efficient way to execute them, considering factors like indexes and table relationships\n", 70 | "\n", 71 | "SQL is used by developers, database administrators, data analysts, and anyone working with databases to manage and retrieve data. While the core SQL language is consistent across different database systems, there might be some differences in syntax and additional features supported by different database vendors (such as MySQL, PostgreSQL, Oracle, Microsoft SQL Server, and SQLite).\n", 72 | "\n", 73 | "\n" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "id": "e6b32772", 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [] 83 | } 84 | ], 85 | "metadata": { 86 | "kernelspec": { 87 | "display_name": "Python 3 (ipykernel)", 88 | "language": "python", 89 | "name": "python3" 90 | }, 91 | "language_info": { 92 | "codemirror_mode": { 93 | "name": "ipython", 94 | "version": 3 95 | }, 96 | "file_extension": ".py", 97 | "mimetype": "text/x-python", 98 | "name": "python", 99 | "nbconvert_exporter": "python", 100 | "pygments_lexer": "ipython3", 101 | "version": "3.10.8" 102 | } 103 | }, 104 | "nbformat": 4, 105 | "nbformat_minor": 5 106 | } 107 | -------------------------------------------------------------------------------- /SQLite-Commands.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "9a061340", 6 | "metadata": {}, 7 | "source": [ 8 | "# SQLite DB Installation and Command Line Tools" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "540ce134", 14 | "metadata": {}, 15 | "source": [ 16 | "## Download and Install Tools" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "id": "2691e37d", 22 | "metadata": {}, 23 | "source": [ 24 | "- go to the official download page: https://sqlite.org/download.html\n", 25 | "- download the appropriate version based-on your Operating System and platform\n", 26 | "- E.g., on Windows, download: [sqlite-tools-win32-x86-3420000.zip](https://sqlite.org/2023/sqlite-tools-win32-x86-3420000.zip)\n", 27 | "- unzip/extract binary files to some folder of your choice\n", 28 | " - Users//sqlite-tools is a good place\n", 29 | "- add the path to the extracted binary files to your system environemnt\n", 30 | " - Windows users follow these instructions: https://helpdeskgeek.com/windows-10/add-windows-path-environment-variable/\n", 31 | " - Mac/Linux users follow these instructions: https://www.cyberciti.biz/faq/appleosx-bash-unix-change-set-path-environment-variable/\n", 32 | " \n", 33 | "## GUI DB Tools\n", 34 | "\n", 35 | "- download and install DBeaver Community - https://dbeaver.io/download/\n", 36 | "- install the driver and sample SQLite database file\n", 37 | " \n", 38 | "## Chinook Database\n", 39 | "- https://github.com/lerocha/chinook-database\n", 40 | "- it represents a digital media store, including tables for artists, albums, media tracks, invoices, and customers\n", 41 | "- can be used to test and demo various databases\n", 42 | "- chinook.sqlite database file is provided in data folder of Intro-Database repository" 43 | ] 44 | }, 45 | { 46 | "cell_type": "markdown", 47 | "id": "cb9b09cd", 48 | "metadata": {}, 49 | "source": [ 50 | "## Run SQLite Command line tool\n", 51 | "\n", 52 | "- open a Terminal on Mac/Linux or Command Prompt on Windows\n", 53 | "- type sqlite3 command\n", 54 | " - if you get sqlite3 not found error, add path to the file to your system environment path following the instructions above\n", 55 | " - here's how it looks on my MacBook Pro:\n", 56 | "\n", 57 | "```bash\n", 58 | "(base) ╭─rbasnet@M-rbasnetMBP ~ \n", 59 | "╰─$ sqlite3 127 ↵\n", 60 | "SQLite version 3.39.3 2022-09-05 11:02:23\n", 61 | "Enter \".help\" for usage hints.\n", 62 | "Connected to a transient in-memory database.\n", 63 | "Use \".open FILENAME\" to reopen on a persistent database.\n", 64 | "sqlite> \n", 65 | "```\n", 66 | "\n", 67 | "### useful commands:\n", 68 | " - .help\n", 69 | " - .quit\n", 70 | " - .open \n", 71 | " - .database\n" 72 | ] 73 | }, 74 | { 75 | "cell_type": "markdown", 76 | "id": "8deed799", 77 | "metadata": {}, 78 | "source": [ 79 | "### Connect to an SQLite db file\n", 80 | "\n", 81 | "```bash\n", 82 | "$ sqlite3 \n", 83 | "```\n", 84 | "\n", 85 | "- download SQLite Sample Database - https://www.sqlitetutorial.net/wp-content/uploads/2018/03/chinook.zip\n", 86 | "- unzip the db file and open it using sqlite3 commands as shown below\n", 87 | "\n", 88 | "```bash\n", 89 | "(base) ╭─rbasnet@M-rbasnetMBP ~ \n", 90 | "╰─$ sqlite3 ~/Downloads/chinook.db \n", 91 | "SQLite version 3.39.3 2022-09-05 11:02:23\n", 92 | "Enter \".help\" for usage hints.\n", 93 | "sqlite> .tables\n", 94 | "albums employees invoices playlists \n", 95 | "artists genres media_types tracks \n", 96 | "customers invoice_items playlist_track\n", 97 | "sqlite> .quit\n", 98 | "```\n", 99 | "\n", 100 | "- open db file using .open command\n", 101 | "- must provide absolute path to the file\n", 102 | "\n", 103 | "```bash\n", 104 | "$ sqlite3\n", 105 | "sqlite> .open DB_FILEPATH/NAME\n", 106 | "```\n", 107 | "\n", 108 | "### Show tables in DB file\n", 109 | "\n", 110 | "- .tables\n", 111 | "\n", 112 | "```bash\n", 113 | "(base) ╭─rbasnet@M-rbasnetMBP ~ \n", 114 | "╰─$ sqlite3 ~/Downloads/chinook.db\n", 115 | "SQLite version 3.39.3 2022-09-05 11:02:23\n", 116 | "Enter \".help\" for usage hints.\n", 117 | "sqlite> .tables\n", 118 | "albums employees invoices playlists \n", 119 | "artists genres media_types tracks \n", 120 | "customers invoice_items playlist_track\n", 121 | "sqlite> \n", 122 | "\n", 123 | "```\n", 124 | "\n", 125 | "### Show the structure of a table\n", 126 | "\n", 127 | "- .schema \n", 128 | " \n", 129 | "```bash\n", 130 | "sqlite> .schema employees\n", 131 | "CREATE TABLE IF NOT EXISTS \"employees\"\n", 132 | "(\n", 133 | " [EmployeeId] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,\n", 134 | " [LastName] NVARCHAR(20) NOT NULL,\n", 135 | " [FirstName] NVARCHAR(20) NOT NULL,\n", 136 | " [Title] NVARCHAR(30),\n", 137 | " [ReportsTo] INTEGER,\n", 138 | " [BirthDate] DATETIME,\n", 139 | " [HireDate] DATETIME,\n", 140 | " [Address] NVARCHAR(70),\n", 141 | " [City] NVARCHAR(40),\n", 142 | " [State] NVARCHAR(40),\n", 143 | " [Country] NVARCHAR(40),\n", 144 | " [PostalCode] NVARCHAR(10),\n", 145 | " [Phone] NVARCHAR(24),\n", 146 | " [Fax] NVARCHAR(24),\n", 147 | " [Email] NVARCHAR(60),\n", 148 | " FOREIGN KEY ([ReportsTo]) REFERENCES \"employees\" ([EmployeeId]) \n", 149 | "\t\tON DELETE NO ACTION ON UPDATE NO ACTION\n", 150 | ");\n", 151 | "CREATE INDEX [IFK_EmployeeReportsTo] ON \"employees\" ([ReportsTo]);\n", 152 | "sqlite> \n", 153 | "```\n", 154 | "\n", 155 | "### use pattern to list tables\n", 156 | "- show all the tables ending with **es**\n", 157 | "- .table '%es'\n", 158 | " \n", 159 | "### Show indexes\n", 160 | " \n", 161 | " - .indexes\n", 162 | "- show the indexes of a specific table\n", 163 | " - .indexes albums\n", 164 | "- show indexes whose names end with 'es'\n", 165 | " - .indexes %es\n", 166 | " - quotes are optional\n", 167 | " \n", 168 | "### Save the result of a query to a file\n", 169 | "- use `.output [FILENAME]` command\n", 170 | " - all the results of the subsequent queries will be saved to the file specified\n", 171 | "- the following commands select title from the albums table and write the result to the albums.txt files\n", 172 | " \n", 173 | "```bash\n", 174 | "sqlite> .output albums.txt\n", 175 | "sqlite> SELECT title from albums;\n", 176 | "\n", 177 | "```\n", 178 | " \n", 179 | "- let's check the contents of the file albums.txt file\n", 180 | "- .output without file name will write the output to the console" 181 | ] 182 | }, 183 | { 184 | "cell_type": "code", 185 | "execution_count": 1, 186 | "id": "7168aa6b", 187 | "metadata": {}, 188 | "outputs": [ 189 | { 190 | "name": "stdout", 191 | "output_type": "stream", 192 | "text": [ 193 | "For Those About To Rock We Salute You\r\n", 194 | "Balls to the Wall\r\n", 195 | "Restless and Wild\r\n", 196 | "Let There Be Rock\r\n", 197 | "Big Ones\r\n", 198 | "Jagged Little Pill\r\n", 199 | "Facelift\r\n", 200 | "Warner 25 Anos\r\n", 201 | "Plays Metallica By Four Cellos\r\n", 202 | "Audioslave\r\n", 203 | "Out Of Exile\r\n", 204 | "BackBeat Soundtrack\r\n", 205 | "The Best Of Billy Cobham\r\n", 206 | "Alcohol Fueled Brewtality Live! [Disc 1]\r\n", 207 | "Alcohol Fueled Brewtality Live! [Disc 2]\r\n", 208 | "Black Sabbath\r\n", 209 | "Black Sabbath Vol. 4 (Remaster)\r\n", 210 | "Body Count\r\n", 211 | "Chemical Wedding\r\n", 212 | "The Best Of Buddy Guy - The Millenium Collection\r\n", 213 | "Prenda Minha\r\n", 214 | "Sozinho Remix Ao Vivo\r\n", 215 | "Minha Historia\r\n", 216 | "Afrociberdelia\r\n", 217 | "Da Lama Ao Caos\r\n", 218 | "Acústico MTV [Live]\r\n", 219 | "Cidade Negra - Hits\r\n", 220 | "Na Pista\r\n", 221 | "Axé Bahia 2001\r\n", 222 | "BBC Sessions [Disc 1] [Live]\r\n", 223 | "Bongo Fury\r\n", 224 | "Carnaval 2001\r\n", 225 | "Chill: Brazil (Disc 1)\r\n", 226 | "Chill: Brazil (Disc 2)\r\n", 227 | "Garage Inc. (Disc 1)\r\n", 228 | "Greatest Hits II\r\n", 229 | "Greatest Kiss\r\n", 230 | "Heart of the Night\r\n", 231 | "International Superhits\r\n", 232 | "Into The Light\r\n", 233 | "Meus Momentos\r\n", 234 | "Minha História\r\n", 235 | "MK III The Final Concerts [Disc 1]\r\n", 236 | "Physical Graffiti [Disc 1]\r\n", 237 | "Sambas De Enredo 2001\r\n", 238 | "Supernatural\r\n", 239 | "The Best of Ed Motta\r\n", 240 | "The Essential Miles Davis [Disc 1]\r\n", 241 | "The Essential Miles Davis [Disc 2]\r\n", 242 | "The Final Concerts (Disc 2)\r\n", 243 | "Up An' Atom\r\n", 244 | "Vinícius De Moraes - Sem Limite\r\n", 245 | "Vozes do MPB\r\n", 246 | "Chronicle, Vol. 1\r\n", 247 | "Chronicle, Vol. 2\r\n", 248 | "Cássia Eller - Coleção Sem Limite [Disc 2]\r\n", 249 | "Cássia Eller - Sem Limite [Disc 1]\r\n", 250 | "Come Taste The Band\r\n", 251 | "Deep Purple In Rock\r\n", 252 | "Fireball\r\n", 253 | "Knocking at Your Back Door: The Best Of Deep Purple in the 80's\r\n", 254 | "Machine Head\r\n", 255 | "Purpendicular\r\n", 256 | "Slaves And Masters\r\n", 257 | "Stormbringer\r\n", 258 | "The Battle Rages On\r\n", 259 | "Vault: Def Leppard's Greatest Hits\r\n", 260 | "Outbreak\r\n", 261 | "Djavan Ao Vivo - Vol. 02\r\n", 262 | "Djavan Ao Vivo - Vol. 1\r\n", 263 | "Elis Regina-Minha História\r\n", 264 | "The Cream Of Clapton\r\n", 265 | "Unplugged\r\n", 266 | "Album Of The Year\r\n", 267 | "Angel Dust\r\n", 268 | "King For A Day Fool For A Lifetime\r\n", 269 | "The Real Thing\r\n", 270 | "Deixa Entrar\r\n", 271 | "In Your Honor [Disc 1]\r\n", 272 | "In Your Honor [Disc 2]\r\n", 273 | "One By One\r\n", 274 | "The Colour And The Shape\r\n", 275 | "My Way: The Best Of Frank Sinatra [Disc 1]\r\n", 276 | "Roda De Funk\r\n", 277 | "As Canções de Eu Tu Eles\r\n", 278 | "Quanta Gente Veio Ver (Live)\r\n", 279 | "Quanta Gente Veio ver--Bônus De Carnaval\r\n", 280 | "Faceless\r\n", 281 | "American Idiot\r\n", 282 | "Appetite for Destruction\r\n", 283 | "Use Your Illusion I\r\n", 284 | "Use Your Illusion II\r\n", 285 | "Blue Moods\r\n", 286 | "A Matter of Life and Death\r\n", 287 | "A Real Dead One\r\n", 288 | "A Real Live One\r\n", 289 | "Brave New World\r\n", 290 | "Dance Of Death\r\n", 291 | "Fear Of The Dark\r\n", 292 | "Iron Maiden\r\n", 293 | "Killers\r\n", 294 | "Live After Death\r\n", 295 | "Live At Donington 1992 (Disc 1)\r\n", 296 | "Live At Donington 1992 (Disc 2)\r\n", 297 | "No Prayer For The Dying\r\n", 298 | "Piece Of Mind\r\n", 299 | "Powerslave\r\n", 300 | "Rock In Rio [CD1]\r\n", 301 | "Rock In Rio [CD2]\r\n", 302 | "Seventh Son of a Seventh Son\r\n", 303 | "Somewhere in Time\r\n", 304 | "The Number of The Beast\r\n", 305 | "The X Factor\r\n", 306 | "Virtual XI\r\n", 307 | "Sex Machine\r\n", 308 | "Emergency On Planet Earth\r\n", 309 | "Synkronized\r\n", 310 | "The Return Of The Space Cowboy\r\n", 311 | "Get Born\r\n", 312 | "Are You Experienced?\r\n", 313 | "Surfing with the Alien (Remastered)\r\n", 314 | "Jorge Ben Jor 25 Anos\r\n", 315 | "Jota Quest-1995\r\n", 316 | "Cafezinho\r\n", 317 | "Living After Midnight\r\n", 318 | "Unplugged [Live]\r\n", 319 | "BBC Sessions [Disc 2] [Live]\r\n", 320 | "Coda\r\n", 321 | "Houses Of The Holy\r\n", 322 | "In Through The Out Door\r\n", 323 | "IV\r\n", 324 | "Led Zeppelin I\r\n", 325 | "Led Zeppelin II\r\n", 326 | "Led Zeppelin III\r\n", 327 | "Physical Graffiti [Disc 2]\r\n", 328 | "Presence\r\n", 329 | "The Song Remains The Same (Disc 1)\r\n", 330 | "The Song Remains The Same (Disc 2)\r\n", 331 | "A TempestadeTempestade Ou O Livro Dos Dias\r\n", 332 | "Mais Do Mesmo\r\n", 333 | "Greatest Hits\r\n", 334 | "Lulu Santos - RCA 100 Anos De Música - Álbum 01\r\n", 335 | "Lulu Santos - RCA 100 Anos De Música - Álbum 02\r\n", 336 | "Misplaced Childhood\r\n", 337 | "Barulhinho Bom\r\n", 338 | "Seek And Shall Find: More Of The Best (1963-1981)\r\n", 339 | "The Best Of Men At Work\r\n", 340 | "Black Album\r\n", 341 | "Garage Inc. (Disc 2)\r\n", 342 | "Kill 'Em All\r\n", 343 | "Load\r\n", 344 | "Master Of Puppets\r\n", 345 | "ReLoad\r\n", 346 | "Ride The Lightning\r\n", 347 | "St. Anger\r\n", 348 | "...And Justice For All\r\n", 349 | "Miles Ahead\r\n", 350 | "Milton Nascimento Ao Vivo\r\n", 351 | "Minas\r\n", 352 | "Ace Of Spades\r\n", 353 | "Demorou...\r\n", 354 | "Motley Crue Greatest Hits\r\n", 355 | "From The Muddy Banks Of The Wishkah [Live]\r\n", 356 | "Nevermind\r\n", 357 | "Compositores\r\n", 358 | "Olodum\r\n", 359 | "Acústico MTV\r\n", 360 | "Arquivo II\r\n", 361 | "Arquivo Os Paralamas Do Sucesso\r\n", 362 | "Bark at the Moon (Remastered)\r\n", 363 | "Blizzard of Ozz\r\n", 364 | "Diary of a Madman (Remastered)\r\n", 365 | "No More Tears (Remastered)\r\n", 366 | "Tribute\r\n", 367 | "Walking Into Clarksdale\r\n", 368 | "Original Soundtracks 1\r\n", 369 | "The Beast Live\r\n", 370 | "Live On Two Legs [Live]\r\n", 371 | "Pearl Jam\r\n", 372 | "Riot Act\r\n", 373 | "Ten\r\n", 374 | "Vs.\r\n", 375 | "Dark Side Of The Moon\r\n", 376 | "Os Cães Ladram Mas A Caravana Não Pára\r\n", 377 | "Greatest Hits I\r\n", 378 | "News Of The World\r\n", 379 | "Out Of Time\r\n", 380 | "Green\r\n", 381 | "New Adventures In Hi-Fi\r\n", 382 | "The Best Of R.E.M.: The IRS Years\r\n", 383 | "Cesta Básica\r\n", 384 | "Raul Seixas\r\n", 385 | "Blood Sugar Sex Magik\r\n", 386 | "By The Way\r\n", 387 | "Californication\r\n", 388 | "Retrospective I (1974-1980)\r\n", 389 | "Santana - As Years Go By\r\n", 390 | "Santana Live\r\n", 391 | "Maquinarama\r\n", 392 | "O Samba Poconé\r\n", 393 | "Judas 0: B-Sides and Rarities\r\n", 394 | "Rotten Apples: Greatest Hits\r\n", 395 | "A-Sides\r\n", 396 | "Morning Dance\r\n", 397 | "In Step\r\n", 398 | "Core\r\n", 399 | "Mezmerize\r\n", 400 | "[1997] Black Light Syndrome\r\n", 401 | "Live [Disc 1]\r\n", 402 | "Live [Disc 2]\r\n", 403 | "The Singles\r\n", 404 | "Beyond Good And Evil\r\n", 405 | "Pure Cult: The Best Of The Cult (For Rockers, Ravers, Lovers & Sinners) [UK]\r\n", 406 | "The Doors\r\n", 407 | "The Police Greatest Hits\r\n", 408 | "Hot Rocks, 1964-1971 (Disc 1)\r\n", 409 | "No Security\r\n", 410 | "Voodoo Lounge\r\n", 411 | "Tangents\r\n", 412 | "Transmission\r\n", 413 | "My Generation - The Very Best Of The Who\r\n", 414 | "Serie Sem Limite (Disc 1)\r\n", 415 | "Serie Sem Limite (Disc 2)\r\n", 416 | "Acústico\r\n", 417 | "Volume Dois\r\n", 418 | "Battlestar Galactica: The Story So Far\r\n", 419 | "Battlestar Galactica, Season 3\r\n", 420 | "Heroes, Season 1\r\n", 421 | "Lost, Season 3\r\n", 422 | "Lost, Season 1\r\n", 423 | "Lost, Season 2\r\n", 424 | "Achtung Baby\r\n", 425 | "All That You Can't Leave Behind\r\n", 426 | "B-Sides 1980-1990\r\n", 427 | "How To Dismantle An Atomic Bomb\r\n", 428 | "Pop\r\n", 429 | "Rattle And Hum\r\n", 430 | "The Best Of 1980-1990\r\n", 431 | "War\r\n", 432 | "Zooropa\r\n", 433 | "UB40 The Best Of - Volume Two [UK]\r\n", 434 | "Diver Down\r\n", 435 | "The Best Of Van Halen, Vol. I\r\n", 436 | "Van Halen\r\n", 437 | "Van Halen III\r\n", 438 | "Contraband\r\n", 439 | "Vinicius De Moraes\r\n", 440 | "Ao Vivo [IMPORT]\r\n", 441 | "The Office, Season 1\r\n", 442 | "The Office, Season 2\r\n", 443 | "The Office, Season 3\r\n", 444 | "Un-Led-Ed\r\n", 445 | "Battlestar Galactica (Classic), Season 1\r\n", 446 | "Aquaman\r\n", 447 | "Instant Karma: The Amnesty International Campaign to Save Darfur\r\n", 448 | "Speak of the Devil\r\n", 449 | "20th Century Masters - The Millennium Collection: The Best of Scorpions\r\n", 450 | "House of Pain\r\n", 451 | "Radio Brasil (O Som da Jovem Vanguarda) - Seleccao de Henrique Amaro\r\n", 452 | "Cake: B-Sides and Rarities\r\n", 453 | "LOST, Season 4\r\n", 454 | "Quiet Songs\r\n", 455 | "Muso Ko\r\n", 456 | "Realize\r\n", 457 | "Every Kind of Light\r\n", 458 | "Duos II\r\n", 459 | "Worlds\r\n", 460 | "The Best of Beethoven\r\n", 461 | "Temple of the Dog\r\n", 462 | "Carry On\r\n", 463 | "Revelations\r\n", 464 | "Adorate Deum: Gregorian Chant from the Proper of the Mass\r\n", 465 | "Allegri: Miserere\r\n", 466 | "Pachelbel: Canon & Gigue\r\n", 467 | "Vivaldi: The Four Seasons\r\n", 468 | "Bach: Violin Concertos\r\n", 469 | "Bach: Goldberg Variations\r\n", 470 | "Bach: The Cello Suites\r\n", 471 | "Handel: The Messiah (Highlights)\r\n", 472 | "The World of Classical Favourites\r\n", 473 | "Sir Neville Marriner: A Celebration\r\n", 474 | "Mozart: Wind Concertos\r\n", 475 | "Haydn: Symphonies 99 - 104\r\n", 476 | "Beethoven: Symhonies Nos. 5 & 6\r\n", 477 | "A Soprano Inspired\r\n", 478 | "Great Opera Choruses\r\n", 479 | "Wagner: Favourite Overtures\r\n", 480 | "Fauré: Requiem, Ravel: Pavane & Others\r\n", 481 | "Tchaikovsky: The Nutcracker\r\n", 482 | "The Last Night of the Proms\r\n", 483 | "Puccini: Madama Butterfly - Highlights\r\n", 484 | "Holst: The Planets, Op. 32 & Vaughan Williams: Fantasies\r\n", 485 | "Pavarotti's Opera Made Easy\r\n", 486 | "Great Performances - Barber's Adagio and Other Romantic Favorites for Strings\r\n", 487 | "Carmina Burana\r\n", 488 | "A Copland Celebration, Vol. I\r\n", 489 | "Bach: Toccata & Fugue in D Minor\r\n", 490 | "Prokofiev: Symphony No.1\r\n", 491 | "Scheherazade\r\n", 492 | "Bach: The Brandenburg Concertos\r\n", 493 | "Chopin: Piano Concertos Nos. 1 & 2\r\n", 494 | "Mascagni: Cavalleria Rusticana\r\n", 495 | "Sibelius: Finlandia\r\n", 496 | "Beethoven Piano Sonatas: Moonlight & Pastorale\r\n", 497 | "Great Recordings of the Century - Mahler: Das Lied von der Erde\r\n", 498 | "Elgar: Cello Concerto & Vaughan Williams: Fantasias\r\n", 499 | "Adams, John: The Chairman Dances\r\n", 500 | "Tchaikovsky: 1812 Festival Overture, Op.49, Capriccio Italien & Beethoven: Wellington's Victory\r\n", 501 | "Palestrina: Missa Papae Marcelli & Allegri: Miserere\r\n", 502 | "Prokofiev: Romeo & Juliet\r\n", 503 | "Strauss: Waltzes\r\n", 504 | "Berlioz: Symphonie Fantastique\r\n", 505 | "Bizet: Carmen Highlights\r\n", 506 | "English Renaissance\r\n", 507 | "Handel: Music for the Royal Fireworks (Original Version 1749)\r\n", 508 | "Grieg: Peer Gynt Suites & Sibelius: Pelléas et Mélisande\r\n", 509 | "Mozart Gala: Famous Arias\r\n", 510 | "SCRIABIN: Vers la flamme\r\n", 511 | "Armada: Music from the Courts of England and Spain\r\n", 512 | "Mozart: Symphonies Nos. 40 & 41\r\n", 513 | "Back to Black\r\n", 514 | "Frank\r\n", 515 | "Carried to Dust (Bonus Track Version)\r\n", 516 | "Beethoven: Symphony No. 6 'Pastoral' Etc.\r\n", 517 | "Bartok: Violin & Viola Concertos\r\n", 518 | "Mendelssohn: A Midsummer Night's Dream\r\n", 519 | "Bach: Orchestral Suites Nos. 1 - 4\r\n", 520 | "Charpentier: Divertissements, Airs & Concerts\r\n", 521 | "South American Getaway\r\n", 522 | "Górecki: Symphony No. 3\r\n", 523 | "Purcell: The Fairy Queen\r\n", 524 | "The Ultimate Relexation Album\r\n", 525 | "Purcell: Music for the Queen Mary\r\n", 526 | "Weill: The Seven Deadly Sins\r\n", 527 | "J.S. Bach: Chaconne, Suite in E Minor, Partita in E Major & Prelude, Fugue and Allegro\r\n", 528 | "Prokofiev: Symphony No.5 & Stravinksy: Le Sacre Du Printemps\r\n", 529 | "Szymanowski: Piano Works, Vol. 1\r\n", 530 | "Nielsen: The Six Symphonies\r\n", 531 | "Great Recordings of the Century: Paganini's 24 Caprices\r\n", 532 | "Liszt - 12 Études D'Execution Transcendante\r\n", 533 | "Great Recordings of the Century - Shubert: Schwanengesang, 4 Lieder\r\n", 534 | "Locatelli: Concertos for Violin, Strings and Continuo, Vol. 3\r\n", 535 | "Respighi:Pines of Rome\r\n", 536 | "Schubert: The Late String Quartets & String Quintet (3 CD's)\r\n", 537 | "Monteverdi: L'Orfeo\r\n", 538 | "Mozart: Chamber Music\r\n", 539 | "Koyaanisqatsi (Soundtrack from the Motion Picture)\r\n" 540 | ] 541 | } 542 | ], 543 | "source": [ 544 | "! cat ~/albums.txt" 545 | ] 546 | }, 547 | { 548 | "cell_type": "markdown", 549 | "id": "dfdc8b27", 550 | "metadata": {}, 551 | "source": [ 552 | "### Execute SQL statements from a file\n", 553 | "- create a file commands.sql\n", 554 | "- open it using .read [COMMAND_FILE]\n", 555 | "\n", 556 | "- type the following content:\n", 557 | "\n", 558 | "```txt\n", 559 | "SELECT albumid, title\n", 560 | "FROM albums\n", 561 | "ORDER BY title\n", 562 | "LIMIT 10;\n", 563 | "```\n", 564 | "- run the following\n", 565 | "\n", 566 | "```bash\n", 567 | "sqlite> .mode column\n", 568 | "sqlite> .header on\n", 569 | "sqlite> .read /Users/rbasnet/projects/Intro-Database/command.sql\n", 570 | "\n", 571 | "AlbumId Title \n", 572 | "------- ------------------------------------------------------------\n", 573 | "156 ...And Justice For All \n", 574 | "257 20th Century Masters - The Millennium Collection: The Best o\n", 575 | " f Scorpions \n", 576 | "296 A Copland Celebration, Vol. I \n", 577 | "94 A Matter of Life and Death \n", 578 | "95 A Real Dead One \n", 579 | "96 A Real Live One \n", 580 | "285 A Soprano Inspired \n", 581 | "139 A TempestadeTempestade Ou O Livro Dos Dias \n", 582 | "203 A-Sides \n", 583 | "160 Ace Of Spades \n", 584 | "sqlite> \n", 585 | "```\n", 586 | "\n" 587 | ] 588 | }, 589 | { 590 | "cell_type": "code", 591 | "execution_count": null, 592 | "id": "0d1d8a83", 593 | "metadata": {}, 594 | "outputs": [], 595 | "source": [] 596 | } 597 | ], 598 | "metadata": { 599 | "kernelspec": { 600 | "display_name": "Python 3 (ipykernel)", 601 | "language": "python", 602 | "name": "python3" 603 | }, 604 | "language_info": { 605 | "codemirror_mode": { 606 | "name": "ipython", 607 | "version": 3 608 | }, 609 | "file_extension": ".py", 610 | "mimetype": "text/x-python", 611 | "name": "python", 612 | "nbconvert_exporter": "python", 613 | "pygments_lexer": "ipython3", 614 | "version": "3.11.5" 615 | } 616 | }, 617 | "nbformat": 4, 618 | "nbformat_minor": 5 619 | } 620 | -------------------------------------------------------------------------------- /SQLite-Database.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "9685dde0", 6 | "metadata": {}, 7 | "source": [ 8 | "# Sqlite Database\n", 9 | "\n", 10 | "## Topics\n", 11 | "- What is Sqlite DB?\n", 12 | "- Key Features of Sqlite DB" 13 | ] 14 | }, 15 | { 16 | "cell_type": "markdown", 17 | "id": "08e6a028", 18 | "metadata": {}, 19 | "source": [ 20 | "## What is SQLite DB?\n", 21 | "\n", 22 | "- https://sqlite.org/index.html\n", 23 | "- SQLite is a software library that provides a self-contained, serverless, zero-configuration, and transactional SQL database engine\n", 24 | "- simpler terms: it's a lightweight, file-based relational database management system that allows you to create, manage, and interact with databases using the SQL language\n", 25 | "- commonly used in scenarios where a full-fledged database server might be overkill or unnecessary, such as small to medium-sized applications, mobile apps, embedded systems, and prototyping\n", 26 | "- particularly well-suited for projects that need a simple and self-contained database solution without the need for a dedicated database server" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "id": "47df0691", 32 | "metadata": {}, 33 | "source": [ 34 | "## Key Features of Sqlite DB\n", 35 | "\n", 36 | "### Self-Contained\n", 37 | "\n", 38 | "- SQLite operates as a library linked directly into the application\n", 39 | "- database engine and the application using it are tightly integrated into a single program\n", 40 | "\n", 41 | "### Serverless\n", 42 | "\n", 43 | "- no separate server process, no need to set up and maintain a database server\n", 44 | "- suitable for embedded systems, mobile applications, and desktop applications\n", 45 | "\n", 46 | "### Zero-Configuration\n", 47 | "- doesn't require complex configuration or administration\n", 48 | "- simply include the SQLite library in your application and start using it\n", 49 | "\n", 50 | "### Transactional\n", 51 | "- supports ACID (Atomicity, Consistency, Isolation, Durability) properties, which ensure data integrity and reliability even in the face of system failures or crashes.\n", 52 | "\n", 53 | "### File-Based\n", 54 | "- database is stored in a single file on the disk\n", 55 | "- makes it easy to manage, backup, and transfer databases\n", 56 | "\n", 57 | "### Cross-Platform\n", 58 | "- SQLite is designed to work on various operating systems, including Windows, macOS, Linux, iOS, and Android.\n", 59 | "\n", 60 | "### Supports Standard SQL\n", 61 | "- SQLite uses SQL (Structured Query Language) for querying and managing data, making it familiar to those who are already accustomed to relational databases\n", 62 | "\n", 63 | "### Limited Concurrent Access\n", 64 | "- supports concurrent read access by multiple processes or threads\n", 65 | "- doesn't support concurrent write \n", 66 | "- not recommended for situations with heavy write loads and high-concurrency requirements\n", 67 | "\n", 68 | "\n", 69 | "\n", 70 | "\n", 71 | "## References\n", 72 | "\n", 73 | "- https://www.sqlite.org – SQLite homepage\n", 74 | "- https://www.sqlite.org/features.html – SQLite features\n", 75 | "- https://www.sqlite.org/copyright.html – SQLite license\n", 76 | "- https://www.sqlite.org/docs.html – SQLite documentation" 77 | ] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": null, 82 | "id": "654c55d7", 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [] 86 | } 87 | ], 88 | "metadata": { 89 | "kernelspec": { 90 | "display_name": "Python 3 (ipykernel)", 91 | "language": "python", 92 | "name": "python3" 93 | }, 94 | "language_info": { 95 | "codemirror_mode": { 96 | "name": "ipython", 97 | "version": 3 98 | }, 99 | "file_extension": ".py", 100 | "mimetype": "text/x-python", 101 | "name": "python", 102 | "nbconvert_exporter": "python", 103 | "pygments_lexer": "ipython3", 104 | "version": "3.10.8" 105 | } 106 | }, 107 | "nbformat": 4, 108 | "nbformat_minor": 5 109 | } 110 | -------------------------------------------------------------------------------- /SupplyChainDB/Categories.tsv: -------------------------------------------------------------------------------- 1 | CategoryID CategoryName Description 2 | 1 Beverages Soft drinks, coffees, teas, beers, and ales 3 | 2 Condiments Sweet and savory sauces, relishes, spreads, and seasonings 4 | 3 Confections Desserts, candies, and sweet breads 5 | 4 Dairy Products Cheeses 6 | 5 Grains/Cereals Breads, crackers, pasta, and cereal 7 | 6 Meat/Poultry Prepared meats 8 | 7 Produce Dried fruit and bean curd 9 | 8 Seafood Seaweed and fish -------------------------------------------------------------------------------- /SupplyChainDB/Customers.tsv: -------------------------------------------------------------------------------- 1 | CustomerID CustomerName ContactName Address City PostalCode Country 2 | 1 Alfreds Futterkiste Maria Anders Obere Str. 57 Berlin 12209 Germany 3 | 2 Ana Trujillo Emparedados y helados Ana Trujillo Avda. de la Constitución 2222 México D.F. 05021 Mexico 4 | 3 Antonio Moreno Taquería Antonio Moreno Mataderos 2312 México D.F. 05023 Mexico 5 | 4 Around the Horn Thomas Hardy 120 Hanover Sq. London WA1 1DP UK 6 | 5 Berglunds snabbköp Christina Berglund Berguvsvägen 8 Luleå S-958 22 Sweden 7 | 6 Blauer See Delikatessen Hanna Moos Forsterstr. 57 Mannheim 68306 Germany 8 | 7 Blondel père et fils Frédérique Citeaux 24, place Kléber Strasbourg 67000 France 9 | 8 Bólido Comidas preparadas Martín Sommer C/ Araquil, 67 Madrid 28023 Spain 10 | 9 Bon app' Laurence Lebihans 12, rue des Bouchers Marseille 13008 France 11 | 10 Bottom-Dollar Marketse Elizabeth Lincoln 23 Tsawassen Blvd. Tsawassen T2F 8M4 Canada 12 | 11 B's Beverages Victoria Ashworth Fauntleroy Circus London EC2 5NT UK 13 | 12 Cactus Comidas para llevar Patricio Simpson Cerrito 333 Buenos Aires 1010 Argentina 14 | 13 Centro comercial Moctezuma Francisco Chang Sierras de Granada 9993 México D.F. 05022 Mexico 15 | 14 Chop-suey Chinese Yang Wang Hauptstr. 29 Bern 3012 Switzerland 16 | 15 Comércio Mineiro Pedro Afonso Av. dos Lusíadas, 23 São Paulo 05432-043 Brazil 17 | 16 Consolidated Holdings Elizabeth Brown Berkeley Gardens 12 Brewery London WX1 6LT UK 18 | 17 Drachenblut Delikatessend Sven Ottlieb Walserweg 21 Aachen 52066 Germany 19 | 18 Du monde entier Janine Labrune 67, rue des Cinquante Otages Nantes 44000 France 20 | 19 Eastern Connection Ann Devon 35 King George London WX3 6FW UK 21 | 20 Ernst Handel Roland Mendel Kirchgasse 6 Graz 8010 Austria 22 | 21 Familia Arquibaldo Aria Cruz Rua Orós, 92 São Paulo 05442-030 Brazil 23 | 22 FISSA Fabrica Inter. Salchichas S.A. Diego Roel C/ Moralzarzal, 86 Madrid 28034 Spain 24 | 23 Folies gourmandes Martine Rancé 184, chaussée de Tournai Lille 59000 France 25 | 24 Folk och fä HB Maria Larsson Åkergatan 24 Bräcke S-844 67 Sweden 26 | 25 Frankenversand Peter Franken Berliner Platz 43 München 80805 Germany 27 | 26 France restauration Carine Schmitt 54, rue Royale Nantes 44000 France 28 | 27 Franchi S.p.A. Paolo Accorti Via Monte Bianco 34 Torino 10100 Italy 29 | 28 Furia Bacalhau e Frutos do Mar Lino Rodriguez Jardim das rosas n. 32 Lisboa 1675 Portugal 30 | 29 Galería del gastrónomo Eduardo Saavedra Rambla de Cataluña, 23 Barcelona 08022 Spain 31 | 30 Godos Cocina Típica José Pedro Freyre C/ Romero, 33 Sevilla 41101 Spain 32 | 31 Gourmet Lanchonetes André Fonseca Av. Brasil, 442 Campinas 04876-786 Brazil 33 | 32 Great Lakes Food Market Howard Snyder 2732 Baker Blvd. Eugene 97403 USA 34 | 33 GROSELLA-Restaurante Manuel Pereira 5ª Ave. Los Palos Grandes Caracas 1081 Venezuela 35 | 34 Hanari Carnes Mario Pontes Rua do Paço, 67 Rio de Janeiro 05454-876 Brazil 36 | 35 HILARIÓN-Abastos Carlos Hernández Carrera 22 con Ave. Carlos Soublette #8-35 San Cristóbal 5022 Venezuela 37 | 36 Hungry Coyote Import Store Yoshi Latimer City Center Plaza 516 Main St. Elgin 97827 USA 38 | 37 Hungry Owl All-Night Grocers Patricia McKenna 8 Johnstown Road Cork Ireland 39 | 38 Island Trading Helen Bennett Garden House Crowther Way Cowes PO31 7PJ UK 40 | 39 Königlich Essen Philip Cramer Maubelstr. 90 Brandenburg 14776 Germany 41 | 40 La corne d'abondance Daniel Tonini 67, avenue de l'Europe Versailles 78000 France 42 | 41 La maison d'Asie Annette Roulet 1 rue Alsace-Lorraine Toulouse 31000 France 43 | 42 Laughing Bacchus Wine Cellars Yoshi Tannamuri 1900 Oak St. Vancouver V3F 2K1 Canada 44 | 43 Lazy K Kountry Store John Steel 12 Orchestra Terrace Walla Walla 99362 USA 45 | 44 Lehmanns Marktstand Renate Messner Magazinweg 7 Frankfurt a.M. 60528 Germany 46 | 45 Let's Stop N Shop Jaime Yorres 87 Polk St. Suite 5 San Francisco 94117 USA 47 | 46 LILA-Supermercado Carlos González Carrera 52 con Ave. Bolívar #65-98 Llano Largo Barquisimeto 3508 Venezuela 48 | 47 LINO-Delicateses Felipe Izquierdo Ave. 5 de Mayo Porlamar I. de Margarita 4980 Venezuela 49 | 48 Lonesome Pine Restaurant Fran Wilson 89 Chiaroscuro Rd. Portland 97219 USA 50 | 49 Magazzini Alimentari Riuniti Giovanni Rovelli Via Ludovico il Moro 22 Bergamo 24100 Italy 51 | 50 Maison Dewey Catherine Dewey Rue Joseph-Bens 532 Bruxelles B-1180 Belgium 52 | 51 Mère Paillarde Jean Fresnière 43 rue St. Laurent Montréal H1J 1C3 Canada 53 | 52 Morgenstern Gesundkost Alexander Feuer Heerstr. 22 Leipzig 04179 Germany 54 | 53 North/South Simon Crowther South House 300 Queensbridge London SW7 1RZ UK 55 | 54 Océano Atlántico Ltda. Yvonne Moncada Ing. Gustavo Moncada 8585 Piso 20-A Buenos Aires 1010 Argentina 56 | 55 Old World Delicatessen Rene Phillips 2743 Bering St. Anchorage 99508 USA 57 | 56 Ottilies Käseladen Henriette Pfalzheim Mehrheimerstr. 369 Köln 50739 Germany 58 | 57 Paris spécialités Marie Bertrand 265, boulevard Charonne Paris 75012 France 59 | 58 Pericles Comidas clásicas Guillermo Fernández Calle Dr. Jorge Cash 321 México D.F. 05033 Mexico 60 | 59 Piccolo und mehr Georg Pipps Geislweg 14 Salzburg 5020 Austria 61 | 60 Princesa Isabel Vinhoss Isabel de Castro Estrada da saúde n. 58 Lisboa 1756 Portugal 62 | 61 Que Delícia Bernardo Batista Rua da Panificadora, 12 Rio de Janeiro 02389-673 Brazil 63 | 62 Queen Cozinha Lúcia Carvalho Alameda dos Canàrios, 891 São Paulo 05487-020 Brazil 64 | 63 QUICK-Stop Horst Kloss Taucherstraße 10 Cunewalde 01307 Germany 65 | 64 Rancho grande Sergio Gutiérrez Av. del Libertador 900 Buenos Aires 1010 Argentina 66 | 65 Rattlesnake Canyon Grocery Paula Wilson 2817 Milton Dr. Albuquerque 87110 USA 67 | 66 Reggiani Caseifici Maurizio Moroni Strada Provinciale 124 Reggio Emilia 42100 Italy 68 | 67 Ricardo Adocicados Janete Limeira Av. Copacabana, 267 Rio de Janeiro 02389-890 Brazil 69 | 68 Richter Supermarkt Michael Holz Grenzacherweg 237 Genève 1203 Switzerland 70 | 69 Romero y tomillo Alejandra Camino Gran Vía, 1 Madrid 28001 Spain 71 | 70 Santé Gourmet Jonas Bergulfsen Erling Skakkes gate 78 Stavern 4110 Norway 72 | 71 Save-a-lot Markets Jose Pavarotti 187 Suffolk Ln. Boise 83720 USA 73 | 72 Seven Seas Imports Hari Kumar 90 Wadhurst Rd. London OX15 4NB UK 74 | 73 Simons bistro Jytte Petersen Vinbæltet 34 København 1734 Denmark 75 | 74 Spécialités du monde Dominique Perrier 25, rue Lauriston Paris 75016 France 76 | 75 Split Rail Beer & Ale Art Braunschweiger P.O. Box 555 Lander 82520 USA 77 | 76 Suprêmes délices Pascale Cartrain Boulevard Tirou, 255 Charleroi B-6000 Belgium 78 | 77 The Big Cheese Liz Nixon 89 Jefferson Way Suite 2 Portland 97201 USA 79 | 78 The Cracker Box Liu Wong 55 Grizzly Peak Rd. Butte 59801 USA 80 | 79 Toms Spezialitäten Karin Josephs Luisenstr. 48 Münster 44087 Germany 81 | 80 Tortuga Restaurante Miguel Angel Paolino Avda. Azteca 123 México D.F. 05033 Mexico 82 | 81 Tradição Hipermercados Anabela Domingues Av. Inês de Castro, 414 São Paulo 05634-030 Brazil 83 | 82 Trail's Head Gourmet Provisioners Helvetius Nagy 722 DaVinci Blvd. Kirkland 98034 USA 84 | 83 Vaffeljernet Palle Ibsen Smagsløget 45 Århus 8200 Denmark 85 | 84 Victuailles en stock Mary Saveley 2, rue du Commerce Lyon 69004 France 86 | 85 Vins et alcools Chevalier Paul Henriot 59 rue de l'Abbaye Reims 51100 France 87 | 86 Die Wandernde Kuh Rita Müller Adenauerallee 900 Stuttgart 70563 Germany 88 | 87 Wartian Herkku Pirkko Koskitalo Torikatu 38 Oulu 90110 Finland 89 | 88 Wellington Importadora Paula Parente Rua do Mercado, 12 Resende 08737-363 Brazil 90 | 89 White Clover Markets Karl Jablonski 305 - 14th Ave. S. Suite 3B Seattle 98128 USA 91 | 90 Wilman Kala Matti Karttunen Keskuskatu 45 Helsinki 21240 Finland 92 | 91 Wolski Zbyszek ul. Filtrowa 68 Walla 01-012 Poland -------------------------------------------------------------------------------- /SupplyChainDB/Employees.tsv: -------------------------------------------------------------------------------- 1 | EmployeeID LastName FirstName BirthDate Photo Notes 2 | 1 Davolio Nancy 1968-12-08 EmpID1.pic Education includes a BA in psychology from Colorado State University. She also completed (The Art of the Cold Call). Nancy is a member of 'Toastmasters International'. 3 | 2 Fuller Andrew 1952-02-19 EmpID2.pic Andrew received his BTS commercial and a Ph.D. in international marketing from the University of Dallas. He is fluent in French and Italian and reads German. He joined the company as a sales representative, was promoted to sales manager and was then named vice president of sales. Andrew is a member of the Sales Management Roundtable, the Seattle Chamber of Commerce, and the Pacific Rim Importers Association. 4 | 3 Leverling Janet 1963-08-30 EmpID3.pic Janet has a BS degree in chemistry from Boston College). She has also completed a certificate program in food retailing management. Janet was hired as a sales associate and was promoted to sales representative. 5 | 4 Peacock Margaret 1958-09-19 EmpID4.pic Margaret holds a BA in English literature from Concordia College and an MA from the American Institute of Culinary Arts. She was temporarily assigned to the London office before returning to her permanent post in Seattle. 6 | 5 Buchanan Steven 1955-03-04 EmpID5.pic Steven Buchanan graduated from St. Andrews University, Scotland, with a BSC degree. Upon joining the company as a sales representative, he spent 6 months in an orientation program at the Seattle office and then returned to his permanent post in London, where he was promoted to sales manager. Mr. Buchanan has completed the courses 'Successful Telemarketing' and 'International Sales Management'. He is fluent in French. 7 | 6 Suyama Michael 1963-07-02 EmpID6.pic Michael is a graduate of Sussex University (MA, economics) and the University of California at Los Angeles (MBA, marketing). He has also taken the courses 'Multi-Cultural Selling' and 'Time Management for the Sales Professional'. He is fluent in Japanese and can read and write French, Portuguese, and Spanish. 8 | 7 King Robert 1960-05-29 EmpID7.pic Robert King served in the Peace Corps and traveled extensively before completing his degree in English at the University of Michigan and then joining the company. After completing a course entitled 'Selling in Europe', he was transferred to the London office. 9 | 8 Callahan Laura 1958-01-09 EmpID8.pic Laura received a BA in psychology from the University of Washington. She has also completed a course in business French. She reads and writes French. 10 | 9 Dodsworth Anne 1969-07-02 EmpID9.pic Anne has a BA degree in English from St. Lawrence College. She is fluent in French and German. 11 | 10 West Adam 1928-09-19 EmpID10.pic An old chum. -------------------------------------------------------------------------------- /SupplyChainDB/OrderDetailsN.tsv: -------------------------------------------------------------------------------- 1 | OrderDetailID OrderID ProductID Quantity 2 | 1 10248 11 12 3 | 2 10248 42 10 4 | 3 10248 72 5 5 | 4 10249 14 9 6 | 5 10249 51 40 7 | 6 10250 41 10 8 | 7 10250 51 35 9 | 8 10250 65 15 10 | 9 10251 22 6 11 | 10 10251 57 15 12 | 11 10251 65 20 13 | 12 10252 20 40 14 | 13 10252 33 25 15 | 14 10252 60 40 16 | 15 10253 31 20 17 | 16 10253 39 42 18 | 17 10253 49 40 19 | 18 10254 24 15 20 | 19 10254 55 21 21 | 20 10254 74 21 22 | 21 10255 2 20 23 | 22 10255 16 35 24 | 23 10255 36 25 25 | 24 10255 59 30 26 | 25 10256 53 15 27 | 26 10256 77 12 28 | 27 10257 27 25 29 | 28 10257 39 6 30 | 29 10257 77 15 31 | 30 10258 2 50 32 | 31 10258 5 65 33 | 32 10258 32 6 34 | 33 10259 21 10 35 | 34 10259 37 1 36 | 35 10260 41 16 37 | 36 10260 57 50 38 | 37 10260 62 15 39 | 38 10260 70 21 40 | 39 10261 21 20 41 | 40 10261 35 20 42 | 41 10262 5 12 43 | 42 10262 7 15 44 | 43 10262 56 2 45 | 44 10263 16 60 46 | 45 10263 24 28 47 | 46 10263 30 60 48 | 47 10263 74 36 49 | 48 10264 2 35 50 | 49 10264 41 25 51 | 50 10265 17 30 52 | 51 10265 70 20 53 | 52 10266 12 12 54 | 53 10267 40 50 55 | 54 10267 59 70 56 | 55 10267 76 15 57 | 56 10268 29 10 58 | 57 10268 72 4 59 | 58 10269 33 60 60 | 59 10269 72 20 61 | 60 10270 36 30 62 | 61 10270 43 25 63 | 62 10271 33 24 64 | 63 10272 20 6 65 | 64 10272 31 40 66 | 65 10272 72 24 67 | 66 10273 10 24 68 | 67 10273 31 15 69 | 68 10273 33 20 70 | 69 10273 40 60 71 | 70 10273 76 33 72 | 71 10274 71 20 73 | 72 10274 72 7 74 | 73 10275 24 12 75 | 74 10275 59 6 76 | 75 10276 10 15 77 | 76 10276 13 10 78 | 77 10277 28 20 79 | 78 10277 62 12 80 | 79 10278 44 16 81 | 80 10278 59 15 82 | 81 10278 63 8 83 | 82 10278 73 25 84 | 83 10279 17 15 85 | 84 10280 24 12 86 | 85 10280 55 20 87 | 86 10280 75 30 88 | 87 10281 19 1 89 | 88 10281 24 6 90 | 89 10281 35 4 91 | 90 10282 30 6 92 | 91 10282 57 2 93 | 92 10283 15 20 94 | 93 10283 19 18 95 | 94 10283 60 35 96 | 95 10283 72 3 97 | 96 10284 27 15 98 | 97 10284 44 21 99 | 98 10284 60 20 100 | 99 10284 67 5 101 | 100 10285 1 45 102 | 101 10285 40 40 103 | 102 10285 53 36 104 | 103 10286 35 100 105 | 104 10286 62 40 106 | 105 10287 16 40 107 | 106 10287 34 20 108 | 107 10287 46 15 109 | 108 10288 54 10 110 | 109 10288 68 3 111 | 110 10289 3 30 112 | 111 10289 64 9 113 | 112 10290 5 20 114 | 113 10290 29 15 115 | 114 10290 49 15 116 | 115 10290 77 10 117 | 116 10291 13 20 118 | 117 10291 44 24 119 | 118 10291 51 2 120 | 119 10292 20 20 121 | 120 10293 18 12 122 | 121 10293 24 10 123 | 122 10293 63 5 124 | 123 10293 75 6 125 | 124 10294 1 18 126 | 125 10294 17 15 127 | 126 10294 43 15 128 | 127 10294 60 21 129 | 128 10294 75 6 130 | 129 10295 56 4 131 | 130 10296 11 12 132 | 131 10296 16 30 133 | 132 10296 69 15 134 | 133 10297 39 60 135 | 134 10297 72 20 136 | 135 10298 2 40 137 | 136 10298 36 40 138 | 137 10298 59 30 139 | 138 10298 62 15 140 | 139 10299 19 15 141 | 140 10299 70 20 142 | 141 10300 66 30 143 | 142 10300 68 20 144 | 143 10301 40 10 145 | 144 10301 56 20 146 | 145 10302 17 40 147 | 146 10302 28 28 148 | 147 10302 43 12 149 | 148 10303 40 40 150 | 149 10303 65 30 151 | 150 10303 68 15 152 | 151 10304 49 30 153 | 152 10304 59 10 154 | 153 10304 71 2 155 | 154 10305 18 25 156 | 155 10305 29 25 157 | 156 10305 39 30 158 | 157 10306 30 10 159 | 158 10306 53 10 160 | 159 10306 54 5 161 | 160 10307 62 10 162 | 161 10307 68 3 163 | 162 10308 69 1 164 | 163 10308 70 5 165 | 164 10309 4 20 166 | 165 10309 6 30 167 | 166 10309 42 2 168 | 167 10309 43 20 169 | 168 10309 71 3 170 | 169 10310 16 10 171 | 170 10310 62 5 172 | 171 10311 42 6 173 | 172 10311 69 7 174 | 173 10312 28 4 175 | 174 10312 43 24 176 | 175 10312 53 20 177 | 176 10312 75 10 178 | 177 10313 36 12 179 | 178 10314 32 40 180 | 179 10314 58 30 181 | 180 10314 62 25 182 | 181 10315 34 14 183 | 182 10315 70 30 184 | 183 10316 41 10 185 | 184 10316 62 70 186 | 185 10317 1 20 187 | 186 10318 41 20 188 | 187 10318 76 6 189 | 188 10319 17 8 190 | 189 10319 28 14 191 | 190 10319 76 30 192 | 191 10320 71 30 193 | 192 10321 35 10 194 | 193 10322 52 20 195 | 194 10323 15 5 196 | 195 10323 25 4 197 | 196 10323 39 4 198 | 197 10324 16 21 199 | 198 10324 35 70 200 | 199 10324 46 30 201 | 200 10324 59 40 202 | 201 10324 63 80 203 | 202 10325 6 6 204 | 203 10325 13 12 205 | 204 10325 14 9 206 | 205 10325 31 4 207 | 206 10325 72 40 208 | 207 10326 4 24 209 | 208 10326 57 16 210 | 209 10326 75 50 211 | 210 10327 2 25 212 | 211 10327 11 50 213 | 212 10327 30 35 214 | 213 10327 58 30 215 | 214 10328 59 9 216 | 215 10328 65 40 217 | 216 10328 68 10 218 | 217 10329 19 10 219 | 218 10329 30 8 220 | 219 10329 38 20 221 | 220 10329 56 12 222 | 221 10330 26 50 223 | 222 10330 72 25 224 | 223 10331 54 15 225 | 224 10332 18 40 226 | 225 10332 42 10 227 | 226 10332 47 16 228 | 227 10333 14 10 229 | 228 10333 21 10 230 | 229 10333 71 40 231 | 230 10334 52 8 232 | 231 10334 68 10 233 | 232 10335 2 7 234 | 233 10335 31 25 235 | 234 10335 32 6 236 | 235 10335 51 48 237 | 236 10336 4 18 238 | 237 10337 23 40 239 | 238 10337 26 24 240 | 239 10337 36 20 241 | 240 10337 37 28 242 | 241 10337 72 25 243 | 242 10338 17 20 244 | 243 10338 30 15 245 | 244 10339 4 10 246 | 245 10339 17 70 247 | 246 10339 62 28 248 | 247 10340 18 20 249 | 248 10340 41 12 250 | 249 10340 43 40 251 | 250 10341 33 8 252 | 251 10341 59 9 253 | 252 10342 2 24 254 | 253 10342 31 56 255 | 254 10342 36 40 256 | 255 10342 55 40 257 | 256 10343 64 50 258 | 257 10343 68 4 259 | 258 10343 76 15 260 | 259 10344 4 35 261 | 260 10344 8 70 262 | 261 10345 8 70 263 | 262 10345 19 80 264 | 263 10345 42 9 265 | 264 10346 17 36 266 | 265 10346 56 20 267 | 266 10347 25 10 268 | 267 10347 39 50 269 | 268 10347 40 4 270 | 269 10347 75 6 271 | 270 10348 1 15 272 | 271 10348 23 25 273 | 272 10349 54 24 274 | 273 10350 50 15 275 | 274 10350 69 18 276 | 275 10351 38 20 277 | 276 10351 41 13 278 | 277 10351 44 77 279 | 278 10351 65 10 280 | 279 10352 24 10 281 | 280 10352 54 20 282 | 281 10353 11 12 283 | 282 10353 38 50 284 | 283 10354 1 12 285 | 284 10354 29 4 286 | 285 10355 24 25 287 | 286 10355 57 25 288 | 287 10356 31 30 289 | 288 10356 55 12 290 | 289 10356 69 20 291 | 290 10357 10 30 292 | 291 10357 26 16 293 | 292 10357 60 8 294 | 293 10358 24 10 295 | 294 10358 34 10 296 | 295 10358 36 20 297 | 296 10359 16 56 298 | 297 10359 31 70 299 | 298 10359 60 80 300 | 299 10360 28 30 301 | 300 10360 29 35 302 | 301 10360 38 10 303 | 302 10360 49 35 304 | 303 10360 54 28 305 | 304 10361 39 54 306 | 305 10361 60 55 307 | 306 10362 25 50 308 | 307 10362 51 20 309 | 308 10362 54 24 310 | 309 10363 31 20 311 | 310 10363 75 12 312 | 311 10363 76 12 313 | 312 10364 69 30 314 | 313 10364 71 5 315 | 314 10365 11 24 316 | 315 10366 65 5 317 | 316 10366 77 5 318 | 317 10367 34 36 319 | 318 10367 54 18 320 | 319 10367 65 15 321 | 320 10367 77 7 322 | 321 10368 21 5 323 | 322 10368 28 13 324 | 323 10368 57 25 325 | 324 10368 64 35 326 | 325 10369 29 20 327 | 326 10369 56 18 328 | 327 10370 1 15 329 | 328 10370 64 30 330 | 329 10370 74 20 331 | 330 10371 36 6 332 | 331 10372 20 12 333 | 332 10372 38 40 334 | 333 10372 60 70 335 | 334 10372 72 42 336 | 335 10373 58 80 337 | 336 10373 71 50 338 | 337 10374 31 30 339 | 338 10374 58 15 340 | 339 10375 14 15 341 | 340 10375 54 10 342 | 341 10376 31 42 343 | 342 10377 28 20 344 | 343 10377 39 20 345 | 344 10378 71 6 346 | 345 10379 41 8 347 | 346 10379 63 16 348 | 347 10379 65 20 349 | 348 10380 30 18 350 | 349 10380 53 20 351 | 350 10380 60 6 352 | 351 10380 70 30 353 | 352 10381 74 14 354 | 353 10382 5 32 355 | 354 10382 18 9 356 | 355 10382 29 14 357 | 356 10382 33 60 358 | 357 10382 74 50 359 | 358 10383 13 20 360 | 359 10383 50 15 361 | 360 10383 56 20 362 | 361 10384 20 28 363 | 362 10384 60 15 364 | 363 10385 7 10 365 | 364 10385 60 20 366 | 365 10385 68 8 367 | 366 10386 24 15 368 | 367 10386 34 10 369 | 368 10387 24 15 370 | 369 10387 28 6 371 | 370 10387 59 12 372 | 371 10387 71 15 373 | 372 10388 45 15 374 | 373 10388 52 20 375 | 374 10388 53 40 376 | 375 10389 10 16 377 | 376 10389 55 15 378 | 377 10389 62 20 379 | 378 10389 70 30 380 | 379 10390 31 60 381 | 380 10390 35 40 382 | 381 10390 46 45 383 | 382 10390 72 24 384 | 383 10391 13 18 385 | 384 10392 69 50 386 | 385 10393 2 25 387 | 386 10393 14 42 388 | 387 10393 25 7 389 | 388 10393 26 70 390 | 389 10393 31 32 391 | 390 10394 13 10 392 | 391 10394 62 10 393 | 392 10395 46 28 394 | 393 10395 53 70 395 | 394 10395 69 8 396 | 395 10396 23 40 397 | 396 10396 71 60 398 | 397 10396 72 21 399 | 398 10397 21 10 400 | 399 10397 51 18 401 | 400 10398 35 30 402 | 401 10398 55 120 403 | 402 10399 68 60 404 | 403 10399 71 30 405 | 404 10399 76 35 406 | 405 10399 77 14 407 | 406 10400 29 21 408 | 407 10400 35 35 409 | 408 10400 49 30 410 | 409 10401 30 18 411 | 410 10401 56 70 412 | 411 10401 65 20 413 | 412 10401 71 60 414 | 413 10402 23 60 415 | 414 10402 63 65 416 | 415 10403 16 21 417 | 416 10403 48 70 418 | 417 10404 26 30 419 | 418 10404 42 40 420 | 419 10404 49 30 421 | 420 10405 3 50 422 | 421 10406 1 10 423 | 422 10406 21 30 424 | 423 10406 28 42 425 | 424 10406 36 5 426 | 425 10406 40 2 427 | 426 10407 11 30 428 | 427 10407 69 15 429 | 428 10407 71 15 430 | 429 10408 37 10 431 | 430 10408 54 6 432 | 431 10408 62 35 433 | 432 10409 14 12 434 | 433 10409 21 12 435 | 434 10410 33 49 436 | 435 10410 59 16 437 | 436 10411 41 25 438 | 437 10411 44 40 439 | 438 10411 59 9 440 | 439 10412 14 20 441 | 440 10413 1 24 442 | 441 10413 62 40 443 | 442 10413 76 14 444 | 443 10414 19 18 445 | 444 10414 33 50 446 | 445 10415 17 2 447 | 446 10415 33 20 448 | 447 10416 19 20 449 | 448 10416 53 10 450 | 449 10416 57 20 451 | 450 10417 38 50 452 | 451 10417 46 2 453 | 452 10417 68 36 454 | 453 10417 77 35 455 | 454 10418 2 60 456 | 455 10418 47 55 457 | 456 10418 61 16 458 | 457 10418 74 15 459 | 458 10419 60 60 460 | 459 10419 69 20 461 | 460 10420 9 20 462 | 461 10420 13 2 463 | 462 10420 70 8 464 | 463 10420 73 20 465 | 464 10421 19 4 466 | 465 10421 26 30 467 | 466 10421 53 15 468 | 467 10421 77 10 469 | 468 10422 26 2 470 | 469 10423 31 14 471 | 470 10423 59 20 472 | 471 10424 35 60 473 | 472 10424 38 49 474 | 473 10424 68 30 475 | 474 10425 55 10 476 | 475 10425 76 20 477 | 476 10426 56 5 478 | 477 10426 64 7 479 | 478 10427 14 35 480 | 479 10428 46 20 481 | 480 10429 50 40 482 | 481 10429 63 35 483 | 482 10430 17 45 484 | 483 10430 21 50 485 | 484 10430 56 30 486 | 485 10430 59 70 487 | 486 10431 17 50 488 | 487 10431 40 50 489 | 488 10431 47 30 490 | 489 10432 26 10 491 | 490 10432 54 40 492 | 491 10433 56 28 493 | 492 10434 11 6 494 | 493 10434 76 18 495 | 494 10435 2 10 496 | 495 10435 22 12 497 | 496 10435 72 10 498 | 497 10436 46 5 499 | 498 10436 56 40 500 | 499 10436 64 30 501 | 500 10436 75 24 502 | 501 10437 53 15 503 | 502 10438 19 15 504 | 503 10438 34 20 505 | 504 10438 57 15 506 | 505 10439 12 15 507 | 506 10439 16 16 508 | 507 10439 64 6 509 | 508 10439 74 30 510 | 509 10440 2 45 511 | 510 10440 16 49 512 | 511 10440 29 24 513 | 512 10440 61 90 514 | 513 10441 27 50 515 | 514 10442 11 30 516 | 515 10442 54 80 517 | 516 10442 66 60 518 | 517 10443 11 6 519 | 518 10443 28 12 -------------------------------------------------------------------------------- /SupplyChainDB/OrdersN.tsv: -------------------------------------------------------------------------------- 1 | OrderID CustomerID EmployeeID OrderDate ShipperID 2 | 10248 90 5 1996-07-04 3 3 | 10249 81 6 1996-07-05 1 4 | 10250 34 4 1996-07-08 2 5 | 10251 84 3 1996-07-08 1 6 | 10252 76 4 1996-07-09 2 7 | 10253 34 3 1996-07-10 2 8 | 10254 14 5 1996-07-11 2 9 | 10255 68 9 1996-07-12 3 10 | 10256 88 3 1996-07-15 2 11 | 10257 35 4 1996-07-16 3 12 | 10258 20 1 1996-07-17 1 13 | 10259 13 4 1996-07-18 3 14 | 10260 55 4 1996-07-19 1 15 | 10261 61 4 1996-07-19 2 16 | 10262 65 8 1996-07-22 3 17 | 10263 20 9 1996-07-23 3 18 | 10264 24 6 1996-07-24 3 19 | 10265 7 2 1996-07-25 1 20 | 10266 87 3 1996-07-26 3 21 | 10267 25 4 1996-07-29 1 22 | 10268 33 8 1996-07-30 3 23 | 10269 89 5 1996-07-31 1 24 | 10270 87 1 1996-08-01 1 25 | 10271 75 6 1996-08-01 2 26 | 10272 65 6 1996-08-02 2 27 | 10273 63 3 1996-08-05 3 28 | 10274 85 6 1996-08-06 1 29 | 10275 49 1 1996-08-07 1 30 | 10276 80 8 1996-08-08 3 31 | 10277 52 2 1996-08-09 3 32 | 10278 5 8 1996-08-12 2 33 | 10279 44 8 1996-08-13 2 34 | 10280 5 2 1996-08-14 1 35 | 10281 69 4 1996-08-14 1 36 | 10282 69 4 1996-08-15 1 37 | 10283 46 3 1996-08-16 3 38 | 10284 44 4 1996-08-19 1 39 | 10285 63 1 1996-08-20 2 40 | 10286 63 8 1996-08-21 3 41 | 10287 67 8 1996-08-22 3 42 | 10288 66 4 1996-08-23 1 43 | 10289 11 7 1996-08-26 3 44 | 10290 15 8 1996-08-27 1 45 | 10291 61 6 1996-08-27 2 46 | 10292 81 1 1996-08-28 2 47 | 10293 80 1 1996-08-29 3 48 | 10294 65 4 1996-08-30 2 49 | 10295 85 2 1996-09-02 2 50 | 10296 46 6 1996-09-03 1 51 | 10297 7 5 1996-09-04 2 52 | 10298 37 6 1996-09-05 2 53 | 10299 67 4 1996-09-06 2 54 | 10300 49 2 1996-09-09 2 55 | 10301 86 8 1996-09-09 2 56 | 10302 76 4 1996-09-10 2 57 | 10303 30 7 1996-09-11 2 58 | 10304 80 1 1996-09-12 2 59 | 10305 55 8 1996-09-13 3 60 | 10306 69 1 1996-09-16 3 61 | 10307 48 2 1996-09-17 2 62 | 10308 2 7 1996-09-18 3 63 | 10309 37 3 1996-09-19 1 64 | 10310 77 8 1996-09-20 2 65 | 10311 18 1 1996-09-20 3 66 | 10312 86 2 1996-09-23 2 67 | 10313 63 2 1996-09-24 2 68 | 10314 65 1 1996-09-25 2 69 | 10315 38 4 1996-09-26 2 70 | 10316 65 1 1996-09-27 3 71 | 10317 48 6 1996-09-30 1 72 | 10318 38 8 1996-10-01 2 73 | 10319 80 7 1996-10-02 3 74 | 10320 87 5 1996-10-03 3 75 | 10321 38 3 1996-10-03 2 76 | 10322 58 7 1996-10-04 3 77 | 10323 39 4 1996-10-07 1 78 | 10324 71 9 1996-10-08 1 79 | 10325 39 1 1996-10-09 3 80 | 10326 8 4 1996-10-10 2 81 | 10327 24 2 1996-10-11 1 82 | 10328 28 4 1996-10-14 3 83 | 10329 75 4 1996-10-15 2 84 | 10330 46 3 1996-10-16 1 85 | 10331 9 9 1996-10-16 1 86 | 10332 51 3 1996-10-17 2 87 | 10333 87 5 1996-10-18 3 88 | 10334 84 8 1996-10-21 2 89 | 10335 37 7 1996-10-22 2 90 | 10336 60 7 1996-10-23 2 91 | 10337 25 4 1996-10-24 3 92 | 10338 55 4 1996-10-25 3 93 | 10339 51 2 1996-10-28 2 94 | 10340 9 1 1996-10-29 3 95 | 10341 73 7 1996-10-29 3 96 | 10342 25 4 1996-10-30 2 97 | 10343 44 4 1996-10-31 1 98 | 10344 89 4 1996-11-01 2 99 | 10345 63 2 1996-11-04 2 100 | 10346 65 3 1996-11-05 3 101 | 10347 21 4 1996-11-06 3 102 | 10348 86 4 1996-11-07 2 103 | 10349 75 7 1996-11-08 1 104 | 10350 41 6 1996-11-11 2 105 | 10351 20 1 1996-11-11 1 106 | 10352 28 3 1996-11-12 3 107 | 10353 59 7 1996-11-13 3 108 | 10354 58 8 1996-11-14 3 109 | 10355 4 6 1996-11-15 1 110 | 10356 86 6 1996-11-18 2 111 | 10357 46 1 1996-11-19 3 112 | 10358 41 5 1996-11-20 1 113 | 10359 72 5 1996-11-21 3 114 | 10360 7 4 1996-11-22 3 115 | 10361 63 1 1996-11-22 2 116 | 10362 9 3 1996-11-25 1 117 | 10363 17 4 1996-11-26 3 118 | 10364 19 1 1996-11-26 1 119 | 10365 3 3 1996-11-27 2 120 | 10366 29 8 1996-11-28 2 121 | 10367 83 7 1996-11-28 3 122 | 10368 20 2 1996-11-29 2 123 | 10369 75 8 1996-12-02 2 124 | 10370 14 6 1996-12-03 2 125 | 10371 41 1 1996-12-03 1 126 | 10372 62 5 1996-12-04 2 127 | 10373 37 4 1996-12-05 3 128 | 10374 91 1 1996-12-05 3 129 | 10375 36 3 1996-12-06 2 130 | 10376 51 1 1996-12-09 2 131 | 10377 72 1 1996-12-09 3 132 | 10378 24 5 1996-12-10 3 133 | 10379 61 2 1996-12-11 1 134 | 10380 37 8 1996-12-12 3 135 | 10381 46 3 1996-12-12 3 136 | 10382 20 4 1996-12-13 1 137 | 10383 4 8 1996-12-16 3 138 | 10384 5 3 1996-12-16 3 139 | 10385 75 1 1996-12-17 2 140 | 10386 21 9 1996-12-18 3 141 | 10387 70 1 1996-12-18 2 142 | 10388 72 2 1996-12-19 1 143 | 10389 10 4 1996-12-20 2 144 | 10390 20 6 1996-12-23 1 145 | 10391 17 3 1996-12-23 3 146 | 10392 59 2 1996-12-24 3 147 | 10393 71 1 1996-12-25 3 148 | 10394 36 1 1996-12-25 3 149 | 10395 35 6 1996-12-26 1 150 | 10396 25 1 1996-12-27 3 151 | 10397 60 5 1996-12-27 1 152 | 10398 71 2 1996-12-30 3 153 | 10399 83 8 1996-12-31 3 154 | 10400 19 1 1997-01-01 3 155 | 10401 65 1 1997-01-01 1 156 | 10402 20 8 1997-01-02 2 157 | 10403 20 4 1997-01-03 3 158 | 10404 49 2 1997-01-03 1 159 | 10405 47 1 1997-01-06 1 160 | 10406 62 7 1997-01-07 1 161 | 10407 56 2 1997-01-07 2 162 | 10408 23 8 1997-01-08 1 163 | 10409 54 3 1997-01-09 1 164 | 10410 10 3 1997-01-10 3 165 | 10411 10 9 1997-01-10 3 166 | 10412 87 8 1997-01-13 2 167 | 10413 41 3 1997-01-14 2 168 | 10414 21 2 1997-01-14 3 169 | 10415 36 3 1997-01-15 1 170 | 10416 87 8 1997-01-16 3 171 | 10417 73 4 1997-01-16 3 172 | 10418 63 4 1997-01-17 1 173 | 10419 68 4 1997-01-20 2 174 | 10420 88 3 1997-01-21 1 175 | 10421 61 8 1997-01-21 1 176 | 10422 27 2 1997-01-22 1 177 | 10423 31 6 1997-01-23 3 178 | 10424 51 7 1997-01-23 2 179 | 10425 41 6 1997-01-24 2 180 | 10426 29 4 1997-01-27 1 181 | 10427 59 4 1997-01-27 2 182 | 10428 66 7 1997-01-28 1 183 | 10429 37 3 1997-01-29 2 184 | 10430 20 4 1997-01-30 1 185 | 10431 10 4 1997-01-30 2 186 | 10432 75 3 1997-01-31 2 187 | 10433 60 3 1997-02-03 3 188 | 10434 24 3 1997-02-03 2 189 | 10435 16 8 1997-02-04 2 190 | 10436 7 3 1997-02-05 2 191 | 10437 87 8 1997-02-05 1 192 | 10438 79 3 1997-02-06 2 193 | 10439 51 6 1997-02-07 3 194 | 10440 71 4 1997-02-10 2 195 | 10441 55 3 1997-02-10 2 196 | 10442 20 3 1997-02-11 2 197 | 10443 66 8 1997-02-12 1 -------------------------------------------------------------------------------- /SupplyChainDB/ProductsN.tsv: -------------------------------------------------------------------------------- 1 | ProductID ProductName SupplierID CategoryID Unit Price 2 | 1 Chais 1 1 10 boxes x 20 bags 18 3 | 2 Chang 1 1 24 - 12 oz bottles 19 4 | 3 Aniseed Syrup 1 2 12 - 550 ml bottles 10 5 | 4 Chef Anton's Cajun Seasoning 2 2 48 - 6 oz jars 22 6 | 5 Chef Anton's Gumbo Mix 2 2 36 boxes 21.35 7 | 6 Grandma's Boysenberry Spread 3 2 12 - 8 oz jars 25 8 | 7 Uncle Bob's Organic Dried Pears 3 7 12 - 1 lb pkgs. 30 9 | 8 Northwoods Cranberry Sauce 3 2 12 - 12 oz jars 40 10 | 9 Mishi Kobe Niku 4 6 18 - 500 g pkgs. 97 11 | 10 Ikura 4 8 12 - 200 ml jars 31 12 | 11 Queso Cabrales 5 4 1 kg pkg. 21 13 | 12 Queso Manchego La Pastora 5 4 10 - 500 g pkgs. 38 14 | 13 Konbu 6 8 2 kg box 6 15 | 14 Tofu 6 7 40 - 100 g pkgs. 23.25 16 | 15 Genen Shouyu 6 2 24 - 250 ml bottles 15.5 17 | 16 Pavlova 7 3 32 - 500 g boxes 17.45 18 | 17 Alice Mutton 7 6 20 - 1 kg tins 39 19 | 18 Carnarvon Tigers 7 8 16 kg pkg. 62.5 20 | 19 Teatime Chocolate Biscuits 8 3 10 boxes x 12 pieces 9.2 21 | 20 Sir Rodney's Marmalade 8 3 30 gift boxes 81 22 | 21 Sir Rodney's Scones 8 3 24 pkgs. x 4 pieces 10 23 | 22 Gustaf's Knäckebröd 9 5 24 - 500 g pkgs. 21 24 | 23 Tunnbröd 9 5 12 - 250 g pkgs. 9 25 | 24 Guaraná Fantástica 10 1 12 - 355 ml cans 4.5 26 | 25 NuNuCa Nuß-Nougat-Creme 11 3 20 - 450 g glasses 14 27 | 26 Gumbär Gummibärchen 11 3 100 - 250 g bags 31.23 28 | 27 Schoggi Schokolade 11 3 100 - 100 g pieces 43.9 29 | 28 Rössle Sauerkraut 12 7 25 - 825 g cans 45.6 30 | 29 Thüringer Rostbratwurst 12 6 50 bags x 30 sausgs. 123.79 31 | 30 Nord-Ost Matjeshering 13 8 10 - 200 g glasses 25.89 32 | 31 Gorgonzola Telino 14 4 12 - 100 g pkgs 12.5 33 | 32 Mascarpone Fabioli 14 4 24 - 200 g pkgs. 32 34 | 33 Geitost 15 4 500 g 2.5 35 | 34 Sasquatch Ale 16 1 24 - 12 oz bottles 14 36 | 35 Steeleye Stout 16 1 24 - 12 oz bottles 18 37 | 36 Inlagd Sill 17 8 24 - 250 g jars 19 38 | 37 Gravad lax 17 8 12 - 500 g pkgs. 26 39 | 38 Côte de Blaye 18 1 12 - 75 cl bottles 263.5 40 | 39 Chartreuse verte 18 1 750 cc per bottle 18 41 | 40 Boston Crab Meat 19 8 24 - 4 oz tins 18.4 42 | 41 Jack's New England Clam Chowder 19 8 12 - 12 oz cans 9.65 43 | 42 Singaporean Hokkien Fried Mee 20 5 32 - 1 kg pkgs. 14 44 | 43 Ipoh Coffee 20 1 16 - 500 g tins 46 45 | 44 Gula Malacca 20 2 20 - 2 kg bags 19.45 46 | 45 Røgede sild 21 8 1k pkg. 9.5 47 | 46 Spegesild 21 8 4 - 450 g glasses 12 48 | 47 Zaanse koeken 22 3 10 - 4 oz boxes 9.5 49 | 48 Chocolade 22 3 10 pkgs. 12.75 50 | 49 Maxilaku 23 3 24 - 50 g pkgs. 20 51 | 50 Valkoinen suklaa 23 3 12 - 100 g bars 16.25 52 | 51 Manjimup Dried Apples 24 7 50 - 300 g pkgs. 53 53 | 52 Filo Mix 24 5 16 - 2 kg boxes 7 54 | 53 Perth Pasties 24 6 48 pieces 32.8 55 | 54 Tourtière 25 6 16 pies 7.45 56 | 55 Pâté chinois 25 6 24 boxes x 2 pies 24 57 | 56 Gnocchi di nonna Alice 26 5 24 - 250 g pkgs. 38 58 | 57 Ravioli Angelo 26 5 24 - 250 g pkgs. 19.5 59 | 58 Escargots de Bourgogne 27 8 24 pieces 13.25 60 | 59 Raclette Courdavault 28 4 5 kg pkg. 55 61 | 60 Camembert Pierrot 28 4 15 - 300 g rounds 34 62 | 61 Sirop d'érable 29 2 24 - 500 ml bottles 28.5 63 | 62 Tarte au sucre 29 3 48 pies 49.3 64 | 63 Vegie-spread 7 2 15 - 625 g jars 43.9 65 | 64 Wimmers gute Semmelknödel 12 5 20 bags x 4 pieces 33.25 66 | 65 Louisiana Fiery Hot Pepper Sauce 2 2 32 - 8 oz bottles 21.05 67 | 66 Louisiana Hot Spiced Okra 2 2 24 - 8 oz jars 17 68 | 67 Laughing Lumberjack Lager 16 1 24 - 12 oz bottles 14 69 | 68 Scottish Longbreads 8 3 10 boxes x 8 pieces 12.5 70 | 69 Gudbrandsdalsost 15 4 10 kg pkg. 36 71 | 70 Outback Lager 7 1 24 - 355 ml bottles 15 72 | 71 Fløtemysost 15 4 10 - 500 g pkgs. 21.5 73 | 72 Mozzarella di Giovanni 14 4 24 - 200 g pkgs. 34.8 74 | 73 Röd Kaviar 17 8 24 - 150 g jars 15 75 | 74 Longlife Tofu 4 7 5 kg pkg. 10 76 | 75 Rhönbräu Klosterbier 12 1 24 - 0.5 l bottles 7.75 77 | 76 Lakkalikööri 23 1 500 ml 18 78 | 77 Original Frankfurter grüne Soße 12 2 12 boxes 13 -------------------------------------------------------------------------------- /SupplyChainDB/Shippers.tsv: -------------------------------------------------------------------------------- 1 | ShipperID ShipperName Phone 2 | 1 Speedy Express (503) 555-9831 3 | 2 United Package (503) 555-3199 4 | 3 Federal Shipping (503) 555-9931 -------------------------------------------------------------------------------- /SupplyChainDB/Suppliers.tsv: -------------------------------------------------------------------------------- 1 | SupplierID SupplierName ContactName Address City PostalCode Country Phone 2 | 1 Exotic Liquid Charlotte Cooper 49 Gilbert St. Londona EC1 4SD UK (171) 555-2222 3 | 2 New Orleans Cajun Delights Shelley Burke P.O. Box 78934 New Orleans 70117 USA (100) 555-4822 4 | 3 Grandma Kelly's Homestead Regina Murphy 707 Oxford Rd. Ann Arbor 48104 USA (313) 555-5735 5 | 4 Tokyo Traders Yoshi Nagase 9-8 Sekimai Musashino-shi Tokyo 100 Japan (03) 3555-5011 6 | 5 Cooperativa de Quesos 'Las Cabras' Antonio del Valle Saavedra Calle del Rosal 4 Oviedo 33007 Spain (98) 598 76 54 7 | 6 Mayumi's Mayumi Ohno 92 Setsuko Chuo-ku Osaka 545 Japan (06) 431-7877 8 | 7 Pavlova, Ltd. Ian Devling 74 Rose St. Moonie Ponds Melbourne 3058 Australia (03) 444-2343 9 | 8 Specialty Biscuits, Ltd. Peter Wilson 29 King's Way Manchester M14 GSD UK (161) 555-4448 10 | 9 PB Knäckebröd AB Lars Peterson Kaloadagatan 13 Göteborg S-345 67 Sweden 031-987 65 43 11 | 10 Refrescos Americanas LTDA Carlos Diaz Av. das Americanas 12.890 São Paulo 5442 Brazil (11) 555 4640 12 | 11 Heli Süßwaren GmbH & Co. KG Petra Winkler Tiergartenstraße 5 Berlin 10785 Germany (010) 9984510 13 | 12 Plutzer Lebensmittelgroßmärkte AG Martin Bein Bogenallee 51 Frankfurt 60439 Germany (069) 992755 14 | 13 Nord-Ost-Fisch Handelsgesellschaft mbH Sven Petersen Frahmredder 112a Cuxhaven 27478 Germany (04721) 8713 15 | 14 Formaggi Fortini s.r.l. Elio Rossi Viale Dante, 75 Ravenna 48100 Italy (0544) 60323 16 | 15 Norske Meierier Beate Vileid Hatlevegen 5 Sandvika 1320 Norway (0)2-953010 17 | 16 Bigfoot Breweries Cheryl Saylor 3400 - 8th Avenue Suite 210 Bend 97101 USA (503) 555-9931 18 | 17 Svensk Sjöföda AB Michael Björn Brovallavägen 231 Stockholm S-123 45 Sweden 08-123 45 67 19 | 18 Aux joyeux ecclésiastiques Guylène Nodier 203, Rue des Francs-Bourgeois Paris 75004 France (1) 03.83.00.68 20 | 19 New England Seafood Cannery Robb Merchant Order Processing Dept. 2100 Paul Revere Blvd. Boston 02134 USA (617) 555-3267 21 | 20 Leka Trading Chandra Leka 471 Serangoon Loop, Suite #402 Singapore 0512 Singapore 555-8787 22 | 21 Lyngbysild Niels Petersen Lyngbysild Fiskebakken 10 Lyngby 2800 Denmark 43844108 23 | 22 Zaanse Snoepfabriek Dirk Luchte Verkoop Rijnweg 22 Zaandam 9999 ZZ Netherlands (12345) 1212 24 | 23 Karkki Oy Anne Heikkonen Valtakatu 12 Lappeenranta 53120 Finland (953) 10956 25 | 24 G'day, Mate Wendy Mackenzie 170 Prince Edward Parade Hunter's Hill Sydney 2042 Australia (02) 555-5914 26 | 25 Ma Maison Jean-Guy Lauzon 2960 Rue St. Laurent Montréal H1J 1C3 Canada (514) 555-9022 27 | 26 Pasta Buttini s.r.l. Giovanni Giudici Via dei Gelsomini, 153 Salerno 84100 Italy (089) 6547665 28 | 27 Escargots Nouveaux Marie Delamare 22, rue H. Voiron Montceau 71300 France 85.57.00.07 29 | 28 Gai pâturage Eliane Noz Bat. B 3, rue des Alpes Annecy 74000 France 38.76.98.06 30 | 29 Forêts d'érables Chantal Goulet 148 rue Chasseur Ste-Hyacinthe J2S 7S8 Canada (514) 555-2955 -------------------------------------------------------------------------------- /Tables.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "cfbb1028", 6 | "metadata": {}, 7 | "source": [ 8 | "# Tables\n", 9 | "\n", 10 | "## Create Table\n", 11 | "- https://sqlite.org/lang_createtable.html\n", 12 | "- CREATE TABLE statement is used to define and create a new table within a database\n", 13 | "- a table is a structured collection of data organized into rows and columns, where each column has a specific data type and each row represents a record\n", 14 | "- keywords within `[ ]` are optional\n", 15 | "- syntax\n", 16 | "\n", 17 | "```sql\n", 18 | "CREATE TABLE [IF NOT EXISTS] [schema_name].table_name (\n", 19 | " column_1 data_type PRIMARY KEY,\n", 20 | " column_2 data_type NOT NULL,\n", 21 | " column_3 data_type DEFAULT 0,\n", 22 | " ...\n", 23 | " table_constraints\n", 24 | ") [WITHOUT ROWID];\n", 25 | "```\n", 26 | "- Sqlite automatically adds ROWID AUTOINCREMENT column for every table created\n", 27 | "- you can tell it to not do that by using [WITHOUT ROWID] optional clause\n", 28 | "- e.g.,\n", 29 | "\n", 30 | "```sql\n", 31 | "CREATE TABLE employees (\n", 32 | " EmployeeID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,\n", 33 | " FirstName VARCHAR(50) NOT NULL,\n", 34 | " LastName VARCHAR(50) NOT NULL,\n", 35 | " DateOfBirth DATETIME\n", 36 | ");\n", 37 | "```\n", 38 | "\n", 39 | "- use DBeaver to run the script on any chinook sqlite database\n", 40 | "\n", 41 | "## Table Definition\n", 42 | "\n", 43 | "- tables are the primary data storage units\n", 44 | "- each table is characterized by its name, columns, data types, and constraints\n", 45 | "- columns (also called attributes or fields) are data/values stored in each record\n", 46 | "- tables need to be created before data can be stored into it\n", 47 | "\n", 48 | "## Column Definition\n", 49 | "\n", 50 | "- each table definition specifies the columns (also known as fields or attributes)\n", 51 | "- column definitions include the column name, data type, and any constraints\n", 52 | "- most Relational DB Engines use static/rigid data types for columns\n", 53 | "- SQLite DB uses flexible dynamic typing and it's a feature not bug\n", 54 | "- SQLite provides STRICT Table for static/rigid typing enforcement\n", 55 | "\n", 56 | "### SQLite Storage Classes & Datatypes\n", 57 | "\n", 58 | "- https://sqlite.org/datatype3.html\n", 59 | "- each value stored in an SQLite database (or manipulated by the database engine) has one of the following storage classes:\n", 60 | "\n", 61 | "| Storage | Datatype |\n", 62 | "| ----- | ----------|\n", 63 | "| NULL | NULL value |\n", 64 | "| INTEGER | signed integer in 0...8 bytes depending on the magnitude |\n", 65 | "| REAL | floating point value 8-byte |\n", 66 | "| TEXT | text string (utf-8 and other encodings)\n", 67 | "| BLOB | blob of data, stored exactly as it as input |\n", 68 | "\n", 69 | "### Date and Time Datatype\n", 70 | "\n", 71 | "- SQLite does not have a storage class set aside for storing dates and/or times\n", 72 | "- usually dates and times are stored as TEXT, REAL, or INTEGER\n", 73 | "- use SQLite provided functions or language features to convert data to date and time\n", 74 | "\n", 75 | "## Table constraints\n", 76 | "- PRIMARY KEY, FOREIGN KEY, UNIQUE, CHECK\n", 77 | "\n", 78 | "## Primary Keys\n", 79 | "\n", 80 | "- a primary key is a unique identifier for a record within a relational database table\n", 81 | "- is used to uniquely identify each row or record in a table\n", 82 | "- the primary key ensures that there are no duplicate values within the column(s) designated as the primary key\n", 83 | "- primary keys help to maintain data integrity and provide a reliable and efficient way to access and manage data\n", 84 | "- primary key is used to establish relationship among the tables\n", 85 | "- CustomerID column in the table above is the primary key\n", 86 | "- can use separate line with PRIMARY KEY constraint to identify PK column(s), especially if there are more than 1 primary key column\n", 87 | "\n", 88 | "- e.g.: \n", 89 | "\n", 90 | "```sql\n", 91 | "CREATE TABLE employee (\n", 92 | " EmployeeID INTEGER NOT NULL,\n", 93 | " FirstName VARCHAR(50) NOT NULL,\n", 94 | " LastName VARCHAR(50) NOT NULL,\n", 95 | " DateOfBirth DATETIME,\n", 96 | " PRIMARY KEY (EmployeeID)\n", 97 | ");\n", 98 | "```\n", 99 | "- can also use a list of multiple columns as PRIMARY KEY(column_1, column_2,...)\n", 100 | "\n", 101 | "### Key characteristics of Primary Key\n", 102 | "\n", 103 | "#### Uniquness\n", 104 | " - a primary key must contain unique values for each record in the table\n", 105 | "- No two records can have the same primary key value\n", 106 | "\n", 107 | "#### Non-Null\n", 108 | "- values in a primary key column cannot be null (empty) because null values are not unique\n", 109 | "\n", 110 | "#### Single or Composite\n", 111 | "- can consist of a single column or multiple columns, depending on the requirements of the database design\n", 112 | "\n", 113 | "#### Data Integrity\n", 114 | "- by enforcing uniqueness and non-null constraints, the primary key ensures that each record is uniquely identifiable, preventing data duplication and inconsistency\n", 115 | "\n", 116 | "#### Access and Referencing\n", 117 | "- primary keys are used to uniquely identify records within the table\n", 118 | "- they also serve as a basis for establishing relationships between tables using foreign keys.\n", 119 | "\n", 120 | "#### Automatically Generated Primary Keys\n", 121 | "- in some cases, primary key values are automatically generated by the DBMS when new records are added\n", 122 | "- this is often seen with auto-incrementing integer values\n" 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "id": "55036d00", 128 | "metadata": {}, 129 | "source": [ 130 | "## FOREIGN KEY\n", 131 | "\n", 132 | "- FK is a field or a set of fields that refers to the primary key of another table\n", 133 | "- it establishes a relationship between two tables, enabling you to maintain referential integrity and ensure data consistency across related tables\n", 134 | "- in essence, a foreign key creates a link between records in two tables, enforcing rules about the relationships between those records\n", 135 | "- the table containing the foreign key is referred to as the \"child\" table, and the table to which the foreign key refers is referred to as the \"parent\" table\n", 136 | "- the foreign key column(s) in the child table hold values that match the primary key values in the parent table\n", 137 | "- when inserting record into child table, there must be an existing record in the parent table\n", 138 | "\n", 139 | "### Benefits of using foreign keys include\n", 140 | "\n", 141 | "#### Referential Integrity\n", 142 | "- Foreign keys help maintain the integrity of the relationships between tables, preventing orphaned or inconsistent data\n", 143 | "\n", 144 | "#### Data Consistency\n", 145 | "- they ensure that only valid values are inserted into the foreign key column, reducing the risk of data anomalies\n", 146 | "\n", 147 | "#### Relationship Definition\n", 148 | "- Foreign keys explicitly define the relationships between tables, making it easier to understand the database's structure\n", 149 | "- help reduce data redundancy making tables smaller storing unique information on some entity\n", 150 | "- run the following statements one at a time as Sqlite doesn't allow multiple statements at once\n", 151 | "\n", 152 | "```sql\n", 153 | "CREATE TABLE customers (\n", 154 | " customer_id INTEGER PRIMARY KEY,\n", 155 | " customer_name VARCHAR(100),\n", 156 | " email VARCHAR(255)\n", 157 | ");\n", 158 | "\n", 159 | "CREATE TABLE orders (\n", 160 | " order_id INTEGER PRIMARY KEY,\n", 161 | " order_date DATETIME,\n", 162 | " customer_id INT,\n", 163 | " FOREIGN KEY (customer_id) REFERENCES customers(customer_id)\n", 164 | " /*FOREIGN KEY ...*/\n", 165 | ");\n", 166 | "```\n", 167 | "\n", 168 | "### FOREIGN KEY Constraints\n", 169 | "\n", 170 | "- you can add constraints to FOREIGN KEY\n", 171 | "- e.g.,\n", 172 | "```sql\n", 173 | "FOREIGN KEY (foreign_key_columns)\n", 174 | " REFERENCES parent_table(parent_key_columns)\n", 175 | " ON UPDATE action \n", 176 | " ON DELETE action\n", 177 | "```\n", 178 | "\n", 179 | "- rules that are defined on a column or a set of columns to enforce referential integrity between related tables when related records in parent table are updated or deleted \n", 180 | "\n", 181 | "Here are some common types of foreign key constraints/actions:\n", 182 | "\n", 183 | "1. **CASCADE**: \n", 184 | "- when a record in the parent table is deleted or updated, the corresponding records in the child table are also deleted or updated automatically\n", 185 | "- this helps maintain data consistency across related tables\n", 186 | "\n", 187 | "```sql\n", 188 | "FOREIGN KEY (foreign_key_columns)\n", 189 | " REFERENCES parent_table(parent_key_columns)\n", 190 | " ON DELETE CASCADE\n", 191 | "```\n", 192 | "\n", 193 | "2. **SET NULL**:\n", 194 | "- when a record in the parent table is deleted or updated, the foreign key values in the child table are set to `NULL`\n", 195 | "- this is useful when you want to allow records to exist in the child table even if the related record in the parent table is deleted\n", 196 | "\n", 197 | "```sql\n", 198 | "FOREIGN KEY (foreign_key_columns)\n", 199 | " REFERENCES parent_table(parent_key_columns)\n", 200 | " ON DELETE SET NULL\n", 201 | "```\n", 202 | "\n", 203 | "3. **SET DEFAULT**: \n", 204 | "- similar to `SET NULL`, but the foreign key values in the child table are set to their default values defined when the table was created\n", 205 | "\n", 206 | "```sql\n", 207 | "FOREIGN KEY (foreign_key_columns)\n", 208 | " REFERENCES parent_table(parent_key_columns)\n", 209 | " ON DELETE SET DEFAULT\n", 210 | "```\n", 211 | "\n", 212 | "4. **NO ACTION**:\n", 213 | "- prevents any actions that would violate referential integrity, such as deleting a record from the parent table if related records exist in the child table\n", 214 | "- this constraint essentially stops actions that would create inconsistencies\n", 215 | "\n", 216 | "```sql\n", 217 | "FOREIGN KEY (foreign_key_columns)\n", 218 | " REFERENCES parent_table(parent_key_columns)\n", 219 | " ON DELETE CASCADE\n", 220 | " ON UPDATE NO ACTION\n", 221 | "```\n", 222 | "\n", 223 | "5. **RESTRICT**: \n", 224 | "- similar to `NO ACTION`, it prevents actions that would violate referential integrity\n", 225 | "- tt's a more restrictive version of the constraint and is often used interchangeably with `NO ACTION`\n", 226 | "\n", 227 | "6. **CHECK**:\n", 228 | "- CHECK constraints allow you to define additional data integrity checks beyond UNIQUE or NOT NULL to suit your specific application\n", 229 | "- allows you to define a boolean expressions to test values whenever they are inserted into or updated within a column\n", 230 | "- if the values do not meet the criteria defined by the expression, SQLite will issue a constraint violation and abort the statement\n", 231 | "- SQLite allows you to define a CHECK constraint at the column level or the table level\n", 232 | "- syntax of CHECK constraint at the column level:\n", 233 | "\n", 234 | "```sql\n", 235 | "CREATE TABLE table_name(\n", 236 | " ...,\n", 237 | " column_name data_type CHECK(expression),\n", 238 | " ...\n", 239 | ");\n", 240 | "```\n", 241 | "- e.g. test the following table in a Sqlite database:\n", 242 | "```sql\n", 243 | "CREATE TABLE customers (\n", 244 | " customer_id INTEGER PRIMARY KEY,\n", 245 | " first_name TEXT,\n", 246 | " middle_initial TEXT NOT NULL \n", 247 | " CHECK (length(middle_initial) == 1)\n", 248 | ");\n", 249 | "```\n", 250 | "\n", 251 | "- CHECK constraint at the table level example:\n", 252 | "\n", 253 | "```sql\n", 254 | "CREATE TABLE products (\n", 255 | " product_id INTEGER PRIMARY KEY,\n", 256 | " product_name TEXT NOT NULL,\n", 257 | " list_price DECIMAL (10, 2) NOT NULL,\n", 258 | " discount DECIMAL (10, 2) NOT NULL DEFAULT 0,\n", 259 | " CHECK (list_price >= discount AND \n", 260 | " discount >= 0 AND \n", 261 | " list_price >= 0) \n", 262 | ");\n", 263 | "```\n", 264 | "\n", 265 | "### Constraints Example\n", 266 | "\n", 267 | "Here's an example of how you might apply these constraints in SQL:\n", 268 | "\n", 269 | "```sql\n", 270 | "CREATE TABLE authors (\n", 271 | " author_id INTEGER PRIMARY KEY\n", 272 | " CHECK(author_id >= 1000),\n", 273 | " author_name TEXT\n", 274 | ");\n", 275 | "\n", 276 | "CREATE TABLE books (\n", 277 | " book_id INTEGER PRIMARY KEY,\n", 278 | " book_title TEXT,\n", 279 | " author_id INTEGER,\n", 280 | " FOREIGN KEY (author_id) REFERENCES authors(author_id)\n", 281 | " ON DELETE CASCADE\n", 282 | ");\n", 283 | "\n", 284 | "CREATE TABLE customers (\n", 285 | " customer_id INTEGER PRIMARY KEY,\n", 286 | " customer_name TEXT\n", 287 | ");\n", 288 | "\n", 289 | "CREATE TABLE orders (\n", 290 | " order_id INTEGER PRIMARY KEY,\n", 291 | " order_date DATETIME,\n", 292 | " customer_id INT,\n", 293 | " FOREIGN KEY (customer_id) REFERENCES customers(customer_id)\n", 294 | " ON DELETE SET NULL\n", 295 | ");\n", 296 | "```\n", 297 | "\n", 298 | "In this example:\n", 299 | "- the `books` table has a foreign key constraint with `ON DELETE CASCADE`, meaning if an author is deleted from the `authors` table, all related books will be deleted as well\n", 300 | "- the `orders` table has a foreign key constraint with `ON DELETE SET NULL`, so if a customer is deleted, the customer ID in the orders table will be set to `NULL`.\n", 301 | "\n", 302 | "### Lab\n", 303 | "\n", 304 | "- create the above tables in a Sqlite Database \n", 305 | "- insert some records into all the tables\n", 306 | "- delete some records from the parent tables\n", 307 | "- observe the records in the child tables\n" 308 | ] 309 | }, 310 | { 311 | "cell_type": "markdown", 312 | "id": "a629994d", 313 | "metadata": {}, 314 | "source": [ 315 | "## DROP TABLE\n", 316 | "\n", 317 | "- `DROP TABLE` statement is used to delete a table and remove it from the database schema\n", 318 | "- removes both the table structure and all the data stored within it\n", 319 | "- https://sqlite.org/lang_droptable.html\n", 320 | "- syntax:\n", 321 | "\n", 322 | "```sql\n", 323 | "DROP TABLE [IF EXISTS] [schema_name.]table_name;\n", 324 | "```\n", 325 | "\n", 326 | "- `IF EXISTS`: This optional clause is used to check if the table exists before attempting to drop it. If the table does not exist, no error will be raised.\n", 327 | " - it's good practice to use the `IF EXISTS` clause to avoid errors if you attempt to drop a table that doesn't exist\n", 328 | "\n", 329 | "- e.g.,\n", 330 | "\n", 331 | "```sql\n", 332 | "DROP TABLE IF EXISTS employees;\n", 333 | "```" 334 | ] 335 | }, 336 | { 337 | "cell_type": "markdown", 338 | "id": "14859667", 339 | "metadata": {}, 340 | "source": [ 341 | "## ALTER TABLE\n", 342 | "\n", 343 | "- https://sqlite.org/lang_altertable.html\n", 344 | "- `ALTER TABLE` statement is used to modify an existing table's structure\n", 345 | "- it allows you to add, modify, or drop columns in a table, as well as make other changes to the table's definition\n", 346 | "- SQLite's support for altering tables is more limited compared to some other database systems\n", 347 | "- always back up your data and carefully plan any table alterations to ensure data integrity and minimize disruptions\n", 348 | "- basic syntax:\n", 349 | "\n", 350 | "```sql\n", 351 | "ALTER TABLE table_name\n", 352 | "action;\n", 353 | "```\n", 354 | "\n", 355 | "### Common alter table statements\n", 356 | "\n", 357 | "1. **Adding a Column**:\n", 358 | "```sql\n", 359 | "ALTER TABLE table_name\n", 360 | "ADD COLUMN new_column_name data_type;\n", 361 | "```\n", 362 | "- e.g.,\n", 363 | "\n", 364 | "```sql\n", 365 | "ALTER TABLE customers\n", 366 | "ADD COLUMN email TEXT;\n", 367 | "```\n", 368 | "\n", 369 | "2. **Renaming a Table**:\n", 370 | "- syntax\n", 371 | "\n", 372 | "```sql\n", 373 | "ALTER TABLE existing_table\n", 374 | "RENAME TO new_table;\n", 375 | "```\n", 376 | "- e.g.,\n", 377 | "\n", 378 | "```\n", 379 | "ALTER TABLE customers\n", 380 | "RENAME TO clients;\n", 381 | "```\n", 382 | "\n", 383 | "3. **Renaming a Columns**:\n", 384 | "- syntax\n", 385 | "\n", 386 | "```sql\n", 387 | "ALTER TABLE table_name\n", 388 | "RENAME COLUMN current_name TO new_name;\n", 389 | "```\n", 390 | "- e.g.,\n", 391 | "\n", 392 | "```sql\n", 393 | "ALTER TABLE products\n", 394 | "RENAME discount TO sale;\n", 395 | "```\n", 396 | "\n", 397 | "3. **Dropping a Column**:\n", 398 | "- can't drop column that is part of some constraints: PRIMARY KEY, CHECK, FOREIGN KEY, etc.\n", 399 | "\n", 400 | "- syntax:\n", 401 | "```\n", 402 | "ALTER TABLE table_name\n", 403 | "DROP COLUMNN column_name;\n", 404 | "```\n", 405 | "- e.g.,\n", 406 | "\n", 407 | "```sql\n", 408 | "ALTER TABLE products\n", 409 | "DROP COLUMN product_name;\n", 410 | "```\n", 411 | "\n", 412 | "4. **Modifying a Column**:\n", 413 | "- SQLite does not support directly modifying column definitions (such as changing data types or constraints) using the `ALTER TABLE` statement\n", 414 | "- You usually have to create a new table with the desired modifications and then copy the data.\n", 415 | "\n", 416 | "## Checking if a Table Exists in Sqlite\n", 417 | "\n", 418 | "```sql\n", 419 | "SELECT * FROM sqlite_master WHERE type = 'table'\n", 420 | " and name='';\n", 421 | "```" 422 | ] 423 | }, 424 | { 425 | "cell_type": "code", 426 | "execution_count": 2, 427 | "id": "ff02f8f0", 428 | "metadata": {}, 429 | "outputs": [], 430 | "source": [ 431 | "from python import db" 432 | ] 433 | }, 434 | { 435 | "cell_type": "code", 436 | "execution_count": 3, 437 | "id": "b0a1b87f", 438 | "metadata": {}, 439 | "outputs": [], 440 | "source": [ 441 | "db_file = 'data/chinook.sqlite'" 442 | ] 443 | }, 444 | { 445 | "cell_type": "code", 446 | "execution_count": 4, 447 | "id": "250fb672", 448 | "metadata": {}, 449 | "outputs": [], 450 | "source": [ 451 | "sql_table_info = \"SELECT * FROM sqlite_master WHERE type = 'table' AND name='employees'\"" 452 | ] 453 | }, 454 | { 455 | "cell_type": "code", 456 | "execution_count": 7, 457 | "id": "e10cef9b", 458 | "metadata": {}, 459 | "outputs": [], 460 | "source": [ 461 | "row = db.select_one_row(db_file, sql_table_info, ())\n", 462 | "# should return a row with 5 columns: type, name, tbl_name, rootpage, sql" 463 | ] 464 | }, 465 | { 466 | "cell_type": "code", 467 | "execution_count": 8, 468 | "id": "304b36ea", 469 | "metadata": {}, 470 | "outputs": [ 471 | { 472 | "data": { 473 | "text/plain": [ 474 | "('table',\n", 475 | " 'employees',\n", 476 | " 'employees',\n", 477 | " 8,\n", 478 | " 'CREATE TABLE \"employees\"\\r\\n(\\r\\n [EmployeeId] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,\\r\\n [LastName] NVARCHAR(20) NOT NULL,\\r\\n [FirstName] NVARCHAR(20) NOT NULL,\\r\\n [Title] NVARCHAR(30),\\r\\n [ReportsTo] INTEGER,\\r\\n [BirthDate] DATETIME,\\r\\n [HireDate] DATETIME,\\r\\n [Address] NVARCHAR(70),\\r\\n [City] NVARCHAR(40),\\r\\n [State] NVARCHAR(40),\\r\\n [Country] NVARCHAR(40),\\r\\n [PostalCode] NVARCHAR(10),\\r\\n [Phone] NVARCHAR(24),\\r\\n [Fax] NVARCHAR(24),\\r\\n [Email] NVARCHAR(60),\\r\\n FOREIGN KEY ([ReportsTo]) REFERENCES \"employees\" ([EmployeeId]) \\r\\n\\t\\tON DELETE NO ACTION ON UPDATE NO ACTION\\r\\n)')" 479 | ] 480 | }, 481 | "execution_count": 8, 482 | "metadata": {}, 483 | "output_type": "execute_result" 484 | } 485 | ], 486 | "source": [ 487 | "row" 488 | ] 489 | }, 490 | { 491 | "cell_type": "code", 492 | "execution_count": 9, 493 | "id": "39b0ab51", 494 | "metadata": {}, 495 | "outputs": [], 496 | "source": [ 497 | "assert len(row) == 5" 498 | ] 499 | }, 500 | { 501 | "cell_type": "code", 502 | "execution_count": null, 503 | "id": "18a9370d", 504 | "metadata": {}, 505 | "outputs": [], 506 | "source": [] 507 | } 508 | ], 509 | "metadata": { 510 | "kernelspec": { 511 | "display_name": "Python 3 (ipykernel)", 512 | "language": "python", 513 | "name": "python3" 514 | }, 515 | "language_info": { 516 | "codemirror_mode": { 517 | "name": "ipython", 518 | "version": 3 519 | }, 520 | "file_extension": ".py", 521 | "mimetype": "text/x-python", 522 | "name": "python", 523 | "nbconvert_exporter": "python", 524 | "pygments_lexer": "ipython3", 525 | "version": "3.10.8" 526 | } 527 | }, 528 | "nbformat": 4, 529 | "nbformat_minor": 5 530 | } 531 | -------------------------------------------------------------------------------- /Transactions.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "e640b097", 6 | "metadata": {}, 7 | "source": [ 8 | "# Transactions\n", 9 | "\n", 10 | "- Like many SQL-based DBMS, SQLite is a transactional database system\n", 11 | "- meaning queries can be grouped to execute as a single transaction\n", 12 | "- transactions have ACID properites\n", 13 | "\n", 14 | "## Atomic\n", 15 | "\n", 16 | "- means that a change cannot be broken down into smaller ones\n", 17 | "- when you commit a transaction, either the entire transaction is applied or nothing is applied\n", 18 | "\n", 19 | "## Consistent\n", 20 | "\n", 21 | "- a transaction must ensure to change the database from one valid state to another\n", 22 | "- when a transaction starts and executes a statement to modify data, the database becomes inconsistent; \n", 23 | " - however, when the transaction is committed or rolled back, it is important that the transaction must keep the database consistent\n", 24 | " \n", 25 | "## Isolation\n", 26 | "\n", 27 | "- isolation ensures that multiple transactions can be executed concurrently without interfering with each other - - each transaction is executed in isolation from other transactions, as if it were the only transaction running on the system\n", 28 | "- this prevents issues such as data corruption or conflicts that could arise from simultaneous access to the same data by multiple transactions\n", 29 | "\n", 30 | "## Durability\n", 31 | "\n", 32 | "- durability guarantees that once a transaction is successfully completed, its changes are permanently saved and will survive any subsequent system failures, crashes, or power outages\n", 33 | "- these changes are stored in a durable medium (such as disk storage) so that they can be recovered even if the system crashes right after the transaction is completed\n", 34 | "\n", 35 | "## Applications\n", 36 | "\n", 37 | "- the ACID properties collectively provide a robust framework for maintaining data integrity and reliability in database management systems, making them suitable for applications where accuracy and consistency of data are paramount, such as financial systems, e-commerce platforms, and various enterprise-level applications\n", 38 | "\n", 39 | "### Drawbacks\n", 40 | "\n", 41 | "- it's important to note that adhering strictly to the ACID properties can sometimes impact system performance, particularly in highly concurrent environments\n", 42 | "- some modern databases, especially those designed for specific use cases, might opt for a more relaxed consistency model to achieve better performance and scalability while still ensuring data reliability\n", 43 | "\n", 44 | "\n", 45 | "## SQLite Transaction\n", 46 | "\n", 47 | "- by default, SQLite operates in auto-commit mode\n", 48 | "- meaning that for each command, SQLite starts, processes, and commits the transaction automatically\n", 49 | "- syntax:\n", 50 | "\n", 51 | "```sql\n", 52 | "BEGIN TRANSACTION;\n", 53 | "\n", 54 | "/* SQL STATEMENTS, INSERT, UPDATE, DELETE, ETC... */\n", 55 | "\n", 56 | "[COMMIT] [ROLLBACK];\n", 57 | "```\n" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": null, 63 | "id": "c06b5314", 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "import sqlite3\n", 68 | "from sqlite3 import Error" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": null, 74 | "id": "0f99d379", 75 | "metadata": {}, 76 | "outputs": [], 77 | "source": [ 78 | "filename = './data/bank.db'" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "id": "cb023eb2", 85 | "metadata": {}, 86 | "outputs": [], 87 | "source": [ 88 | "# if conn object is used as a cotext manager\n", 89 | "# all the statements is considered as a transaction\n", 90 | "with sqlite3.connect(filename) as conn:\n", 91 | " cursor = conn.cursor()\n", 92 | " sqls = [\n", 93 | " 'DROP TABLE IF EXISTS CHECKING;',\n", 94 | " 'CREATE TABLE CHECKING (balance integer);',\n", 95 | " 'INSERT INTO CHECKING(balance) VALUES (100);',]\n", 96 | " for sql in sqls:\n", 97 | " cursor.execute(sql)" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": 9, 103 | "id": "c5937a54", 104 | "metadata": {}, 105 | "outputs": [ 106 | { 107 | "name": "stdout", 108 | "output_type": "stream", 109 | "text": [ 110 | "All success...\n" 111 | ] 112 | } 113 | ], 114 | "source": [ 115 | "conn = sqlite3.connect(filename)\n", 116 | "\n", 117 | "c = conn.cursor()\n", 118 | "c.execute(\"BEGIN TRANSACTION;\")\n", 119 | "try:\n", 120 | " c.execute(\"UPDATE CHECKING SET balance = balance - 5;\")\n", 121 | " #c.execute(\"UPDATE CHECKING SET saving = balance + 10;\") # <-- no column name saving\n", 122 | " # comment the above statement and run it again\n", 123 | " c.execute(\"UPDATE CHECKING SET balance = balance - 5;\")\n", 124 | " c.execute('SELECT balance FROM CHECKING;')\n", 125 | " balance = int(c.fetchone()[0])\n", 126 | " if balance < 0:\n", 127 | " print(f'{balance =}')\n", 128 | " raise Error(f'-ve Balance: {balance}')\n", 129 | " c.execute(\"COMMIT;\")\n", 130 | " print('All success...')\n", 131 | "except Error as ex:\n", 132 | " c.execute(\"ROLLBACK;\")\n", 133 | " print(\"Fail! Rolling back...\", ex)\n", 134 | "finally:\n", 135 | " conn.close()" 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": 10, 141 | "id": "bfe509c8", 142 | "metadata": {}, 143 | "outputs": [ 144 | { 145 | "name": "stdout", 146 | "output_type": "stream", 147 | "text": [ 148 | "[(90,)]\n" 149 | ] 150 | } 151 | ], 152 | "source": [ 153 | "from python import db\n", 154 | "sql = 'SELECT BALANCE FROM CHECKING;'\n", 155 | "rows = db.select_many_rows(filename, sql, ())\n", 156 | "print(rows)" 157 | ] 158 | }, 159 | { 160 | "cell_type": "code", 161 | "execution_count": null, 162 | "id": "b8cf29ef", 163 | "metadata": {}, 164 | "outputs": [], 165 | "source": [] 166 | } 167 | ], 168 | "metadata": { 169 | "kernelspec": { 170 | "display_name": "Python 3 (ipykernel)", 171 | "language": "python", 172 | "name": "python3" 173 | }, 174 | "language_info": { 175 | "codemirror_mode": { 176 | "name": "ipython", 177 | "version": 3 178 | }, 179 | "file_extension": ".py", 180 | "mimetype": "text/x-python", 181 | "name": "python", 182 | "nbconvert_exporter": "python", 183 | "pygments_lexer": "ipython3", 184 | "version": "3.10.8" 185 | } 186 | }, 187 | "nbformat": 4, 188 | "nbformat_minor": 5 189 | } 190 | -------------------------------------------------------------------------------- /Trigger.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "9e890a5e", 6 | "metadata": {}, 7 | "source": [ 8 | "# Trigger\n", 9 | "\n", 10 | "- a database object that defines a set of actions to be performed automatically in response to a specific database event, such as an INSERT, UPDATE, DELETE, or even certain DDL (Data Definition Language) statements like CREATE or ALTER\n", 11 | "- Triggers are used to enforce business rules, maintain data integrity, and automate certain tasks without requiring manual intervention\n", 12 | "\n", 13 | "- trigger consists of two main components: \n", 14 | " - **an event** that activates the trigger and a set of actions to be executed when the trigger is activated \n", 15 | " - **actions** are written in the form of SQL statements, and they can include data modification statements (INSERT, UPDATE, SELECT, DELETE) or other operations.\n", 16 | "\n", 17 | "## CREATE TRIGGER\n", 18 | "\n", 19 | "- syntax:\n", 20 | "\n", 21 | "```sql\n", 22 | "CREATE TRIGGER [IF NOT EXISTS] trigger_name\n", 23 | " {BEFORE | AFTER} {INSERT | UPDATE | DELETE} \n", 24 | "ON table_name\n", 25 | "FOR EACH ROW\n", 26 | "BEGIN\n", 27 | " -- SQL statements to be executed\n", 28 | "END;\n", 29 | "```\n", 30 | "\n", 31 | "- `CREATE TRIGGER`: Indicates that you want to create a trigger.\n", 32 | "- `trigger_name`: The name you want to give to the trigger.\n", 33 | "- `BEFORE` or `AFTER`: Specifies whether the trigger should be fired before or after the specified event.\n", 34 | "- `INSERT`, `UPDATE`, or `DELETE`: The event that triggers the trigger.\n", 35 | "- `ON table_name`: The table on which the trigger should be applied.\n", 36 | "- `FOR EACH ROW`: Specifies that the trigger should be fired for each row affected by the event.\n", 37 | "- `BEGIN` and `END`: The block where you place the SQL statements that define the actions to be taken when the trigger is activated\n", 38 | "\n", 39 | "- e.g., let's say you have a table named `orders` and you want to automatically update an `order_date` column whenever a new order is inserted\n", 40 | " - you can create a trigger like this:\n", 41 | "\n", 42 | "```sql\n", 43 | "CREATE TRIGGER update_order_date\n", 44 | "AFTER INSERT ON orders\n", 45 | "FOR EACH ROW\n", 46 | "BEGIN\n", 47 | " UPDATE orders\n", 48 | " SET order_date = CURRENT_TIMESTAMP\n", 49 | " WHERE order_id = NEW.order_id;\n", 50 | "END;\n", 51 | "```\n", 52 | "\n", 53 | "In this example:\n", 54 | "- `update_order_date` is the name of the trigger.\n", 55 | "- `AFTER INSERT` specifies that the trigger should be activated after an INSERT operation.\n", 56 | "- `NEW` is a special keyword that refers to the new row being inserted.\n", 57 | "- The `UPDATE` statement modifies the `order_date` column for the newly inserted order using the current timestamp.\n", 58 | "\n", 59 | "- Triggers can be powerful tools for maintaining data consistency and automating tasks, but they should be used judiciously to avoid complexity and unintended consequences.\n", 60 | "\n", 61 | "- to trigger the update_order_date, insert a new order into orders tables\n", 62 | "- check the oder_date value by refreshing the \n", 63 | "\n", 64 | "```sql\n", 65 | "INSERT INTO orders (order_id, customer_id) values (100, 1);\n", 66 | "```\n", 67 | "\n", 68 | "\n", 69 | "### AFTER UPDATE TRIGGER example\n", 70 | "\n", 71 | "- email can be important and you want to keep track of old emails when customers change their emails\n", 72 | "- we can create a log table to keep track of every email changes using a Trigger\n", 73 | "- create a customer_email_changes table\n", 74 | "\n", 75 | "```sql\n", 76 | "CREATE TABLE customer_email_changes (\n", 77 | " old_id integer,\n", 78 | " new_id integer,\n", 79 | "\told_email text,\n", 80 | "\tnew_email text,\n", 81 | "\tuser_action text,\n", 82 | "\tcreated_at text\n", 83 | ");\n", 84 | "```\n", 85 | "\n", 86 | "- let's create AFTER UPDATE Trigger\n", 87 | "\n", 88 | "```sql\n", 89 | "CREATE TRIGGER log_customer_after_update \n", 90 | " AFTER UPDATE ON customers\n", 91 | " WHEN OLD.Email <> New.Email\n", 92 | "BEGIN\n", 93 | "\tINSERT INTO customer_email_changes (\n", 94 | " old_id,\n", 95 | " new_id,\n", 96 | "\t\told_email,\n", 97 | "\t\tnew_email,\n", 98 | "\t\tuser_action,\n", 99 | "\t\tcreated_at\n", 100 | "\t)\n", 101 | " VALUES\n", 102 | "\t(\n", 103 | " OLD.CustomerID,\n", 104 | " NEW.CustomerID,\n", 105 | "\t\tOLD.email,\n", 106 | "\t\tNEW.email,\n", 107 | "\t\t'UPDATE',\n", 108 | "\t\tDATETIME('NOW')\n", 109 | "\t) ;\n", 110 | "END;\n", 111 | "```\n", 112 | "\n", 113 | "- now update some customer's email\n", 114 | "\n", 115 | "```sql\n", 116 | "UPDATE customers set Email='newemail@gmail.com' where CustomerId=1;\n", 117 | "```\n", 118 | "\n", 119 | "- after executing the update statement check the customer_email_changes table\n" 120 | ] 121 | }, 122 | { 123 | "cell_type": "markdown", 124 | "id": "b7b56aca", 125 | "metadata": {}, 126 | "source": [ 127 | "## DROP TRIGGER\n", 128 | "\n", 129 | "- https://sqlite.org/lang_droptrigger.html\n", 130 | "- e.g.,\n", 131 | "\n", 132 | "```sql\n", 133 | "DROP TRIGGER IF EXISTS log_customer_after_update;\n", 134 | "```\n", 135 | "\n", 136 | "## Check Trigger Information\n", 137 | "\n", 138 | "```sql\n", 139 | "SELECT * FROM sqlite_master WHERE type = 'trigger' and name='';\n", 140 | "```" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 1, 146 | "id": "764c18b7", 147 | "metadata": {}, 148 | "outputs": [], 149 | "source": [ 150 | "from python import db" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": 2, 156 | "id": "aabae965", 157 | "metadata": {}, 158 | "outputs": [], 159 | "source": [ 160 | "db_file = 'data/chinook.sqlite'" 161 | ] 162 | }, 163 | { 164 | "cell_type": "code", 165 | "execution_count": 3, 166 | "id": "2f6ff16b", 167 | "metadata": {}, 168 | "outputs": [], 169 | "source": [ 170 | "sql_check_trigger = \"SELECT * FROM sqlite_master WHERE type = 'trigger' and name='update_order_date';\"" 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": 5, 176 | "id": "47f5c84b", 177 | "metadata": {}, 178 | "outputs": [], 179 | "source": [ 180 | "row = db.select_one_row(db_file, sql_check_trigger, ())" 181 | ] 182 | }, 183 | { 184 | "cell_type": "code", 185 | "execution_count": 6, 186 | "id": "175ead03", 187 | "metadata": {}, 188 | "outputs": [ 189 | { 190 | "name": "stdout", 191 | "output_type": "stream", 192 | "text": [ 193 | "('trigger', 'update_order_date', 'orders', 0, 'CREATE TRIGGER update_order_date\\nAFTER INSERT ON orders\\nFOR EACH ROW\\nBEGIN\\n UPDATE orders\\n SET order_date = CURRENT_TIMESTAMP\\n WHERE order_id = NEW.order_id;\\nEND')\n" 194 | ] 195 | } 196 | ], 197 | "source": [ 198 | "print(row)" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": null, 204 | "id": "9a2295d8", 205 | "metadata": {}, 206 | "outputs": [], 207 | "source": [] 208 | } 209 | ], 210 | "metadata": { 211 | "kernelspec": { 212 | "display_name": "Python 3 (ipykernel)", 213 | "language": "python", 214 | "name": "python3" 215 | }, 216 | "language_info": { 217 | "codemirror_mode": { 218 | "name": "ipython", 219 | "version": 3 220 | }, 221 | "file_extension": ".py", 222 | "mimetype": "text/x-python", 223 | "name": "python", 224 | "nbconvert_exporter": "python", 225 | "pygments_lexer": "ipython3", 226 | "version": "3.10.8" 227 | } 228 | }, 229 | "nbformat": 4, 230 | "nbformat_minor": 5 231 | } 232 | -------------------------------------------------------------------------------- /UPDATE.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "dd57dea5", 6 | "metadata": {}, 7 | "source": [ 8 | "# UPDATE\n", 9 | "\n", 10 | "- the \"U\" in \"CRUD\" operations refers to the action of updating/modifying existing data in a DBMS\n", 11 | "- allows you to change the values of one or more columns in one or more rows based on specified conditions\n", 12 | "- helps you keep your database up to date by allowing you to make changes to existing data\n", 13 | "- https://sqlite.org/lang_update.html\n", 14 | "- syntax:\n", 15 | "\n", 16 | "```\n", 17 | "UPDATE table_name\n", 18 | "SET column1 = value1, column2 = value2, ...\n", 19 | "WHERE condition;\n", 20 | "```\n", 21 | "- optional OREDER BY and LIMI clause can be used but rare\n", 22 | "- e.g. query:\n", 23 | "\n", 24 | "```sql\n", 25 | "UPDATE employees\n", 26 | "SET salary = 55000;\n", 27 | "```\n", 28 | "- what do you think will be the result of above query?\n", 29 | "- NOTE: update statement is powerful tool for modifying data in a table, so be cautious when using it, especially with the WHERE clause, to ensure that you update the correct records and values\n", 30 | "- once UPDATE is executed, you can't undo the operation\n", 31 | "\n", 32 | "## WHERE\n", 33 | "\n", 34 | "- optional clause but used almost all the time\n", 35 | "- used to provide filter/search condition for rows to be updated by the query\n", 36 | " - WHERE clause should uniquely identify the records to be deleted \n", 37 | " - Primary Key is primariy used in WHERE clause\n", 38 | "- e.g.:\n", 39 | "\n", 40 | "```sql\n", 41 | "UPDATE employees\n", 42 | "SET salary = 55000\n", 43 | "WHERE employee_id = 123;\n", 44 | "```\n", 45 | "\n", 46 | "- run the following query on chinook sqlite db with DBeaver\n", 47 | "\n", 48 | "```\n", 49 | "UPDATE employees\n", 50 | "SET Title = 'IT Manager'\n", 51 | "WHERE EmployeeId = 8;\n", 52 | "```\n" 53 | ] 54 | }, 55 | { 56 | "cell_type": "markdown", 57 | "id": "b99318f9", 58 | "metadata": {}, 59 | "source": [ 60 | "## UPDATE with Python\n", 61 | "\n", 62 | "- UPDATE doesn't return records but updates values in place in the table columns\n", 63 | "- use cursor's execute() method to update one or many records based on the WHERE clause\n", 64 | "- use parameterized query using ? placeholder to use untrusted data as part of the query\n" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 1, 70 | "id": "443f3508", 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "from python import db" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 2, 80 | "id": "601414cf", 81 | "metadata": {}, 82 | "outputs": [], 83 | "source": [ 84 | "db_file = 'data/chinook.sqlite'" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 3, 90 | "id": "3bdf0b41", 91 | "metadata": {}, 92 | "outputs": [ 93 | { 94 | "name": "stdout", 95 | "output_type": "stream", 96 | "text": [ 97 | "Enter employee id: 1\n" 98 | ] 99 | } 100 | ], 101 | "source": [ 102 | "emp_id = int(input('Enter employee id: '))" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": 4, 108 | "id": "404b6eb9", 109 | "metadata": {}, 110 | "outputs": [], 111 | "source": [ 112 | "query = \"\"\"\n", 113 | " UPDATE employees\n", 114 | " SET Title = 'IT Specialist'\n", 115 | " WHERE EmployeeId = ?;\n", 116 | " \"\"\"" 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": 5, 122 | "id": "1b2d24fe", 123 | "metadata": {}, 124 | "outputs": [], 125 | "source": [ 126 | "row_count = db.update(db_file, query, (emp_id,))" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": 6, 132 | "id": "d44b75f0", 133 | "metadata": {}, 134 | "outputs": [ 135 | { 136 | "name": "stdout", 137 | "output_type": "stream", 138 | "text": [ 139 | "1\n" 140 | ] 141 | } 142 | ], 143 | "source": [ 144 | "print(row_count)" 145 | ] 146 | }, 147 | { 148 | "cell_type": "markdown", 149 | "id": "de23719b", 150 | "metadata": {}, 151 | "source": [] 152 | } 153 | ], 154 | "metadata": { 155 | "kernelspec": { 156 | "display_name": "Python 3 (ipykernel)", 157 | "language": "python", 158 | "name": "python3" 159 | }, 160 | "language_info": { 161 | "codemirror_mode": { 162 | "name": "ipython", 163 | "version": 3 164 | }, 165 | "file_extension": ".py", 166 | "mimetype": "text/x-python", 167 | "name": "python", 168 | "nbconvert_exporter": "python", 169 | "pygments_lexer": "ipython3", 170 | "version": "3.10.8" 171 | } 172 | }, 173 | "nbformat": 4, 174 | "nbformat_minor": 5 175 | } 176 | -------------------------------------------------------------------------------- /ci-cd-requirements.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | mypy 3 | flake8 4 | hypothesis 5 | -------------------------------------------------------------------------------- /command.sql: -------------------------------------------------------------------------------- 1 | SELECT albumid, title 2 | FROM albums 3 | ORDER BY title 4 | LIMIT 10; 5 | -------------------------------------------------------------------------------- /data/bank.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rambasnet/Intro-Database/94409d947b1e9d00af4fdc94c49fdaec0ae2bd8f/data/bank.db -------------------------------------------------------------------------------- /data/chinook.sqlite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rambasnet/Intro-Database/94409d947b1e9d00af4fdc94c49fdaec0ae2bd8f/data/chinook.sqlite -------------------------------------------------------------------------------- /kattis-cli/.gitignore: -------------------------------------------------------------------------------- 1 | .kattisrc 2 | -------------------------------------------------------------------------------- /kattis-cli/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2006-2015 Kattis 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /kattis-cli/kattis: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | python "${KATTIS_CLI}/submit.py" "$@" 3 | #python "submit.py" "$@" 4 | -------------------------------------------------------------------------------- /kattis-cli/kattis.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | python %~dp0\submit.py %* 3 | -------------------------------------------------------------------------------- /kattis-cli/submit.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | import argparse 4 | import os 5 | import re 6 | import sys 7 | import time 8 | 9 | import requests 10 | import requests.exceptions 11 | 12 | from lxml.html import fragment_fromstring 13 | 14 | # Python 2/3 compatibility 15 | if sys.version_info[0] >= 3: 16 | import configparser 17 | else: 18 | # Python 2, import modules with Python 3 names 19 | import ConfigParser as configparser 20 | 21 | # End Python 2/3 compatibility 22 | 23 | _DEFAULT_CONFIG = '/usr/local/etc/kattisrc' 24 | _LANGUAGE_GUESS = { 25 | '.c': 'C', 26 | '.c++': 'C++', 27 | '.cc': 'C++', 28 | '.c#': 'C#', 29 | '.cpp': 'C++', 30 | '.cs': 'C#', 31 | '.cxx': 'C++', 32 | '.cbl': 'COBOL', 33 | '.cob': 'COBOL', 34 | '.cpy': 'COBOL', 35 | '.fs': 'F#', 36 | '.go': 'Go', 37 | '.hs': 'Haskell', 38 | '.java': 'Java', 39 | '.js': 'JavaScript (Node.js)', 40 | '.ts': 'TypeScript', 41 | '.kt': 'Kotlin', 42 | '.lisp': 'Common Lisp', 43 | '.cl': 'Common Lisp', 44 | '.m': 'Objective-C', 45 | '.ml': 'OCaml', 46 | '.pas': 'Pascal', 47 | '.php': 'PHP', 48 | '.pl': 'Prolog', 49 | '.py': 'Python 3', 50 | '.pyc': 'Python 3', 51 | '.rb': 'Ruby', 52 | '.rs': 'Rust', 53 | '.scala': 'Scala', 54 | '.f90': 'Fortran', 55 | '.f': 'Fortran', 56 | '.for': 'Fortran', 57 | '.sh': 'Bash', 58 | '.apl': 'APL', 59 | '.ss': 'Gerbil', 60 | '.jl': 'Julia', 61 | '.vb': 'Visual Basic', 62 | '.dart': 'Dart', 63 | '.zig': 'Zig', 64 | '.swift': 'Swift', 65 | '.nim': 'Nim', 66 | } 67 | 68 | _GUESS_MAINCLASS = {'Java', 'Kotlin', 'Scala'} 69 | _GUESS_MAINFILE = {'APL', 'Bash', 'Dart', 'Gerbil', 'JavaScript (Node.js)', 'Julia', 'Common Lisp', 'Pascal', 'PHP', 'Python 2', 'Python 3', 'Ruby', 'Rust', 'TypeScript', 'Zig'} 70 | 71 | _HEADERS = {'User-Agent': 'kattis-cli-submit'} 72 | 73 | _RUNNING_STATUS = 5 74 | _COMPILE_ERROR_STATUS = 8 75 | _ACCEPTED_STATUS = 16 76 | _STATUS_MAP = { 77 | 0: 'New', # 78 | 1: 'New', 79 | 2: 'Waiting for compile', 80 | 3: 'Compiling', 81 | 4: 'Waiting for run', 82 | _RUNNING_STATUS: 'Running', 83 | 6: 'Judge Error', 84 | 7: 'Submission Error', 85 | _COMPILE_ERROR_STATUS: 'Compile Error', 86 | 9: 'Run Time Error', 87 | 10: 'Memory Limit Exceeded', 88 | 11: 'Output Limit Exceeded', 89 | 12: 'Time Limit Exceeded', 90 | 13: 'Illegal Function', 91 | 14: 'Wrong Answer', 92 | # 15: '', 93 | _ACCEPTED_STATUS: 'Accepted', 94 | } 95 | 96 | 97 | class ConfigError(Exception): 98 | pass 99 | 100 | 101 | def get_url(cfg, option, default): 102 | if cfg.has_option('kattis', option): 103 | return cfg.get('kattis', option) 104 | else: 105 | return 'https://%s/%s' % (cfg.get('kattis', 'hostname'), default) 106 | 107 | 108 | def get_config(): 109 | """Returns a ConfigParser object for the .kattisrc file(s) 110 | """ 111 | cfg = configparser.ConfigParser() 112 | if os.path.exists(_DEFAULT_CONFIG): 113 | cfg.read(_DEFAULT_CONFIG) 114 | 115 | if not cfg.read([os.path.join(os.path.expanduser("~"), '.kattisrc'), 116 | os.path.join(os.path.dirname(sys.argv[0]), '.kattisrc')]): 117 | raise ConfigError('''\ 118 | I failed to read in a config file from your home directory or from the 119 | same directory as this script. To download a .kattisrc file please visit 120 | https:///download/kattisrc 121 | 122 | The file should look something like this: 123 | [user] 124 | username: yourusername 125 | token: ********* 126 | 127 | [kattis] 128 | hostname: 129 | loginurl: https:///login 130 | submissionurl: https:///submit 131 | submissionsurl: https:///submissions''') 132 | return cfg 133 | 134 | 135 | def is_python2(files): 136 | python2 = re.compile(r'^\s*\bprint\b *[^ \(\),\]]|\braw_input\b') 137 | for filename in files: 138 | try: 139 | with open(filename) as f: 140 | for index, line in enumerate(f): 141 | if index == 0 and line.startswith('#!'): 142 | if 'python2' in line: 143 | return True 144 | if 'python3' in line: 145 | return False 146 | if python2.search(line.split('#')[0]): 147 | return True 148 | except IOError: 149 | return False 150 | return False 151 | 152 | 153 | def guess_language(ext, files): 154 | if ext == ".C": 155 | return "C++" 156 | ext = ext.lower() 157 | if ext == ".h": 158 | if any(f.endswith(".c") for f in files): 159 | return "C" 160 | else: 161 | return "C++" 162 | if ext == ".py": 163 | if is_python2(files): 164 | return "Python 2" 165 | else: 166 | return "Python 3" 167 | return _LANGUAGE_GUESS.get(ext, None) 168 | 169 | 170 | def guess_mainfile(language, files): 171 | for filename in files: 172 | if os.path.splitext(os.path.basename(filename))[0] in ['main', 'Main']: 173 | return filename 174 | for filename in files: 175 | try: 176 | with open(filename) as f: 177 | conts = f.read() 178 | if language in ['Java', 'Rust', 'Scala', 'Kotlin'] and re.search(r' main\s*\(', conts): 179 | return filename 180 | if language == 'Pascal' and re.match(r'^\s*[Pp]rogram\b', conts): 181 | return filename 182 | except IOError: 183 | pass 184 | return files[0] 185 | 186 | 187 | def guess_mainclass(language, files): 188 | if language in _GUESS_MAINFILE and len(files) > 1: 189 | return os.path.basename(guess_mainfile(language, files)) 190 | if language in _GUESS_MAINCLASS: 191 | mainfile = os.path.basename(guess_mainfile(language, files)) 192 | name = os.path.splitext(mainfile)[0] 193 | if language == 'Kotlin': 194 | return name[0].upper() + name[1:] + 'Kt' 195 | return name 196 | return None 197 | 198 | 199 | def login(login_url, username, password=None, token=None): 200 | """Log in to Kattis. 201 | 202 | At least one of password or token needs to be provided. 203 | 204 | Returns a requests.Response with cookies needed to be able to submit 205 | """ 206 | login_args = {'user': username, 'script': 'true'} 207 | if password: 208 | login_args['password'] = password 209 | if token: 210 | login_args['token'] = token 211 | 212 | return requests.post(login_url, data=login_args, headers=_HEADERS) 213 | 214 | 215 | def login_from_config(cfg): 216 | """Log in to Kattis using the access information in a kattisrc file 217 | 218 | Returns a requests.Response with cookies needed to be able to submit 219 | """ 220 | username = cfg.get('user', 'username') 221 | password = token = None 222 | try: 223 | password = cfg.get('user', 'password') 224 | except configparser.NoOptionError: 225 | pass 226 | try: 227 | token = cfg.get('user', 'token') 228 | except configparser.NoOptionError: 229 | pass 230 | if password is None and token is None: 231 | raise ConfigError('''\ 232 | Your .kattisrc file appears corrupted. It must provide a token (or a 233 | KATTIS password). 234 | 235 | Please download a new .kattisrc file''') 236 | 237 | loginurl = get_url(cfg, 'loginurl', 'login') 238 | return login(loginurl, username, password, token) 239 | 240 | 241 | def submit(submit_url, cookies, problem, language, files, mainclass='', tag=''): 242 | """Make a submission. 243 | 244 | The url_opener argument is an OpenerDirector object to use (as 245 | returned by the login() function) 246 | 247 | Returns the requests.Result from the submission 248 | """ 249 | 250 | data = {'submit': 'true', 251 | 'submit_ctr': 2, 252 | 'language': language, 253 | 'mainclass': mainclass, 254 | 'problem': problem, 255 | 'tag': tag, 256 | 'script': 'true'} 257 | 258 | sub_files = [] 259 | for f in files: 260 | with open(f) as sub_file: 261 | sub_files.append(('sub_file[]', 262 | (os.path.basename(f), 263 | sub_file.read(), 264 | 'application/octet-stream'))) 265 | 266 | return requests.post(submit_url, data=data, files=sub_files, cookies=cookies, headers=_HEADERS) 267 | 268 | 269 | def confirm_or_die(problem, language, files, mainclass, tag): 270 | print('Problem:', problem) 271 | print('Language:', language) 272 | print('Files:', ', '.join(files)) 273 | if mainclass: 274 | if language in _GUESS_MAINFILE: 275 | print('Main file:', mainclass) 276 | else: 277 | print('Mainclass:', mainclass) 278 | if tag: 279 | print('Tag:', tag) 280 | print('Submit (y/N)?') 281 | if sys.stdin.readline().upper()[:-1] != 'Y': 282 | print('Cancelling') 283 | sys.exit(1) 284 | 285 | 286 | def get_submission_url(submit_response, cfg): 287 | m = re.search(r'Submission ID: (\d+)', submit_response) 288 | if m: 289 | submissions_url = get_url(cfg, 'submissionsurl', 'submissions') 290 | submission_id = m.group(1) 291 | return '%s/%s' % (submissions_url, submission_id) 292 | 293 | 294 | def get_submission_status(submission_url, cookies): 295 | reply = requests.get(submission_url + '?json', cookies=cookies, headers=_HEADERS) 296 | return reply.json() 297 | 298 | 299 | _RED_COLOR = 31 300 | _GREEN_COLOR = 32 301 | def color(s, c): 302 | return '\x1b[%sm%s\x1b[0m' % (c, s) 303 | 304 | 305 | def show_judgement(submission_url, cfg): 306 | print() 307 | login_reply = login_from_config(cfg) 308 | while True: 309 | status = get_submission_status(submission_url, login_reply.cookies) 310 | status_id = status['status_id'] 311 | testcases_done = status['testcase_index'] 312 | testcases_total = status['row_html'].count(' _RUNNING_STATUS: 352 | # Done 353 | print() 354 | success = status_id == _ACCEPTED_STATUS 355 | try: 356 | root = fragment_fromstring(status['row_html'], create_parent=True) 357 | cpu_time = root.find('.//*[@data-type="cpu"]').text 358 | status_text += " (" + cpu_time + ")" 359 | except: 360 | pass 361 | if status_id != _COMPILE_ERROR_STATUS: 362 | print(color(status_text, _GREEN_COLOR if success else _RED_COLOR)) 363 | return success 364 | 365 | time.sleep(0.25) 366 | 367 | 368 | def main(): 369 | parser = argparse.ArgumentParser(prog='kattis', description='Submit a solution to Kattis') 370 | parser.add_argument('-p', '--problem', 371 | help=''''Which problem to submit to. 372 | Overrides default guess (first part of first filename)''') 373 | parser.add_argument('-m', '--mainclass', 374 | help='''Sets mainclass. 375 | Overrides default guess (first part of first filename)''') 376 | parser.add_argument('-l', '--language', 377 | help='''Sets language. 378 | Overrides default guess (based on suffix of first filename)''') 379 | parser.add_argument('-t', '--tag', 380 | help=argparse.SUPPRESS) 381 | parser.add_argument('-f', '--force', 382 | help='Force, no confirmation prompt before submission', 383 | action='store_true') 384 | parser.add_argument('files', nargs='+') 385 | 386 | args = parser.parse_args() 387 | files = args.files 388 | 389 | try: 390 | cfg = get_config() 391 | except ConfigError as exc: 392 | print(exc) 393 | sys.exit(1) 394 | 395 | problem, ext = os.path.splitext(os.path.basename(files[0])) 396 | language = guess_language(ext, files) 397 | mainclass = guess_mainclass(language, files) 398 | tag = args.tag 399 | 400 | problem = problem.lower() 401 | 402 | if args.problem: 403 | problem = args.problem 404 | 405 | if args.mainclass is not None: 406 | mainclass = args.mainclass 407 | 408 | if args.language: 409 | language = args.language 410 | 411 | if language is None: 412 | print('''\ 413 | No language specified, and I failed to guess language from filename 414 | extension "%s"''' % (ext,)) 415 | sys.exit(1) 416 | 417 | files = sorted(list(set(args.files))) 418 | 419 | try: 420 | login_reply = login_from_config(cfg) 421 | except ConfigError as exc: 422 | print(exc) 423 | sys.exit(1) 424 | except requests.exceptions.RequestException as err: 425 | print('Login connection failed:', err) 426 | sys.exit(1) 427 | 428 | if not login_reply.status_code == 200: 429 | print('Login failed.') 430 | if login_reply.status_code == 403: 431 | print('Incorrect username or password/token (403)') 432 | elif login_reply.status_code == 404: 433 | print('Incorrect login URL (404)') 434 | else: 435 | print('Status code:', login_reply.status_code) 436 | sys.exit(1) 437 | 438 | submit_url = get_url(cfg, 'submissionurl', 'submit') 439 | 440 | if not args.force: 441 | confirm_or_die(problem, language, files, mainclass, tag) 442 | 443 | try: 444 | result = submit(submit_url, 445 | login_reply.cookies, 446 | problem, 447 | language, 448 | files, 449 | mainclass, 450 | tag) 451 | except requests.exceptions.RequestException as err: 452 | print('Submit connection failed:', err) 453 | sys.exit(1) 454 | 455 | if result.status_code != 200: 456 | print('Submission failed.') 457 | if result.status_code == 403: 458 | print('Access denied (403)') 459 | elif result.status_code == 404: 460 | print('Incorrect submit URL (404)') 461 | else: 462 | print('Status code:', result.status_code) 463 | sys.exit(1) 464 | 465 | plain_result = result.content.decode('utf-8').replace('
', '\n') 466 | print(plain_result) 467 | 468 | submission_url = None 469 | try: 470 | submission_url = get_submission_url(plain_result, cfg) 471 | except configparser.NoOptionError: 472 | pass 473 | 474 | if submission_url: 475 | print(submission_url) 476 | if not show_judgement(submission_url, cfg): 477 | sys.exit(1) 478 | 479 | 480 | if __name__ == '__main__': 481 | main() 482 | -------------------------------------------------------------------------------- /python/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rambasnet/Intro-Database/94409d947b1e9d00af4fdc94c49fdaec0ae2bd8f/python/__init__.py -------------------------------------------------------------------------------- /python/db.py: -------------------------------------------------------------------------------- 1 | """Sqlite Database wrapper. 2 | 3 | This module provides a wrapper for sqlite3 database operations. 4 | 5 | Example: 6 | import db 7 | 8 | # create a database connection 9 | conn = db.create_connection("sqlite.db") 10 | db.close_connection(conn) 11 | 12 | """ 13 | 14 | 15 | import sqlite3 16 | from sqlite3 import Error 17 | from typing import Any, Optional, Tuple, List 18 | 19 | 20 | def create_connection(db_file: str) -> sqlite3.Connection: 21 | """Create sqlite3 connection and return it. 22 | 23 | Args: 24 | db_file (str): sqlite filename to open or create. 25 | 26 | Raises: 27 | err: sqlite3.Error as an exception. 28 | 29 | Returns: 30 | sqlite3.Connection: sqlite3 connection object. 31 | """ 32 | 33 | try: 34 | conn = sqlite3.connect(db_file) 35 | return conn 36 | except Error as err: 37 | raise err 38 | 39 | 40 | def close_connection(conn: sqlite3.Connection) -> None: 41 | """Close a database connection to a SQLite database. 42 | Args: 43 | conn (Connection): Connection object 44 | """ 45 | if conn: 46 | conn.close() 47 | 48 | 49 | def create_table(db_file: str, create_table_sql: str) -> None: 50 | """Create a table from the create_table_sql statement 51 | Args: 52 | db_file (str): database file path 53 | create_table_sql (str): a CREATE TABLE statement 54 | 55 | Raises: 56 | err: sqlite3.Error as an exception. 57 | 58 | Return: 59 | None 60 | """ 61 | conn = create_connection(db_file) 62 | # with conn: 63 | try: 64 | cursor = conn.cursor() 65 | cursor.execute(create_table_sql) 66 | conn.commit() 67 | except Error as err: 68 | raise err 69 | # Successful, conn.commit() is called automatically afterwards 70 | finally: 71 | # close most be called explictly 72 | close_connection(conn) 73 | 74 | 75 | def insert_one_row(db_file: str, insert_row_sql: str, 76 | row: Tuple[Any, ...]) -> Optional[int]: 77 | """Insert data into a table from the insert_data_sql statement 78 | Args: 79 | db_file (str): database file path 80 | insert_data_sql (str): an INSERT INTO statement 81 | row (tuple): row as tuple to be inserted 82 | 83 | Raises: 84 | err: sqlite3.Error as an exception. 85 | 86 | Return: 87 | row_id (int): row id of the last inserted row 88 | """ 89 | conn = create_connection(db_file) 90 | 91 | try: 92 | cursor = conn.cursor() 93 | cursor.execute(insert_row_sql, row) 94 | conn.commit() 95 | return cursor.lastrowid 96 | except Error as err: 97 | raise err 98 | # Successful, conn.commit() is called automatically afterwards 99 | finally: 100 | # close most be called explictly 101 | close_connection(conn) 102 | 103 | 104 | def insert_many_rows(db_file: str, insert_rows_sql: str, 105 | rows: List[Any]) -> Optional[int]: 106 | """Insert data into a table from the insert_data_sql statement 107 | Args: 108 | db_file (str): database file path 109 | insert_data_sql (str): an INSERT INTO statement 110 | rows (list[tuple]): list of tuples as rows to be inserted 111 | 112 | Raises: 113 | err: sqlite3.Error as an exception. 114 | 115 | Return: 116 | row_id (int): row id of the last inserted row 117 | """ 118 | conn = create_connection(db_file) 119 | try: 120 | cursor = conn.cursor() 121 | cursor.executemany(insert_rows_sql, rows) 122 | conn.commit() 123 | return cursor.lastrowid 124 | except Error as err: 125 | raise err 126 | # Successful, conn.commit() is called automatically afterwards 127 | finally: 128 | # close most be called explictly 129 | close_connection(conn) 130 | 131 | 132 | def select_one_row(db_file: str, select_row_sql: str, 133 | where: Tuple[Any, ...]) -> Any: 134 | """API to select one row from a table from the select_data_sql statement. 135 | 136 | Args: 137 | db_file (str): database file path 138 | select_row_sql (str): a SELECT statement 139 | where (tuple[str]): where clause as tuple for ? placeholder 140 | 141 | Raises: 142 | err: sqlite3.Error as an exception. 143 | 144 | Returns: 145 | tuple[str]: row as tuple or None 146 | """ 147 | conn = create_connection(db_file) 148 | with conn: 149 | try: 150 | cursor = conn.cursor() 151 | cursor.execute(select_row_sql, where) 152 | return cursor.fetchone() 153 | except Error as err: 154 | raise err 155 | 156 | 157 | def select_many_rows(db_file: str, select_rows_sql: str, 158 | where: Tuple[Any, ...]) -> Any: 159 | """Select all rows from a table from the select_data_sql statement 160 | Args: 161 | db_file (str): database file path 162 | select_data_sql (str): an SELECT statement 163 | where (tuple): where clause as tuple for ? placeholder 164 | 165 | Raises: 166 | err: sqlite3.Error as an exception. 167 | 168 | Return: 169 | rows (Any): list of tuples as rows or None 170 | """ 171 | conn = create_connection(db_file) 172 | with conn: 173 | try: 174 | cursor = conn.cursor() 175 | cursor.execute(select_rows_sql, where) 176 | return cursor.fetchall() 177 | except Error as err: 178 | raise err 179 | 180 | 181 | def update_record(db_file: str, update_sql: str, 182 | where: Tuple[Any, ...]) -> Optional[int]: 183 | """Update a table from the update_sql statement 184 | Args: 185 | db_file (str): database file path 186 | update_sql (str): an UPDATE statement 187 | where (tuple): where clause as tuple for ? placeholder 188 | 189 | Raises: 190 | err: sqlite3.Error as an exception. 191 | 192 | Return: 193 | rows_affected (int): number of rows affected 194 | """ 195 | conn = create_connection(db_file) 196 | with conn: 197 | try: 198 | cursor = conn.cursor() 199 | cursor.execute(update_sql, where) 200 | return cursor.rowcount 201 | except Error as err: 202 | raise err 203 | 204 | 205 | def delete_record(db_file: str, delete_sql: str, where: Tuple[Any]) -> int: 206 | """Delete row(s) from the delete_sql statement 207 | Args: 208 | db_file (str): database file path 209 | delete_sql (str): a DELETE statement 210 | where (tuple): where clause as tuple for ? placeholder 211 | 212 | Raises: 213 | err: sqlite3.Error as an exception. 214 | 215 | Return: 216 | rows_affected (int): number of rows affected 217 | """ 218 | conn = create_connection(db_file) 219 | with conn: 220 | try: 221 | cursor = conn.cursor() 222 | cursor.execute(delete_sql, where) 223 | return cursor.rowcount 224 | except Error as err: 225 | raise err 226 | 227 | 228 | def execute_non_query(db_file: str, sql: str) -> None: 229 | """Execute a non query statement 230 | Args: 231 | db_file (str): database file path 232 | sql (str): a non query statement that doesn't return rows 233 | 234 | Raises: 235 | err: sqlite3.Error as an exception. 236 | 237 | Return: 238 | None 239 | """ 240 | conn = create_connection(db_file) 241 | with conn: 242 | try: 243 | cursor = conn.cursor() 244 | cursor.execute(sql) 245 | conn.commit() 246 | except Error as err: 247 | raise err 248 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | jupyter 3 | pymongo[srv] 4 | mypy 5 | hypothesis 6 | flake8 7 | autopep8 8 | lxml 9 | requests 10 | -------------------------------------------------------------------------------- /run-jupyter.sh: -------------------------------------------------------------------------------- 1 | # get directory of this script 2 | # https://stackoverflow.com/questions/59895/how-can-i-get-the-source-directory-of-a-bash-script-from-within-the-script-itsel 3 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 4 | if ! cd "$SCRIPT_DIR" 5 | then 6 | echo "Could not change directory to script directory '$SCRIPT_DIR'" 7 | exit 1 8 | fi 9 | echo "Working directory is $SCRIPT_DIR" 10 | 11 | if which docker > /dev/null 12 | then 13 | container=docker 14 | elif which podman > /dev/null 15 | then 16 | container=podman 17 | else 18 | echo "Docker or Podman Desktop needs to be installed!" 19 | exit 1 20 | fi 21 | 22 | args=() 23 | 24 | while [ $# -gt 0 ] 25 | do 26 | case "$1" 27 | in 28 | --podman) container=podman;; 29 | --docker) container=docker;; 30 | *) args+=("$1");; 31 | esac 32 | shift 33 | done 34 | echo "Using $container..." 35 | 36 | is_windows() { 37 | if which cygpath > /dev/null # git-bash/msys/cygwin 38 | then 39 | return 0 40 | fi 41 | if [ -r /proc/version ] && grep -q Microsoft /proc/version 42 | then 43 | return 0 44 | fi 45 | return 1 46 | } 47 | 48 | is_selinux() { 49 | if which sestatus > /dev/null 50 | then 51 | if sestatus | grep -q enabled 52 | then 53 | return 0 54 | fi 55 | fi 56 | return 1 57 | } 58 | 59 | optZ() { 60 | if [ "$container" = "podman" ] 61 | then 62 | if is_selinux 63 | then 64 | echo -n ":Z" 65 | fi 66 | fi 67 | } 68 | 69 | winenv() { 70 | if is_windows 71 | then 72 | winpty "$@" 73 | else 74 | "$@" 75 | fi 76 | } 77 | 78 | # Try to convert for git-bash/msys/cygwin/wsl... 79 | winpath() { 80 | if which cygpath > /dev/null # git-bash/msys/cygwin 81 | then 82 | cygpath -w "$@" 83 | elif [ -r /proc/version ] && grep -q Microsoft /proc/version 84 | then 85 | echo readlink -m "$@" | sed 's|^/mnt/\([a-z]\)|\U\1:|' | sed 's|/|\\|g' 86 | else 87 | echo "$@" 88 | fi 89 | } 90 | 91 | # use the directory name as the tag name for podman 92 | PARENT_DIR=$(basename "$SCRIPT_DIR") 93 | CONTAINER_TAG=$container-$PARENT_DIR 94 | CONTAINER_TAG=$(echo "$CONTAINER_TAG" | tr '[:upper:]' '[:lower:]') 95 | # replace spaces with hypens 96 | CONTAINER_TAG=$(echo "$CONTAINER_TAG" | sed 's/ /-/g') 97 | 98 | # Host (source) directory to mount in container 99 | HOST_DIR="$(winpath "$SCRIPT_DIR")" 100 | 101 | # Guest (target) directory where host directoy is mounted 102 | GUEST_DIR=/home/user/$PARENT_DIR 103 | 104 | # Configure container git to use the host's .gitconfig 105 | g_dir=$(cat ~/.gitconfig | grep "$GUEST_DIR") 106 | if [ -z "$g_dir" ]; then 107 | git config --global --add safe.directory "$GUEST_DIR" 108 | fi 109 | git config core.hooksPath .githooks 110 | 111 | if [ ! -f Dockerfile ] 112 | then 113 | echo "'$SCRIPT_DIR' does not contain a Dockerfile." 114 | exit 1 115 | fi 116 | 117 | echo "$container build '$SCRIPT_DIR/Dockerfile' with tag '$CONTAINER_TAG'..." 118 | if $container build -t "$CONTAINER_TAG" . 119 | then 120 | echo "$container build ok." 121 | else 122 | echo "$container build failed." 123 | exit 1 124 | fi 125 | 126 | if [ ${#args[@]} -eq 0 ] 127 | then 128 | args=("bash" "-c" "cd $GUEST_DIR; jupyter notebook --ip 0.0.0.0 --port 9999 --no-browser") 129 | fi 130 | 131 | echo "$container run '$CONTAINER_TAG' (mounting host '$HOST_DIR' as '$GUEST_DIR'):" \ 132 | "${args[@]}" 133 | 134 | winenv $container run -it --rm \ 135 | -v "$HOST_DIR:$GUEST_DIR$(optZ)" \ 136 | -h debian \ 137 | -p 9999:9999 \ 138 | "$CONTAINER_TAG" \ 139 | "${args[@]}" 140 | 141 | -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | # get directory of this script 2 | # https://stackoverflow.com/questions/59895/how-can-i-get-the-source-directory-of-a-bash-script-from-within-the-script-itsel 3 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 4 | if ! cd "$SCRIPT_DIR" 5 | then 6 | echo "Could not change directory to script directory '$SCRIPT_DIR'" 7 | exit 1 8 | fi 9 | echo "Working directory is $SCRIPT_DIR" 10 | 11 | if which docker > /dev/null 12 | then 13 | container=docker 14 | elif which podman > /dev/null 15 | then 16 | container=podman 17 | else 18 | echo "Docker or Podman Desktop needs to be installed!" 19 | exit 1 20 | fi 21 | 22 | args=() 23 | 24 | while [ $# -gt 0 ] 25 | do 26 | case "$1" 27 | in 28 | --podman) container=podman;; 29 | --docker) container=docker;; 30 | *) args+=("$1");; 31 | esac 32 | shift 33 | done 34 | echo "Using $container..." 35 | 36 | is_windows() { 37 | if which cygpath > /dev/null # git-bash/msys/cygwin 38 | then 39 | return 0 40 | fi 41 | if [ -r /proc/version ] && grep -q Microsoft /proc/version 42 | then 43 | return 0 44 | fi 45 | return 1 46 | } 47 | 48 | is_selinux() { 49 | if which sestatus > /dev/null 50 | then 51 | if sestatus | grep -q enabled 52 | then 53 | return 0 54 | fi 55 | fi 56 | return 1 57 | } 58 | 59 | optZ() { 60 | if [ "$container" = "podman" ] 61 | then 62 | if is_selinux 63 | then 64 | echo -n ":Z" 65 | fi 66 | fi 67 | } 68 | 69 | winenv() { 70 | if is_windows 71 | then 72 | winpty "$@" 73 | else 74 | "$@" 75 | fi 76 | } 77 | 78 | # Try to convert for git-bash/msys/cygwin/wsl... 79 | winpath() { 80 | if which cygpath > /dev/null # git-bash/msys/cygwin 81 | then 82 | cygpath -w "$@" 83 | elif [ -r /proc/version ] && grep -q Microsoft /proc/version 84 | then 85 | echo readlink -m "$@" | sed 's|^/mnt/\([a-z]\)|\U\1:|' | sed 's|/|\\|g' 86 | else 87 | echo "$@" 88 | fi 89 | } 90 | 91 | # use the directory name as the tag name for podman 92 | PARENT_DIR=$(basename "$SCRIPT_DIR") 93 | CONTAINER_TAG=$container-$PARENT_DIR 94 | CONTAINER_TAG=$(echo "$CONTAINER_TAG" | tr '[:upper:]' '[:lower:]') 95 | # replace spaces with hypens 96 | CONTAINER_TAG=$(echo "$CONTAINER_TAG" | sed 's/ /-/g') 97 | 98 | # Host (source) directory to mount in container 99 | HOST_DIR="$(winpath "$SCRIPT_DIR")" 100 | 101 | # Guest (target) directory where host directoy is mounted 102 | GUEST_DIR=/home/user/$PARENT_DIR 103 | 104 | # Configure container git to use the host's .gitconfig 105 | g_dir=$(cat ~/.gitconfig | grep "$GUEST_DIR") 106 | if [ -z "$g_dir" ]; then 107 | git config --global --add safe.directory "$GUEST_DIR" 108 | fi 109 | git config core.hooksPath .githooks 110 | 111 | if [ ! -f Dockerfile ] 112 | then 113 | echo "'$SCRIPT_DIR' does not contain a Dockerfile." 114 | exit 1 115 | fi 116 | 117 | echo "$container build '$SCRIPT_DIR/Dockerfile' with tag '$CONTAINER_TAG'..." 118 | if $container build -t "$CONTAINER_TAG" . 119 | then 120 | echo "$container build ok." 121 | else 122 | echo "$container build failed." 123 | exit 1 124 | fi 125 | 126 | 127 | if [ ${#args[@]} -eq 0 ] 128 | then 129 | args=("bash" "-c" "cd $GUEST_DIR; sudo bash script.sh; zsh") 130 | fi 131 | 132 | echo "$container run '$CONTAINER_TAG' (mounting host '$HOST_DIR' as '$GUEST_DIR'):" \ 133 | "${args[@]}" 134 | 135 | winenv $container run -it --rm \ 136 | -v "$HOST_DIR:$GUEST_DIR$(optZ)" \ 137 | -v "$HOME/.ssh:/home/user/.ssh" \ 138 | -v "$HOME/.gnupg:/home/user/.gnupg" \ 139 | -v "$HOME/.gitconfig:/home/user/.gitconfig" \ 140 | -v "$HOME/.zsh_history:/home/user/.zsh_history" \ 141 | -v "./kattis-cli/:/home/user/kattis-cli" \ 142 | -h debian \ 143 | "$CONTAINER_TAG" \ 144 | "${args[@]}" 145 | -------------------------------------------------------------------------------- /script.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | 3 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 4 | echo "Working directory is $SCRIPT_DIR" 5 | 6 | chown user:users /home/user/.zsh_history 7 | chown user:users /home/user/.gitconfig 8 | # chown user:users --recursive /home/user 9 | PATH='${SCRIPT_DIR}:$PATH' 10 | export PATH 11 | -------------------------------------------------------------------------------- /setup.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | 3 | # download required files 4 | git clone https://github.com/rambasnet/course-container.git 5 | rm -rf course-container/.git 6 | rm course-container/README.md 7 | rm course-container/setup.sh 8 | rm course-container/.gitignore 9 | rm course-container/LICENSE 10 | rm -rf course-container/hello 11 | rm -rf course-container/cold 12 | cp -r course-container/. ./ 13 | rm -rf course-container 14 | git config core.hooksPath .githooks 15 | echo "Downloaded required files" 16 | -------------------------------------------------------------------------------- /test.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rambasnet/Intro-Database/94409d947b1e9d00af4fdc94c49fdaec0ae2bd8f/test.db -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rambasnet/Intro-Database/94409d947b1e9d00af4fdc94c49fdaec0ae2bd8f/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_db.py: -------------------------------------------------------------------------------- 1 | """Test module for db.py 2 | """ 3 | 4 | 5 | import os 6 | import unittest 7 | import sqlite3 8 | from typing import Tuple 9 | from python import db 10 | 11 | 12 | class TestDB(unittest.TestCase): 13 | """Test class for db.py 14 | """ 15 | 16 | def setUp(self) -> None: 17 | """Setup 18 | """ 19 | self.db_file = "sqlite.db" 20 | 21 | def tearDown(self) -> None: 22 | """Teardown 23 | """ 24 | if os.path.exists(self.db_file): 25 | os.remove(self.db_file) 26 | 27 | def test_create_connection(self) -> None: 28 | """Test create_connection function. 29 | """ 30 | conn = db.create_connection(self.db_file) 31 | self.assertIsInstance(conn, sqlite3.Connection) 32 | db.close_connection(conn) 33 | 34 | def test_close_connection(self) -> None: 35 | """Test close_connection function. 36 | """ 37 | conn = db.create_connection(self.db_file) 38 | db.close_connection(conn) 39 | self.assertRaises(sqlite3.ProgrammingError, 40 | conn.execute, "SELECT 1") 41 | 42 | def test_create_table(self) -> None: 43 | """Test create_table function. 44 | """ 45 | sql = """CREATE TABLE IF NOT EXISTS test ( 46 | id integer PRIMARY KEY, 47 | name text NOT NULL, 48 | age integer 49 | );""" 50 | db.create_table(self.db_file, sql) 51 | self.assertTrue(os.path.exists(self.db_file)) 52 | 53 | def test_create_table1(self) -> None: 54 | """Test create_table function. 55 | """ 56 | sql = """CREATE TABLE IF NOT EXISTS test ( 57 | id integer PRIMARY KEY, 58 | f_name text NOT NULL, 59 | l_name text NOT NULL, 60 | age integer 61 | );""" 62 | db.create_table(self.db_file, sql) 63 | sql_check = """SELECT name FROM sqlite_master \ 64 | WHERE type = 'table' \ 65 | AND name = 'test';""" 66 | row = db.select_one_row(self.db_file, sql_check, ()) 67 | self.assertEqual("test", row[0]) 68 | 69 | def test_insert_one_row(self) -> None: 70 | """Test insert_one_row function. 71 | """ 72 | sql = """CREATE TABLE IF NOT EXISTS test ( 73 | id integer PRIMARY KEY, 74 | name text NOT NULL, 75 | age integer 76 | );""" 77 | db.create_table(self.db_file, sql) 78 | sql = """INSERT INTO test (name, age) VALUES (?, ?);""" 79 | data_in = ("John", 20) 80 | db.insert_one_row(self.db_file, sql, data_in) 81 | conn = db.create_connection(self.db_file) 82 | cursor = conn.cursor() 83 | cursor.execute("SELECT * FROM test") 84 | data_out = cursor.fetchone() 85 | self.assertEqual(data_in, data_out[1:]) 86 | db.close_connection(conn) 87 | 88 | def test_insert_one_row_error(self) -> None: 89 | """Test insert_one_row function. 90 | """ 91 | sql = """CREATE TABLE IF NOT EXISTS test ( 92 | id integer PRIMARY KEY, 93 | name text NOT NULL, 94 | age integer 95 | );""" 96 | db.create_table(self.db_file, sql) 97 | sql = """INSERT INTO test (id, name, age) VALUES (?, ?, ?);""" 98 | data_in = (1, "John", 20) 99 | db.insert_one_row(self.db_file, sql, data_in) 100 | # insert again 101 | self.assertRaises(sqlite3.IntegrityError, 102 | db.insert_one_row, self.db_file, sql, data_in) 103 | 104 | def test_insert_many_rows(self) -> None: 105 | """Test insert_many_rows function. 106 | """ 107 | sql = """CREATE TABLE IF NOT EXISTS test ( 108 | id integer PRIMARY KEY, 109 | name text NOT NULL, 110 | age integer 111 | );""" 112 | db.create_table(self.db_file, sql) 113 | sql = """INSERT INTO test (name, age) VALUES (?, ?);""" 114 | data_in = [("John", 20), ("Jane", 25)] 115 | db.insert_many_rows(self.db_file, sql, data_in) 116 | conn = db.create_connection(self.db_file) 117 | cursor = conn.cursor() 118 | cursor.execute("SELECT * FROM test") 119 | data_out = cursor.fetchall() 120 | self.assertEqual(data_in, [row[1:] for row in data_out]) 121 | db.close_connection(conn) 122 | 123 | def test_insert_many_rows_error(self) -> None: 124 | """Test insert_many_rows function. 125 | """ 126 | sql = """CREATE TABLE IF NOT EXISTS test ( 127 | id integer PRIMARY KEY, 128 | name text NOT NULL, 129 | age integer 130 | );""" 131 | db.create_table(self.db_file, sql) 132 | sql = """INSERT INTO test (id, name, age) VALUES (?, ?, ?);""" 133 | data_in = [(1, "John", 20), (2, "Jane", 25)] 134 | db.insert_many_rows(self.db_file, sql, data_in) 135 | # insert again 136 | self.assertRaises(sqlite3.IntegrityError, 137 | db.insert_many_rows, self.db_file, sql, data_in) 138 | 139 | def test_select_one_row(self) -> None: 140 | """Test select_one_row function. 141 | """ 142 | sql = """CREATE TABLE IF NOT EXISTS test ( 143 | id integer PRIMARY KEY, 144 | name text NOT NULL, 145 | age integer 146 | );""" 147 | db.create_table(self.db_file, sql) 148 | sql = """INSERT INTO test (name, age) VALUES (?, ?);""" 149 | data_in = [("John", 20), ("Jane", 25)] 150 | db.insert_many_rows(self.db_file, sql, data_in) 151 | sql = """SELECT * FROM test WHERE id = ?;""" 152 | data_out = db.select_one_row(self.db_file, sql, (1,)) 153 | self.assertEqual(data_in[0], data_out[1:]) 154 | 155 | def test_select_many_rows(self) -> None: 156 | """Test select_many_rows function. 157 | """ 158 | sql = """CREATE TABLE IF NOT EXISTS test ( 159 | id integer PRIMARY KEY, 160 | name text NOT NULL, 161 | age integer 162 | );""" 163 | db.create_table(self.db_file, sql) 164 | sql = """INSERT INTO test (name, age) VALUES (?, ?);""" 165 | data_in = [("John", 20), ("Jane", 25)] 166 | db.insert_many_rows(self.db_file, sql, data_in) 167 | sql = """SELECT * FROM test;""" 168 | data_out = db.select_many_rows(self.db_file, sql, ()) 169 | self.assertEqual(data_in, [row[1:] for row in data_out]) 170 | 171 | def test_update_one_row(self) -> None: 172 | """Test update_one_row function. 173 | """ 174 | sql = """CREATE TABLE IF NOT EXISTS test ( 175 | id integer PRIMARY KEY, 176 | name text NOT NULL, 177 | age integer 178 | );""" 179 | db.create_table(self.db_file, sql) 180 | sql = """INSERT INTO test (name, age) VALUES (?, ?);""" 181 | data_in_list = [("John", 20), ("Jane", 25)] 182 | db.insert_many_rows(self.db_file, sql, data_in_list) 183 | sql = """UPDATE test SET name = ? WHERE id = ?;""" 184 | data_in_tuple = ("John Doe", 1) 185 | db.update_record(self.db_file, sql, data_in_tuple) 186 | sql = """SELECT * FROM test WHERE id = ?;""" 187 | data_out = db.select_one_row(self.db_file, sql, (1,)) 188 | self.assertEqual(("John Doe", 20), data_out[1:]) 189 | 190 | def test_update_many_rows(self) -> None: 191 | """Test update_many_rows function. 192 | """ 193 | sql = """CREATE TABLE IF NOT EXISTS test ( 194 | id integer PRIMARY KEY, 195 | name text NOT NULL, 196 | age integer 197 | );""" 198 | db.create_table(self.db_file, sql) 199 | sql = """INSERT INTO test (name, age) VALUES (?, ?);""" 200 | data_in_list = [("John", 20), ("Jane", 25)] 201 | db.insert_many_rows(self.db_file, sql, data_in_list) 202 | sql = """UPDATE test SET name = ? WHERE age <= ?;""" 203 | data_in: Tuple[str, int] = ("John Doe", 25) 204 | db.update_record(self.db_file, sql, data_in) 205 | sql = """SELECT * FROM test;""" 206 | data_out = db.select_many_rows(self.db_file, sql, ()) 207 | self.assertEqual([("John Doe", 20), ("John Doe", 25)], 208 | [row[1:] for row in data_out]) 209 | 210 | def test_delete_one_row(self) -> None: 211 | """Test delete_one_row function. 212 | """ 213 | sql = """CREATE TABLE IF NOT EXISTS test ( 214 | id integer PRIMARY KEY, 215 | name text NOT NULL, 216 | age integer 217 | );""" 218 | db.create_table(self.db_file, sql) 219 | sql = """INSERT INTO test (name, age) VALUES (?, ?);""" 220 | data_in = [("John", 20), ("Jane", 25)] 221 | db.insert_many_rows(self.db_file, sql, data_in) 222 | sql = """DELETE FROM test WHERE id = ?;""" 223 | db.delete_record(self.db_file, sql, (1,)) 224 | sql = """SELECT * FROM test;""" 225 | data_out = db.select_many_rows(self.db_file, sql, ()) 226 | self.assertEqual([("Jane", 25)], [row[1:] for row in data_out]) 227 | 228 | def test_create_index(self) -> None: 229 | """Test execute_non_query function. 230 | """ 231 | sql = """CREATE TABLE IF NOT EXISTS test ( 232 | id integer PRIMARY KEY, 233 | name text NOT NULL, 234 | age integer 235 | ); 236 | """ 237 | db.create_table(self.db_file, sql) 238 | sql = """ 239 | CREATE INDEX IF NOT EXISTS idx_test_name ON test (name); 240 | """ 241 | db.execute_non_query(self.db_file, sql) 242 | conn = db.create_connection(self.db_file) 243 | cursor = conn.cursor() 244 | cursor.execute("SELECT * FROM sqlite_master \ 245 | WHERE type = 'index' \ 246 | and name='idx_test_name';") 247 | # should return a row with 5 columns: 248 | # type, name, tbl_name, rootpage, sql 249 | data_out = cursor.fetchone() 250 | self.assertEqual(5, len(data_out)) 251 | db.close_connection(conn) 252 | --------------------------------------------------------------------------------