├── .gitignore
├── LICENSE
├── README.md
├── ch00_course_introduction.md
├── ch01_database_systems.md
├── ch02_data_models.md
├── ch03_relational_database_model.md
├── ch04_entity_replationship_modeling.md
├── ch05_advanced_data_modeling.md
├── ch06_normalization_of_database_tables.md
├── ch07_introduction_to_SQL.md
├── ch08_advanced_SQL.md
├── ch10_transaction_and_concurrency.md
├── crash_ch2.sql
├── crash_ch3.sql
├── crash_ch4.sql
├── files
├── csv
│ ├── customer.csv
│ ├── emp.csv
│ ├── invoice.csv
│ ├── line.csv
│ ├── product.csv
│ └── vendor.csv
├── image
│ ├── 3_tier_architecture.png
│ ├── DBMS_SQL.jpg
│ ├── Types_of_Keys.jpg
│ ├── UML_class_diagram.png
│ ├── chens-notation-1.png
│ ├── chens-notation-2.png
│ ├── cover_of_database_systems_14e.jpg
│ ├── crows-foot-notation.png
│ ├── database_design_process.jpg
│ ├── employee_manager.drawio
│ ├── employee_manager.jpg
│ ├── fig_data_dictionary.png
│ ├── four_join_types.jpg
│ ├── functional_dependency_example.png
│ ├── hierarchical_model.png
│ ├── index_table_1.gif
│ ├── index_table_2.png
│ ├── index_table_3.png
│ ├── jacky_last_day_in_tsmc.jpg
│ ├── join_example.jpg
│ ├── mysql_join.jpg
│ ├── mysql_wb_toolbar.png
│ ├── qrcode_lecture_database.png
│ ├── relationship-cardinality.png
│ └── workbench.jpg
├── ipynb
│ ├── scores.ipynb
│ ├── social_media.ipynb
│ └── sqls_alternative.ipynb
└── sql
│ ├── create_function_and_procedure.sql
│ ├── p_split_big_ny_counties.sql
│ ├── triggers_after.sql
│ └── triggers_before.sql
└── requirements.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102 | #poetry.lock
103 |
104 | # pdm
105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106 | #pdm.lock
107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108 | # in version control.
109 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
110 | .pdm.toml
111 | .pdm-python
112 | .pdm-build/
113 |
114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
115 | __pypackages__/
116 |
117 | # Celery stuff
118 | celerybeat-schedule
119 | celerybeat.pid
120 |
121 | # SageMath parsed files
122 | *.sage.py
123 |
124 | # Environments
125 | .env
126 | .venv
127 | env/
128 | venv/
129 | ENV/
130 | env.bak/
131 | venv.bak/
132 |
133 | # Spyder project settings
134 | .spyderproject
135 | .spyproject
136 |
137 | # Rope project settings
138 | .ropeproject
139 |
140 | # mkdocs documentation
141 | /site
142 |
143 | # mypy
144 | .mypy_cache/
145 | .dmypy.json
146 | dmypy.json
147 |
148 | # Pyre type checker
149 | .pyre/
150 |
151 | # pytype static type analyzer
152 | .pytype/
153 |
154 | # Cython debug symbols
155 | cython_debug/
156 |
157 | # PyCharm
158 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
159 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
160 | # and can be added to the global gitignore or merged into this file. For a more nuclear
161 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
162 | #.idea/
163 |
164 | # Temp files
165 | restricted/
166 | tmp/
167 | test/
168 | z_*.*
169 | .DS_Store
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Lecture-Database
2 | Database course materials offered at NYCU. It is subjected to modification.
3 | Some contents refer to text books and web pages.
4 | Here, I would like to express my thanks to those authors.
--------------------------------------------------------------------------------
/ch00_course_introduction.md:
--------------------------------------------------------------------------------
1 | ---
2 | marp: true
3 | theme: default
4 | class: invert
5 | size: 16:9
6 | paginate: true
7 | footer: 國立陽明交通大學 電子與光子學士學位學程
8 | headingDivider: 1
9 | style: |
10 | section::after {
11 | content: attr(data-marpit-pagination) '/' attr(data-marpit-pagination-total);
12 | }
13 |
14 | .columns {
15 | display: grid;
16 | grid-template-columns: repeat(2, minmax(0, 1fr));
17 | gap: 1rem;
18 | }
19 | .columns img {
20 | width: 50%;
21 | }
22 | .middle-grid {
23 | display: grid;
24 | grid-template-columns: repeat(2, minmax(0, 1fr));
25 | gap: 1rem;
26 | }
27 | .middle-grid img {
28 | width: 75%;
29 | }
30 | .grid {
31 | display: grid;
32 | grid-template-columns: 1fr 1fr;
33 | gap: 10px;
34 | }
35 | .grid img {
36 | width: 100%;
37 | }
38 | .red-text {
39 | color: red;
40 | }
41 |
42 | .blue-text {
43 | color: blue;
44 | }
45 |
46 | .small-text {
47 | font-size: 0.50rem;
48 | }
49 | ---
50 | # 資料庫管理
51 | - 講師:林志偉
52 | - 教材:https://github.com/mingfujacky/Lecture-Database.git
53 | 
54 |
55 | # Textbook
56 | 
57 | - Database Systems Design, Implementation, and Management 14/E
58 | - Coronel & Morris
59 | - Cengage, 2023
60 |
61 | # 課程講師 - 林志偉 (Jacky Lin)
62 | 
63 |
64 | - #### 現職: 陽明交通大學 / 學士後電子與光子學士學位學程 助理教授
65 | - #### 學歷: 交大資訊管理博士
66 | - #### 經歷: 台積電資訊科技(IT)
67 | - #### 專長: 資料工程、程式設計、巨量資料分析
68 | - #### Email: jacky.jw.lin@nycu.edu.tw
69 |
70 | # 課程助教
71 |
199 |

200 |
201 | # Data Dictionary and the System Catalog
202 | - **Data dictionary** describes all tables in the DB created by the user and designer
203 | - **System catalog** describes all objects within the database
204 | - Homonym – same name is used to label different attributes
205 | - Synonym – different names are used to describe the same attribute.
206 | - Both homonym and synonym should be avoided whenever possible
207 | 
208 |
209 | # Relationships within the Relational Database
210 | - The one-to-many (1:M) relationship is the norm for relational databases
211 | - In the one-to-one (1:1) relationship, one entity can be related to one and only one other entity and vice versa
212 | - The many-to-many (M:N) relationship can be implemented by creating a new entity in 1:M relationships with the original entities
213 |
214 | # 1:M Relationship
215 |
216 |

217 |

218 |

219 |

220 |
221 |
222 | # 1:1 Relationship
223 | - 1:1 a professor only chair one department
224 | - 1:M a department employee many professors
225 |
226 |

227 |

228 |
229 |
230 | # M:N Relationship
231 | - A M:N relationship is not supported directly in the relational environment.
232 | - M:N relationship can be implemented by creating a new entity in 1:M relationships with the original entities
233 | - In Fig 3.24, the tables create many data redundancies and relational operation become complex and less efficiency
234 | -
235 |

236 |

237 |
238 |
239 | # Introduce Composite Entry into M:N Relationship
240 | Table ENROLL is a composite entry (bridge entry, associative entry, link table) to help convert M:N to 1:M
241 | -
242 |

243 |

244 |
245 |
246 | # Data Redundancy Revisited
247 | - The relational database control of data redundancies through use of foreign keys
248 | - Data redundancy should be controlled except performance and historical data
249 |
250 |

251 |

252 |
253 |
254 | # Index to Increase Performance
255 | - An index is an orderly arrangement to logically access rows in a table
256 | - The index key is the reference point that leads to data location identified by the key
257 | - A table can have many indexes, but each index is associated with only one table
258 | - The index key can have multiple attributes
259 |
260 |
261 |

262 |
263 |
264 | # Review Questions
265 | - What is the integrity rules in RDBMS?
266 | - Describe relational database operators to manipulate relational table contents.
267 | - Describe how to deal with M:N relationship.
--------------------------------------------------------------------------------
/ch04_entity_replationship_modeling.md:
--------------------------------------------------------------------------------
1 | ---
2 | marp: true
3 | theme: default
4 | class: invert
5 | size: 16:9
6 | paginate: true
7 | footer: 國立陽明交通大學 電子與光子學士學位學程
8 | headingDivider: 1
9 | style: |
10 | section::after {
11 | content: attr(data-marpit-pagination) '/' attr(data-marpit-pagination-total);
12 | }
13 |
14 | .middle-grid {
15 | display: grid;
16 | grid-template-columns: repeat(2, minmax(0, 1fr));
17 | gap: 1rem;
18 | }
19 | .middle-grid img {
20 | width: 75%;
21 | }
22 | .grid {
23 | display: grid;
24 | grid-template-columns: 1fr 1fr;
25 | gap: 10px;
26 | }
27 | .grid img {
28 | width: 100%;
29 | }
30 | .red-text {
31 | color: red;
32 | }
33 | .brown-text {
34 | color: brown;
35 | }
36 | .blue-text {
37 | color: lightskyblue;
38 | }
39 |
40 | .small-text {
41 | font-size: 0.50rem;
42 | }
43 | ---
44 | # Chapter4: Entity Relationship (ER) Modeling
45 | - Data modeling is the first step in database design, as a bridge between real-world objects and the database model implemented in the computer.
46 | - It is important to illustrate data-modeling details graphically through entity relationship diagrams (ERDs) to facilitate communication.
47 |
48 |

49 |
50 |
51 | # Entity Relationship Model
52 | - The entity relationship model (ERM) generate ERD (ER diagram)
53 | - The ERD represents the
external model as viewed by end users
54 | - The ERD represents the
conceptual model as viewed by database designer
55 | - ERDs describe the database’s main components:
56 | - Entities
57 | - Attributes
58 | - Relationships
59 |
60 | # Entity
61 | - An entity is an object of interest to the end user
62 | - An entity in the ERM is a table (relation)
63 | - In Crow’s Foot notation, an entity is represented by a rectangle
64 | - contains entity name
65 | - entity name is a noun usually written in all capital letters. It would depend on your naming convention
66 |
67 | # Attributes
68 | - **Attributes** are characteristics of entities
69 | - **Required attribute** (not null) and **optional attribute** (allow null)
70 | - Attributes must have a **domain**, the set of possible values for a given attribute
71 | - **Identifier** and **composite identifier** is one or more attributes that uniquely identify each row.
72 | - **Simple attribute** (age, sex) and **composite attribute** (address, phone_number)
73 | - **Single-valued attribute** (emp_id) and **multi-valued attributes** (car_color, emp_habit)
74 | - **Derived attribute** whose value is calculated from other attributes (working_years)
75 |
76 | # Identifier and Composite identifier
77 |
78 |

79 |
80 |
81 | - CLASS_CODE is a identifier
82 | - (CRS_CODE, CLASS_SECTION) is a composite identifier
83 |
84 | # Entity's Notation
85 |
86 |

87 |

88 |
89 |
90 | Required attribute: bold font
91 | PK: in a separated cell with bold and underline font
92 |
93 | # Implementing Multi-valued Attributes
94 | - If necessary, replace multi-value attribute by creating several new attributes
95 | - If necessary, replace multi-value attribute by creating an new entity
96 |
97 |

98 |

99 |
100 |
101 |
Q: What is the pros and cons between the two replacement approaches
102 |
103 | # Derived Attributes
104 |
105 |

106 |
107 |
108 |
Q: What attribute is proper to use derived attributed, working_year or total_amount?
109 |
110 | # Relationship
111 | - The entities that participate in a relationship are also known as
participants
112 | - A relationship is identified by a name that describes the relationship
113 | - The relationship name is an active or passive
verb
114 | -
Connectivity describes the relationship classification: 1:1, 1:M, and M:N
115 | -
Cardinality expresses the minimum and maximum number of entity occurrences associated with one occurrence of the related entity
116 |
117 | # Relationship's Notation
118 |
119 |

120 |

121 |
122 |
123 | - (1, 4): one professor teach at least one and no more than four classes
124 | - (1, 1): each class is taught by one and only one professor
125 |
126 | # Existence Dependence
127 | - Entity can be
strong or
weak depending on whether the entity can exist independently or not.
128 | - A strong entity can exist apart from all of its related entities, it is
existence-independent
129 | - A weak entity is
existence-dependent on another related entity occurrence
130 | - Relationship 'EMPLOYEE claims DEPENDENT', the DEPENDENT entity is existence dependent on the EMPLOYEE entity. That is, DEPENDENT has a mandatory (NOT NULL) foreign key, EMP_NUM to link with EMPLOYEE.
131 |
132 | # Weak Entity
133 | - A weak entity is existence-dependent on a strong entity with a strong (identifying) relationship - requires a non-null FK from the related strong entity and form a composite PK.
134 | - DEPENDENT(
EMP_NUM, DEP_SID, DEP_NAME, DEP_DOB), EMP_NUM is FK,
135 | - A weak entity always has a **mandatory participation** to a strong entity (every row of the weak entity must be associated with one row of a strong entity because of non-null FK).
136 |
137 | # Strong Entity
138 | - A strong entity has a PK that uniquely identifies each record without depending on other entity.
139 | - EMPLOYEE(
EMP_NUM, EMP_LNAME, EMP_FNAME, EMP_INITIAL, EMP_DOB, EMP_HIREDATE)
140 |
141 | # Example of Strong and Weak Entities
142 | Considering two entities: EMPLOYEE (strong) and DEPENDENT (weak)
143 | - DEPENDENT is weak because it has no sufficient PK by itself at the beginning
144 | - DEPENDENT(
DEP_SID, DEP_NAME, DEP_DOB, EMP_NUM), EMP_NUM is FK, but when two employee is couple, their children will be duplicated
145 | - DEP_SID alone cannot uniquely identify a dependent.
146 | - Need expand PK of DEPENDENT by combining EMP_NUM
147 | - DEPENDENT(
EMP_NUM, DEP_SID, DEP_NAME, DEP_DOB) to build a strong (identifying) relationship with EMPLOYEE to uniquely identify each dependent.
148 | - DEP_NUM is better than DEP_SID in terms of privacy
149 | - DEPENDENT(
EMP_NUM, DEP_NUM, DEP_NAME, DEP_DOB), EMP_NUM is non-null FK
150 |
151 | # Illustrate Relationship Between Weak & Strong Entity
152 |
153 |

154 |

155 |
156 |
157 | # Relationship Strength
158 | -
Relationship strength can be strong or weak based on how to define PK of a related entity.
159 | - To implement a relationship, the PK of one entity (parent entity, normally on the “one” side of 1:M relationship) appears as a FK in the related entity (child entity, mostly the entity on the “many” side of 1:M relationship) to link two entities.
160 | -
Non-identifying(weak) Relationships : if the PK of the "M side" entity does
NOT contain a PK of the '1 side' entity
161 | -
Identifying (strong) Relationships: when the PK of the "M side" entity contains the PK of the "1 side" entity
162 |
163 | # Illustration of Relationship Strength
164 |
165 |
166 |

167 |

168 |
169 |
170 | - dotted line shows weak relationships; solid line shows strong relationships
171 |
172 | # Implementation Strong / Weak Relationship in DBMS
173 | - Use a strong relationship when:
174 | • The M-side entity is conceptually a part of the 1-side.
175 | • The M-side object should be destroyed when the 1-side is destroyed (e.g., an employee’s dependant).
176 | - Use a weak relationship when:
177 | • The M-site entity can exist independently of the 1-side.
178 | • The M-side object should not be deleted if the 1-side is deleted (e.g., an employee and a department).
179 |
180 | # The Order to Load Tables Under 1:M Relationship
181 | - Keep in mind that the order in which the tables are created and loaded is very important.
182 | - In the “COURSE generates CLASS” relationship, the COURSE table must be created before the CLASS table. After all, it would not be acceptable to have the CLASS table’s foreign key refer to a COURSE table that did not yet exist.
183 | - Load the data of the “1” side first in a 1:M relationship to avoid the possibility of referential integrity errors.
184 |
185 | # Relationship Participation
186 | - Relationship participation is either
optional or mandatory.
187 | - Because of the bidirectional nature of relationships, it is necessary to determine the connectivity as well as max and min cardinalities of the relationship from COURSE to CLASS and from CLASS to COURSE.
188 | - **Optional participation** means that some rows may not participate into the relationship
189 | - **Mandatory participation** means that each row must participate into the relationship
190 |
191 | # Illustration of Relationship Participation
192 |
193 |
198 |
199 | # Relationship Degree
200 |
201 |

202 |
203 |
204 | # Recursive Relationship
205 |
206 |

207 |

208 |
209 |
210 | # Associative (Composite) Entities
211 | - The ER model uses the associative entity to represent an M:N relationship between two or more entities
212 | - It is also called a composite or bridge entity and is a 1:M relationship with the parent entities
213 | - It is composed of the primary key attributes of each parent entity
214 | - The composite entity may also contain additional attributes that play no role in connective process
215 |
216 | # Illustration of Associative Entities
217 | STUDENT has CLASS is a M:N relationship
218 |
219 |

220 |

221 |
222 |
223 | # Developing an ER Diagram
224 | Building an ERD usually involves the following activities as a
iterative process:
225 | - Create a detailed description of the organization’s operations
226 | - Interview users
227 | - Investigate SOPs, Forms, Reports
228 | - Identify business rules based on the description of operations
229 | - Identify main entities and relationships from the business rules
230 | - Develop the initial ERD
231 | - Identify the attributes and primary keys that adequately describe entities
232 | - Revise and review the ERD
233 |
234 | # Tiny College (TC) (1,2/10)
235 | - Tiny College is divided into several schools.
236 | - A school is managed by a professor.
237 | - Each professor can be the dean of only one school, or none of any school.
238 | - Each school has several departments.
239 | - The number of departments operated by a school is at least one to many
240 | - Each department belongs to only a single school
241 | 
242 |
243 | # Tiny College (TC) (3/10)
244 | - Each department may offer courses.
245 | - Some departments that were classified as "research only," they would not offer courses; therefore, the COURSE entity would be optional to the DEPARTMENT entity.
246 | 
247 |
248 | # Tiny College (TC) (4/10)
249 | - A course can be taught in several classes.
250 | - A course may not be taught in some semester
251 | - A class is offered during a given semester. SEMESTER defines the year and the term that the class will be offered.
252 | - CLASS is optional to SEMESTER.
253 | - CLASS is optional to COURSE.
254 | 
255 |
256 | # Tiny College (TC) (5/10)
257 | - Each department should have one or more professors assigned to it.
258 | - One and only one of those professors chairs the department
259 | - Not all professors are required to chair a department.
260 | - DEPARTMENT is optional to PROFESSOR in the "chairs" relationship.
261 | 
262 |
263 | # Tiny College (TC) (6/10)
264 | - Each professor may teach up to four classes; each class is belong to a course.
265 | - A professor may also be on a research contract and teach no classes at all.
266 | 
267 |
268 | # Tiny College (TC) (7/10)
269 | - A student may enroll in several classes but take each class only once.
270 | - Each student may enroll in up to six classes, and each class may have up to 35 students, (STUDENT and CLASS is M:N relationship ).
271 | - This M:N relationship must be divided into two 1:M relationships by ENROLL entity
272 | 
273 |
274 | # Tiny College (TC) (8/10)
275 | - Each department has several students whose major is offered by that department.
(VAGUE!!)
276 | - Each student has only a single major associated with a single department.
277 | - It is possible for a student not to declare a major field of study.
278 | 
279 |
280 | # Tiny College (TC) (9/10)
281 | - Each student has an advisor in his or her department
282 | - Each advisor counsels several students.
283 | - An advisor is also a professor, but not all professors advise students.
284 | 
285 |
286 | # Tiny College (TC) (10/10)
287 | - A class is taught in a room.
288 | - Each room is located in a building.
289 | - A building can contain many rooms.
290 | - Some buildings do not contain (class) rooms.
291 | 
292 |
293 | # Tiny College (TC) (Summary: Entities)
294 | PROFESSOR
295 | COURSE
296 | STUDENT
297 | SCHOOL
298 | CLASS
299 | BUILDING
300 | DEPARTMENT
301 | SEMESTER
302 | ROOM
303 | ENROLL (the associative entity between STUDENT and CLASS)
304 |
305 | # Summary: Components of ERM
306 | 
307 |
308 | # Summary: Completed ERD
309 | 
310 |
311 | # Database Design Challenges: Conflicting Goals
312 | - Database designers must often make design compromises that are triggered by conflicting
GOALS
313 | - Database design must conform to design standards
314 | - High processing speed may limit the number and complexity of logically desirable relationships
315 | - However, a design that meets all requirements and design conventions are the most important goals
316 |
317 | # Review Questions
318 | - What is the difference between weak entity and strong entity?
319 | - What is the difference between weak (non-identifying) and identifying (strong) relationship?
320 | - How to translate M:N relationship in ERM?
321 |
322 | # Homework #B
323 | 資料庫課程作業(B)
324 |
325 | # Present Final Project Progress
326 | - Date: 04/15 (專班) and 04/16 (資財)
327 | - Duration: 5 minutes per team
328 | - Approach: oral presentation and one word page only
329 | - Agenda:
330 | - Project briefing and teaming
331 | - Expected deliverables
332 | - Current progress
333 | - Support needed (if any)
--------------------------------------------------------------------------------
/ch05_advanced_data_modeling.md:
--------------------------------------------------------------------------------
1 | ---
2 | marp: true
3 | theme: default
4 | class: invert
5 | size: 16:9
6 | paginate: true
7 | footer: 國立陽明交通大學 電子與光子學士學位學程
8 | headingDivider: 1
9 | style: |
10 | section::after {
11 | content: attr(data-marpit-pagination) '/' attr(data-marpit-pagination-total);
12 | }
13 |
14 | .middle-grid {
15 | display: grid;
16 | grid-template-columns: repeat(2, minmax(0, 1fr));
17 | gap: 1rem;
18 | }
19 | .middle-grid img {
20 | width: 75%;
21 | }
22 | .grid {
23 | display: grid;
24 | grid-template-columns: 1fr 1fr;
25 | gap: 10px;
26 | }
27 | .grid img {
28 | width: 100%;
29 | }
30 | .red-text {
31 | color: red;
32 | }
33 |
34 | .blue-text {
35 | color: blue;
36 | }
37 |
38 | .small-text {
39 | font-size: 0.80rem;
40 | }
41 | ---
42 | # Chapter5: Advanced Data Modeling
43 | - Illustrate extended entity relationship (EER) model.
44 | - Describe the characteristics of good primary keys and how to select them.
45 | - Data-modeling design cases
46 |
47 | # Nulls Created by Unique Attributes
48 | 
49 |
50 | # Extended (Enhanced) Entity Relationship Model (EERM)
51 | - EERM is the result of adding more object-oriented concept to the original ER model
52 | - A diagram that uses the EERM is called EER diagram (EERD)
53 |
54 | # Entity Supertypes and Subtypes
55 | - The grouping of employees into various types provides the following two benefits:
56 | - It avoids unnecessary nulls in attributes when some employees have characteristics that are not shared by other employees
57 | - It enables a particular employee type to participate in relationships that are unique to that employee type
58 | - The entity supertype (EMPLOYEE) contains common characteristics
59 | - The entity subtype (PILOT, MECHANIC, ACCOUNTANT) contains unique characteristics of each entity subtype
60 |
61 | # Characteristics of EERD
62 | - Support attribute **inheritance**
63 | - Subtypes inherit primary key from supertype
64 | - Subtypes inherit all attributes and relationships from its supertypes
65 | - Have a special supertype attribute as the **subtype discriminator**, commonly use equality comparison
66 |
67 | # Specialization Hierarchy Example
68 | 
69 |
70 | # Entity Clustering (封裝的概念)
71 | - OFFERING: SEMESTER + COURSE + CLASS
72 | - LOCATION: ROOM + BUILDING
73 | 
74 |
75 | # Entity Integrity: Selecting Primary Keys
76 | - The most important characteristic of an entity is its primary key (a single attribute or a combination of attributes), which uniquely identifies each entity instance.
77 | - The primary key’s function is to guarantee entity integrity
78 | - Primary keys and foreign keys work together to implement relationships in the relational model
79 | - The importance of properly selecting the primary key has a direct bearing on the efficiency and effectiveness of database implementation
80 |
81 | # Natural Keys and Primary Keys
82 | - A natural key is a real-world identifier used to uniquely identify real-world objects, which forms part of end user day-to-day business vocabulary
83 | - Usually, if an entity has a natural identifier, a data modeler uses it as the primary key of the entity being modeled
84 |
85 | Q: Guess the pros and cons of using nature key
86 | [7 Database Design Mistake](https://youtu.be/s6m8Aby2at8?si=LsJyqtws-hEz2UyN)
87 |
88 | # Primary Key Guidelines
89 | - Unique values
90 | - No change over time
91 | - Preferably single-attribute
92 | - Preferably numeric: auto-numbering
93 | - Security-compliant: social secure ID is not good
94 |
95 | # When to Use Composite Primary Keys
96 | - As identifiers of composite (bridge, associate) entities, in which each primary key combination is allowed once in M:N relationship
97 | - As identifiers of weak entities, in which the weak entity has a strong identifying relationship with the parent entity
98 |
99 | 
100 |
101 | # When to Use Surrogate Primary Keys (代理鍵)
102 | - A surrogate key is a primary key created by the database designer to simplify the identification of entity instances
103 | - Surrogate key has no business meaning, with advantages like unique, stability, performance
104 | 
105 |
106 | # Design Case 1: Implementing 1:1 Relationships
107 | - Foreign keys work with primary keys to properly implement relationships in the relational model
108 | - The basic rule is to put the primary key of the parent entity on the dependent entity as a foreign key
109 | - Options for selecting and placing the foreign key include the following:
110 | - Place a foreign key in both entities
111 | - Place a foreign key in one of the entities
112 |
113 | # Design Case 1: Illustration
114 | A 1:1 relationship:
115 | - An EMPLOYEE manages zero or one DEPARTMENT
116 | - Each DEPARTMENT is managed by one EMPLOYEE
117 | 
118 |
119 | Design comparison
120 | - Fig 1: proper design
121 | - Fig 2: generate many null values
122 | - Fig 3: duplicated work
123 |
124 | # Design Case 2: Maintaining Salary History of Time-Variant Data
125 | -
Time-variant data refers to data whose values change over time and the data changes must be retained
126 | -
Modeling time-variant data, need a new entity with 1:M relationship to the original entity
127 | -
This new entity contains the new value, the date of the change, and any other pertinent attribute
128 | -
Question: What is (1) current salary and (2) salary raise history of an employee within a time period
129 | -
Discussion: in relationship emp_sal_hist, what cardinality salary_hist is? (0,M) or (1,M)
130 | 
131 |
132 | # Design Case 3: Fan Traps
133 | - A design **trap** occurs when a relationship is improperly or incompletely identified, which is not consistent with the real world
134 | - The most common design trap is fan trap, a type of join path between three tables when a "1-to-M" join links a table which is in turn linked by another "1-to-M" join
135 | - It produces an association among other entities not expressed in the model
136 | 
137 | - Question: Which team the player Jordan belongs to ?
138 |
139 | # Illustration of Design Case 3
140 | 
141 | Exists a **transitive** relationship between DIVISION and PLAYER via the TEAM entity
142 |
143 | # Design Case 4: Redundant Relationships
144 | - Redundant relationships occur when there are multiple relationship paths between related entities
145 | - The main concern is that they remain consistent across the model
146 | - Some designs use redundant relationships as a way to simplify the design
147 | 
148 |
149 | # Review Questions
150 | - What is an entity supertype, and why is it used?
151 | - What is the most common design trap, and how does it occur?
152 | - Describe the characteristics of good primary keys and how to select them
--------------------------------------------------------------------------------
/ch06_normalization_of_database_tables.md:
--------------------------------------------------------------------------------
1 | ---
2 | marp: true
3 | theme: default
4 | class: invert
5 | size: 16:9
6 | paginate: true
7 | footer: 國立陽明交通大學 電子與光子學士學位學程
8 | headingDivider: 1
9 | style: |
10 | section::after {
11 | content: attr(data-marpit-pagination) '/' attr(data-marpit-pagination-total);
12 | }
13 |
14 | .middle-grid {
15 | display: grid;
16 | grid-template-columns: repeat(2, minmax(0, 1fr));
17 | gap: 1rem;
18 | }
19 | .middle-grid img {
20 | width: 75%;
21 | }
22 | .grid {
23 | display: grid;
24 | grid-template-columns: 1fr 1fr;
25 | gap: 10px;
26 | }
27 | .grid img {
28 | width: 100%;
29 | }
30 | .red-text {
31 | color: red;
32 | }
33 |
34 | .blue-text {
35 | color: lightskyblue;
36 | }
37 |
38 | .brown-text {
39 | color: brown;
40 | }
41 |
42 | .small-text {
43 | font-size: 0.80rem;
44 | }
45 | ---
46 | # Chapter6: Normalization of Database Tables
47 | - Good database design must be matched to good table structures.
48 | - Learn to evaluate and design good table structures to **control data redundancies** thereby avoiding data anomalies.
49 | - The process that yields such desirable results is known as **normalization**.
50 |
51 | # Self-Taught Resource
52 | https://www.databasestar.com/mysql-database/
53 | https://www.databasestar.com/database-design/
54 | https://www.databasestar.com/programmer-jokes/
55 | https://www.databasestar.com/vip/
56 | https://youtube.com/@decomplexify?si=I5sEMNUZOJcSpiCX
57 |
58 |
59 | # Database Tables and Normalization
60 | -
**Normalization** is a process for adjusting table structures to
**minimize data redundancies**
61 | - Reduce data anomalies
62 | - Assigns attributes to tables based on functional dependency
63 | - Normalization goes through a series of stages called normal forms
64 | 
65 |
66 | # Why Normalize a Database
67 | - Prevent the same data from being stored in more than one place (insert anomaly)
68 | - Prevent updates being made to some data but not others (update anomaly)
69 | - Prevent data not being deleted when it is supposed to be, or from data being lost when it is not supposed to be (delete anomaly)
70 | - Ensure the data is accurate
71 | - Reduce the storage space that a database takes up
72 | - Ensure the queries on a database run as fast as possible
73 |
74 | # Data Redundancies Issues - A Sample Table
75 |
80 | Student ID|Student Name|Fees Paid|Course Name|Class 1|Class 2|Class 3
81 | ----------|------------|---------|-----------|-------|-------|-------
82 | 1|John Smith|200|Economics|Economics 1|Biology 1
83 | 2|Maria Griffin|500|Computer Science|Biology 1|Business Intro|Programming 2
84 | 3|Susan Johnson|400|Medicine|Biology 2
85 | 4|Matt Long|850|Dentistry
86 |
87 | - Attributes: student names, paid fees, registered classes
88 | - It is not a normalized table, and there are a few issues with this
89 |
90 | # Data Redundancies Issues - Insert Anomaly
91 | - If we want to add a new student but did not know their course name
92 |
97 | Student ID|Student Name|Fees Paid|Course Name|Class 1|Class 2|Class 3
98 | ----------|------------|---------|-----------|-------|-------|-------
99 | 1|John Smith|200|Economics|Economics 1|Biology 1
100 | 2|Maria Griffin|500|Computer Science|Biology 1|Business Intro|Programming 2
101 | 3|Susan Johnson|400|Medicine|Biology 2
102 | 4|Matt Long|850|Dentistry
103 | 5|Jared Oldham|0|?
104 |
105 | - We would be adding incomplete data to our table, which can cause issues when trying to analyze this data.
106 |
107 | # Data Redundancies Issues - Update Anomaly
108 | - If the class Biology 1 was changed to “Intro to Biology”. We would have to query all of the columns that could have this Class field and rename each one that was found.
109 |
110 |
115 | Student ID|Student Name|Fees Paid|Course Name|Class 1|Class 2|Class 3
116 | ----------|------------|---------|-----------|-------|-------|-------
117 | 1|John Smith|200|Economics|Economics 1|Biology 1
118 | 2|Maria Griffin|500|Computer Science|Biology 1|Business Intro|Programming 2
119 | 3|Susan Johnson|400|Medicine|Biology 2
120 | 4|Matt Long|850|Dentistry
121 |
122 | - There’s a risk that we miss out on a value, which would cause issues.
123 | - Ideally, we would only update the value once, in one location.
124 |
125 | # Data Redundancies Issues - Delete Anomaly
126 | - If Susan Johnson quits and her record needs to be deleted from the system. We could delete her row
127 |
128 |
133 | Student ID|Student Name|Fees Paid|Course Name|Class 1|Class 2|Class 3
134 | ----------|------------|---------|-----------|-------|-------|-------
135 | 1|John Smith|200|Economics|Economics 1|Biology 1
136 | 2|Maria Griffin|500|Computer Science|Biology 1|Business Intro|Programming 2
137 | **3**|**Susan Johnson**|**400**|**Medicine**|**Biology 2**
138 | 4|Matt Long|850|Dentistry
139 |
140 | - But, if we delete this row, we lose the record of the Biology 2 class, because it’s not stored anywhere else. The same can be said for the Medicine course.
141 | - We should be able to delete one type of data or one record without having impacts on other records we don’t want to delete.
142 |
143 | # A Sample Report Layout
144 | 
145 |
146 | # Poor Table Structure
147 | 
148 | - Data inconsistency
149 | - Difficult to update
150 | - Data redundant
151 |
152 | # Enough Normalization
153 | - From a structural point of view, higher normal forms are better than lower normal forms
154 | - For most purposes in business database design, 3NF is as high as you need to go in the normalization process
155 | - Denormalization produces a lower normal form to increase performance but greater data redundancy
156 |
157 | # The Need for Normalization
158 | - Database designers commonly use normalization in the following two situations:
159 | - When designing a new database structure
160 | - To analyze the relationship among the attributes within each entity and determine if the structure can be improved through normalization
161 | - The main goal of normalization is to eliminate data anomalies by eliminating unnecessary
data redundancies
162 | - Normalization uses the concept of
functional dependencies, FD to identify which attribute determines other attributes
163 |
164 | # The Objectives of Normalization
165 | - Each table represents a single subject
166 | - Each row/column intersection contains only one value and not a group of values
167 | - No data item will be unnecessarily stored in more than one table.
168 | - Data is updated in only one place
169 | - All non-prime attributes in a table are dependent on the primary key
170 | - Each table has no insertion, update, or deletion anomalies
171 | - Ensure that all tables are in at least in 3NF in business environment
172 | - Work one table at a time, identifying FD of a table
173 |
174 | # Normal Forms
175 | 
176 |
177 | # Normalization Base: Functional Dependency (FD)
178 | - Normalization starts by identifying **functional dependencies** of a given table
179 | -
FD X→Y: the values of Y are determined by the values of X. (X, Y is a set of attributes)
180 | -
X→Y is full FD , if no attribute can be removed from X and still keep the dependency.
181 | - Example:
182 | - PROJ_NUM → PROJ_ NAME (read as PROJ_ NUM functionally determines PROJ_NAME)
183 | - Attribute PROJ_NUM is known as the determinant attribute
184 | - Attribute PROJ_NAME is known as the dependent attribute.
185 |
186 | # FD Type: Partial Functional Dependency
187 | When there is a FD in which the determinant is only part of the PK
188 | -
X→Y is a partial FD if X is a subset of PK.
189 |
190 | Example
191 | - Give a table having PK (A, B), there is a FD (B → C), we say it is a partial FD because B is a subset of PK
192 |
193 | # FD Type: Transitive Functional Dependency
194 | When a attribute is dependent on another attribute which is not part of PK
195 | - Transitive FD is more difficult to identify among a set of data
196 | - They occur only when a FD exists among non-prime attributes
197 | Example
198 | - Given primary key: X, there are two FDs X → Z and X → Y
199 | - After investigating, we find that there is a FD Y → Z, which can support X determine Z because (X → Y) + (Y → Z) can make (X → Z) that is, x can determine the value of Z via Y.
200 | - Y → Z signals that there is a
transitive FD because Y is not a PK.
201 |
202 | # Why Do We Do Database Normalization?
203 | 
204 |
205 | # Conversion to First Normal Form (1NF)
206 | A table in 1NF means
207 | - All key attributes are well defined
208 | - There are no repeating groups
209 | - All attributes are dependent on the primary key
210 |
211 | Converting to 1NF starts with three steps
212 | 1. Eliminate the repeating groups
213 | 2. Identify the primary key
214 | 3. Identify all dependencies
215 |
216 | # Conversion to First Normal Form (1NF) - Supplement
217 | - Row order do not convey any information
218 | - There is no mixed data types within a column
219 |
220 | # 1NF Step1 - Eliminate Repeating Groups
221 | Repeating group: a group of entries existing for a single key value
222 |
223 |
224 |

225 |

226 |
227 |
228 | # 1NF Step2 - Identify PK
229 | PK: an identifier composed of one or more attributes that uniquely identifies a row
230 |
PROJ_NUM + EMP_NUM
231 |
232 | 
233 |
234 | # 1NF Step3 - Identify all Dependencies
235 | According to PK (PROJ_NUM, EMP_NUM), we can find a dependency exist (PROJ_NUM, EMP_NUM) → (PROJ_NAME, EMP_NAME, JOB_CLASS, CHG_HOUR, HOURS) and derive it into two partial FD and one transitive FD
236 | - Partial FD: PROJ_NUM → PROJ_NAME (because PROJ_NUM is a part of PK)
237 | - Partial FD: EMP_NUM → EMP_NAME, JOB_CLASS, CHG_HOUR,
(not HOURS) (because EMP_NUM is a part of PK)
238 | - Transitive FD: JOB_CLASS → CHG_HOUR (because JOB_CLASS is not part of PK )
239 |
240 | # Dependency Diagram
241 | Dependency diagram shows all dependencies found within given table structure
242 | 
243 |
244 | # After 1NF
245 | - All relational tables satisfy 1NF requirements
246 | - All key attributes are defined
247 | - There are no repeating groups in the table
248 | - All attributes are dependent on the primary key
249 | - Some tables may contain partial and transitive FDs
250 |
251 | # Conversion to Second Normal Form (2NF)
252 | A table in the second normal form means
253 | - it is in 1NF
254 | - it does not include partial FD
255 |
256 | Conversion to 2NF occurs only when the 1NF has a composite primary key
257 | - If the 1NF has a single-attribute primary key, then the table is automatically in 2NF
258 |
259 | Converting to 2NF starts with two steps
260 | 1. Make new tables to eliminate partial FD
261 | 2. Reassign corresponding dependent attributes
262 |
263 | # 2NF Step1 - Make New Tables to Eliminate Partial FD
264 | - Separate composite PK (PROJ_NUM + EMP_NUM) into different PKs
265 | - PK1: PROJ_NUM
266 | - PK2: EMP_NUM
267 | - PK3: PROJ_NUM + EMP_NUM
268 | - Create tables based on new PK
269 | - Table1: PROJECT, PK is PROJ_NUM
270 | - Table2: EMPLOYEE, PK is EMP_NUM
271 | - Table3: ASSIGNMENT, PK is PROJ_NUM + EMP_NUM
272 |
273 | # 2NF Step2 - Reassign Corresponding Dependent Attributes
274 | - Table PROJECT(**PROJ_NUM**, PROJ_NAME)
275 | - Table EMPLOYEE(**EMP_NUM**, EMP_NAME, JOB_CLASS, CHG_HOUR)
276 | - Table ASSIGNMENT(**PROJ_NUM**, **EMP_NUM**,
ASSIGN_HOUR)
277 | (any attributes that are not dependent in partial FD will remain in the original table)
278 |
279 | # Dependency Diagram
280 | 
281 |
282 | # After 2NF
283 | - All relational tables satisfy 2NF requirements
284 | - it is in 1NF
285 | - it does not include partial FD
286 | - If the 1NF has a single-attribute primary key, then the table is automatically in 2NF
287 | - Some tables may contain transitive FD
288 |
289 | # Conversion to Third Normal Form (3NF)
290 | A table in the third normal form means
291 | - it is in 2NF
292 | - it does not include transitive FD
293 |
294 | Converting to 3NF starts with two steps
295 | 1. Make new tables to eliminate transitive FD
296 | 2. Reassign corresponding dependent attributes
297 |
298 | # 3NF Step1 - Make New Tables to Eliminate Transitive FD
299 | A transitive FD: JOB_CLASS → CHG_HOUR
300 | - Make determinant (JOB_CLASS) as a PK of a new table
301 | - Create tables based on new PK
302 | - Table JOB(**JOB_CLASS**, CHG_HOUR)
303 |
304 | # 3NF Step2 - Reassign Corresponding Dependent Attributes
305 | - Table EMPLOYEE(
**EMP_NUM**, EMP_NAME, JOB_CLASS)
306 | - Table JOB(
**JOB_CLASS**, CHG_HOUR)
307 | - Table PROJECT(
**PROJ_NUM**, PROJ_NAME)
308 | - Table ASSIGNMENT(
**PROJ_NUM**, **EMP_NUM**, ASSIGN_HOUR)
309 |
310 | # Dependency Diagram
311 | 
312 |
313 | # After 3NF
314 | - it is in 2NF
315 | - it does not include transitive FD
316 |
317 | # Improving the design
318 | - Normalization form only focus on avoiding data redundancy
319 | - Beyond normalization, there are still various issues we need to address
320 | 1. Minimize data entry errors
321 | 2. Evaluate naming conventions
322 | 3. Refine attribute atomicity
323 | 4. Identify new attributes
324 | 5. Identify new relationships
325 | 6. Refine primary keys as required for data granularity
326 | 7. Maintain historical accuracy
327 | 8. Evaluate using derived attributes
328 |
329 | # Minimize data entry errors
330 | - When a new database designer on board, we need insert a record in EMPLOYEE table. Thus, we enter data into JOB_CLASS.
331 | - However, sometime we may enter either Database Designer, DB Designer or database designer. It easily makes data entry errors
332 | - Reduce the data enter errors by adding a
surrogate key JOB_CODE
JOB_CODE → JOB_CLASS, CHG_HOUR
333 | - Table JOB(
**JOB_CODE**, JOB_CLASS, CHG_HOUR)
334 | - Surrogate key is an artificial key introduced by DB designer
335 | - simplify PK design
336 | - usually numeric
337 | - often generated automatically by DBMS
338 |
339 | # Evaluate Naming Conventions
340 | - CHG_HOUR changed to JOB_CHG_HOUR
341 | - JOB_CLASS changed to JOB_DESCRIPTION
342 | - HOURS changed to ASSIGN_HOURS
343 |
344 | # Refine Attribute atomicity
345 | - Atomicity: not being able to be divided into small units
346 | - An atomic attribute is an attribute that cannot be further subdivided
347 | - EMP_NAME divided into EMP_LNAME, EMP_FNAME, EMP_INITIAL
348 |
349 | # Identify New attributes
350 | - Consider if any other attributes could be added into table
351 | - Social Security Number, Hire Date,....
352 |
353 | # Identify New Relationships
354 | - Add EMP_NUM attribute into PROJECT as a foreign key to keep project manager information
355 |
356 | # Refine PKs as Required for Data Granularity (1/3)
357 | - How often an employee reports hours work on a project and at what level of granularity (many times per day, once a day, once a week, at the end of project)
358 | -
Granularity refers to the level of detail represented by the values stored in a table’s row
359 |
360 |
361 | - After 3NF
362 | ASSIGNMENT(
**PROJ_NUM**, **EMP_NUM**, ASSIGN_HOUR)
363 |
368 | PROJ_NUM|EMP_NUM|ASSIGN_HOUR
369 | --------|-------|-----------
370 | 15|103|2.6
371 | 18|118|1.4
372 |
373 | Report hours at the end of project
374 |
375 | # Refine PKs as Required for Data Granularity (2/3)
376 |
377 | - Add ASSIGN_DATE attribute
378 | ASSIGNMENT(
**PROJ_NUM**, **EMP_NUM**, **ASSIGN_DATE**, ASSIGN_HOUR)
379 |
380 |
385 | PROJ_NUM|EMP_NUM|ASSIGN_DATE|ASSIGN_HOUR
386 | --------|-------|-----------|-----------
387 | 15|103|06-Mar-22|2.6
388 | 18|118|06-Mar-22|1.4
389 |
390 | Report hours once a day
391 |
392 | # Refine PKs as Required for Data Granularity (3/3)
393 | - Add ASSIGN_NUM as a surrogate key
394 | ASSIGNMENT(
**ASSIGN_NUM**, PROJ_NUM, EMP_NUM, ASSIGN_DATE, ASSIGN_HOUR)
395 |
396 |
401 | ASSIGN_NUM|PROJ_NUM|EMP_NUM|ASSIGN_DATE|ASSIGN_HOUR
402 | --------|--------|-------|-----------|-----------
403 | 1001|15|103|06-Mar-22|2.6
404 | 1002|18|118|06-Mar-22|1.4
405 |
406 | - Report hours anytime
407 | - Lower granularity yields greater flexibility
408 |
409 | # Maintain historical accuracy
410 | - Add job charge per hour (ASSIGN_CHG_HOUR) into ASSIGNMENT table is important to maintain historical accuracy of the data
411 | - JOB_CHG_HOUR in JOB and ASSIGN_CHG_HOUR in ASSIGNMENT. they may the same in a time period.
412 | - Due to salary raise, JOB_CHG_HOUR will be changed
413 | - ASSIGN_CHG_HOUR keep historical data and only reflect the charge hour whey employee report hours
414 |
415 | # Evaluate Using Derived Attributes
416 | - For simplify coding or improve performance, database designer will introduce derived attributes
417 | - The derived attribute ASSIGN_CHARGE comes from a transitive dependency
418 | - (ASSIGN_HOURS + ASSIGN_CHG_HOUR) → ASSIGN_CHARGE
419 |
420 | # The Completed Database After Design Improvement
421 |
422 |

423 |

424 |
425 |
426 | # Surrogate Key Considerations
427 | - Surrogate keys are used by designers when the primary key is considered to be unsuitable
428 | - A surrogate key is a system-defined attribute generally created and managed via the DBMS
429 | - Usually it is a numeric value which is automatically incremented for each new row
430 |
431 | # Higher-Level Normal Forms
432 | - Tables in 3NF will perform suitably for business transactional databases
433 | - Higher normal forms are sometimes useful for theoretical interest or statistical research
434 | - Higher-level normal forms: Boyce-Codd normal form (BCNF), 4NF and 5NF
435 |
436 | # Normalization and Database Design
437 | - Normalization should be part of the design process
438 | - Proposed entities must meet the required normal form before table structures are created
439 | - Principles and normalization procedures should be written when redesigning and modifying databases
440 | - ERD should be updated through the iterative process
441 |
442 | # Denormalization
443 | - Important database design goals include the following:
444 | - Creation of normalized relations
445 | - Considering processing requirements and speed
446 | - A problem with normalization is that joining a larger number of tables takes additional input/output (I/O) operations, thereby reducing system speed
447 | - Defects in unnormalized tables include the following:
448 | - Data updates are less efficient because tables are larger
449 | - Lead to data redundancy
450 |
451 | # Examples of Denormalization
452 | 
453 | - STU_HRS = Credit hours earned
454 | - STU_CLASS = Student classification (fr, so, jr, sr)
455 |
456 | # Denormalization - Wide and Narrow Table
457 |
458 |

459 |

460 |
461 |
462 | # Data Modeling Checklist
463 | 
464 |
465 | # Review Questions
466 | - What is normalization
467 | - When is a table in 1NF
468 | - When is a table in 2NF
469 | - When is a table in 3NF
470 |
471 | # Homework #C
472 | 1. Read section 6-7 Normalization and Database Design
473 | 2. Design database schemas for Contracting Company, including but not limited to
474 | - Business rules
475 | - Evolving history of ER diagram in terms of normal forms
476 | - 1NF, 2NF, 3NF conversion, dependency diagram and reason
477 | - At least 3 sample records of each table to illustrate PK and FK among tables to demonstrate their relationships.
478 | - Check your design by Table 6.7 Data Modeling Checklist
--------------------------------------------------------------------------------
/ch10_transaction_and_concurrency.md:
--------------------------------------------------------------------------------
1 | ---
2 | marp: true
3 | theme: default
4 | class: invert
5 | size: 16:9
6 | paginate: true
7 | footer: 國立陽明交通大學 電子與光子學士學位學程
8 | headingDivider: 1
9 | style: |
10 | section::after {
11 | content: attr(data-marpit-pagination) '/' attr(data-marpit-pagination-total);
12 | }
13 | .columns {
14 | display: grid;
15 | grid-template-columns: repeat(2, minmax(0, 1fr));
16 | gap: 1rem;
17 | }
18 | .middle-grid {
19 | display: grid;
20 | grid-template-columns: repeat(2, minmax(0, 1fr));
21 | gap: 1rem;
22 | }
23 | .middle-grid img {
24 | width: 75%;
25 | }
26 | .grid {
27 | display: grid;
28 | grid-template-columns: 1fr 1fr;
29 | gap: 10px;
30 | }
31 | .grid img {
32 | width: 100%;
33 | }
34 | .red-text {
35 | color: red;
36 | }
37 |
38 | .blue-text {
39 | color: lightskyblue;
40 | }
41 |
42 | .brown-text {
43 | color: brown;
44 | }
45 |
46 | .small-text {
47 | font-size: 0.70rem;
48 | }
49 | ---
50 | # Chapter10: RDBMS Advanced Topics
51 | - Transaction management process and four transaction properties
52 | - Concurrency control and locking methods
53 | - Database recovery management
54 | - Database security (access control list)
55 | - Performance tunning
56 | - No SQL
57 |
58 |
59 |
60 | # A Transaction
61 |
62 | 
63 |
64 |
--------------------------------------------------------------------------------
/crash_ch2.sql:
--------------------------------------------------------------------------------
1 | show databases;
2 |
3 | create database music;
4 | drop database music;
5 |
6 | -- operate database land
7 | create database if not exists land;
8 | use land;
9 | create table continent
10 | (
11 | continent_id int,
12 | continent_name varchar(20),
13 | population bigint
14 | );
15 |
16 | -- operate database customer
17 | create database if not exists customer;
18 | use customer;
19 | create table if not exists customer
20 | (
21 | customer_id int,
22 | first_name varchar(50),
23 | last_name varchar(50),
24 | address varchar(100),
25 | primary key (customer_id)
26 | );
27 | create table if not exists complaint
28 | (
29 | complaint_id int,
30 | customer_id int,
31 | complaint varchar(200),
32 | primary key (complaint_id),
33 | foreign key (customer_id) references customer(customer_id)
34 | );
35 |
36 | insert into customer (customer_id, first_name, last_name, address)
37 | values
38 | (1, 'Bob', 'Smith', '12 Dreary Lane'),
39 | (2, 'Sally', 'Johns', '76 Boulevard Meugler'),
40 | (3, 'Karen', 'Bellyacher', '354 Main Street');
41 |
42 | insert into complaint (complaint_id, customer_id, complaint)
43 | values
44 | (1, 3, 'I want to speak to your manager');
45 |
46 | select A.customer_id, A.first_name, A.last_name, B.complaint from customer A, complaint B where A.customer_id = B.customer_id;
47 |
48 | create table if not exists contact
49 | (
50 | contact_id int,
51 | name varchar(50) not null,
52 | city varchar(50),
53 | phone varchar(20),
54 | email_address varchar(50) unique,
55 | primary key (contact_id)
56 | );
57 | insert into contact(contact_id, name, city, phone, email_address)
58 | values
59 | (1, 'Steve Chen', '北京', '123-3123', 'steve@schen21.org'),
60 | (2, 'Joan Field', '紐約', '321-4321', 'jfield@gmail.com'),
61 | (3, 'Bill Bashful', '台北',null,'bb@gmail.com');
62 |
63 | insert into contact(contact_id, name, city, phone, email_address)
64 | values
65 | (4, 'Jacky Lin', '新竹', '', 'jacky@gmail.com');
66 |
67 |
68 | -- operate table city
69 | use land;
70 | create table if not exists temperature
71 | (
72 | city varchar(50),
73 | year int,
74 | high_temperature int,
75 | primary key (city, year)
76 | );
77 | insert into temperature (city, year, high_temperature)
78 | values
79 | ("New York, NY", 2020, 96),
80 | ('Death Valley, CA', 2021, 128),
81 | ('International Falls, MN', 2021, 77),
82 | ('New York, NY', 2021, 98);
83 |
84 | select * from temperature;
85 |
86 | create table if not exists temperature_with_constrain_default
87 | (
88 | city varchar(50),
89 | year int,
90 | high_temperature int,
91 | reporter varchar(10) default 'jacky',
92 | constraint check (year between 1880 and 2200),
93 | constraint check (high_temperature < 200),
94 | primary key (city, year)
95 | );
96 | insert into temperature_with_constrain_default (city, year, high_temperature, reporter)
97 | values
98 | ("New York, NY", 2024, 100, 'john');
99 | insert into temperature_with_constrain_default (city, year, high_temperature, reporter)
100 | values
101 | ("New York, NY", 2023, 100, default);
102 | create index temperature_repoter on temperature_with_constrain_default(reporter);
103 | alter table temperature_with_constrain_default add column day_of_year varchar(50);
104 | alter table temperature_with_constrain_default drop column day_of_year;
105 | alter table temperature_with_constrain_default rename column reporter to reporters;
106 | alter table temperature_with_constrain_default rename to temperature_constrain_default;
107 |
108 |
109 |
--------------------------------------------------------------------------------
/crash_ch3.sql:
--------------------------------------------------------------------------------
1 | -- Comment in a standalone line
2 | create database if not exists crash_ch3; -- Comment after statement
3 | use crash_ch3;
4 | create table if not exists continent
5 | (
6 | continent_id int,
7 | continent_name varchar(30),
8 | population bigint
9 | );
10 |
11 | insert into continent (continent_id, continent_name, population)
12 | values
13 | (1, 'Asia', 4641054775),
14 | (2, 'Africa', 1340598147),
15 | (3, 'Europe', 747636026),
16 | (4, 'North America', 592072212),
17 | (5, 'South America', 430759766),
18 | (6, 'Australia', 43111704),
19 | (7, 'Antarctica', 0);
20 |
21 | select continent_id,
22 | continent_name,
23 | population
24 | from continent
25 | order by population desc;
26 |
27 | /* comments with multiple lines
28 | line1
29 | line2
30 | */
31 |
32 | select population
33 | from continent
34 | where continent_name = 'Asia';
35 |
36 | select *
37 | from continent;
38 |
39 | -- null value check
40 | create table unemployed
41 | (
42 | region int,
43 | unemployed int
44 | );
45 | insert into unemployed (region, unemployed)
46 | values
47 | (1, 2218547),
48 | (2, 137455),
49 | (3, null);
50 | select * from unemployed;
51 | select * from unemployed where unemployed is not null;
--------------------------------------------------------------------------------
/crash_ch4.sql:
--------------------------------------------------------------------------------
1 | create database crash_ch4;
2 | use crash_ch4;
3 | CREATE TABLE solar_eclipse (
4 | eclipse_date DATE,
5 | time_of_greatest_eclipse TIME,
6 | eclipse_type VARCHAR(10),
7 | magnitude DECIMAL(4 , 3 )
8 | );
9 | insert into solar_eclipse (eclipse_date, time_of_greatest_eclipse, eclipse_type, magnitude)
10 | values
11 | ('2022-04-30', '20:42:36','Partial', 0.640),
12 | ('2022-10-25', '11:01:20','Partial', 0.862),
13 | ('2023-04-20', '04:17:56', 'Hybrid', 1.013);
14 |
15 | CREATE TABLE country_code (
16 | country_code CHAR(3)
17 | );
18 | insert into country_code (country_code)
19 | values
20 | ('JPN'),
21 | ('JP'),
22 | ('J'),
23 | ('');
24 | CREATE TABLE test_varchar_size (
25 | huge_column VARCHAR(16384)
26 | );
27 |
28 | CREATE TABLE student (
29 | student_id INT,
30 | student_class ENUM('Freshman', 'Sophomore', 'Junior', 'Senior')
31 | );
32 | insert into student(student_id, student_class)
33 | values
34 | (1, 'Freshman');
35 |
36 | CREATE TABLE interpreter (
37 | interpreter_id INT,
38 | language_spoken SET('English', 'German', 'French', 'Spanish')
39 | );
40 | insert into interpreter(interpreter_id, language_spoken)
41 | values
42 | (1, ''),
43 | (2, 'English'),
44 | (3, 'English,Spanish'),
45 | (4, 'Chinese');
46 |
47 | create table book
48 | (
49 | book_id int,
50 | author_bio tinytext, -- 256 characters
51 | book_proposal text, -- 64KB
52 | entire_book mediumtext -- 16MB
53 | );
54 | insert into book (book_id, author_bio, book_proposal, entire_book)
55 | values
56 | (1, "Tom's birthday is 1939/10/03", null, null),
57 | (2, 'John\'s birthday is 1940/01/01', null, null);
58 | -- (3, 'Peter's birthday is 1968/02/02', null, null);
59 |
60 | create table encryption
61 | (
62 | key_id int,
63 | encryption_key binary(50)
64 | );
65 |
66 | create table signature
67 | (
68 | signature_id int,
69 | signature varbinary(400)
70 | );
71 |
72 | create table planet_stat
73 | (
74 | planet varchar(20),
75 | miles_from_earth bigint,
76 | diameter_km mediumint
77 | );
78 | insert into planet_stat (planet, miles_from_earth, diameter_km)
79 | values
80 | ('Mars', 48678219, 6792),
81 | ('Jupiter', 3906747712, 142984),
82 | ('Staurn', 792248279, 120536),
83 | ('Uranus', 1692662533, 51118),
84 | ('Neptune', 2703959966, 49528);
85 |
86 | create table planet_stat_v2
87 | (
88 | planet varchar(20),
89 | miles_from_earth int unsigned,
90 | diameter_km mediumint
91 | );
92 | insert into planet_stat_v2 (planet, miles_from_earth, diameter_km)
93 | values
94 | ('Mars', 48678219, 6792),
95 | ('Jupiter', 3906747712, 142984),
96 | ('Staurn', 792248279, 120536),
97 | ('Uranus', 1692662533, 51118),
98 | ('Neptune', 2703959966, 49528);
99 |
100 | create table food
101 | (
102 | food varchar(30),
103 | organic_flag bool,
104 | gluten_free_flag bool
105 | );
106 | desc food;
107 |
--------------------------------------------------------------------------------
/files/csv/customer.csv:
--------------------------------------------------------------------------------
1 | CUS_CODE;CUS_LNAME;CUS_FNAME;CUS_INITIAL;CUS_AREACODE;CUS_PHONE;CUS_BALANCE
2 | 10010;Ramas;Alfred;A;615;844-2573;0.00
3 | 10011;Dunne;Leona;K;713;894-1238;0.00
4 | 10012;Smith;Kathy;W;615;894-2285;345.86
5 | 10013;Olowski;Paul;F;615;894-2180;536.75
6 | 10014;Orlando;Myron;NULL;615;222-1672;0.00
7 | 10015;O'Brian;Amy;B;713;442-3381;0.00
8 | 10016;Brown;James;G;615;297-1228;221.19
9 | 10017;Williams;George;NULL;615;290-2556;768.93
10 | 10018;Farriss;Anne;G;713;382-7185;216.55
11 | 10019;Smith;Olette;K;615;297-3809;0.00
12 |
--------------------------------------------------------------------------------
/files/csv/emp.csv:
--------------------------------------------------------------------------------
1 | EMP_NUM;EMP_TITLE;EMP_LNAME;EMP_FNAME;EMP_INITIAL;EMP_DOB;EMP_HIRE_DATE;EMP_AREACODE;EMP_PHONE;EMP_MGR
2 | 100;Mr.;Kolmycz;George;D;"1967-06-15 00:00:00";"2010-03-15 00:00:00";615;324-5456;NULL
3 | 101;Ms.;Lewis;Rhonda;G;"1990-03-19 00:00:00";"2011-04-25 00:00:00";615;324-4472;100
4 | 102;Mr.;Vandam;Rhett;NULL;"1983-11-14 00:00:00";"2015-12-20 00:00:00";901;675-8993;100
5 | 103;Ms.;Jones;Anne;M;"1999-10-16 00:00:00";"2019-08-28 00:00:00";615;898-3456;100
6 | 104;Mr.;Lange;John;P;"1996-11-08 00:00:00";"2019-10-20 00:00:00";901;504-4430;105
7 | 105;Mr.;Williams;Robert;D;"2000-03-14 00:00:00";"2020-11-08 00:00:00";615;890-3220;NULL
8 | 106;Mrs.;Smith;Jeanine;K;"1993-02-12 00:00:00";"2014-01-05 00:00:00";615;324-7883;105
9 | 107;Mr.;Diante;Jorge;D;"1999-08-21 00:00:00";"2019-07-02 00:00:00";615;890-4567;105
10 | 108;Mr.;Wiesenbach;Paul;R;"1991-02-14 00:00:00";"2017-11-18 00:00:00";615;897-4358;NULL
11 | 109;Mr.;Smith;George;K;"1986-06-18 00:00:00";"2014-04-14 00:00:00";901;504-3339;108
12 | 110;Mrs.;Genkazi;Leighla;W;"1995-05-19 00:00:00";"2015-12-01 00:00:00";901;569-0093;108
13 | 111;Mr.;Washington;Rupert;E;"1991-01-03 00:00:00";"2018-06-21 00:00:00";615;890-4925;105
14 | 112;Mr.;Johnson;Edward;E;"1986-05-14 00:00:00";"2008-12-01 00:00:00";615;898-4387;100
15 | 113;Ms.;Smythe;Melanie;P;"1995-09-15 00:00:00";"2020-05-11 00:00:00";615;324-9006;105
16 | 114;Ms.;Brandon;Marie;G;"1981-11-02 00:00:00";"2004-11-15 00:00:00";901;882-0845;108
17 | 115;Mrs.;Saranda;Hermine;R;"1997-07-25 00:00:00";"2018-04-23 00:00:00";615;324-5505;105
18 | 116;Mr.;Smith;George;A;"1990-11-08 00:00:00";"2013-12-10 00:00:00";615;890-2984;108
19 |
--------------------------------------------------------------------------------
/files/csv/invoice.csv:
--------------------------------------------------------------------------------
1 | INV_NUMBER;CUS_CODE;INV_DATE
2 | 1001;10014;"2022-01-16 00:00:00"
3 | 1002;10011;"2022-01-16 00:00:00"
4 | 1003;10012;"2022-01-16 00:00:00"
5 | 1004;10011;"2022-01-17 00:00:00"
6 | 1005;10018;"2022-01-17 00:00:00"
7 | 1006;10014;"2022-01-17 00:00:00"
8 | 1007;10015;"2022-01-17 00:00:00"
9 | 1008;10011;"2022-01-17 00:00:00"
10 |
--------------------------------------------------------------------------------
/files/csv/line.csv:
--------------------------------------------------------------------------------
1 | INV_NUMBER;LINE_NUMBER;P_CODE;LINE_UNITS;LINE_PRICE
2 | 1001;1;13-Q2/P2;1.00;14.99
3 | 1001;2;23109-HB;1.00;9.95
4 | 1002;1;54778-2T;2.00;4.99
5 | 1003;1;2238/QPD;1.00;38.95
6 | 1003;2;1546-QQ2;1.00;39.95
7 | 1003;3;13-Q2/P2;5.00;14.99
8 | 1004;1;54778-2T;3.00;4.99
9 | 1004;2;23109-HB;2.00;9.95
10 | 1005;1;PVC23DRT;12.00;5.87
11 | 1006;1;SM-18277;3.00;6.99
12 | 1006;2;2232/QTY;1.00;109.92
13 | 1006;3;23109-HB;1.00;9.95
14 | 1006;4;89-WRE-Q;1.00;256.99
15 | 1007;1;13-Q2/P2;2.00;14.99
16 | 1007;2;54778-2T;1.00;4.99
17 | 1008;1;PVC23DRT;5.00;5.87
18 | 1008;2;WR3/TT3;3.00;119.95
19 | 1008;3;23109-HB;1.00;9.95
20 |
--------------------------------------------------------------------------------
/files/csv/product.csv:
--------------------------------------------------------------------------------
1 | P_CODE;P_DESCRIPT;P_INDATE;P_QOH;P_MIN;P_PRICE;P_DISCOUNT;V_CODE
2 | 11QER/31;"Power painter, 15 psi., 3-nozzle";"2021-11-03 00:00:00";8;5;109.99;0.00;25595
3 | 13-Q2/P2;"7.25-in. pwr. saw blade";"2021-12-13 00:00:00";32;15;14.99;0.05;21344
4 | 14-Q1/L3;"9.00-in. pwr. saw blade";"2021-11-13 00:00:00";18;12;17.49;0.00;21344
5 | 1546-QQ2;"Hrd. cloth, 1/4-in., 2x50";"2022-01-15 00:00:00";15;8;39.95;0.00;23119
6 | 1558-QW1;"Hrd. cloth, 1/2-in., 3x50";"2022-01-15 00:00:00";23;5;43.99;0.00;23119
7 | 2232/QTY;"B&D jigsaw, 12-in. blade";"2021-12-30 00:00:00";8;5;109.92;0.05;24288
8 | 2232/QWE;"B&D jigsaw, 8-in. blade";"2021-12-24 00:00:00";6;5;99.87;0.05;24288
9 | 2238/QPD;"B&D cordless drill, 1/2-in.";"2022-01-20 00:00:00";12;5;38.95;0.05;25595
10 | 23109-HB;"Claw hammer";"2022-01-20 00:00:00";23;10;9.95;0.10;21225
11 | 23114-AA;"Sledge hammer, 12 lb.";"2022-01-02 00:00:00";8;5;14.40;0.05;NULL
12 | 54778-2T;"Rat-tail file, 1/8-in. fine";"2021-12-15 00:00:00";43;20;4.99;0.00;21344
13 | 89-WRE-Q;"Hicut chain saw, 16 in.";"2022-02-07 00:00:00";11;5;256.99;0.05;24288
14 | PVC23DRT;"PVC pipe, 3.5-in., 8-ft";"2022-02-20 00:00:00";188;75;5.87;0.00;NULL
15 | SM-18277;"1.25-in. metal screw, 25";"2022-03-01 00:00:00";172;75;6.99;0.00;21225
16 | SW-23116;"2.5-in. wd. screw, 50";"2022-02-24 00:00:00";237;100;8.45;0.00;21231
17 | WR3/TT3;"Steel matting, 4'x8'x1/6", .5" mesh";"2022-01-17 00:00:00";18;5;119.95;0.10;25595
18 |
--------------------------------------------------------------------------------
/files/csv/vendor.csv:
--------------------------------------------------------------------------------
1 | V_CODE,V_NAME,V_CONTACT,V_AREACODE,V_PHONE,V_STATE,V_ORDER
2 | 21225,"Bryson, Inc.",Smithson,615,223-3234,TN,Y
3 | 21226,"SuperLoo, Inc.",Flushing,904,215-8995,FL,N
4 | 21231,"D&E Supply",Singh,615,228-3245,TN,Y
5 | 21344,"Gomez Bros.",Ortega,615,889-2546,KY,N
6 | 22567,"Dome Supply",Smith,901,678-1419,GA,N
7 | 23119,"Randsets Ltd.",Anderson,901,678-3998,GA,Y
8 | 24004,"Brackman Bros.",Browning,615,228-1410,TN,N
9 | 24288,"ORDVA, Inc.",Hakford,615,898-1234,TN,Y
10 | 25443,"B&K, Inc.",Smith,904,227-0093,FL,N
11 | 25501,"Damal Supplies",Smythe,615,890-3529,TN,N
12 | 25595,"Rubicon Systems",Orton,904,456-0092,FL,Y
13 |
--------------------------------------------------------------------------------
/files/image/3_tier_architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/3_tier_architecture.png
--------------------------------------------------------------------------------
/files/image/DBMS_SQL.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/DBMS_SQL.jpg
--------------------------------------------------------------------------------
/files/image/Types_of_Keys.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/Types_of_Keys.jpg
--------------------------------------------------------------------------------
/files/image/UML_class_diagram.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/UML_class_diagram.png
--------------------------------------------------------------------------------
/files/image/chens-notation-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/chens-notation-1.png
--------------------------------------------------------------------------------
/files/image/chens-notation-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/chens-notation-2.png
--------------------------------------------------------------------------------
/files/image/cover_of_database_systems_14e.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/cover_of_database_systems_14e.jpg
--------------------------------------------------------------------------------
/files/image/crows-foot-notation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/crows-foot-notation.png
--------------------------------------------------------------------------------
/files/image/database_design_process.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/database_design_process.jpg
--------------------------------------------------------------------------------
/files/image/employee_manager.drawio:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
--------------------------------------------------------------------------------
/files/image/employee_manager.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/employee_manager.jpg
--------------------------------------------------------------------------------
/files/image/fig_data_dictionary.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/fig_data_dictionary.png
--------------------------------------------------------------------------------
/files/image/four_join_types.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/four_join_types.jpg
--------------------------------------------------------------------------------
/files/image/functional_dependency_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/functional_dependency_example.png
--------------------------------------------------------------------------------
/files/image/hierarchical_model.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/hierarchical_model.png
--------------------------------------------------------------------------------
/files/image/index_table_1.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/index_table_1.gif
--------------------------------------------------------------------------------
/files/image/index_table_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/index_table_2.png
--------------------------------------------------------------------------------
/files/image/index_table_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/index_table_3.png
--------------------------------------------------------------------------------
/files/image/jacky_last_day_in_tsmc.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/jacky_last_day_in_tsmc.jpg
--------------------------------------------------------------------------------
/files/image/join_example.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/join_example.jpg
--------------------------------------------------------------------------------
/files/image/mysql_join.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/mysql_join.jpg
--------------------------------------------------------------------------------
/files/image/mysql_wb_toolbar.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/mysql_wb_toolbar.png
--------------------------------------------------------------------------------
/files/image/qrcode_lecture_database.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/qrcode_lecture_database.png
--------------------------------------------------------------------------------
/files/image/relationship-cardinality.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/relationship-cardinality.png
--------------------------------------------------------------------------------
/files/image/workbench.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mingfujacky/Lecture-Database/69fc8eba9e9b1ac45400570faf89606908175265/files/image/workbench.jpg
--------------------------------------------------------------------------------
/files/ipynb/scores.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "### Import Library"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": 1,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "import sys\n",
17 | "sys.path.append(\n",
18 | " \"/Users/jacky/Library/Mobile Documents/com~apple~CloudDocs/交大教學/DB/Lecture-Database/.venv/lib/python3.13/site-packages\")"
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": 6,
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "import mysql.connector"
28 | ]
29 | },
30 | {
31 | "cell_type": "markdown",
32 | "metadata": {},
33 | "source": [
34 | "### Build Connection"
35 | ]
36 | },
37 | {
38 | "cell_type": "code",
39 | "execution_count": 7,
40 | "metadata": {},
41 | "outputs": [],
42 | "source": [
43 | "conn = mysql.connector.connect(\n",
44 | " host=\"127.0.0.1\", # Change to your MySQL server host\n",
45 | " user=\"dbms_demo\", # Change to your username\n",
46 | " password=\"12345\", # Change to your password\n",
47 | " database=\"demo\" # Change to your database name\n",
48 | ")"
49 | ]
50 | },
51 | {
52 | "cell_type": "markdown",
53 | "metadata": {},
54 | "source": [
55 | "### Create Tables "
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": 8,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "# create a cursor object when working with a database connection.\n",
65 | "# cursor object is the primary way to execute SQL queries and fetch data from DB\n",
66 | "cursor = conn.cursor()\n",
67 | "\n",
68 | "# 1. make a SQL statement as a string to create a table\n",
69 | "create_scores_table = \"\"\"\n",
70 | "CREATE TABLE IF NOT EXISTS score (\n",
71 | " id VARCHAR(10) PRIMARY KEY,\n",
72 | " name VARCHAR(20) NOT NULL,\n",
73 | " chinese INT NOT NULL,\n",
74 | " math INT NOT NULL,\n",
75 | " english INT NOT NULL\n",
76 | ");\n",
77 | "\"\"\"\n",
78 | "# 2. execute SQL statement with cursor object\n",
79 | "cursor.execute(create_scores_table)\n",
80 | "# 3. commit the transaction\n",
81 | "conn.commit()"
82 | ]
83 | },
84 | {
85 | "cell_type": "markdown",
86 | "metadata": {},
87 | "source": [
88 | "### [C]reate Records"
89 | ]
90 | },
91 | {
92 | "cell_type": "code",
93 | "execution_count": 5,
94 | "metadata": {},
95 | "outputs": [],
96 | "source": [
97 | "insert_score = \"\"\"\n",
98 | "INSERT INTO\n",
99 | " score (id, name, chinese, math, english)\n",
100 | "VALUES\n",
101 | " (\"001\", 'John', 90, 80, 70),\n",
102 | " (\"002\", 'Mary', 55, 60, 75),\n",
103 | " (\"003\", 'Tom', 90, 95, 100);\n",
104 | "\"\"\" \n",
105 | "cursor.execute(insert_score)\n",
106 | "conn.commit()"
107 | ]
108 | },
109 | {
110 | "cell_type": "markdown",
111 | "metadata": {},
112 | "source": [
113 | "### [R]ead Records"
114 | ]
115 | },
116 | {
117 | "cell_type": "code",
118 | "execution_count": 8,
119 | "metadata": {},
120 | "outputs": [
121 | {
122 | "name": "stdout",
123 | "output_type": "stream",
124 | "text": [
125 | "[('001', 'John', 90, 80, 70), ('002', 'Mary', 55, 60, 75), ('003', 'Tom', 90, 95, 100)]\n",
126 | "ID: 001, Name: John, Chinese: 90, Math: 80, English: 70\n",
127 | "ID: 002, Name: Mary, Chinese: 55, Math: 60, English: 75\n",
128 | "ID: 003, Name: Tom, Chinese: 90, Math: 95, English: 100\n"
129 | ]
130 | }
131 | ],
132 | "source": [
133 | "read_score = \"\"\"\n",
134 | "SELECT * FROM score;\n",
135 | "\"\"\"\n",
136 | "# the cursor object will execute the SELECT * FROM score query on the database\n",
137 | "# and prepare the results for further processing.\n",
138 | "cursor.execute(read_score)\n",
139 | "\n",
140 | "# fetchall() method to get all the rows from the result set\n",
141 | "# fetchone() method to get the first row from the result set\n",
142 | "# fetchmany() method to get a specific number of rows from the result set\n",
143 | "rows = cursor.fetchall()\n",
144 | "print(rows)\n",
145 | "for row in rows:\n",
146 | " print(f'ID: {row[0]}, Name: {row[1]}, Chinese: {row[2]}, Math: {row[3]}, English: {row[4]}')"
147 | ]
148 | },
149 | {
150 | "cell_type": "code",
151 | "execution_count": 9,
152 | "metadata": {},
153 | "outputs": [
154 | {
155 | "name": "stdout",
156 | "output_type": "stream",
157 | "text": [
158 | "[('003', 'Tom', 90, 95, 100)]\n",
159 | "ID: 003, Name: Tom, Math: 95\n"
160 | ]
161 | }
162 | ],
163 | "source": [
164 | "read_score = \"\"\"\n",
165 | "SELECT * FROM score WHERE math > 90;\n",
166 | "\"\"\"\n",
167 | "cursor.execute(read_score)\n",
168 | "rows = cursor.fetchall()\n",
169 | "print(rows)\n",
170 | "for row in rows:\n",
171 | " print(f'ID: {row[0]}, Name: {row[1]}, Math: {row[3]}')"
172 | ]
173 | },
174 | {
175 | "cell_type": "markdown",
176 | "metadata": {},
177 | "source": [
178 | "### [U]pdate Records"
179 | ]
180 | },
181 | {
182 | "cell_type": "code",
183 | "execution_count": 10,
184 | "metadata": {},
185 | "outputs": [],
186 | "source": [
187 | "update_score = \"\"\"\n",
188 | "UPDATE\n",
189 | " score\n",
190 | "SET\n",
191 | " math = 83\n",
192 | "WHERE\n",
193 | " id = '002' \n",
194 | "\"\"\"\n",
195 | "cursor.execute(update_score)\n",
196 | "conn.commit()"
197 | ]
198 | },
199 | {
200 | "cell_type": "markdown",
201 | "metadata": {},
202 | "source": [
203 | "### [D]elete Records"
204 | ]
205 | },
206 | {
207 | "cell_type": "code",
208 | "execution_count": 11,
209 | "metadata": {},
210 | "outputs": [],
211 | "source": [
212 | "delete_record = \"DELETE FROM score WHERE id = '002';\"\n",
213 | "cursor.execute(delete_record)\n",
214 | "conn.commit()"
215 | ]
216 | },
217 | {
218 | "cell_type": "markdown",
219 | "metadata": {},
220 | "source": [
221 | "### Close Connection"
222 | ]
223 | },
224 | {
225 | "cell_type": "code",
226 | "execution_count": 12,
227 | "metadata": {},
228 | "outputs": [],
229 | "source": [
230 | "conn.close()"
231 | ]
232 | },
233 | {
234 | "cell_type": "markdown",
235 | "metadata": {},
236 | "source": []
237 | },
238 | {
239 | "cell_type": "markdown",
240 | "metadata": {},
241 | "source": [
242 | "### Drop Tables"
243 | ]
244 | },
245 | {
246 | "cell_type": "code",
247 | "execution_count": 13,
248 | "metadata": {},
249 | "outputs": [],
250 | "source": [
251 | "conn = mysql.connector.connect(\n",
252 | " host=\"127.0.0.1\", # Change to your MySQL server host\n",
253 | " user=\"dbms_demo\", # Change to your username\n",
254 | " password=\"12345\", # Change to your password\n",
255 | " database=\"demo\" # Change to your database name\n",
256 | ")\n",
257 | "cursor = conn.cursor()\n",
258 | "cursor.execute(\"DROP TABLE score\")\n",
259 | "conn.commit()\n",
260 | "conn.close()"
261 | ]
262 | }
263 | ],
264 | "metadata": {
265 | "kernelspec": {
266 | "display_name": ".venv",
267 | "language": "python",
268 | "name": "python3"
269 | },
270 | "language_info": {
271 | "codemirror_mode": {
272 | "name": "ipython",
273 | "version": 3
274 | },
275 | "file_extension": ".py",
276 | "mimetype": "text/x-python",
277 | "name": "python",
278 | "nbconvert_exporter": "python",
279 | "pygments_lexer": "ipython3",
280 | "version": "3.13.2"
281 | }
282 | },
283 | "nbformat": 4,
284 | "nbformat_minor": 2
285 | }
286 |
--------------------------------------------------------------------------------
/files/ipynb/social_media.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "### Import Library"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": null,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "import sys\n",
17 | "sys.path.append(\n",
18 | " \"/Users/jacky/Library/Mobile Documents/com~apple~CloudDocs/交大教學/DB/Lecture-Database/.venv/lib/python3.13/site-packages\")\n",
19 | "import mysql.connector"
20 | ]
21 | },
22 | {
23 | "cell_type": "markdown",
24 | "metadata": {},
25 | "source": [
26 | "### Build Functions"
27 | ]
28 | },
29 | {
30 | "cell_type": "code",
31 | "execution_count": null,
32 | "metadata": {},
33 | "outputs": [],
34 | "source": [
35 | "def build_connection(host, user, password, database):\n",
36 | " connection = None\n",
37 | " try:\n",
38 | " connection = mysql.connector.connect(\n",
39 | " host=host,\n",
40 | " user=user,\n",
41 | " password=password,\n",
42 | " database=database\n",
43 | " )\n",
44 | " print(f'Connection to MySQL {database} database successfully!!')\n",
45 | " except mysql.connector.OperationalError as err:\n",
46 | " print(f\"❌ OperationalError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
47 | " except mysql.connector.DatabaseError as err:\n",
48 | " print(f\"❌ DatabaseError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
49 | " except mysql.connector.InterfaceError as err:\n",
50 | " print(f\"❌ InterfaceError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
51 | " return connection"
52 | ]
53 | },
54 | {
55 | "cell_type": "code",
56 | "execution_count": null,
57 | "metadata": {},
58 | "outputs": [],
59 | "source": [
60 | "def execute_sql(connection, sql):\n",
61 | " cursor = connection.cursor()\n",
62 | " try:\n",
63 | " cursor.execute(sql)\n",
64 | " connection.commit()\n",
65 | " print(\"Query executed successfully\")\n",
66 | " except mysql.connector.IntegrityError as err:\n",
67 | " print(f\"❌ IntegrityError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
68 | " except mysql.connector.ProgrammingError as err:\n",
69 | " print(f\"❌ ProgrammingError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
70 | " except mysql.connector.OperationalError as err:\n",
71 | " print(f\"❌ OperationalError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
72 | " except mysql.connector.DataError as err:\n",
73 | " print(f\"❌ DataError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
74 | " except mysql.connector.NotSupportedError as err:\n",
75 | " print(f\"❌ NotSupportedError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
76 | " except mysql.connector.InternalError as err:\n",
77 | " print(f\"❌ InternalError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")"
78 | ]
79 | },
80 | {
81 | "cell_type": "code",
82 | "execution_count": null,
83 | "metadata": {},
84 | "outputs": [],
85 | "source": [
86 | "def execute_read(connection, sql):\n",
87 | " cursor = connection.cursor()\n",
88 | " result = None\n",
89 | " try:\n",
90 | " cursor.execute(sql)\n",
91 | " result = cursor.fetchall()\n",
92 | " return result\n",
93 | " except mysql.connector.ProgrammingError as err:\n",
94 | " print(f\"❌ ProgrammingError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
95 | " except mysql.connector.OperationalError as err:\n",
96 | " print(f\"❌ OperationalError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
97 | " except mysql.connector.DataError as err:\n",
98 | " print(f\"❌ DataError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
99 | " except mysql.connector.NotSupportedError as err:\n",
100 | " print(f\"❌ NotSupportedError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")\n",
101 | " except mysql.connector.InternalError as err:\n",
102 | " print(f\"❌ InternalError: {err.errno} | SQLSTATE: {err.sqlstate} | Message: {err.msg}\")"
103 | ]
104 | },
105 | {
106 | "cell_type": "markdown",
107 | "metadata": {},
108 | "source": [
109 | "### Build Connection"
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": null,
115 | "metadata": {},
116 | "outputs": [],
117 | "source": [
118 | "connection = build_connection('localhost', 'dbms_demo', 'NYCUdbms314', 'demo')"
119 | ]
120 | },
121 | {
122 | "cell_type": "markdown",
123 | "metadata": {},
124 | "source": [
125 | "### Create Tables"
126 | ]
127 | },
128 | {
129 | "cell_type": "code",
130 | "execution_count": null,
131 | "metadata": {},
132 | "outputs": [],
133 | "source": [
134 | "create_user_table = \"\"\"\n",
135 | "CREATE TABLE IF NOT EXISTS user (\n",
136 | " id INT PRIMARY KEY AUTO_INCREMENT,\n",
137 | " name VARCHAR(20) NOT NULL,\n",
138 | " age INT,\n",
139 | " gender VARCHAR(10),\n",
140 | " nationality VARCHAR(20)\n",
141 | ");\n",
142 | "\"\"\"\n",
143 | "create_post_table = \"\"\"\n",
144 | "CREATE TABLE IF NOT EXISTS post (\n",
145 | " id INT PRIMARY KEY AUTO_INCREMENT,\n",
146 | " title VARCHAR(100) NOT NULL,\n",
147 | " description TEXT,\n",
148 | " user_id INT NOT NULL,\n",
149 | " CONSTRAINT FOREIGN KEY (user_id) REFERENCES user (id)\n",
150 | ");\n",
151 | "\"\"\"\n",
152 | "create_comment_table = \"\"\"\n",
153 | "CREATE TABLE IF NOT EXISTS comment (\n",
154 | " id INT PRIMARY KEY AUTO_INCREMENT,\n",
155 | " text TEXT NOT NULL,\n",
156 | " user_id INT NOT NULL,\n",
157 | " post_id INT NOT NULL,\n",
158 | " CONSTRAINT FOREIGN KEY (user_id) REFERENCES user (id),\n",
159 | " CONSTRAINT FOREIGN KEY (post_id) REFERENCES post (id)\n",
160 | ");\n",
161 | "\"\"\"\n",
162 | "create_like_table = \"\"\"\n",
163 | "CREATE TABLE IF NOT EXISTS like_ (\n",
164 | " id INT PRIMARY KEY AUTO_INCREMENT,\n",
165 | " user_id INT NOT NULL,\n",
166 | "\tpost_id\tINT NOT NULL,\n",
167 | "\tCONSTRAINT FOREIGN KEY(user_id) REFERENCES user (id),\n",
168 | "\tCONSTRAINT FOREIGN KEY(post_id) REFERENCES post (id)\n",
169 | ");\n",
170 | "\"\"\"\n",
171 | "execute_sql(connection, create_user_table)\n",
172 | "execute_sql(connection, create_post_table)\n",
173 | "execute_sql(connection, create_comment_table)\n",
174 | "execute_sql(connection, create_like_table)"
175 | ]
176 | },
177 | {
178 | "cell_type": "markdown",
179 | "metadata": {},
180 | "source": [
181 | "### Create Records"
182 | ]
183 | },
184 | {
185 | "cell_type": "code",
186 | "execution_count": null,
187 | "metadata": {},
188 | "outputs": [],
189 | "source": [
190 | "insert_user = \"\"\"\n",
191 | "INSERT INTO user (name, age, gender, nationality)\n",
192 | "VALUES\n",
193 | " ('James', 25, 'male', 'USA'),\n",
194 | " ('Leila', 32, 'female', 'France'),\n",
195 | " ('Brigitte', 35, 'female', 'England'),\n",
196 | " ('Mike', 40, 'male', 'Denmark'),\n",
197 | " ('Elizabeth', 21, 'female', 'Canada');\n",
198 | "\"\"\"\n",
199 | "execute_sql(connection, insert_user)"
200 | ]
201 | },
202 | {
203 | "cell_type": "code",
204 | "execution_count": null,
205 | "metadata": {},
206 | "outputs": [],
207 | "source": [
208 | "insert_post = \"\"\"\n",
209 | "INSERT INTO post (title, description, user_id)\n",
210 | "VALUES\n",
211 | " ('Happy', 'I am feeling very happy today', 1),\n",
212 | " ('Hot Weather', 'The weather is very hot today', 2),\n",
213 | " ('Help', 'I need some help with my work', 2),\n",
214 | " ('Great News', 'I am getting married', 1),\n",
215 | " ('Interesting Game', 'It was a fantastic game of tennis', 5),\n",
216 | " ('Party', 'Anyone up for a late-night party today?', 3);\n",
217 | "\"\"\"\n",
218 | "execute_sql(connection, insert_post) "
219 | ]
220 | },
221 | {
222 | "cell_type": "code",
223 | "execution_count": null,
224 | "metadata": {},
225 | "outputs": [],
226 | "source": [
227 | "insert_comment = \"\"\"\n",
228 | "INSERT INTO comment (text, user_id, post_id)\n",
229 | "VALUES\n",
230 | " ('Count me in', 1, 6),\n",
231 | " ('What sort of help?', 5, 3),\n",
232 | " ('Congrats buddy', 2, 4),\n",
233 | " ('I was rooting for Nadal though', 4, 5),\n",
234 | " ('Help with your thesis?', 2, 3),\n",
235 | " ('Many congratulations', 5, 4);\n",
236 | "\"\"\"\n",
237 | "execute_sql(connection, insert_comment) "
238 | ]
239 | },
240 | {
241 | "cell_type": "code",
242 | "execution_count": null,
243 | "metadata": {},
244 | "outputs": [],
245 | "source": [
246 | "insert_like_ = \"\"\"\n",
247 | "INSERT INTO like_ (user_id, post_id)\n",
248 | "VALUES\n",
249 | " (1, 6),\n",
250 | " (2, 3),\n",
251 | " (1, 5),\n",
252 | " (5, 4),\n",
253 | " (2, 4),\n",
254 | " (4, 2),\n",
255 | " (3, 6);\n",
256 | "\"\"\"\n",
257 | "execute_sql(connection, insert_like_)"
258 | ]
259 | },
260 | {
261 | "cell_type": "markdown",
262 | "metadata": {},
263 | "source": [
264 | "### Read Records"
265 | ]
266 | },
267 | {
268 | "cell_type": "code",
269 | "execution_count": null,
270 | "metadata": {},
271 | "outputs": [],
272 | "source": [
273 | "select_user = \"SELECT * FROM user\"\n",
274 | "users = execute_read(connection, select_user)\n",
275 | "for user in users:\n",
276 | " print(user)"
277 | ]
278 | },
279 | {
280 | "cell_type": "code",
281 | "execution_count": null,
282 | "metadata": {},
283 | "outputs": [],
284 | "source": [
285 | "select_post = \"SELECT * FROM post\"\n",
286 | "posts = execute_read(connection, select_post)\n",
287 | "for post in posts:\n",
288 | " print(post)"
289 | ]
290 | },
291 | {
292 | "cell_type": "code",
293 | "execution_count": null,
294 | "metadata": {},
295 | "outputs": [],
296 | "source": [
297 | "select_comment = \"SELECT * FROM comment\"\n",
298 | "comments = execute_read(connection, select_comment)\n",
299 | "for comment in comments:\n",
300 | " print(comment)"
301 | ]
302 | },
303 | {
304 | "cell_type": "code",
305 | "execution_count": null,
306 | "metadata": {},
307 | "outputs": [],
308 | "source": [
309 | "select_likes = \"SELECT * FROM like_\"\n",
310 | "likes = execute_read(connection, select_likes)\n",
311 | "for like in likes:\n",
312 | " print(like)"
313 | ]
314 | },
315 | {
316 | "cell_type": "code",
317 | "execution_count": null,
318 | "metadata": {},
319 | "outputs": [],
320 | "source": [
321 | "# return the user ids and names, along with the description of the posts that these users posted\n",
322 | "select_user_post = \"\"\"\n",
323 | "SELECT user.id, user.name, post.description\n",
324 | "FROM post \n",
325 | "INNER JOIN user ON post.user_id = user.id;\n",
326 | "\"\"\"\n",
327 | "users_posts = execute_read(connection, select_user_post)\n",
328 | "for user_post in users_posts:\n",
329 | " print(user_post)"
330 | ]
331 | },
332 | {
333 | "cell_type": "code",
334 | "execution_count": null,
335 | "metadata": {},
336 | "outputs": [],
337 | "source": [
338 | "# returns all posts, along with the comments on the posts and \n",
339 | "# the names of the users who posted the comments\n",
340 | "select_post_comment_user = \"\"\"\n",
341 | "SELECT post.description as post, comment.text as comment, user.name as name\n",
342 | "FROM post \n",
343 | "INNER JOIN comment ON post.id = comment.post_id\n",
344 | "INNER JOIN user ON user.id = comment.user_id;\n",
345 | "\"\"\"\n",
346 | "posts_comments_users = execute_read(connection, select_post_comment_user)\n",
347 | "for post_comment_user in posts_comments_users:\n",
348 | " print(post_comment_user)"
349 | ]
350 | },
351 | {
352 | "cell_type": "code",
353 | "execution_count": null,
354 | "metadata": {},
355 | "outputs": [],
356 | "source": [
357 | "# returns the post, along with the total number of likes that the post received\n",
358 | "# Implicit joins use a comma-separated list of tables in the FROM clause, \n",
359 | "# and the join conditions are specified in the WHERE clause. \n",
360 | "# Implicit join is a deprecated syntax and is not recommended for use.\n",
361 | "# cursor.close()\n",
362 | "# cursor = connection.cursor()\n",
363 | "\n",
364 | "select_post_like_ = \"\"\"\n",
365 | "SELECT post.description as post_desc, COUNT(like_.id) as likes\n",
366 | "FROM post, like_\n",
367 | "WHERE post.id = like_.post_id\n",
368 | "GROUP BY like_.post_id;\n",
369 | "\"\"\"\n",
370 | "posts_likes = execute_read(connection, select_post_like_)\n",
371 | "for post_like in posts_likes:\n",
372 | " print(post_like)"
373 | ]
374 | },
375 | {
376 | "cell_type": "code",
377 | "execution_count": null,
378 | "metadata": {},
379 | "outputs": [],
380 | "source": [
381 | "# returns the post, along with the total number of likes that the post received\n",
382 | "# Explicit joins use the JOIN keyword to specify the join operation, \n",
383 | "# and the join conditions are specified in the ON clause. \n",
384 | "# This is a recommended syntax.\n",
385 | "\n",
386 | "select_posts_likes = \"\"\"\n",
387 | "SELECT \n",
388 | " post.description as post, COUNT(like_.id) as like_\n",
389 | "FROM \n",
390 | " post\n",
391 | "INNER JOIN like_ ON post.id = like_.post_id\n",
392 | "GROUP BY \n",
393 | " like_.post_id;\n",
394 | "\"\"\"\n",
395 | "posts_likes = execute_read(connection, select_posts_likes)\n",
396 | "for post_like in posts_likes:\n",
397 | " print(post_like)"
398 | ]
399 | },
400 | {
401 | "cell_type": "markdown",
402 | "metadata": {},
403 | "source": [
404 | "### Update Records"
405 | ]
406 | },
407 | {
408 | "cell_type": "code",
409 | "execution_count": null,
410 | "metadata": {},
411 | "outputs": [],
412 | "source": [
413 | "select_post_description = \"SELECT description FROM post WHERE id = 2\"\n",
414 | "posts_description = execute_read(connection, select_post_description)\n",
415 | "for post_description in posts_description:\n",
416 | " print(post_description)\n",
417 | "\n",
418 | "update_post_description = \"\"\"\n",
419 | "UPDATE\n",
420 | " post\n",
421 | "SET\n",
422 | " description = 'The weather has become pleasant now' \n",
423 | "WHERE\n",
424 | " id = 2; \n",
425 | "\"\"\"\n",
426 | "execute_sql(connection, update_post_description)"
427 | ]
428 | },
429 | {
430 | "cell_type": "markdown",
431 | "metadata": {},
432 | "source": [
433 | "### Delete Records"
434 | ]
435 | },
436 | {
437 | "cell_type": "code",
438 | "execution_count": null,
439 | "metadata": {},
440 | "outputs": [],
441 | "source": [
442 | "delete_comment = \"DELETE FROM comment WHERE id = 5;\"\n",
443 | "execute_sql(connection, delete_comment)"
444 | ]
445 | },
446 | {
447 | "cell_type": "markdown",
448 | "metadata": {},
449 | "source": [
450 | "### Close Connection"
451 | ]
452 | },
453 | {
454 | "cell_type": "code",
455 | "execution_count": null,
456 | "metadata": {},
457 | "outputs": [],
458 | "source": [
459 | "try:\n",
460 | " connection.commit()\n",
461 | " print(\"connection commit successfully.\")\n",
462 | " if 'cursor' in locals():\n",
463 | " cursor.close()\n",
464 | " print(\"cursor close successfully.\")\n",
465 | " if 'connection' in locals() and connection.is_connected():\n",
466 | " connection.close()\n",
467 | " print(\"connection close successfully.\")\n",
468 | "except mysql.connector.Error as e:\n",
469 | " print(f\"❌ MySQL Error occurred: {e}\")\n",
470 | "\n",
471 | "except Exception as e:\n",
472 | " print(f\"❌ An unexpected error occurred: {e}\")\n",
473 | "\n",
474 | "finally:\n",
475 | " # Ensure the connection is closed if it was opened\n",
476 | " if 'connection' in locals() and connection.is_connected():\n",
477 | " connection.close()\n",
478 | " print(\"🔒 Connection closed in finally block.\")"
479 | ]
480 | }
481 | ],
482 | "metadata": {
483 | "kernelspec": {
484 | "display_name": ".venv",
485 | "language": "python",
486 | "name": "python3"
487 | },
488 | "language_info": {
489 | "codemirror_mode": {
490 | "name": "ipython",
491 | "version": 3
492 | },
493 | "file_extension": ".py",
494 | "mimetype": "text/x-python",
495 | "name": "python",
496 | "nbconvert_exporter": "python",
497 | "pygments_lexer": "ipython3",
498 | "version": "3.13.2"
499 | }
500 | },
501 | "nbformat": 4,
502 | "nbformat_minor": 2
503 | }
504 |
--------------------------------------------------------------------------------
/files/ipynb/sqls_alternative.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 3,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import sys\n",
10 | "sys.path.append(\n",
11 | " \"/Users/jacky/Library/Mobile Documents/com~apple~CloudDocs/交大教學/DB/Lecture-Database/.venv/lib/python3.13/site-packages\")\n",
12 | "import mysql.connector\n",
13 | "conn = mysql.connector.connect(\n",
14 | " host=\"localhost\", # Change to your MySQL server host\n",
15 | " user=\"dbms_demo\", # Change to your username\n",
16 | " password=\"NYCUdbms314\", # Change to your password\n",
17 | " database=\"demo\" # Change to your database name\n",
18 | ")\n",
19 | "cur = conn.cursor()"
20 | ]
21 | },
22 | {
23 | "cell_type": "markdown",
24 | "metadata": {},
25 | "source": [
26 | "### Alt1 execute SQL cur.execute()"
27 | ]
28 | },
29 | {
30 | "cell_type": "code",
31 | "execution_count": 4,
32 | "metadata": {},
33 | "outputs": [
34 | {
35 | "data": {
36 | "text/plain": [
37 | "
>"
38 | ]
39 | },
40 | "execution_count": 4,
41 | "metadata": {},
42 | "output_type": "execute_result"
43 | }
44 | ],
45 | "source": [
46 | "cur.execute(\"\"\"\n",
47 | " CREATE TABLE IF NOT EXISTS Name\n",
48 | " (first_name TEXT, last_name TEXT)\n",
49 | "\"\"\")\n",
50 | "conn.commit"
51 | ]
52 | },
53 | {
54 | "cell_type": "markdown",
55 | "metadata": {},
56 | "source": [
57 | "### Alt2 execute SQL cur.executemany()"
58 | ]
59 | },
60 | {
61 | "cell_type": "code",
62 | "execution_count": 6,
63 | "metadata": {},
64 | "outputs": [],
65 | "source": [
66 | "name_list = [\n",
67 | " ('Smith', 'John'),\n",
68 | " ('Johnson', 'Jane'),\n",
69 | " ('Lee', 'Samantha'),\n",
70 | " ('Patel', 'Raj'),\n",
71 | " ('Hernandez', 'Maria'),\n",
72 | "]\n",
73 | "cur.executemany(\"\"\"\n",
74 | " INSERT INTO Name (first_name, last_name) VALUES (%s, %s)\n",
75 | " \"\"\", name_list)\n",
76 | "conn.commit()"
77 | ]
78 | },
79 | {
80 | "cell_type": "code",
81 | "execution_count": 9,
82 | "metadata": {},
83 | "outputs": [],
84 | "source": [
85 | "cur.close()\n",
86 | "conn.close()"
87 | ]
88 | }
89 | ],
90 | "metadata": {
91 | "kernelspec": {
92 | "display_name": ".venv",
93 | "language": "python",
94 | "name": "python3"
95 | },
96 | "language_info": {
97 | "codemirror_mode": {
98 | "name": "ipython",
99 | "version": 3
100 | },
101 | "file_extension": ".py",
102 | "mimetype": "text/x-python",
103 | "name": "python",
104 | "nbconvert_exporter": "python",
105 | "pygments_lexer": "ipython3",
106 | "version": "3.13.2"
107 | }
108 | },
109 | "nbformat": 4,
110 | "nbformat_minor": 2
111 | }
112 |
--------------------------------------------------------------------------------
/files/sql/create_function_and_procedure.sql:
--------------------------------------------------------------------------------
1 | --
2 | -- MySQL Crash Course
3 | --
4 | -- Chapter 11 – Creating Functions and Procedures
5 | --
6 | -- You can copy and paste any of these commands into your favorite MySQL tool
7 | -- (like MySQL Workbench) and run them in your own MySQL environment.
8 | --
9 |
10 | -- Create the tables for the chapter
11 | create database population;
12 |
13 | use population;
14 |
15 | create table state_population
16 | (
17 | state varchar(100),
18 | population int
19 | );
20 |
21 | insert into state_population values ('New York', 19299981);
22 | insert into state_population values ('Texas', 29730311);
23 | insert into state_population values ('California', 39613493);
24 | insert into state_population values ('Florida', 21944577);
25 | insert into state_population values ('New Jersey', 9267130);
26 | insert into state_population values ('Massachusetts', 6893000);
27 | insert into state_population values ('Rhode Island', 1097379);
28 |
29 | drop table if exists county_population;
30 |
31 | create table county_population (state char(50), county varchar(100), population int);
32 |
33 | insert into county_population values ('New York', 'Kings', 2736074);
34 | insert into county_population values ('New York', 'Queens', 2405464);
35 | insert into county_population values ('New York', 'New York', 1694251);
36 | insert into county_population values ('New York', 'Suffolk', 1525920);
37 | insert into county_population values ('New York', 'Bronx', 1472654);
38 | insert into county_population values ('New York', 'Nassau', 1395774);
39 | insert into county_population values ('New York', 'Westchester', 1004457);
40 | insert into county_population values ('New York', 'Erie', 954236);
41 | insert into county_population values ('New York', 'Monroe', 759443);
42 | insert into county_population values ('New York', 'Richmond', 495747);
43 | insert into county_population values ('New York', 'Onondaga', 476516);
44 | insert into county_population values ('New York', 'Orange', 401310);
45 | insert into county_population values ('New York', 'Rockland', 338329);
46 | insert into county_population values ('New York', 'AlbaNew York', 314848);
47 | insert into county_population values ('New York', 'Dutchess', 295911);
48 | insert into county_population values ('New York', 'Saratoga', 235509);
49 | insert into county_population values ('New York', 'Oneida', 232125);
50 | insert into county_population values ('New York', 'Niagara', 212666);
51 | insert into county_population values ('New York', 'Broome', 198683);
52 | insert into county_population values ('New York', 'Ulster', 181851);
53 | insert into county_population values ('New York', 'Rensselaer', 161130);
54 | insert into county_population values ('New York', 'Schenectady', 158061);
55 | insert into county_population values ('New York', 'Chautauqua', 127657);
56 | insert into county_population values ('New York', 'Oswego', 117525);
57 | insert into county_population values ('New York', 'Jefferson', 116721);
58 | insert into county_population values ('New York', 'Ontario', 112458);
59 | insert into county_population values ('New York', 'St. Lawrence', 108505);
60 | insert into county_population values ('New York', 'Tompkins', 105740);
61 | insert into county_population values ('New York', 'Putnam', 97668);
62 | insert into county_population values ('New York', 'Steuben', 93584);
63 | insert into county_population values ('New York', 'Wayne', 91283);
64 | insert into county_population values ('New York', 'Chemung', 84148);
65 | insert into county_population values ('New York', 'Clinton', 79843);
66 | insert into county_population values ('New York', 'Sullivan', 78624);
67 | insert into county_population values ('New York', 'Cattaraugus', 77042);
68 | insert into county_population values ('New York', 'Cayuga', 76248);
69 | insert into county_population values ('New York', 'Madison', 68016);
70 | insert into county_population values ('New York', 'Warren', 65737);
71 | insert into county_population values ('New York', 'Livingston', 61834);
72 | insert into county_population values ('New York', 'Columbia', 61570);
73 | insert into county_population values ('New York', 'Washington', 61302);
74 | insert into county_population values ('New York', 'Herkimer', 60139);
75 | insert into county_population values ('New York', 'Otsego', 58524);
76 | insert into county_population values ('New York', 'Genesee', 58388);
77 | insert into county_population values ('New York', 'Fulton', 53324);
78 | insert into county_population values ('New York', 'Montgomery', 49532);
79 | insert into county_population values ('New York', 'Tioga', 48455);
80 | insert into county_population values ('New York', 'Greene', 47931);
81 | insert into county_population values ('New York', 'Franklin', 47555);
82 | insert into county_population values ('New York', 'Chenango', 47220);
83 | insert into county_population values ('New York', 'Cortland', 46809);
84 | insert into county_population values ('New York', 'Allega', 46456);
85 | insert into county_population values ('New York', 'Delaware', 44308);
86 | insert into county_population values ('New York', 'Wyoming', 40531);
87 | insert into county_population values ('New York', 'Orleans', 40343);
88 | insert into county_population values ('New York', 'Essex', 37381);
89 | insert into county_population values ('New York', 'Seneca', 33814);
90 | insert into county_population values ('New York', 'Schoharie', 29714);
91 | insert into county_population values ('New York', 'Lewis', 26582);
92 | insert into county_population values ('New York', 'Yates', 24774);
93 | insert into county_population values ('New York', 'Schuyler', 17898);
94 | insert into county_population values ('New York', 'Hamilton', 5107);
95 |
96 | -- Create the f_get_state_population() function
97 | use population;
98 |
99 | drop function if exists f_get_state_population;
100 |
101 | delimiter //
102 | create function f_get_state_population (
103 | state_param varchar(100)
104 | )
105 | returns int
106 | deterministic reads sql data
107 | begin
108 | declare population_var int;
109 |
110 | select population
111 | into population_var
112 | from state_population
113 | where state = state_param;
114 |
115 | return(population_var);
116 |
117 | end//
118 |
119 | delimiter ;
120 |
121 | -- Call the f_get_state_population() function
122 | select f_get_state_population('New York');
123 |
124 | -- Call the f_get_state_population() function from a WHERE clause
125 | select *
126 | from state_population
127 | where population > f_get_state_population('New York');
128 |
129 | -- Create the p_set_state_population() procedure
130 | use population;
131 |
132 | drop procedure if exists p_set_state_population;
133 |
134 | delimiter //
135 |
136 |
137 | drop function if exists f_get_world_population;
138 |
139 | -- f_get_world_population() with delimiters
140 | delimiter //
141 | create function f_get_world_population()
142 | returns bigint
143 | deterministic no sql
144 | begin
145 | return(7978759141);
146 | end//
147 |
148 | delimiter ;
149 |
150 | drop function if exists f_get_world_population;
151 |
152 | -- f_get_world_population() without delimiters
153 | create function f_get_world_population()
154 | returns bigint
155 | deterministic no sql
156 | return(7978759141);
157 |
158 |
159 |
160 | select f_get_world_population();
161 |
162 |
163 | create procedure p_set_state_population(
164 | in state_param varchar(100)
165 | )
166 | begin
167 | delete from state_population
168 | where state = state_param;
169 |
170 | insert into state_population
171 | (
172 | state,
173 | population
174 | )
175 | select state,
176 | sum(population)
177 | from county_population
178 | where state = state_param
179 | group by state;
180 |
181 | end//
182 |
183 | delimiter ;
184 |
185 | -- Call the p_set_state_population() procedure
186 | call p_set_state_population('New York');
187 |
188 | -- Create the p_set_and_show_state_population() procedure
189 | use population;
190 |
191 | drop procedure if exists p_set_and_show_state_population;
192 |
193 | delimiter //
194 |
195 | create procedure p_set_and_show_state_population(
196 | in state_param varchar(100)
197 | )
198 | begin
199 | declare population_var int;
200 |
201 | delete from state_population
202 | where state = state_param;
203 |
204 | select sum(population)
205 | into population_var
206 | from county_population
207 | where state = state_param;
208 |
209 | insert into state_population
210 | (
211 | state,
212 | population
213 | )
214 | values
215 | (
216 | state_param,
217 | population_var
218 | );
219 |
220 | select concat(
221 | 'Setting the population for ',
222 | state_param,
223 | ' of ',
224 | population_var
225 | );
226 | end//
227 |
228 | delimiter ;
229 |
230 | -- Call the p_set_and_show_state_population() procedure
231 | call p_set_and_show_state_population('New York');
232 |
233 | -- Show all procedures and functions in the population database
234 | select routine_type,
235 | routine_name
236 | from information_schema.routines
237 | where routine_schema='population';
238 |
239 | -- Create the weird_math database and the f_math_trick() function
240 | create database weird_math;
241 |
242 | use weird_math;
243 |
244 | drop function if exists f_math_trick;
245 |
246 | delimiter //
247 |
248 | create function f_math_trick(
249 | input_param int
250 | )
251 | returns int
252 | no sql
253 | begin
254 | set @a = input_param;
255 | set @b = @a * 3;
256 | set @c = @b + 6;
257 | set @d = @c / 3;
258 | set @e = @d - @a;
259 |
260 | return(@e);
261 | end//
262 |
263 | delimiter ;
264 |
265 | -- Call the f_math_trick() function. I hope you like 2's. ;-)
266 | select f_math_trick(12);
267 |
268 | select f_math_trick(-28),
269 | f_math_trick(0),
270 | f_math_trick(175);
271 |
272 | -- Create the p_compare_population() procedure
273 | use population;
274 |
275 | drop procedure if exists p_compare_population;
276 |
277 | delimiter //
278 |
279 | create procedure p_compare_population(
280 | in state_param varchar(100)
281 | )
282 | begin
283 | declare state_population_var int;
284 | declare county_population_var int;
285 |
286 | select population
287 | into state_population_var
288 | from state_population
289 | where state = state_param;
290 |
291 | select sum(population)
292 | into county_population_var
293 | from county_population
294 | where state = state_param;
295 |
296 | if (state_population_var = county_population_var) then
297 | select 'The population values match';
298 | else
299 | select 'The population values are different';
300 | end if;
301 |
302 | -- If you want to display one of THREE messages, replace the if/else above with this code
303 | -- Remove the comment characters (the 2 dashes) first.
304 |
305 | -- if (state_population_var = county_population_var) then
306 | -- select 'The population values match';
307 | -- elseif (state_population_var > county_population_var) then
308 | -- select 'State population is more than the sum of county population';
309 | -- else
310 | -- select 'The sum of county population is more than the state population';
311 | -- end if;
312 | end//
313 |
314 | delimiter ;
315 |
316 | -- Call the p_compare_population() procedure
317 | call p_compare_population('New York');
318 |
319 | -- Create the p_population_group() procedure
320 | use population;
321 |
322 | drop procedure if exists p_population_group;
323 |
324 | delimiter //
325 |
326 | create procedure p_population_group(
327 | in state_param varchar(100)
328 | )
329 | begin
330 | declare state_population_var int;
331 |
332 | select population
333 | into state_population_var
334 | from state_population
335 | where state = state_param;
336 |
337 | case
338 | when state_population_var > 30000000 then select 'Over 30 Million';
339 | when state_population_var > 10000000 then select 'Between 10M and 30M';
340 | else select 'Under 10 Million';
341 | end case;
342 |
343 | end//
344 |
345 | delimiter ;
346 |
347 | -- Call the p_population_group() procedure three times
348 | call p_population_group('California');
349 | call p_population_group('New York');
350 | call p_population_group('Rhode Island');
351 |
352 | -- Create the p_endless_loop() procedure. This creates an endless loop.
353 | drop procedure if exists p_endless_loop;
354 |
355 | delimiter //
356 | create procedure p_endless_loop()
357 | begin
358 | loop
359 | select 'Looping Again';
360 | end loop;
361 | end;
362 | //
363 | delimiter ;
364 |
365 | -- Call the p_endless_loop() procedure. Warning: This kicks off an endless loop.
366 | call p_endless_loop();
367 |
368 | -- Create the procedure p_more_sensible_loop()
369 | drop procedure if exists p_more_sensible_loop;
370 |
371 | delimiter //
372 | create procedure p_more_sensible_loop()
373 | begin
374 | set @cnt = 0;
375 | msl: loop
376 | select 'Looping Again';
377 | set @cnt = @cnt + 1;
378 | if @cnt = 10 then
379 | leave msl;
380 | end if;
381 | end loop msl;
382 | end;
383 | //
384 | delimiter ;
385 |
386 | -- Call the procedure p_more_sensible_loop()
387 | call p_more_sensible_loop();
388 |
389 | -- Create the procedure p_repeat_until_loop()
390 | drop procedure if exists p_repeat_until_loop;
391 |
392 | delimiter //
393 | create procedure p_repeat_until_loop()
394 | begin
395 | set @cnt = 0;
396 | repeat
397 | select 'Looping Again';
398 | set @cnt = @cnt + 1;
399 | until @cnt = 10
400 | end repeat;
401 | end;
402 | //
403 | delimiter ;
404 |
405 | -- Call the procedure p_repeat_until_loop()
406 | call p_repeat_until_loop();
407 |
408 | -- Create procedure p_while_loop()
409 | drop procedure if exists p_while_loop;
410 |
411 | delimiter //
412 | create procedure p_while_loop()
413 | begin
414 | set @cnt = 0;
415 | while @cnt < 10 do
416 | select 'Looping Again';
417 | set @cnt = @cnt + 1;
418 | end while;
419 | end;
420 | //
421 | delimiter ;
422 |
423 | -- Call procedure p_while_loop()
424 | call p_while_loop();
425 |
426 | -- Create procedure p_get_county_population()
427 | use population;
428 |
429 | drop procedure if exists p_get_county_population;
430 |
431 | delimiter //
432 |
433 | create procedure p_get_county_population(
434 | in state_param varchar(100)
435 | )
436 | begin
437 | select county,
438 | format(population,0)
439 | from county_population
440 | where state = state_param
441 | order by population desc;
442 | end//
443 |
444 | delimiter ;
445 |
446 | -- Call procedure p_get_county_population()
447 | call p_get_county_population('New York');
448 |
449 | -- Create procedure p_split_big_ny_counties()
450 | drop procedure if exists p_split_big_ny_counties;
451 |
452 | delimiter //
453 |
454 | create procedure p_split_big_ny_counties()
455 | begin
456 | declare v_state varchar(100);
457 | declare v_county varchar(100);
458 | declare v_population int;
459 |
460 | declare done bool default false;
461 |
462 | declare county_cursor cursor for
463 | select state,
464 | county,
465 | population
466 | from county_population
467 | where state = 'New York'
468 | and population > 2000000;
469 |
470 | declare continue handler for not found set done = true;
471 |
472 | open county_cursor;
473 |
474 | fetch_loop: loop
475 | fetch county_cursor into v_state, v_county, v_population;
476 |
477 | if done then
478 | leave fetch_loop;
479 | end if;
480 |
481 | set @cnt = 1;
482 |
483 | split_loop: loop
484 |
485 | insert into county_population
486 | (
487 | state,
488 | county,
489 | population
490 | )
491 | values
492 | (
493 | v_state,
494 | concat(v_county,'-',@cnt),
495 | round(v_population/2)
496 | );
497 |
498 | set @cnt = @cnt + 1;
499 |
500 | if @cnt > 2 then
501 | leave split_loop;
502 | end if;
503 |
504 | end loop split_loop;
505 |
506 | -- delete the original county
507 | delete from county_population where county = v_county;
508 |
509 | end loop fetch_loop;
510 |
511 | close county_cursor;
512 | end;
513 | //
514 |
515 | delimiter ;
516 |
517 | -- Call procedure p_split_big_ny_counties()
518 | call p_split_big_ny_counties();
519 |
520 | -- How do those counties look now? Did they get split?
521 | select *
522 | from county_population
523 | order by population desc;
524 |
525 | -- Create procedure p_return_state_population()
526 | use population;
527 |
528 | drop procedure if exists p_return_state_population;
529 |
530 | delimiter //
531 |
532 | create procedure p_return_state_population(
533 | in state_param varchar(100),
534 | out current_pop_param int
535 | )
536 | begin
537 | select population
538 | into current_pop_param
539 | from state_population
540 | where state = state_param;
541 | end//
542 |
543 | delimiter ;
544 |
545 | -- Call procedure p_return_state_population()
546 | call p_return_state_population('New York', @pop_ny);
547 |
548 | -- What value did the procedure return in the @pop_ny user variable?
549 | select @pop_ny;
550 |
551 | -- Create procedure p_population_caller()
552 | use population;
553 |
554 | drop procedure if exists p_population_caller;
555 |
556 | delimiter //
557 |
558 | create procedure p_population_caller()
559 | begin
560 | call p_return_state_population('New York',@pop_ny);
561 | call p_return_state_population('New Jersey',@pop_nj);
562 |
563 | set @pop_ny_and_nj = @pop_ny + @pop_nj;
564 |
565 | select concat(
566 | 'The population of the NY and NJ area is ',
567 | @pop_ny_and_nj);
568 |
569 | end//
570 |
571 | delimiter ;
572 |
573 | -- Call procedure p_population_caller()
574 | call p_population_caller();
575 |
576 |
577 |
578 | -- Try It Yourself Exercises
579 |
580 | -- Setup for exercise 1-1
581 | create database diet;
582 |
583 | use diet;
584 |
585 | create table calorie
586 | (
587 | food varchar(100),
588 | calorie_count int
589 | );
590 |
591 | insert into calorie
592 | (
593 | food,
594 | calorie_count
595 | )
596 | values
597 | ('banana', 110),
598 | ('pizza', 700),
599 | ('apple', 185);
600 |
601 | -- Exercise 11-1: Create the f_get_calorie_count() function
602 | use diet;
603 |
604 | drop function if exists f_get_calorie_count;
605 |
606 | delimiter //
607 | create function f_get_calorie_count (
608 | food_param varchar(100)
609 | )
610 | returns int
611 | deterministic reads sql data
612 | begin
613 | declare calorie_count_var int;
614 |
615 | select calorie_count
616 | into calorie_count_var
617 | from calorie
618 | where food = food_param;
619 |
620 | return(calorie_count_var);
621 |
622 | end//
623 |
624 | delimiter ;
625 |
626 | -- Call the f_get_calorie_count() function to get the calories for pizza
627 | select f_get_calorie_count('pizza');
628 |
629 | -- Setup for exercise 11-2
630 | create database age;
631 |
632 | use age;
633 |
634 | create table family_member_age
635 | (
636 | person varchar(100),
637 | age int
638 | );
639 |
640 | insert into family_member_age
641 | values
642 | ('Junior', 7),
643 | ('Ricky', 16),
644 | ('Grandpa', 102);
645 |
646 | -- Exercise 11-2: Create the p_get_age_group() procedure to get a family member's age group
647 | drop procedure if exists p_get_age_group;
648 |
649 | delimiter //
650 |
651 | create procedure p_get_age_group(
652 | in family_member varchar(100)
653 | )
654 | begin
655 | declare age_var int;
656 |
657 | select age
658 | into age_var
659 | from family_member_age
660 | where person = family_member;
661 |
662 | case
663 | when age_var < 13 then select 'Child';
664 | when age_var < 20 then select 'Teenager';
665 | else select 'Adult';
666 | end case;
667 |
668 | end//
669 |
670 | delimiter ;
671 |
672 | -- Call the p_get_age_group() procedure to get a family member's age group
673 | call p_get_age_group('Ricky');
674 | call p_get_age_group('Junior');
675 | call p_get_age_group('Grandpa');
676 |
677 | -- Exercise 11-3: Create the p_get_food() procedure
678 | use diet;
679 |
680 | drop procedure if exists p_get_food;
681 |
682 | delimiter //
683 | create procedure p_get_food()
684 | begin
685 | select *
686 | from calorie
687 | order by calorie_count desc;
688 |
689 | end//
690 |
691 | delimiter ;
692 |
693 | -- Call the procedure to get the list of food and calories
694 | call p_get_food();
695 |
--------------------------------------------------------------------------------
/files/sql/p_split_big_ny_counties.sql:
--------------------------------------------------------------------------------
1 | -- prepare state_population table
2 | drop table if exists state_population;
3 | create table state_population
4 | (
5 | state varchar(100),
6 | population int
7 | );
8 |
9 | insert into state_population values ('New York', 19299981);
10 | insert into state_population values ('Texas', 29730311);
11 | insert into state_population values ('California', 39613493);
12 | insert into state_population values ('Florida', 21944577);
13 | insert into state_population values ('New Jersey', 9267130);
14 | insert into state_population values ('Massachusetts', 6893000);
15 | insert into state_population values ('Rhode Island', 1097379);
16 |
17 | -- prepare county_population table
18 | drop table if exists county_population;
19 |
20 | create table county_population (state char(50), county varchar(100), population int);
21 |
22 | insert into county_population values ('New York', 'Kings', 2736074);
23 | insert into county_population values ('New York', 'Queens', 2405464);
24 | insert into county_population values ('New York', 'New York', 1694251);
25 | insert into county_population values ('New York', 'Suffolk', 1525920);
26 | insert into county_population values ('New York', 'Bronx', 1472654);
27 | insert into county_population values ('New York', 'Nassau', 1395774);
28 | insert into county_population values ('New York', 'Westchester', 1004457);
29 | insert into county_population values ('New York', 'Erie', 954236);
30 | insert into county_population values ('New York', 'Monroe', 759443);
31 | insert into county_population values ('New York', 'Richmond', 495747);
32 | insert into county_population values ('New York', 'Onondaga', 476516);
33 | insert into county_population values ('New York', 'Orange', 401310);
34 | insert into county_population values ('New York', 'Rockland', 338329);
35 | insert into county_population values ('New York', 'AlbaNew York', 314848);
36 | insert into county_population values ('New York', 'Dutchess', 295911);
37 | insert into county_population values ('New York', 'Saratoga', 235509);
38 | insert into county_population values ('New York', 'Oneida', 232125);
39 | insert into county_population values ('New York', 'Niagara', 212666);
40 | insert into county_population values ('New York', 'Broome', 198683);
41 | insert into county_population values ('New York', 'Ulster', 181851);
42 | insert into county_population values ('New York', 'Rensselaer', 161130);
43 | insert into county_population values ('New York', 'Schenectady', 158061);
44 | insert into county_population values ('New York', 'Chautauqua', 127657);
45 | insert into county_population values ('New York', 'Oswego', 117525);
46 | insert into county_population values ('New York', 'Jefferson', 116721);
47 | insert into county_population values ('New York', 'Ontario', 112458);
48 | insert into county_population values ('New York', 'St. Lawrence', 108505);
49 | insert into county_population values ('New York', 'Tompkins', 105740);
50 | insert into county_population values ('New York', 'Putnam', 97668);
51 | insert into county_population values ('New York', 'Steuben', 93584);
52 | insert into county_population values ('New York', 'Wayne', 91283);
53 | insert into county_population values ('New York', 'Chemung', 84148);
54 | insert into county_population values ('New York', 'Clinton', 79843);
55 | insert into county_population values ('New York', 'Sullivan', 78624);
56 | insert into county_population values ('New York', 'Cattaraugus', 77042);
57 | insert into county_population values ('New York', 'Cayuga', 76248);
58 | insert into county_population values ('New York', 'Madison', 68016);
59 | insert into county_population values ('New York', 'Warren', 65737);
60 | insert into county_population values ('New York', 'Livingston', 61834);
61 | insert into county_population values ('New York', 'Columbia', 61570);
62 | insert into county_population values ('New York', 'Washington', 61302);
63 | insert into county_population values ('New York', 'Herkimer', 60139);
64 | insert into county_population values ('New York', 'Otsego', 58524);
65 | insert into county_population values ('New York', 'Genesee', 58388);
66 | insert into county_population values ('New York', 'Fulton', 53324);
67 | insert into county_population values ('New York', 'Montgomery', 49532);
68 | insert into county_population values ('New York', 'Tioga', 48455);
69 | insert into county_population values ('New York', 'Greene', 47931);
70 | insert into county_population values ('New York', 'Franklin', 47555);
71 | insert into county_population values ('New York', 'Chenango', 47220);
72 | insert into county_population values ('New York', 'Cortland', 46809);
73 | insert into county_population values ('New York', 'Allega', 46456);
74 | insert into county_population values ('New York', 'Delaware', 44308);
75 | insert into county_population values ('New York', 'Wyoming', 40531);
76 | insert into county_population values ('New York', 'Orleans', 40343);
77 | insert into county_population values ('New York', 'Essex', 37381);
78 | insert into county_population values ('New York', 'Seneca', 33814);
79 | insert into county_population values ('New York', 'Schoharie', 29714);
80 | insert into county_population values ('New York', 'Lewis', 26582);
81 | insert into county_population values ('New York', 'Yates', 24774);
82 | insert into county_population values ('New York', 'Schuyler', 17898);
83 | insert into county_population values ('New York', 'Hamilton', 5107);
84 |
85 | -- Imagine you're a database developer working for the State of New York.
86 | -- You've been asked to break up counties that have over 2 million people into two smaller counties, each containing half of the original county's population.
87 | -- For example, Kings and Queens
88 |
89 | drop procedure if exists p_split_big_ny_counties;
90 |
91 | delimiter //
92 | create procedure p_split_big_ny_counties()
93 | begin
94 | declare v_state varchar(100);
95 | declare v_county varchar(100);
96 | declare v_population int;
97 |
98 | declare done bool default false;
99 |
100 | declare county_cursor cursor for
101 | select state,
102 | county,
103 | population
104 | from county_population
105 | where state = 'New York'
106 | and population > 2000000;
107 |
108 | declare continue handler for not found set done = true;
109 |
110 | open county_cursor;
111 |
112 | fetch_loop: loop
113 | fetch county_cursor into v_state, v_county, v_population;
114 |
115 | if done then
116 | leave fetch_loop;
117 | end if;
118 |
119 | set @cnt = 1;
120 |
121 | split_loop: loop
122 |
123 | insert into county_population
124 | (
125 | state,
126 | county,
127 | population
128 | )
129 | values
130 | (
131 | v_state,
132 | concat(v_county,'-',@cnt),
133 | round(v_population/2)
134 | );
135 |
136 | set @cnt = @cnt + 1;
137 |
138 | if @cnt > 2 then
139 | leave split_loop;
140 | end if;
141 |
142 | end loop split_loop;
143 |
144 | -- delete the original county
145 | delete from county_population where state = v_state and county = v_county;
146 |
147 | end loop fetch_loop;
148 | close county_cursor;
149 | end;
150 |
151 | //
152 |
153 | delimiter ;
154 |
155 | -- Call procedure p_split_big_ny_counties()
156 | set sql_safe_updates = 0;
157 | call p_split_big_ny_counties();
158 | set sql_safe_updates = 1;
159 |
--------------------------------------------------------------------------------
/files/sql/triggers_after.sql:
--------------------------------------------------------------------------------
1 | --
2 | -- MySQL Crash Course
3 | --
4 | -- Chapter 12 – Creating Triggers
5 | --
6 | -- You can copy and paste any of these commands into your favorite MySQL tool
7 | -- (like MySQL Workbench) and run them in your own MySQL environment.
8 | --
9 |
10 | -- Create the tables for the chapter
11 | create database accounting;
12 |
13 | use accounting;
14 |
15 | -- Create a table for account payable data for a company
16 | create table payable
17 | (
18 | payable_id int,
19 | company varchar(100),
20 | amount numeric(8,2),
21 | service varchar(100)
22 | );
23 |
24 | insert into payable
25 | (
26 | payable_id,
27 | company,
28 | amount,
29 | service
30 | )
31 | values
32 | (1, 'Acme HVAC', 123.32, 'Repair of Air Conditioner'),
33 | (2, 'Initech Printers', 1459.00, 'New Printers'),
34 | (3, 'Hooli Cleaning', 4398.55, 'Janitorial Services');
35 |
36 | -- Create the payable_audit table that will track changes to the payable table
37 | create table payable_audit
38 | (
39 | audit_datetime datetime,
40 | audit_user varchar(50),
41 | audit_change varchar(500)
42 | );
43 |
44 | -- Create an after insert trigger
45 | drop trigger if exists tr_payable_ai;
46 |
47 | delimiter //
48 |
49 | create trigger tr_payable_ai
50 | after insert on payable
51 | for each row
52 | begin
53 | insert into payable_audit
54 | (
55 | audit_datetime,
56 | audit_user,
57 | audit_change
58 | )
59 | values
60 | (
61 | now(),
62 | user(),
63 | concat(
64 | 'New row for payable_id ',
65 | new.payable_id,
66 | '. Company: ',
67 | new.company,
68 | '. Amount: ',
69 | new.amount,
70 | '. Service: ',
71 | new.service
72 | )
73 | );
74 | end//
75 |
76 | delimiter ;
77 |
78 | -- Insert a row into the payable table to test the insert trigger
79 | insert into payable
80 | (
81 | payable_id,
82 | company,
83 | amount,
84 | service
85 | )
86 | values
87 | (
88 | 4,
89 | 'Sirius Painting',
90 | 451.45,
91 | 'Painting the lobby'
92 | );
93 |
94 | -- Did a row get logged in the payable_audit table showing what was inserted into the payable table?
95 | select * from payable_audit;
96 |
97 | -- Create an after delete trigger
98 | use accounting;
99 |
100 | drop trigger if exists tr_payable_ad;
101 |
102 | delimiter //
103 |
104 | create trigger tr_payable_ad
105 | after delete on payable
106 | for each row
107 | begin
108 | insert into payable_audit
109 | (
110 | audit_datetime,
111 | audit_user,
112 | audit_change
113 | )
114 | values
115 | (
116 | now(),
117 | user(),
118 | concat(
119 | 'Deleted row for payable_id ',
120 | old.payable_id,
121 | '. Company: ',
122 | old.compatr_payable_adny,
123 | '. Amount: ',
124 | old.amount,
125 | '. Service: ',
126 | old.service
127 | )
128 | );
129 | end//
130 |
131 | delimiter ;
132 |
133 | -- Delete a row from the payable table to test the delete trigger
134 | set sql_safe_updates = 0;
135 | delete from payable where company = 'Sirius Painting';
136 | set sql_safe_updates = 0;
137 |
138 |
139 | -- Is there a row in the payable_audit table that logs the deleted row from the payable table?
140 | select * from payable_audit;
141 |
142 | -- Create an after update trigger
143 | delimiter //
144 |
145 | create trigger tr_payable_au
146 | after update on payable
147 | for each row
148 | begin
149 | set @change_msg =
150 | concat(
151 | 'Updated row for payable_id ',
152 | old.payable_id
153 | );
154 |
155 | if (old.company != new.company) then
156 | set @change_msg =
157 | concat(
158 | @change_msg,
159 | '. Company changed from ',
160 | old.company,
161 | ' to ',
162 | new.company
163 | );
164 | end if;
165 |
166 | if (old.amount != new.amount) then
167 | set @change_msg =
168 | concat(
169 | @change_msg,
170 | '. Amount changed from ',
171 | old.amount,
172 | ' to ',
173 | new.amount
174 | );
175 | end if;
176 |
177 | if (old.service != new.service) then
178 | set @change_msg =
179 | concat(
180 | @change_msg,
181 | '. Service changed from ',
182 | old.service,
183 | ' to ',
184 | new.service
185 | );
186 | end if;
187 |
188 | insert into payable_audit
189 | (
190 | audit_datetime,
191 | audit_user,
192 | audit_change
193 | )
194 | values
195 | (
196 | now(),
197 | user(),
198 | @change_msg
199 | );
200 |
201 | end//
202 |
203 | delimiter ;
204 |
205 | -- Test the trigger by updating a row
206 | update payable
207 | set amount = 100000,
208 | company = 'House of Larry'
209 | where payable_id = 3;
210 |
211 | -- Did the update get logged?
212 | select * from payable_audit;
213 |
--------------------------------------------------------------------------------
/files/sql/triggers_before.sql:
--------------------------------------------------------------------------------
1 | --
2 | -- MySQL Crash Course
3 | --
4 | -- Chapter 12 – Creating Triggers
5 | --
6 | -- You can copy and paste any of these commands into your favorite MySQL tool
7 | -- (like MySQL Workbench) and run them in your own MySQL environment.
8 | --
9 |
10 | -- Create the tables for the chapter
11 |
12 | create database bank;
13 |
14 | use bank;
15 |
16 | create table credit
17 | (
18 | customer_id int,
19 | customer_name varchar(100),
20 | credit_score int
21 | );
22 |
23 | -- Create a before insert trigger
24 | drop trigger if exists tr_credit_bi;
25 |
26 | delimiter //
27 |
28 | create trigger tr_credit_bi
29 | before insert on credit
30 | for each row
31 | begin
32 | if (new.credit_score < 300) then
33 | set new.credit_score = 300;
34 | end if;
35 |
36 | if (new.credit_score > 850) then
37 | set new.credit_score = 850;
38 | end if;
39 |
40 | end//
41 |
42 | delimiter ;
43 |
44 | -- Test the trigger by inserting some values into the credit table
45 | insert into credit
46 | (
47 | customer_id,
48 | customer_name,
49 | credit_score
50 | )
51 | values
52 | (1, 'Milton Megabucks', 987),
53 | (2, 'Patty Po', 145),
54 | (3, 'Vinny Middle-Class', 702);
55 |
56 | -- Create the before delete trigger
57 | use bank;
58 |
59 | delimiter //
60 |
61 | create trigger tr_credit_bd
62 | before delete on credit
63 | for each row
64 | begin
65 | if (old.credit_score > 750) then
66 | signal sqlstate '45000'
67 | set message_text = 'Cannot delete scores over 750';
68 | end if;
69 |
70 | end//
71 |
72 | delimiter ;
73 |
74 | -- Test the trigger
75 | delete from credit where customer_id = 1;
76 | delete from credit where customer_id = 2;
77 |
78 | -- Try it Yourself Exercises
79 |
80 | -- Set up for chapter 12 exercises
81 | create database jail;
82 |
83 | use jail;
84 |
85 | create table alcatraz_prisoner
86 | (
87 | prisoner_id int,
88 | prisoner_name varchar(100)
89 | );
90 |
91 | insert into alcatraz_prisoner
92 | (
93 | prisoner_id,
94 | prisoner_name
95 | )
96 | values
97 | (85, 'Al Capone'),
98 | (594, 'Robert Stroud'),
99 | (1476, 'John Anglin');
100 |
101 | -- Exercise 12-1: Create the audit table
102 | create table alcatraz_prisoner_audit
103 | (
104 | audit_datetime datetime,
105 | audit_user varchar(100),
106 | audit_change varchar(200)
107 | );
108 |
109 | -- Exercise 12-2: Create and test a before insert audit trigger
110 | use jail;
111 |
112 | drop trigger if exists tr_alcatraz_prisoner_ai;
113 |
114 | -- Create the trigger
115 | delimiter //
116 |
117 | create trigger tr_alcatraz_prisoner_ai
118 | after insert on alcatraz_prisoner
119 | for each row
120 | begin
121 | insert into alcatraz_prisoner_audit
122 | (
123 | audit_datetime,
124 | audit_user,
125 | audit_change
126 | )
127 | values
128 | (
129 | now(),
130 | user(),
131 | concat(
132 | 'New row for Prisoner ID ',
133 | new.prisoner_id,
134 | '. Prisoner Name: ',
135 | new.prisoner_name
136 | )
137 | );
138 | end//
139 |
140 | delimiter ;
141 |
142 | -- Test the trigger by inserting a new prisoner
143 | insert into alcatraz_prisoner
144 | (
145 | prisoner_id,
146 | prisoner_name
147 | )
148 | values
149 | (
150 | 117,
151 | 'Machine Gun Kelly'
152 | );
153 |
154 | -- Did the row get inserted into the alcatraz_prisoner table?
155 | select * from alcatraz_prisoner;
156 |
157 | -- Did the new prisoner get logged in the audit table?
158 | select * from alcatraz_prisoner_audit;
159 |
160 | -- Set up for exercise 12-3
161 | create database exam;
162 |
163 | use exam;
164 |
165 | create table grade
166 | (
167 | student_name varchar(100),
168 | score int
169 | );
170 |
171 | insert into grade
172 | (
173 | student_name,
174 | score
175 | )
176 | values
177 | ('Billy',79),
178 | ('Jane', 87),
179 | ('Paul', 93);
180 |
181 | -- Exercise 12-3: Create the trigger
182 | use exam;
183 |
184 | delimiter //
185 |
186 | create trigger tr_grade_bu
187 | before update on grade
188 | for each row
189 | begin
190 | if (new.score < 50) then
191 | set new.score = 50;
192 | end if;
193 |
194 | if (new.score > 100) then
195 | set new.score = 100;
196 | end if;
197 |
198 | end//
199 |
200 | delimiter ;
201 |
202 | -- Test the trigger by updating some grades
203 | update grade set score = 38 where student_name = 'Billy';
204 | update grade set score = 107 where student_name = 'Jane';
205 | update grade set score = 95 where student_name = 'Paul';
206 |
207 | -- Are there no grades lower than 50 or higher than 100?
208 | select * from grade;
209 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohappyeyeballs==2.4.4
2 | aiohttp==3.11.10
3 | aiosignal==1.3.1
4 | appnope==0.1.4
5 | asttokens==3.0.0
6 | attrs==24.2.0
7 | beautifulsoup4==4.12.3
8 | certifi==2024.8.30
9 | charset-normalizer==3.4.0
10 | comm==0.2.2
11 | contourpy==1.3.1
12 | cycler==0.12.1
13 | debugpy==1.8.9
14 | decorator==5.1.1
15 | executing==2.1.0
16 | fonttools==4.55.3
17 | frozenlist==1.5.0
18 | idna==3.10
19 | ipykernel==6.29.5
20 | ipython==8.30.0
21 | jedi==0.19.2
22 | jupyter_client==8.6.3
23 | jupyter_core==5.7.2
24 | kiwisolver==1.4.7
25 | lxml==5.3.0
26 | matplotlib==3.9.3
27 | matplotlib-inline==0.1.7
28 | multidict==6.1.0
29 | nest-asyncio==1.6.0
30 | numpy==2.2.0
31 | openai==0.28.0
32 | packaging==24.2
33 | pandas==2.2.3
34 | parso==0.8.4
35 | pexpect==4.9.0
36 | pillow==11.0.0
37 | platformdirs==4.3.6
38 | prompt_toolkit==3.0.48
39 | propcache==0.2.1
40 | psutil==6.1.0
41 | ptyprocess==0.7.0
42 | pure_eval==0.2.3
43 | Pygments==2.18.0
44 | pyparsing==3.2.0
45 | python-dateutil==2.9.0.post0
46 | pytz==2024.2
47 | pyzmq==26.2.0
48 | requests==2.32.3
49 | six==1.17.0
50 | soupsieve==2.6
51 | stack-data==0.6.3
52 | tornado==6.5
53 | tqdm==4.67.1
54 | traitlets==5.14.3
55 | twstock==1.4.0
56 | tzdata==2024.2
57 | urllib3==2.2.3
58 | wcwidth==0.2.13
59 | yarl==1.18.3
60 | mysql-connector-python==9.2.0
61 |
--------------------------------------------------------------------------------