>', self.get_selected_row)\n",
563 | " \n",
564 | " b1 = Button(window, text=\"View all\", width=12, command=self.view_command)\n",
565 | " b1.grid(row=2, column=3)\n",
566 | " \n",
567 | " b2 = Button(window, text=\"Search entry\", width=12, command=self.search_command)\n",
568 | " b2.grid(row=3, column=3)\n",
569 | " \n",
570 | " b3 = Button(window, text=\"Add entry\", width=12, command=self.add_command)\n",
571 | " b3.grid(row=4, column=3)\n",
572 | " \n",
573 | " b4 = Button(window, text=\"Update selected\", width=12, command=self.update_command)\n",
574 | " b4.grid(row=5, column=3)\n",
575 | " \n",
576 | " b5 = Button(window, text=\"Delete selected\", width=12, command=self.delete_command)\n",
577 | " b5.grid(row=6, column=3)\n",
578 | " \n",
579 | " b6 = Button(window, text=\"Close\", width=12, command=window.destroy)\n",
580 | " b6.grid(row=7, column=3)\n",
581 | " \n",
582 | " def get_selected_row(self,event):\n",
583 | " index = self.list1.curselection()[0]\n",
584 | " self.selected_tuple = self.list1.get(index)\n",
585 | " self.e1.delete(0, END)\n",
586 | " self.e1.insert(END, self.selected_tuple[1])\n",
587 | " self.e2.delete(0, END)\n",
588 | " self.e2.insert(END, self.selected_tuple[2])\n",
589 | " self.e3.delete(0, END)\n",
590 | " self.e3.insert(END, self.selected_tuple[3])\n",
591 | " self.e4.delete(0, END)\n",
592 | " self.e4.insert(END, self.selected_tuple[4])\n",
593 | " \n",
594 | " def view_command(self):\n",
595 | " self.list1.delete(0, END)\n",
596 | " for row in database.view():\n",
597 | " self.list1.insert(END, row)\n",
598 | " \n",
599 | " def search_command(self):\n",
600 | " self.list1.delete(0, END)\n",
601 | " for row in database.search(self.title_text.get(), self.author_text.get(), self.year_text.get(), self.isbn_text.get()):\n",
602 | " self.list1.insert(END, row)\n",
603 | " \n",
604 | " def add_command(self):\n",
605 | " database.insert(self.title_text.get(), self.author_text.get(), self.year_text.get(), self.isbn_text.get())\n",
606 | " self.list1.delete(0, END)\n",
607 | " self.list1.insert(END, (self.title_text.get(), self.author_text.get(), self.year_text.get(), self.isbn_text.get()))\n",
608 | " \n",
609 | " def delete_command(self):\n",
610 | " database.delete(self.selected_tuple[0])\n",
611 | " \n",
612 | " def update_command(self):\n",
613 | " database.update(self.selected_tuple[0], self.title_text.get(), self.author_text.get(), self.year_text.get(), self.isbn_text.get())\n",
614 | " \n",
615 | "window = Tk()\n",
616 | "Window(window)\n",
617 | "window.mainloop()\n"
618 | ]
619 | },
620 | {
621 | "cell_type": "code",
622 | "execution_count": null,
623 | "metadata": {
624 | "collapsed": true
625 | },
626 | "outputs": [],
627 | "source": [
628 | "#backend.py\n",
629 | "\n",
630 | "import sqlite3\n",
631 | "\n",
632 | "class Database:\n",
633 | " def __init__(self, db):\n",
634 | " self.conn = sqlite3.connect(db)\n",
635 | " self.cur = self.conn.cursor()\n",
636 | " self.cur.execute(\"CREATE TABLE IF NOT EXISTS book (id INTEGER PRIMARY KEY, title text, author text, year integer, isbn integer)\")\n",
637 | " self.conn.commit()\n",
638 | " def insert(self, title, author, year, isbn):\n",
639 | " self.cur.execute(\"INSERT INTO book VALUES (NULL,?,?,?,?)\",(title, author, year, isbn))\n",
640 | " self.conn.commit()\n",
641 | " def view(self):\n",
642 | " self.cur.execute(\"SELECT * FROM book\")\n",
643 | " rows = self.cur.fetchall()\n",
644 | " return rows\n",
645 | " def search(self, title=\"\", author=\"\", year=\"\", isbn=\"\"):\n",
646 | " self.cur.execute(\"SELECT * FROM book WHERE title=? OR author=? OR year=? OR isbn=?\", (title,author,year,isbn))\n",
647 | " rows = self.cur.fetchall()\n",
648 | " return rows\n",
649 | " def delete(self, id):\n",
650 | " self.cur.execute(\"DELETE FROM book WHERE id=?\",(id,))\n",
651 | " self.conn.commit()\n",
652 | " def update(self, id, title, author, year, isbn):\n",
653 | " self.cur.execute(\"UPDATE book SET title=?, author=?, year=?, isbn=? WHERE id=?\",(title, author, year, isbn, id))\n",
654 | " self.conn.commit()\n",
655 | " def __del__(self):\n",
656 | " self.conn.close()"
657 | ]
658 | }
659 | ],
660 | "metadata": {
661 | "kernelspec": {
662 | "display_name": "Python 3",
663 | "language": "python",
664 | "name": "python3"
665 | },
666 | "language_info": {
667 | "codemirror_mode": {
668 | "name": "ipython",
669 | "version": 3
670 | },
671 | "file_extension": ".py",
672 | "mimetype": "text/x-python",
673 | "name": "python",
674 | "nbconvert_exporter": "python",
675 | "pygments_lexer": "ipython3",
676 | "version": "3.6.3"
677 | }
678 | },
679 | "nbformat": 4,
680 | "nbformat_minor": 2
681 | }
682 |
--------------------------------------------------------------------------------
/S16-App-6-Build-a-Webcam-Motion-Detector.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# The Python Mega Course: Build 10 Real World Applications\n",
8 | "---"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {},
14 | "source": [
15 | "This notebook is a summary of [The Python Mega Course: Build 10 Real World Applciations](https://www.udemy.com/the-python-mega-course), a comprehensive online Python course taught by Ardit Sulce. Each lecture name is clickable and takes you to the video lecture in the course."
16 | ]
17 | },
18 | {
19 | "cell_type": "markdown",
20 | "metadata": {},
21 | "source": [
22 | "# Section 16: Application 6: Build a Webcam Motion Detector\n",
23 | "***"
24 | ]
25 | },
26 | {
27 | "cell_type": "markdown",
28 | "metadata": {},
29 | "source": [
30 | "**Lecture:** [Program Demonstration](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/4775504?start=0)\n",
31 | "---"
32 | ]
33 | },
34 | {
35 | "cell_type": "markdown",
36 | "metadata": {},
37 | "source": [
38 | "This video lecture shows the finished version of the website running on a browser."
39 | ]
40 | },
41 | {
42 | "cell_type": "markdown",
43 | "metadata": {},
44 | "source": [
45 | "**Lecture:** [Detecting Objects on Camera](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/4775502?start=0)\n",
46 | "---"
47 | ]
48 | },
49 | {
50 | "cell_type": "code",
51 | "execution_count": null,
52 | "metadata": {
53 | "collapsed": true
54 | },
55 | "outputs": [],
56 | "source": [
57 | "import cv2, time, pandas\n",
58 | "\n",
59 | "first_frame = None\n",
60 | "\n",
61 | "video=cv2.VideoCapture(0)\n",
62 | "\n",
63 | "while True:\n",
64 | " check, frame = video.read()\n",
65 | " status = 0\n",
66 | " gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n",
67 | " gray = cv2.GaussianBlur(gray,(21,21), 0)\n",
68 | "\n",
69 | " if first_frame is None:\n",
70 | " first_frame = gray\n",
71 | " continue\n",
72 | "\n",
73 | " delta_frame = cv2.absdiff(first_frame,gray)\n",
74 | " thresh_frame = cv2.threshold(delta_frame, 30, 255, cv2.THRESH_BINARY)[1]\n",
75 | " thresh_frame = cv2.dilate(thresh_frame, None, iterations=2)\n",
76 | "\n",
77 | " (_,cnts,_) = cv2.findContours(thresh_frame.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n",
78 | "\n",
79 | " for contour in cnts:\n",
80 | " if cv2.contourArea(contour) < 10000:\n",
81 | " continue\n",
82 | " (x, y, w, h) = cv2.boundingRect(contour)\n",
83 | " cv2.rectangle(frame, (x, y), (x+w, y+h), (0,255,0), 3)\n",
84 | "\n",
85 | " cv2.imshow(\"Gray Frame\", gray)\n",
86 | " cv2.imshow(\"Delta Frame\", delta_frame)\n",
87 | " cv2.imshow(\"Threshold Frame\", thresh_frame)\n",
88 | " cv2.imshow(\"Color Frame\", frame)\n",
89 | "\n",
90 | " key=cv2.waitKey(1)\n",
91 | "\n",
92 | " if key == ord('q'):\n",
93 | " if status == 1:\n",
94 | " times.append(datetime.now())\n",
95 | " break\n",
96 | "\n",
97 | "print(status_list)\n",
98 | "print(times)\n",
99 | "\n",
100 | "for i in range(0,len(times), 2):\n",
101 | " df=df.append({\"Start\":times[i], \"End\":times[i+1]}, ignore_index=True)\n",
102 | "\n",
103 | "df.to_csv(\"Times.csv\")\n",
104 | "\n",
105 | "video.release()\n",
106 | "cv2.destroyAllWindows"
107 | ]
108 | },
109 | {
110 | "cell_type": "markdown",
111 | "metadata": {},
112 | "source": [
113 | "**Lecture:** [Capturing Motion Time](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/4775506?start=0)\n",
114 | "---"
115 | ]
116 | },
117 | {
118 | "cell_type": "markdown",
119 | "metadata": {},
120 | "source": [
121 | "The following code detects the time that an object entered and exited the frame and stores that time as a row in a CSV file."
122 | ]
123 | },
124 | {
125 | "cell_type": "code",
126 | "execution_count": null,
127 | "metadata": {
128 | "collapsed": true
129 | },
130 | "outputs": [],
131 | "source": [
132 | "# motion_detector.py\n",
133 | "\n",
134 | "import cv2, time, pandas\n",
135 | "from datetime import datetime\n",
136 | "\n",
137 | "first_frame = None\n",
138 | "status_list = [None,None]\n",
139 | "times = []\n",
140 | "df = pandas.DataFrame(columns=[\"Start\",\"End\"])\n",
141 | "\n",
142 | "video=cv2.VideoCapture(0)\n",
143 | "\n",
144 | "while True:\n",
145 | " check, frame = video.read()\n",
146 | " status = 0\n",
147 | " gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n",
148 | " gray = cv2.GaussianBlur(gray,(21,21), 0)\n",
149 | "\n",
150 | " if first_frame is None:\n",
151 | " first_frame = gray\n",
152 | " continue\n",
153 | "\n",
154 | " delta_frame = cv2.absdiff(first_frame,gray)\n",
155 | " thresh_frame = cv2.threshold(delta_frame, 30, 255, cv2.THRESH_BINARY)[1]\n",
156 | " thresh_frame = cv2.dilate(thresh_frame, None, iterations=2)\n",
157 | "\n",
158 | " (_,cnts,_) = cv2.findContours(thresh_frame.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n",
159 | "\n",
160 | " for contour in cnts:\n",
161 | " if cv2.contourArea(contour) < 10000:\n",
162 | " continue\n",
163 | " status = 1\n",
164 | "\n",
165 | " (x, y, w, h) = cv2.boundingRect(contour)\n",
166 | " reci = cv2.rectangle(frame, (x, y), (x+w, y+h), (0,255,0), 3)\n",
167 | " status_list.append(status)\n",
168 | "\n",
169 | " status_list=status_list[-2:]\n",
170 | "\n",
171 | "\n",
172 | " if status_list[-1] == 1 and status_list[-2] == 0:\n",
173 | " times.append(datetime.now())\n",
174 | " if status_list[-1] == 0 and status_list[-2] == 1:\n",
175 | " times.append(datetime.now())\n",
176 | "\n",
177 | "\n",
178 | " cv2.imshow(\"Gray Frame\", gray)\n",
179 | " cv2.imshow(\"Delta Frame\", delta_frame)\n",
180 | " cv2.imshow(\"Threshold Frame\", thresh_frame)\n",
181 | " cv2.imshow(\"Color Frame\", frame)\n",
182 | "\n",
183 | " key=cv2.waitKey(1)\n",
184 | "\n",
185 | " if key == ord('q'):\n",
186 | " if status == 1:\n",
187 | " times.append(datetime.now())\n",
188 | " break\n",
189 | "\n",
190 | "print(status_list)\n",
191 | "print(times)\n",
192 | "\n",
193 | "for i in range(0,len(times), 2):\n",
194 | " df=df.append({\"Start\":times[i], \"End\":times[i+1]}, ignore_index=True)\n",
195 | "\n",
196 | "df.to_csv(\"Times.csv\")\n",
197 | "\n",
198 | "video.release()\n",
199 | "cv2.destroyAllWindows"
200 | ]
201 | },
202 | {
203 | "cell_type": "markdown",
204 | "metadata": {},
205 | "source": [
206 | "**Lecture:** [Final Code for Application 6]()\n",
207 | "---"
208 | ]
209 | },
210 | {
211 | "cell_type": "markdown",
212 | "metadata": {},
213 | "source": [
214 | "The code below is the final code for `motion_detector.py` and for the `plotting.py` which we'll build in the next section. The `plotting.py` file is responsible for plotting the time data. Run `python plotting.py` to execute the program. "
215 | ]
216 | },
217 | {
218 | "cell_type": "markdown",
219 | "metadata": {},
220 | "source": [
221 | "**Note**: Your computer needs a camera for this to work."
222 | ]
223 | },
224 | {
225 | "cell_type": "code",
226 | "execution_count": null,
227 | "metadata": {
228 | "collapsed": true
229 | },
230 | "outputs": [],
231 | "source": [
232 | "# motion_detector.py\n",
233 | "\n",
234 | "import cv2, time, pandas\n",
235 | "from datetime import datetime\n",
236 | "\n",
237 | "first_frame = None\n",
238 | "status_list = [None,None]\n",
239 | "times = []\n",
240 | "df = pandas.DataFrame(columns=[\"Start\",\"End\"])\n",
241 | "\n",
242 | "video=cv2.VideoCapture(0)\n",
243 | "\n",
244 | "while True:\n",
245 | " check, frame = video.read()\n",
246 | " status = 0\n",
247 | " gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n",
248 | " gray = cv2.GaussianBlur(gray,(21,21), 0)\n",
249 | "\n",
250 | " if first_frame is None:\n",
251 | " first_frame = gray\n",
252 | " continue\n",
253 | "\n",
254 | " delta_frame = cv2.absdiff(first_frame,gray)\n",
255 | " thresh_frame = cv2.threshold(delta_frame, 30, 255, cv2.THRESH_BINARY)[1]\n",
256 | " thresh_frame = cv2.dilate(thresh_frame, None, iterations=2)\n",
257 | "\n",
258 | " (_,cnts,_) = cv2.findContours(thresh_frame.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n",
259 | "\n",
260 | " for contour in cnts:\n",
261 | " if cv2.contourArea(contour) < 10000:\n",
262 | " continue\n",
263 | " status = 1\n",
264 | "\n",
265 | " (x, y, w, h) = cv2.boundingRect(contour)\n",
266 | " reci = cv2.rectangle(frame, (x, y), (x+w, y+h), (0,255,0), 3)\n",
267 | " status_list.append(status)\n",
268 | "\n",
269 | " status_list=status_list[-2:]\n",
270 | "\n",
271 | "\n",
272 | " if status_list[-1] == 1 and status_list[-2] == 0:\n",
273 | " times.append(datetime.now())\n",
274 | " if status_list[-1] == 0 and status_list[-2] == 1:\n",
275 | " times.append(datetime.now())\n",
276 | "\n",
277 | "\n",
278 | " cv2.imshow(\"Gray Frame\", gray)\n",
279 | " cv2.imshow(\"Delta Frame\", delta_frame)\n",
280 | " cv2.imshow(\"Threshold Frame\", thresh_frame)\n",
281 | " cv2.imshow(\"Color Frame\", frame)\n",
282 | "\n",
283 | " key=cv2.waitKey(1)\n",
284 | "\n",
285 | " if key == ord('q'):\n",
286 | " if status == 1:\n",
287 | " times.append(datetime.now())\n",
288 | " break\n",
289 | "\n",
290 | "print(status_list)\n",
291 | "print(times)\n",
292 | "\n",
293 | "for i in range(0,len(times), 2):\n",
294 | " df=df.append({\"Start\":times[i], \"End\":times[i+1]}, ignore_index=True)\n",
295 | "\n",
296 | "df.to_csv(\"Times.csv\")\n",
297 | "\n",
298 | "video.release()\n",
299 | "cv2.destroyAllWindows"
300 | ]
301 | },
302 | {
303 | "cell_type": "code",
304 | "execution_count": null,
305 | "metadata": {
306 | "collapsed": true
307 | },
308 | "outputs": [],
309 | "source": [
310 | "# plotting.py\n",
311 | "\n",
312 | "from motion_detector import df\n",
313 | "from bokeh.plotting import figure, show, output_file\n",
314 | "from bokeh.models import HoverTool, ColumnDataSource\n",
315 | "\n",
316 | "df[\"Start_string\"] = df[\"Start\"].dt.strftime(\"%Y-%m-%d %H:%M:%S\")\n",
317 | "df[\"End_string\"] = df[\"End\"].dt.strftime(\"%Y-%m-%d %H:%M:%S\")\n",
318 | "print(df)\n",
319 | "\n",
320 | "cds = ColumnDataSource(df)\n",
321 | "\n",
322 | "p = figure(x_axis_type='datetime', height=100, width=500, sizing_mode=\"scale_width\", title=\"Motion Graph\")\n",
323 | "p.yaxis.minor_tick_line_color = None\n",
324 | "p.ygrid[0].ticker.desired_num_ticks = 1\n",
325 | "\n",
326 | "hover = HoverTool(tooltips=[(\"Start\",\"@Start_string\"), (\"End\",\"@End_string\")])\n",
327 | "p.add_tools(hover)\n",
328 | "\n",
329 | "q = p.quad(left=\"Start\", right=\"End\", bottom=0, top=1, color=\"green\", source=cds)\n",
330 | "\n",
331 | "output_file(\"Graph1.html\")\n",
332 | "show(p)"
333 | ]
334 | }
335 | ],
336 | "metadata": {
337 | "kernelspec": {
338 | "display_name": "Python 3",
339 | "language": "python",
340 | "name": "python3"
341 | },
342 | "language_info": {
343 | "codemirror_mode": {
344 | "name": "ipython",
345 | "version": 3
346 | },
347 | "file_extension": ".py",
348 | "mimetype": "text/x-python",
349 | "name": "python",
350 | "nbconvert_exporter": "python",
351 | "pygments_lexer": "ipython3",
352 | "version": "3.6.3"
353 | }
354 | },
355 | "nbformat": 4,
356 | "nbformat_minor": 2
357 | }
358 |
--------------------------------------------------------------------------------
/S18-Webscraping-with-BeautifulSoup.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# The Python Mega Course: Build 10 Real World Applications\n",
8 | "---"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {},
14 | "source": [
15 | "This notebook is a summary of [The Python Mega Course: Build 10 Real World Applciations](https://www.udemy.com/the-python-mega-course), a comprehensive online Python course taught by Ardit Sulce. Each lecture name is clickable and takes you to the video lecture in the course."
16 | ]
17 | },
18 | {
19 | "cell_type": "markdown",
20 | "metadata": {},
21 | "source": [
22 | "# Section 18: Webscraping with BeautifulSoup\n",
23 | "***"
24 | ]
25 | },
26 | {
27 | "cell_type": "markdown",
28 | "metadata": {},
29 | "source": [
30 | "**Lecture:** [Section Introduction](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
31 | "---"
32 | ]
33 | },
34 | {
35 | "cell_type": "markdown",
36 | "metadata": {},
37 | "source": [
38 | "Webscraping is the process of extracting data from webpages. Typically webscraping deals with data that are not in a well structured format."
39 | ]
40 | },
41 | {
42 | "cell_type": "markdown",
43 | "metadata": {},
44 | "source": [
45 | "**Lecture:** [The Concept Behind Webscraping](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
46 | "---"
47 | ]
48 | },
49 | {
50 | "cell_type": "markdown",
51 | "metadata": {},
52 | "source": [
53 | "Webscraping technically is the process of loading the webpage source code (which could be HTML, Javascript, and CSS) into Python and then using specific Python code to parse and extract data values from the webpage source code."
54 | ]
55 | },
56 | {
57 | "cell_type": "markdown",
58 | "metadata": {},
59 | "source": [
60 | "**Lecture:** [Webscraping Example](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
61 | "---"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": 3,
67 | "metadata": {
68 | "collapsed": true
69 | },
70 | "outputs": [],
71 | "source": [
72 | "import requests\n",
73 | "from bs4 import BeautifulSoup"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": null,
79 | "metadata": {
80 | "collapsed": true
81 | },
82 | "outputs": [],
83 | "source": [
84 | "r = requests.get(\"http://pythonhow.com/example.html\")\n",
85 | "c = r.content"
86 | ]
87 | },
88 | {
89 | "cell_type": "code",
90 | "execution_count": null,
91 | "metadata": {
92 | "collapsed": true
93 | },
94 | "outputs": [],
95 | "source": [
96 | "print(c)"
97 | ]
98 | },
99 | {
100 | "cell_type": "code",
101 | "execution_count": null,
102 | "metadata": {
103 | "collapsed": true
104 | },
105 | "outputs": [],
106 | "source": [
107 | "soup = BeautifulSoup(c, \"html.parser\")\n",
108 | "all = soup.find_all(\"div\", {\"class\":\"cities\"})"
109 | ]
110 | },
111 | {
112 | "cell_type": "code",
113 | "execution_count": null,
114 | "metadata": {
115 | "collapsed": true
116 | },
117 | "outputs": [],
118 | "source": [
119 | "all"
120 | ]
121 | },
122 | {
123 | "cell_type": "code",
124 | "execution_count": null,
125 | "metadata": {
126 | "collapsed": true
127 | },
128 | "outputs": [],
129 | "source": [
130 | "all[0]"
131 | ]
132 | },
133 | {
134 | "cell_type": "markdown",
135 | "metadata": {},
136 | "source": [
137 | "Find all titles that have a `h2` heading tag:"
138 | ]
139 | },
140 | {
141 | "cell_type": "code",
142 | "execution_count": null,
143 | "metadata": {
144 | "collapsed": true
145 | },
146 | "outputs": [],
147 | "source": [
148 | "all[0].find_all(\"h2\")"
149 | ]
150 | },
151 | {
152 | "cell_type": "markdown",
153 | "metadata": {},
154 | "source": [
155 | "That found the list of tags. You can now extract the text of the first tag:"
156 | ]
157 | },
158 | {
159 | "cell_type": "code",
160 | "execution_count": null,
161 | "metadata": {
162 | "collapsed": true
163 | },
164 | "outputs": [],
165 | "source": [
166 | "all[0].find_all(\"h2\")[0].text"
167 | ]
168 | },
169 | {
170 | "cell_type": "markdown",
171 | "metadata": {},
172 | "source": [
173 | "Let's loop through all `h2` tags and extract their paragraph `p` tags:"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": null,
179 | "metadata": {
180 | "collapsed": true
181 | },
182 | "outputs": [],
183 | "source": [
184 | "for item in all:\n",
185 | " print(item.find_all(\"p\")[0].text)"
186 | ]
187 | }
188 | ],
189 | "metadata": {
190 | "kernelspec": {
191 | "display_name": "Python 3",
192 | "language": "python",
193 | "name": "python3"
194 | },
195 | "language_info": {
196 | "codemirror_mode": {
197 | "name": "ipython",
198 | "version": 3
199 | },
200 | "file_extension": ".py",
201 | "mimetype": "text/x-python",
202 | "name": "python",
203 | "nbconvert_exporter": "python",
204 | "pygments_lexer": "ipython3",
205 | "version": "3.6.3"
206 | }
207 | },
208 | "nbformat": 4,
209 | "nbformat_minor": 2
210 | }
211 |
--------------------------------------------------------------------------------
/S19-App-7-Scrape-Real-Estate-Property-Data-from-the-Web.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# The Python Mega Course: Build 10 Real World Applications\n",
8 | "---"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {},
14 | "source": [
15 | "This notebook is a summary of [The Python Mega Course: Build 10 Real World Applciations](https://www.udemy.com/the-python-mega-course), a comprehensive online Python course taught by Ardit Sulce. Each lecture name is clickable and takes you to the video lecture in the course."
16 | ]
17 | },
18 | {
19 | "cell_type": "markdown",
20 | "metadata": {},
21 | "source": [
22 | "# Section 19: Application 7: Scrape Real Estate Property Data from the Web\n",
23 | "***"
24 | ]
25 | },
26 | {
27 | "cell_type": "markdown",
28 | "metadata": {},
29 | "source": [
30 | "**Lecture:** [Program Demonstration](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
31 | "---"
32 | ]
33 | },
34 | {
35 | "cell_type": "markdown",
36 | "metadata": {},
37 | "source": [
38 | "This video lecture shows the finished version of the website running on a browser."
39 | ]
40 | },
41 | {
42 | "cell_type": "markdown",
43 | "metadata": {},
44 | "source": [
45 | "**Lecture:** [Loading the Webpage in Python](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
46 | "---"
47 | ]
48 | },
49 | {
50 | "cell_type": "markdown",
51 | "metadata": {},
52 | "source": [
53 | "This code loads the webpage source code into Python ready for extracting information from it."
54 | ]
55 | },
56 | {
57 | "cell_type": "code",
58 | "execution_count": null,
59 | "metadata": {
60 | "collapsed": true
61 | },
62 | "outputs": [],
63 | "source": [
64 | "import requests, re\n",
65 | "from bs4 import BeautifulSoup\n",
66 | "\n",
67 | "r = requests.get(\"http://www.pythonhow.com/real-estate/rock-springs-wy/LCWYROCKSPRINGS/\")\n",
68 | "c = r.content\n",
69 | "\n",
70 | "soup=BeautifulSoup(c, \"html.parser\")\n",
71 | "print(soup.prettify())"
72 | ]
73 | },
74 | {
75 | "cell_type": "markdown",
76 | "metadata": {},
77 | "source": [
78 | "**Lecture:** [Extracting \"div\" Tags](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
79 | "---"
80 | ]
81 | },
82 | {
83 | "cell_type": "markdown",
84 | "metadata": {},
85 | "source": [
86 | "We start extracting HTML tags starting from `div` tags."
87 | ]
88 | },
89 | {
90 | "cell_type": "code",
91 | "execution_count": null,
92 | "metadata": {
93 | "collapsed": true
94 | },
95 | "outputs": [],
96 | "source": [
97 | "import requests, re\n",
98 | "from bs4 import BeautifulSoup\n",
99 | "\n",
100 | "r = requests.get(\"http://www.pythonhow.com/real-estate/rock-springs-wy/LCWYROCKSPRINGS/\")\n",
101 | "c = r.content\n",
102 | "\n",
103 | "soup = BeautifulSoup(c,\"html.parser\")\n",
104 | "\n",
105 | "all = soup.find_all(\"div\", {\"class\":\"propertyRow\"})\n",
106 | "all[0].find(\"h4\", {\"class\":\"propPrice\"}).text.replace(\"\\n\", \"\").replace(\" \", \"\")"
107 | ]
108 | },
109 | {
110 | "cell_type": "markdown",
111 | "metadata": {},
112 | "source": [
113 | "**Lecture:** [Extracting Addresses and Property Details](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
114 | "---"
115 | ]
116 | },
117 | {
118 | "cell_type": "markdown",
119 | "metadata": {},
120 | "source": [
121 | "Most of the data are stored inside `span` tags so we extract those data in this code."
122 | ]
123 | },
124 | {
125 | "cell_type": "code",
126 | "execution_count": null,
127 | "metadata": {
128 | "collapsed": true
129 | },
130 | "outputs": [],
131 | "source": [
132 | "import requests, re\n",
133 | "from bs4 import BeautifulSoup\n",
134 | "\n",
135 | "r = requests.get(\"http://www.pythonhow.com/real-estate/rock-springs-wy/LCWYROCKSPRINGS/\")\n",
136 | "c = r.content\n",
137 | "\n",
138 | "soup = BeautifulSoup(c,\"html.parser\")\n",
139 | "\n",
140 | "all = soup.find_all(\"div\", {\"class\":\"propertyRow\"})\n",
141 | "all[0].find(\"h4\", {\"class\":\"propPrice\"}).text.replace(\"\\n\", \"\").replace(\" \", \"\")"
142 | ]
143 | },
144 | {
145 | "cell_type": "code",
146 | "execution_count": null,
147 | "metadata": {
148 | "collapsed": true
149 | },
150 | "outputs": [],
151 | "source": [
152 | "for item in all:\n",
153 | " print(item.find(\"h4\", {\"class\", \"propPrice\"}).text.replace(\"\\n\",\"\").replace(\" \", \"\"))\n",
154 | " print(item.find_all(\"span\", {\"class\",\"propAddressCollapse\"})[0].text)\n",
155 | " print(item.find_all(\"span\", {\"class\",\"propAddressCollapse\"})[1].text)\n",
156 | "\n",
157 | " try:\n",
158 | " print(item.find(\"span\", {\"class\", \"infoBed\"}).find(\"b\").text)\n",
159 | " except:\n",
160 | " print(None)\n",
161 | "\n",
162 | " try:\n",
163 | " print(item.find(\"span\", {\"class\", \"infoSqFt\"}).find(\"b\").text)\n",
164 | " except:\n",
165 | " print(None)\n",
166 | "\n",
167 | " try:\n",
168 | " print(item.find(\"span\", {\"class\", \"infoValueFullBath\"}).find(\"b\").text)\n",
169 | " except:\n",
170 | " print(None)\n",
171 | "\n",
172 | " try:\n",
173 | " print(item.find(\"span\", {\"class\", \"infoValueHalfBath\"}).find(\"b\").text)\n",
174 | " except:\n",
175 | " print(None)\n",
176 | " \n",
177 | " print(\" \")"
178 | ]
179 | },
180 | {
181 | "cell_type": "markdown",
182 | "metadata": {},
183 | "source": [
184 | "**Lecture:** [Extracting Elements without Unique Identifiers](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
185 | "---"
186 | ]
187 | },
188 | {
189 | "cell_type": "markdown",
190 | "metadata": {},
191 | "source": [
192 | "Here we extract some more elements."
193 | ]
194 | },
195 | {
196 | "cell_type": "code",
197 | "execution_count": null,
198 | "metadata": {
199 | "collapsed": true
200 | },
201 | "outputs": [],
202 | "source": [
203 | "for item in all:\n",
204 | " print(item.find(\"h4\", {\"class\", \"propPrice\"}).text.replace(\"\\n\",\"\").replace(\" \", \"\"))\n",
205 | " print(item.find_all(\"span\", {\"class\",\"propAddressCollapse\"})[0].text)\n",
206 | " print(item.find_all(\"span\", {\"class\",\"propAddressCollapse\"})[1].text)\n",
207 | "\n",
208 | " try:\n",
209 | " print(item.find(\"span\", {\"class\", \"infoBed\"}).find(\"b\").text)\n",
210 | " except:\n",
211 | " print(None)\n",
212 | "\n",
213 | " try:\n",
214 | " print(item.find(\"span\", {\"class\", \"infoSqFt\"}).find(\"b\").text)\n",
215 | " except:\n",
216 | " print(None)\n",
217 | "\n",
218 | " try:\n",
219 | " print(item.find(\"span\", {\"class\", \"infoValueFullBath\"}).find(\"b\").text)\n",
220 | " except:\n",
221 | " print(None)\n",
222 | "\n",
223 | " try:\n",
224 | " print(item.find(\"span\", {\"class\", \"infoValueHalfBath\"}).find(\"b\").text)\n",
225 | " except:\n",
226 | " print(None)\n",
227 | " \n",
228 | " for column_group in item.find_all(\"div\", {\"class\":\"columnGroup\"}):\n",
229 | " for feature_group, feature_name in zip(column_group.find_all(\"span\", {\"class\":\"featureGroup\"}), column_group.find_all(\"span\", {\"class\":\"featureName\"})):\n",
230 | " if \"Lot Size\" in feature_group.text:\n",
231 | " print(feature_name.text)\n",
232 | "\n",
233 | " print(\" \")"
234 | ]
235 | },
236 | {
237 | "cell_type": "markdown",
238 | "metadata": {},
239 | "source": [
240 | "**Lecture:** [Saving the Extracted Data in CSV Files](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
241 | "---"
242 | ]
243 | },
244 | {
245 | "cell_type": "markdown",
246 | "metadata": {},
247 | "source": [
248 | "Finally, we save the extracted data into a CSV file."
249 | ]
250 | },
251 | {
252 | "cell_type": "code",
253 | "execution_count": null,
254 | "metadata": {
255 | "collapsed": true
256 | },
257 | "outputs": [],
258 | "source": [
259 | "import requests, re\n",
260 | "from bs4 import BeautifulSoup\n",
261 | "\n",
262 | "r = requests.get(\"http://www.pythonhow.com/real-estate/rock-springs-wy/LCWYROCKSPRINGS/\")\n",
263 | "c = r.content\n",
264 | "\n",
265 | "soup = BeautifulSoup(c,\"html.parser\")\n",
266 | "\n",
267 | "all = soup.find_all(\"div\",{\"class\":\"propertyRow\"})\n",
268 | "\n",
269 | "all[0].find(\"h4\", {\"class\":\"propPrice\"}).text.replace(\"\\n\", \"\").replace(\" \", \"\")"
270 | ]
271 | },
272 | {
273 | "cell_type": "code",
274 | "execution_count": null,
275 | "metadata": {
276 | "collapsed": true
277 | },
278 | "outputs": [],
279 | "source": [
280 | "l = []\n",
281 | "for item in all:\n",
282 | " d = {}\n",
283 | " df[\"Address\"] = item.find_all(\"span\", {\"class\", \"propAddressCollapse\"})[0].text\n",
284 | " df[\"Locality\"] = item.find_all(\"span\", {\"class\", \"propAddressCollapse\"})[1].text\n",
285 | " df[\"Price\"] = item.find(\"h4\", {\"class\", \"propPrice\"}).text.replace(\"\\n\",\"\").replace(\" \", \"\")\n",
286 | " \n",
287 | " try:\n",
288 | " d[\"Beds\"] = item.find(\"span\", {\"class\", \"infoBed\"}).find(\"b\").text\n",
289 | " except:\n",
290 | " d[\"Beds\"] = None\n",
291 | "\n",
292 | " try:\n",
293 | " d[\"Area\"] = item.find(\"span\", {\"class\", \"infoSqFt\"}).find(\"b\").text\n",
294 | " except:\n",
295 | " d[\"Area\"] = None\n",
296 | "\n",
297 | " try:\n",
298 | " d[\"Full Baths\"] = item.find(\"span\", {\"class\", \"infoValueFullBath\"}).find(\"b\").text\n",
299 | " except:\n",
300 | " d[\"Full Baths\"] = None\n",
301 | "\n",
302 | " try:\n",
303 | " d[\"Half Baths\"] = item.find(\"span\", {\"class\", \"infoValueHalfBath\"}).find(\"b\").text\n",
304 | " except:\n",
305 | " d[\"Half Baths\"] = None\n",
306 | "\n",
307 | " for column_group in item.find_all(\"div\", {\"class\":\"columnGroup\"}):\n",
308 | " for feature_group, feature_name in zip(column_group.find_all(\"span\", {\"class\":\"featureGroup\"}), column_group.find_all(\"span\", {\"class\":\"featureName\"})):\n",
309 | " if \"Lot Size\" in feature_group.text:\n",
310 | " print(feature_name.text)\n",
311 | " d[\"Lot Size\"] = feature_name.text\n",
312 | " l.append(d)"
313 | ]
314 | },
315 | {
316 | "cell_type": "code",
317 | "execution_count": null,
318 | "metadata": {
319 | "collapsed": true
320 | },
321 | "outputs": [],
322 | "source": [
323 | "import pandas\n",
324 | "df = pandas.DataFrame(l)\n",
325 | "df"
326 | ]
327 | },
328 | {
329 | "cell_type": "code",
330 | "execution_count": null,
331 | "metadata": {
332 | "collapsed": true
333 | },
334 | "outputs": [],
335 | "source": [
336 | "df.to_csv(\"Output.csv\")"
337 | ]
338 | },
339 | {
340 | "cell_type": "markdown",
341 | "metadata": {},
342 | "source": [
343 | "**Lecture:** [Crawling Through Webpages](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
344 | "---"
345 | ]
346 | },
347 | {
348 | "cell_type": "markdown",
349 | "metadata": {},
350 | "source": [
351 | "In case you need to extract data from multiple pages, here is how to do it."
352 | ]
353 | },
354 | {
355 | "cell_type": "code",
356 | "execution_count": null,
357 | "metadata": {
358 | "collapsed": true
359 | },
360 | "outputs": [],
361 | "source": [
362 | "import requests, re\n",
363 | "from bs4 import BeautifulSoup\n",
364 | "\n",
365 | "r = requests.get(\"http://www.pythonhow.com/real-estate/rock-springs-wy/LCWYROCKSPRINGS/\")\n",
366 | "c = r.content\n",
367 | "\n",
368 | "soup = BeautifulSoup(c,\"html.parser\")\n",
369 | "\n",
370 | "all = soup.find_all(\"div\",{\"class\":\"propertyRow\"})\n",
371 | "\n",
372 | "all[0].find(\"h4\", {\"class\":\"propPrice\"}).text.replace(\"\\n\", \"\").replace(\" \", \"\")\n",
373 | "\n",
374 | "page_nr = soup.find_all(\"a\",{\"class\":\"Page\"})[-1].text\n",
375 | "print(page_nr, \"number of pages were found\")\n",
376 | "\n",
377 | "l = []\n",
378 | "base_url = \"http://www.pythonhow.com/real-estate/rock-springs-wy/LCWYROCKSPRINGS/t=0&s=\"\n",
379 | "for page in range(0, int(page_nr)*10, 10):\n",
380 | " print( )\n",
381 | " r = requests.get(base_url + str(page) + \".html\")\n",
382 | " c = r.content\n",
383 | " soup = BeautifulSoup(c, \"html.parser\")\n",
384 | " all = soup.find_all(\"div\", {\"class\":\"propertyRow\"})\n",
385 | " for item in all:\n",
386 | " d = {}\n",
387 | " d[\"Address\"] = item.find_all(\"span\", {\"class\",\"propAddressCollapse\"})[0].text\n",
388 | " \n",
389 | " try:\n",
390 | " d[\"Locality\"] = item.find_all(\"span\",{\"class\",\"propAddressCollapse\"})[1].text\n",
391 | " except:\n",
392 | " d[\"Locality\"] = None\n",
393 | " d[\"Price\"] = item.find(\"h4\", {\"class\", \"propPrice\"}).text.replace(\"\\n\",\"\").replace(\" \", \"\")\n",
394 | " \n",
395 | " try:\n",
396 | " d[\"Beds\"] = item.find(\"span\", {\"class\", \"infoBed\"}).find(\"b\").text\n",
397 | " except:\n",
398 | " d[\"Beds\"] = None\n",
399 | "\n",
400 | " try:\n",
401 | " d[\"Area\"] = item.find(\"span\", {\"class\", \"infoSqFt\"}).find(\"b\").text\n",
402 | " except:\n",
403 | " d[\"Area\"] = None\n",
404 | " \n",
405 | " try:\n",
406 | " d[\"Full Baths\"] = item.find(\"span\", {\"class\", \"infoValueFullBath\"}).find(\"b\").text\n",
407 | " except:\n",
408 | " d[\"Full Baths\"] = None\n",
409 | "\n",
410 | " try:\n",
411 | " d[\"Half Baths\"] = item.find(\"span\", {\"class\", \"infoValueHalfBath\"}).find(\"b\").text\n",
412 | " except:\n",
413 | " d[\"Half Baths\"] = None\n",
414 | " \n",
415 | " for column_group in item.find_all(\"div\", {\"class\":\"columnGroup\"}):\n",
416 | " for feature_group, feature_name in zip(column_group.find_all(\"span\", {\"class\":\"featureGroup\"}), column_group.find_all(\"span\", {\"class\":\"featureName\"})):\n",
417 | " if \"Lot Size\" in feature_group.text:\n",
418 | " print(feature_name.text)\n",
419 | " d[\"Lot Size\"] = feature_name.text\n",
420 | " l.append(d)"
421 | ]
422 | },
423 | {
424 | "cell_type": "markdown",
425 | "metadata": {},
426 | "source": [
427 | "**Lecture:** [Final Code of Application 7]()\n",
428 | "---"
429 | ]
430 | },
431 | {
432 | "cell_type": "markdown",
433 | "metadata": {},
434 | "source": [
435 | "This is the final code. It accesses a webpage and it extracts data from that webpage and save those data in a CSV file."
436 | ]
437 | },
438 | {
439 | "cell_type": "markdown",
440 | "metadata": {},
441 | "source": [
442 | "**Note**: You need internet connection for the code to work."
443 | ]
444 | },
445 | {
446 | "cell_type": "code",
447 | "execution_count": null,
448 | "metadata": {
449 | "collapsed": true
450 | },
451 | "outputs": [],
452 | "source": [
453 | "import requests, re\n",
454 | "from bs4 import BeautifulSoup\n",
455 | "import pandas\n",
456 | "\n",
457 | "r = requests.get(\"http://www.pythonhow.com/real-estate/rock-springs-wy/LCWYROCKSPRINGS/\")\n",
458 | "c = r.content\n",
459 | "\n",
460 | "soup = BeautifulSoup(c,\"html.parser\")\n",
461 | "\n",
462 | "all = soup.find_all(\"div\",{\"class\":\"propertyRow\"})\n",
463 | "\n",
464 | "all[0].find(\"h4\", {\"class\":\"propPrice\"}).text.replace(\"\\n\", \"\").replace(\" \", \"\")\n",
465 | "\n",
466 | "page_nr = soup.find_all(\"a\",{\"class\":\"Page\"})[-1].text\n",
467 | "print(page_nr, \"number of pages were found\")\n",
468 | "\n",
469 | "l = []\n",
470 | "base_url = \"http://www.pythonhow.com/real-estate/rock-springs-wy/LCWYROCKSPRINGS/t=0&s=\"\n",
471 | "for page in range(0, int(page_nr)*10, 10):\n",
472 | " print( )\n",
473 | " r = requests.get(base_url + str(page) + \".html\")\n",
474 | " c = r.content\n",
475 | " soup = BeautifulSoup(c, \"html.parser\")\n",
476 | " all = soup.find_all(\"div\", {\"class\":\"propertyRow\"})\n",
477 | " for item in all:\n",
478 | " d = {}\n",
479 | " d[\"Address\"] = item.find_all(\"span\", {\"class\",\"propAddressCollapse\"})[0].text\n",
480 | " \n",
481 | " try:\n",
482 | " d[\"Locality\"] = item.find_all(\"span\",{\"class\",\"propAddressCollapse\"})[1].text\n",
483 | " except:\n",
484 | " d[\"Locality\"] = None\n",
485 | " d[\"Price\"] = item.find(\"h4\", {\"class\", \"propPrice\"}).text.replace(\"\\n\",\"\").replace(\" \", \"\")\n",
486 | " \n",
487 | " try:\n",
488 | " d[\"Beds\"] = item.find(\"span\", {\"class\", \"infoBed\"}).find(\"b\").text\n",
489 | " except:\n",
490 | " d[\"Beds\"] = None\n",
491 | "\n",
492 | " try:\n",
493 | " d[\"Area\"] = item.find(\"span\", {\"class\", \"infoSqFt\"}).find(\"b\").text\n",
494 | " except:\n",
495 | " d[\"Area\"] = None\n",
496 | " \n",
497 | " try:\n",
498 | " d[\"Full Baths\"] = item.find(\"span\", {\"class\", \"infoValueFullBath\"}).find(\"b\").text\n",
499 | " except:\n",
500 | " d[\"Full Baths\"] = None\n",
501 | "\n",
502 | " try:\n",
503 | " d[\"Half Baths\"] = item.find(\"span\", {\"class\", \"infoValueHalfBath\"}).find(\"b\").text\n",
504 | " except:\n",
505 | " d[\"Half Baths\"] = None\n",
506 | " \n",
507 | " for column_group in item.find_all(\"div\", {\"class\":\"columnGroup\"}):\n",
508 | " for feature_group, feature_name in zip(column_group.find_all(\"span\", {\"class\":\"featureGroup\"}), column_group.find_all(\"span\", {\"class\":\"featureName\"})):\n",
509 | " if \"Lot Size\" in feature_group.text:\n",
510 | " print(feature_name.text)\n",
511 | " d[\"Lot Size\"] = feature_name.text\n",
512 | " l.append(d)\n",
513 | " \n",
514 | "df = pandas.DataFrame(l)\n",
515 | "df.to_csv(\"Output.csv\")"
516 | ]
517 | }
518 | ],
519 | "metadata": {
520 | "kernelspec": {
521 | "display_name": "Python 3",
522 | "language": "python",
523 | "name": "python3"
524 | },
525 | "language_info": {
526 | "codemirror_mode": {
527 | "name": "ipython",
528 | "version": 3
529 | },
530 | "file_extension": ".py",
531 | "mimetype": "text/x-python",
532 | "name": "python",
533 | "nbconvert_exporter": "python",
534 | "pygments_lexer": "ipython3",
535 | "version": "3.6.3"
536 | }
537 | },
538 | "nbformat": 4,
539 | "nbformat_minor": 2
540 | }
541 |
--------------------------------------------------------------------------------
/S20-App-8-Build-a-Web-Based-Financial-Graph.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# The Python Mega Course: Build 10 Real World Applications\n",
8 | "---"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {},
14 | "source": [
15 | "This notebook is a summary of [The Python Mega Course: Build 10 Real World Applciations](https://www.udemy.com/the-python-mega-course), a comprehensive online Python course taught by Ardit Sulce. Each lecture name is clickable and takes you to the video lecture in the course."
16 | ]
17 | },
18 | {
19 | "cell_type": "markdown",
20 | "metadata": {},
21 | "source": [
22 | "# Section 20: Application 8: Build a Web-Based Financial Graph\n",
23 | "***"
24 | ]
25 | },
26 | {
27 | "cell_type": "markdown",
28 | "metadata": {},
29 | "source": [
30 | "**Lecture:** [Program Demonstration](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
31 | "---"
32 | ]
33 | },
34 | {
35 | "cell_type": "markdown",
36 | "metadata": {},
37 | "source": [
38 | "This video lecture shows the finished version of the website running on a browser."
39 | ]
40 | },
41 | {
42 | "cell_type": "markdown",
43 | "metadata": {},
44 | "source": [
45 | "**Lecture:** [Note](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
46 | "---"
47 | ]
48 | },
49 | {
50 | "cell_type": "markdown",
51 | "metadata": {},
52 | "source": [
53 | "In the next video I install the pandas_datareader library with pip install pandas_datareader. There's currently a bug with that version of pandas_datareader, but the bug has been fixed in the development version of the library.\n",
54 | "\n",
55 | "Therefore, instead of doing pip install pandas_datareader please do the following instead:\n",
56 | "\n",
57 | "`pip install git+https://github.com/pydata/pandas-datareader.git`\n",
58 | "\n",
59 | "That will install the development version which works just fine."
60 | ]
61 | },
62 | {
63 | "cell_type": "markdown",
64 | "metadata": {},
65 | "source": [
66 | "**Lecture:** [Downloading Datasets with Python](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
67 | "---"
68 | ]
69 | },
70 | {
71 | "cell_type": "code",
72 | "execution_count": null,
73 | "metadata": {
74 | "collapsed": true
75 | },
76 | "outputs": [],
77 | "source": [
78 | "from pandas_datareader import data\n",
79 | "import datetime\n",
80 | "\n",
81 | "start = datetime.datetime(2016, 1, 1)\n",
82 | "end = datetime.datetime(2016, 3, 10)\n",
83 | "\n",
84 | "df = data.DataReader(name=\"GOOG\", data_source=\"google\", start=start, end=end)\n",
85 | "df"
86 | ]
87 | },
88 | {
89 | "cell_type": "markdown",
90 | "metadata": {},
91 | "source": [
92 | "**Lecture:** [Stock Market Data](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
93 | "---"
94 | ]
95 | },
96 | {
97 | "cell_type": "markdown",
98 | "metadata": {},
99 | "source": [
100 | "This video lecture explains what stock market data are and how to read them."
101 | ]
102 | },
103 | {
104 | "cell_type": "markdown",
105 | "metadata": {},
106 | "source": [
107 | "**Lecture:** [Stock Market Data Candlestick Charts](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
108 | "---"
109 | ]
110 | },
111 | {
112 | "cell_type": "markdown",
113 | "metadata": {},
114 | "source": [
115 | "This video lecture explains how to read a stock market candlestick chart."
116 | ]
117 | },
118 | {
119 | "cell_type": "markdown",
120 | "metadata": {},
121 | "source": [
122 | "**Lecture:** [Candlestick Charts with Bokeh Quadrants](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
123 | "---"
124 | ]
125 | },
126 | {
127 | "cell_type": "markdown",
128 | "metadata": {},
129 | "source": [
130 | "To create charts usign quadrant glyphs you can use `figure.quad`, but it's better to use `figure.rect`. We will actually use figure.rect in the next lecture."
131 | ]
132 | },
133 | {
134 | "cell_type": "markdown",
135 | "metadata": {},
136 | "source": [
137 | "**Lecture:** [Candlestick Charts with Bokeh Rectangles](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
138 | "---"
139 | ]
140 | },
141 | {
142 | "cell_type": "markdown",
143 | "metadata": {},
144 | "source": [
145 | "Here we start creating the figure object, and apply quadrant glyphs to the figure."
146 | ]
147 | },
148 | {
149 | "cell_type": "code",
150 | "execution_count": null,
151 | "metadata": {
152 | "collapsed": true
153 | },
154 | "outputs": [],
155 | "source": [
156 | "from pandas_datareader import data\n",
157 | "import datetime\n",
158 | "\n",
159 | "start = datetime.datetime(2016, 1, 1)\n",
160 | "end = datetime.datetime(2016, 3, 10)\n",
161 | "\n",
162 | "df = data.DataReader(name=\"GOOG\", data_source=\"google\", start=start, end=end)"
163 | ]
164 | },
165 | {
166 | "cell_type": "code",
167 | "execution_count": null,
168 | "metadata": {
169 | "collapsed": true
170 | },
171 | "outputs": [],
172 | "source": [
173 | "def inc_dec(c, o): \n",
174 | " if c > o:\n",
175 | " value = \"Increase\"\n",
176 | " elif c < o:\n",
177 | " value = \"Decrease\"\n",
178 | " else:\n",
179 | " value = \"Equal\"\n",
180 | " return value\n",
181 | "\n",
182 | "df[\"Status\"] = [inc_dec(c, o) for c, o in zip(df.Close, df.Open)]\n",
183 | "df[\"Middle\"] = (df.Open + df.Close) / 2\n",
184 | "df[\"Height\"] = abs(df.Close - df.Open)"
185 | ]
186 | },
187 | {
188 | "cell_type": "code",
189 | "execution_count": null,
190 | "metadata": {
191 | "collapsed": true
192 | },
193 | "outputs": [],
194 | "source": [
195 | "from bokeh.plotting import figure, show, output_file\n",
196 | "\n",
197 | "p = figure(x_axis_type='datetime', width=1000, height=300)\n",
198 | "p.title.text = \"Candlestick Chart\"\n",
199 | "\n",
200 | "hours_12 = 12 * 60 * 60 * 1000\n",
201 | "\n",
202 | "p.rect(df.index[df.Status==\"Increase\"], df.Middle[df.Status==\"Increase\"],\n",
203 | " hours_12, df.Height[df.Status==\"Increase\"], fill_color=\"green\", line_color=\"black\")\n",
204 | "\n",
205 | "p.rect(df.index[df.Status==\"Decrease\"], df.Middle[df.Status==\"Decrease\"],\n",
206 | " hours_12, df.Height[df.Status==\"Decrease\"], fill_color=\"red\", line_color=\"black\")\n",
207 | "\n",
208 | "output_file(\"CS.html\")\n",
209 | "show(p)"
210 | ]
211 | },
212 | {
213 | "cell_type": "markdown",
214 | "metadata": {},
215 | "source": [
216 | "**Lecture:** [Candlestick Segments](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
217 | "---"
218 | ]
219 | },
220 | {
221 | "cell_type": "code",
222 | "execution_count": null,
223 | "metadata": {
224 | "collapsed": true
225 | },
226 | "outputs": [],
227 | "source": [
228 | "from pandas_datareader import data\n",
229 | "import datetime\n",
230 | "\n",
231 | "start = datetime.datetime(2016, 1, 1)\n",
232 | "end = datetime.datetime(2016, 3, 10)\n",
233 | "\n",
234 | "df = data.DataReader(name=\"GOOG\", data_source=\"google\", start=start, end=end)"
235 | ]
236 | },
237 | {
238 | "cell_type": "code",
239 | "execution_count": null,
240 | "metadata": {
241 | "collapsed": true
242 | },
243 | "outputs": [],
244 | "source": [
245 | "def inc_dec(c, o): \n",
246 | " if c > o:\n",
247 | " value = \"Increase\"\n",
248 | " elif c < o:\n",
249 | " value = \"Decrease\"\n",
250 | " else:\n",
251 | " value = \"Equal\"\n",
252 | " return value\n",
253 | "\n",
254 | "df[\"Status\"] = [inc_dec(c, o) for c, o in zip(df.Close, df.Open)]\n",
255 | "df[\"Middle\"] = (df.Open + df.Close) / 2\n",
256 | "df[\"Height\"] = abs(df.Close - df.Open)"
257 | ]
258 | },
259 | {
260 | "cell_type": "code",
261 | "execution_count": null,
262 | "metadata": {
263 | "collapsed": true
264 | },
265 | "outputs": [],
266 | "source": [
267 | "from bokeh.plotting import figure, show, output_file\n",
268 | "\n",
269 | "p = figure(x_axis_type='datetime', width=1000, height=300)\n",
270 | "p.title.text = \"Candlestick Chart\"\n",
271 | "p.grid.grid_line_alpha = 0.3\n",
272 | "\n",
273 | "hours_12 = 12 * 60 * 60 * 1000\n",
274 | "\n",
275 | "p.segment(df.index, df.High, df.index, df.Low, color=\"Black\")\n",
276 | "\n",
277 | "p.rect(df.index[df.Status==\"Increase\"], df.Middle[df.Status==\"Increase\"],\n",
278 | " hours_12, df.Height[df.Status==\"Increase\"], fill_color=\"green\", line_color=\"black\")\n",
279 | "\n",
280 | "p.rect(df.index[df.Status==\"Decrease\"], df.Middle[df.Status==\"Decrease\"],\n",
281 | " hours_12, df.Height[df.Status==\"Decrease\"], fill_color=\"red\", line_color=\"black\")\n",
282 | "\n",
283 | "output_file(\"CS.html\")\n",
284 | "show(p)"
285 | ]
286 | },
287 | {
288 | "cell_type": "markdown",
289 | "metadata": {},
290 | "source": [
291 | "**Lecture:** [Styling the Chart](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
292 | "---"
293 | ]
294 | },
295 | {
296 | "cell_type": "code",
297 | "execution_count": 3,
298 | "metadata": {},
299 | "outputs": [],
300 | "source": [
301 | "from pandas_datareader import data\n",
302 | "import datetime\n",
303 | "from bokeh.plotting import figure, show, output_file\n",
304 | "\n",
305 | "start = datetime.datetime(2015, 11, 1)\n",
306 | "end = datetime.datetime(2016, 3, 10)\n",
307 | "\n",
308 | "df = data.DataReader(name=\"GOOG\", data_source=\"yahoo\", start=start, end=end)\n",
309 | "\n",
310 | "def inc_dec(c, o): \n",
311 | " if c > o:\n",
312 | " value = \"Increase\"\n",
313 | " elif c < o:\n",
314 | " value = \"Decrease\"\n",
315 | " else:\n",
316 | " value = \"Equal\"\n",
317 | " return value\n",
318 | "\n",
319 | "df[\"Status\"] = [inc_dec(c, o) for c, o in zip(df.Close, df.Open)]\n",
320 | "df[\"Middle\"] = (df.Open + df.Close) / 2\n",
321 | "df[\"Height\"] = abs(df.Close - df.Open)\n",
322 | "\n",
323 | "p = figure(x_axis_type='datetime', width=1000, height=300, sizing_mode='scale_width')\n",
324 | "p.title.text = \"Candlestick Chart\"\n",
325 | "p.grid.grid_line_alpha = 0.3\n",
326 | "\n",
327 | "hours_12 = 12 * 60 * 60 * 1000\n",
328 | "\n",
329 | "p.segment(df.index, df.High, df.index, df.Low, color=\"Black\")\n",
330 | "\n",
331 | "p.rect(df.index[df.Status==\"Increase\"], df.Middle[df.Status==\"Increase\"],\n",
332 | " hours_12, df.Height[df.Status==\"Increase\"], fill_color=\"#CCFFFF\", line_color=\"black\")\n",
333 | "\n",
334 | "p.rect(df.index[df.Status==\"Decrease\"], df.Middle[df.Status==\"Decrease\"],\n",
335 | " hours_12, df.Height[df.Status==\"Decrease\"], fill_color=\"#FF3333\", line_color=\"black\")\n",
336 | "\n",
337 | "output_file(\"CS.html\")\n",
338 | "show(p)"
339 | ]
340 | },
341 | {
342 | "cell_type": "markdown",
343 | "metadata": {},
344 | "source": [
345 | "**Lecture:** [The Concept Behind Embedding Bokeh Charts in a Flask Webpage](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
346 | "---"
347 | ]
348 | },
349 | {
350 | "cell_type": "markdown",
351 | "metadata": {},
352 | "source": [
353 | "Since Bokeh charts are can embed Bokeh charts, you can embed them in your websites. The next lecture contains the code of a Flask web app with the Bokeh candlestick chart embedded in it."
354 | ]
355 | },
356 | {
357 | "cell_type": "markdown",
358 | "metadata": {},
359 | "source": [
360 | "**Lecture:** [Embedding the Bokeh Chart in a Webpage](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
361 | "---"
362 | ]
363 | },
364 | {
365 | "cell_type": "markdown",
366 | "metadata": {},
367 | "source": [
368 | "Please see the lecture \"Final Code for Application 8\" for the code."
369 | ]
370 | },
371 | {
372 | "cell_type": "markdown",
373 | "metadata": {},
374 | "source": [
375 | "**Lecture:** [Deploying the Chart Website to a Live Server](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
376 | "---"
377 | ]
378 | },
379 | {
380 | "cell_type": "markdown",
381 | "metadata": {},
382 | "source": [
383 | "This lecture covers how to deploy the app on Heroku. Below is a summary of the deployment steps:"
384 | ]
385 | },
386 | {
387 | "cell_type": "markdown",
388 | "metadata": {},
389 | "source": [
390 | "1. Create an account on www.heroku.com if you don't have one already.\n",
391 | "2. Download and install Heroku CLI from www.heroku.com/\n",
392 | "3. Install gunicorn with \"pip install gunicorn\". Make sure you're using pip from your virtual environment if you have one.\n",
393 | "4. Create a requirement.txt file in the main app directory where the main Python app file is located. You can create that file by running \"pip freeze > requirements.txt\" in the command line. Make sure you're using pip from your virtual environment if you have one. The requirement.txt file should now contain a list of Python packages.\n",
394 | "5. Create a file named \"Procfile\" in the main app directory. The file should not contain any extension. Then type in this line inside: \"web: gunicorn script1:app\" where \"script1\" should be replaced with the name of your Python script and \"app\" with the name of the variable holding your Flask app.\n",
395 | "6. Create a runtime.txt file in the main app directory and type \"python-3.5.1\" inside, or even better go to the \"Python runtimes for Heroku\" webpage to see the current supported runtime and put that in the file.\n",
396 | "7. Open your computer terminal/command line to point to the directory where the Python file containing your app code is located.\n",
397 | "8. Using the terminal, log in to Heroku with \"heroku login\" \n",
398 | "9. Enter your Heroku email address\n",
399 | "10. Enter your Heroku password \n",
400 | "11. Create a new Heroku app with \"heroku create myawesomeappname\"\n",
401 | "17. Initialize a local git repository with \"git init\"\n",
402 | "18. Add your local application files to git with \"git add .\"\n",
403 | "19. Tell git your email address with \"git config --global user.email \"myemail@hotmail.com\"\". Make sure the email address is inside quotes here.\n",
404 | "20. Tell git your username (just pick whatever username) with \"config --global user.name \"whateverusername\"\". The username should be in quotes.\n",
405 | "21. Commit the changes with \"git commit -m \"first commit\"\". Make sure \"first commit\" is inside quotes.\n",
406 | "22. Before pushing the changes to Heroku, tell heroku the name of the app you want to use with heroku \"git:remote --app myawesomeappname\"\n",
407 | "23. Push the changes to Heroku with \"git push heroku master\"\n",
408 | "26. That should do it. Go ahead and open your app with \"heroku open\"."
409 | ]
410 | },
411 | {
412 | "cell_type": "markdown",
413 | "metadata": {},
414 | "source": [
415 | "**Lecture:** [Final Code for Application 8]()\n",
416 | "---"
417 | ]
418 | },
419 | {
420 | "cell_type": "markdown",
421 | "metadata": {},
422 | "source": [
423 | "To run this web app, make sure the directory structure is like the one you see below and then execute `python script1.py` and then visit `localhost:5000` on your browser."
424 | ]
425 | },
426 | {
427 | "cell_type": "markdown",
428 | "metadata": {},
429 | "source": [
430 | "Project directory tree:"
431 | ]
432 | },
433 | {
434 | "cell_type": "markdown",
435 | "metadata": {},
436 | "source": [
437 | "`templates\n",
438 | " layout.html\n",
439 | " home.html\n",
440 | " about.html\n",
441 | " plot.html\n",
442 | "static\n",
443 | " css\n",
444 | " main.css\n",
445 | "script1.py\n",
446 | "plot.py\n",
447 | "runtime.txt\n",
448 | "Procfile\n",
449 | "requirements.txt`"
450 | ]
451 | },
452 | {
453 | "cell_type": "markdown",
454 | "metadata": {},
455 | "source": [
456 | "script1.py"
457 | ]
458 | },
459 | {
460 | "cell_type": "code",
461 | "execution_count": null,
462 | "metadata": {
463 | "collapsed": true
464 | },
465 | "outputs": [],
466 | "source": [
467 | "from flask import Flask, render_template\n",
468 | "\n",
469 | "app=Flask(__name__)\n",
470 | "\n",
471 | "@app.route('/plot/')\n",
472 | "def plot():\n",
473 | " from pandas_datareader import data\n",
474 | " import datetime\n",
475 | " import fix_yahoo_finance as yf\n",
476 | " yf.pdr_override()\n",
477 | " from bokeh.plotting import figure, show, output_file\n",
478 | " from bokeh.embed import components\n",
479 | " from bokeh.resources import CDN \n",
480 | "\n",
481 | " start=datetime.datetime(2015,11,1)\n",
482 | " end=datetime.datetime(2016,3,10)\n",
483 | "\n",
484 | " df=data.get_data_yahoo(tickers=\"GOOG\", start=start, end=end)\n",
485 | "\n",
486 | "\n",
487 | " def inc_dec(c, o): \n",
488 | " if c > o:\n",
489 | " value = \"Increase\"\n",
490 | " elif c < o:\n",
491 | " value = \"Decrease\"\n",
492 | " else:\n",
493 | " value = \"Equal\"\n",
494 | " return value\n",
495 | "\n",
496 | " df[\"Status\"] = [inc_dec(c, o) for c, o in zip(df.Close, df.Open)]\n",
497 | " df[\"Middle\"] = (df.Open + df.Close) / 2\n",
498 | " df[\"Height\"] = abs(df.Close - df.Open)\n",
499 | "\n",
500 | " p = figure(x_axis_type='datetime', width=1000, height=300)\n",
501 | " p.title.text = \"Candlestick Chart\"\n",
502 | " p.grid.grid_line_alpha = 0.3\n",
503 | "\n",
504 | " hours_12 = 12 * 60 * 60 * 1000\n",
505 | "\n",
506 | " p.segment(df.index, df.High, df.index, df.Low, color=\"Black\")\n",
507 | "\n",
508 | " p.rect(df.index[df.Status==\"Increase\"], df.Middle[df.Status==\"Increase\"],\n",
509 | " hours_12, df.Height[df.Status==\"Increase\"], fill_color=\"#CCFFFF\", line_color=\"black\")\n",
510 | "\n",
511 | " p.rect(df.index[df.Status==\"Decrease\"], df.Middle[df.Status==\"Decrease\"],\n",
512 | " hours_12, df.Height[df.Status==\"Decrease\"], fill_color=\"#FF3333\", line_color=\"black\")\n",
513 | "\n",
514 | " script1, div1 = components(p)\n",
515 | " cdn_js = CDN.js_files[0]\n",
516 | " cdn_css = CDN.css_files[0]\n",
517 | " return render_template(\"plot.html\",\n",
518 | " script1 = script1,\n",
519 | " div1 = div1,\n",
520 | " cdn_css = cdn_css,\n",
521 | " cdn_js = cdn_js )\n",
522 | "\n",
523 | "@app.route('/')\n",
524 | "def home():\n",
525 | " return render_template(\"home.html\")\n",
526 | "\n",
527 | "@app.route('/about/')\n",
528 | "def about():\n",
529 | " return render_template(\"about.html\")\n",
530 | "\n",
531 | "if __name__ == \"__main__\":\n",
532 | " app.run(debug=True)"
533 | ]
534 | },
535 | {
536 | "cell_type": "markdown",
537 | "metadata": {},
538 | "source": [
539 | "templates/layout.html"
540 | ]
541 | },
542 | {
543 | "cell_type": "code",
544 | "execution_count": null,
545 | "metadata": {},
546 | "outputs": [],
547 | "source": [
548 | "\n",
549 | "\n",
550 | " \n",
551 | " Flask App\n",
552 | " \n",
554 | " \n",
555 | " \n",
556 | " \n",
557 | "
Ardit's web app
\n",
558 | " \n",
565 | " \n",
566 | " \n",
567 | " \n",
568 | " {%block content%}\n",
569 | " {%endblock%}\n",
570 | "
\n",
571 | " \n",
572 | ""
573 | ]
574 | },
575 | {
576 | "cell_type": "markdown",
577 | "metadata": {},
578 | "source": [
579 | "templates/home.html"
580 | ]
581 | },
582 | {
583 | "cell_type": "code",
584 | "execution_count": null,
585 | "metadata": {},
586 | "outputs": [],
587 | "source": [
588 | "{%extends \"layout.html\"%}\n",
589 | "{%block content%}\n",
590 | "\n",
591 | "
My homepage
\n",
592 | "
This is a test website
\n",
593 | "
\n",
594 | "{%endblock%}"
595 | ]
596 | },
597 | {
598 | "cell_type": "markdown",
599 | "metadata": {},
600 | "source": [
601 | "templates/about.html"
602 | ]
603 | },
604 | {
605 | "cell_type": "code",
606 | "execution_count": null,
607 | "metadata": {},
608 | "outputs": [],
609 | "source": [
610 | "{%extends \"layout.html\"%}\n",
611 | "{%block content%}\n",
612 | "\n",
613 | "
My about page
\n",
614 | "
This is a test website again
\n",
615 | "
\n",
616 | "{%endblock%} "
617 | ]
618 | },
619 | {
620 | "cell_type": "markdown",
621 | "metadata": {},
622 | "source": [
623 | "templates/plot.html"
624 | ]
625 | },
626 | {
627 | "cell_type": "code",
628 | "execution_count": null,
629 | "metadata": {},
630 | "outputs": [],
631 | "source": [
632 | "{%extends \"layout.html\"%}\n",
633 | "{%block content%}\n",
634 | "\n",
635 | "\n",
636 | "\n",
637 | "\n",
638 | "
My about page
\n",
639 | "
This is a test website again
\n",
640 | "
\n",
641 | "{{script1 | safe}}\n",
642 | "{{div1 | safe}}\n",
643 | "{%endblock%}"
644 | ]
645 | },
646 | {
647 | "cell_type": "markdown",
648 | "metadata": {},
649 | "source": [
650 | "static/css/main.css"
651 | ]
652 | },
653 | {
654 | "cell_type": "code",
655 | "execution_count": null,
656 | "metadata": {},
657 | "outputs": [],
658 | "source": [
659 | "body {\n",
660 | " margin: 0;\n",
661 | " padding: 0;\n",
662 | " font-family: \"Helvetica Neue\", Helvetica, Arial, sans-serif;\n",
663 | " color: #444;\n",
664 | "}\n",
665 | "\n",
666 | "/*\n",
667 | " * Formatting the header area\n",
668 | " */\n",
669 | "\n",
670 | "header {\n",
671 | " background-color: #DFB887;\n",
672 | " height: 35px;\n",
673 | " width: 100%;\n",
674 | " opacity: .9;\n",
675 | " margin-bottom: 10px;\n",
676 | "}\n",
677 | "\n",
678 | "header h1.logo {\n",
679 | " margin: 0;\n",
680 | " font-size: 1.7em;\n",
681 | " color: #fff;\n",
682 | " text-transform: uppercase;\n",
683 | " float: left;\n",
684 | "}\n",
685 | "\n",
686 | "header h1.logo:hover {\n",
687 | " color: #fff;\n",
688 | " text-decoration: none;\n",
689 | "}\n",
690 | "\n",
691 | "/*\n",
692 | " * Center the body content\n",
693 | " */\n",
694 | "\n",
695 | ".container {\n",
696 | " width: 1200px;\n",
697 | " margin: 0 auto;\n",
698 | "}\n",
699 | "\n",
700 | "div.home {\n",
701 | " padding: 10px 0 30px 0;\n",
702 | " background-color: #E6E6FA;\n",
703 | " -webkit-border-radius: 6px;\n",
704 | " -moz-border-radius: 6px;\n",
705 | " border-radius: 6px;\n",
706 | "}\n",
707 | "\n",
708 | "div.about {\n",
709 | " padding: 10px 0 30px 0;\n",
710 | " background-color: #E6E6FA;\n",
711 | " -webkit-border-radius: 6px;\n",
712 | " -moz-border-radius: 6px;\n",
713 | " border-radius: 6px;\n",
714 | "}\n",
715 | "\n",
716 | "h2 {\n",
717 | " font-size: 3em;\n",
718 | " margin-top: 40px;\n",
719 | " text-align: center;\n",
720 | " letter-spacing: -2px;\n",
721 | "}\n",
722 | "\n",
723 | "h3 {\n",
724 | " font-size: 1.7em;\n",
725 | " font-weight: 100;\n",
726 | " margin-top: 30px;\n",
727 | " text-align: center;\n",
728 | " letter-spacing: -1px;\n",
729 | " color: #999;\n",
730 | "}\n",
731 | "\n",
732 | ".menu {\n",
733 | " float: right;\n",
734 | " margin-top: 8px;\n",
735 | "}\n",
736 | "\n",
737 | ".menu li {\n",
738 | " display: inline;\n",
739 | "}\n",
740 | "\n",
741 | ".menu li + li {\n",
742 | " margin-left: 35px;\n",
743 | "}\n",
744 | "\n",
745 | ".menu li a {\n",
746 | " color: #444;\n",
747 | " text-decoration: none;\n",
748 | "}"
749 | ]
750 | },
751 | {
752 | "cell_type": "markdown",
753 | "metadata": {},
754 | "source": [
755 | "The following files are only necessary if you deploy your app on Heroku. "
756 | ]
757 | },
758 | {
759 | "cell_type": "markdown",
760 | "metadata": {},
761 | "source": [
762 | "Procfile"
763 | ]
764 | },
765 | {
766 | "cell_type": "markdown",
767 | "metadata": {
768 | "collapsed": true
769 | },
770 | "source": [
771 | "`web: gunicorn script1:app`"
772 | ]
773 | },
774 | {
775 | "cell_type": "markdown",
776 | "metadata": {},
777 | "source": [
778 | "runtime.txt (Go to the [Heroku Python runtime webpage](https://devcenter.heroku.com/articles/python-runtimes#supported-python-runtimes) to see the latest version of Python used by Heroku and put that version in runtime.txt instead of the one below)"
779 | ]
780 | },
781 | {
782 | "cell_type": "markdown",
783 | "metadata": {},
784 | "source": [
785 | "`python-3.5.1`"
786 | ]
787 | },
788 | {
789 | "cell_type": "markdown",
790 | "metadata": {},
791 | "source": [
792 | "requirements.txt"
793 | ]
794 | },
795 | {
796 | "cell_type": "markdown",
797 | "metadata": {
798 | "collapsed": true
799 | },
800 | "source": [
801 | "`bokeh==0.11.1\n",
802 | "Flask==0.10.1\n",
803 | "gunicorn==19.6.0\n",
804 | "itsdangerous==0.24\n",
805 | "Jinja2==2.8\n",
806 | "MarkupSafe==0.23\n",
807 | "numpy==1.11.0\n",
808 | "pandas==0.18.0\n",
809 | "pandas-datareader==0.2.1\n",
810 | "python-dateutil==2.5.2\n",
811 | "pytz==2016.3\n",
812 | "PyYAML==3.11\n",
813 | "requests==2.9.1\n",
814 | "requests-file==1.4\n",
815 | "six==1.10.0\n",
816 | "tornado==4.3\n",
817 | "Werkzeug==0.11.5`"
818 | ]
819 | },
820 | {
821 | "cell_type": "code",
822 | "execution_count": null,
823 | "metadata": {},
824 | "outputs": [],
825 | "source": []
826 | }
827 | ],
828 | "metadata": {
829 | "kernelspec": {
830 | "display_name": "Python 3",
831 | "language": "python",
832 | "name": "python3"
833 | },
834 | "language_info": {
835 | "codemirror_mode": {
836 | "name": "ipython",
837 | "version": 3
838 | },
839 | "file_extension": ".py",
840 | "mimetype": "text/x-python",
841 | "name": "python",
842 | "nbconvert_exporter": "python",
843 | "pygments_lexer": "ipython3",
844 | "version": "3.6.3"
845 | }
846 | },
847 | "nbformat": 4,
848 | "nbformat_minor": 2
849 | }
850 |
--------------------------------------------------------------------------------
/S22-App-10-Student-Project-on-Building-a-Geocoder-Web-Service.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# The Python Mega Course: Build 10 Real World Applications\n",
8 | "---"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {},
14 | "source": [
15 | "This notebook is a summary of [The Python Mega Course: Build 10 Real World Applciations](https://www.udemy.com/the-python-mega-course), a comprehensive online Python course taught by Ardit Sulce. Each lecture name is clickable and takes you to the video lecture in the course."
16 | ]
17 | },
18 | {
19 | "cell_type": "markdown",
20 | "metadata": {},
21 | "source": [
22 | "# Section 22: Application 10: Student Project on Building a Geocoder Web Service\n",
23 | "***"
24 | ]
25 | },
26 | {
27 | "cell_type": "markdown",
28 | "metadata": {},
29 | "source": [
30 | "**Lecture:** [Program Demonstration](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
31 | "---"
32 | ]
33 | },
34 | {
35 | "cell_type": "markdown",
36 | "metadata": {},
37 | "source": [
38 | "This video lecture shows the finished version of the website running on a browser. The goal for this section is that students try to build this web app on their own and then they can compare ther solution with the one provided here."
39 | ]
40 | },
41 | {
42 | "cell_type": "markdown",
43 | "metadata": {},
44 | "source": [
45 | "**Lecture:** [Solution, Part 1](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
46 | "---"
47 | ]
48 | },
49 | {
50 | "cell_type": "markdown",
51 | "metadata": {},
52 | "source": [
53 | "For the final solution please see the lecture titled \"Final Code of Application 10\" down below."
54 | ]
55 | },
56 | {
57 | "cell_type": "markdown",
58 | "metadata": {},
59 | "source": [
60 | "**Lecture:** [Solution, Part 2](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
61 | "---"
62 | ]
63 | },
64 | {
65 | "cell_type": "markdown",
66 | "metadata": {},
67 | "source": [
68 | "For the final solution please see the lecture titled \"Final Code of Application 10\" down below."
69 | ]
70 | },
71 | {
72 | "cell_type": "markdown",
73 | "metadata": {},
74 | "source": [
75 | "**Lecture:** [End of the Course](https://www.udemy.com/the-python-mega-course/learn/v4/t/lecture/9439078?start=0)\n",
76 | "---"
77 | ]
78 | },
79 | {
80 | "cell_type": "markdown",
81 | "metadata": {},
82 | "source": [
83 | "This video lecture covers some final remarks about the course and what to do next."
84 | ]
85 | },
86 | {
87 | "cell_type": "markdown",
88 | "metadata": {},
89 | "source": [
90 | "**Lecture:** [Final Code of Application 10]()\n",
91 | "---"
92 | ]
93 | },
94 | {
95 | "cell_type": "markdown",
96 | "metadata": {},
97 | "source": [
98 | "Project directory tree:"
99 | ]
100 | },
101 | {
102 | "cell_type": "markdown",
103 | "metadata": {
104 | "collapsed": true
105 | },
106 | "source": [
107 | "`templates\n",
108 | " index.html\n",
109 | " download.html\n",
110 | "static\n",
111 | " main.css\n",
112 | "app.py\n",
113 | "Procfile\n",
114 | "requirements.txt\n",
115 | "runtime.txt`"
116 | ]
117 | },
118 | {
119 | "cell_type": "markdown",
120 | "metadata": {},
121 | "source": [
122 | "app.py"
123 | ]
124 | },
125 | {
126 | "cell_type": "code",
127 | "execution_count": null,
128 | "metadata": {
129 | "collapsed": true
130 | },
131 | "outputs": [],
132 | "source": [
133 | "from flask import Flask, render_template, request, send_file\n",
134 | "from geopy.geocoders import ArcGIS\n",
135 | "import pandas\n",
136 | "import datetime\n",
137 | "\n",
138 | "app=Flask(__name__)\n",
139 | "\n",
140 | "@app.route(\"/\")\n",
141 | "def index():\n",
142 | " return render_template(\"index.html\")\n",
143 | "\n",
144 | "@app.route('/success-table', methods=['POST'])\n",
145 | "def success_table():\n",
146 | " global filename\n",
147 | " if request.method == \"POST\":\n",
148 | " file=request.files['file']\n",
149 | " try:\n",
150 | " df = pandas.read_csv(file)\n",
151 | " gc = ArcGIS(scheme='http')\n",
152 | " df[\"coordinates\"] = df[\"Address\"].apply(gc.geocode)\n",
153 | " df['Latitude'] = df['coordinates'].apply(lambda x: x.latitude if x != None else None)\n",
154 | " df['Longitude'] = df['coordinates'].apply(lambda x: x.longitude if x != None else None)\n",
155 | " df = df.drop(\"coordinates\",1)\n",
156 | " filename = datetime.datetime.now().strftime(\"sample_files/%Y-%m-%d-%H-%M-%S-%f\"+\".csv\")\n",
157 | " df.to_csv(filename,index=None)\n",
158 | " return render_template(\"index.html\", text=df.to_html(), btn='download.html')\n",
159 | " except Exception as e:\n",
160 | " return render_template(\"index.html\", text=str(e))\n",
161 | "\n",
162 | "@app.route(\"/download-file/\")\n",
163 | "def download():\n",
164 | " return send_file(filename, attachment_filename='yourfile.csv', as_attachment=True)\n",
165 | "\n",
166 | "if __name__==\"__main__\":\n",
167 | " app.run(debug=True)"
168 | ]
169 | },
170 | {
171 | "cell_type": "markdown",
172 | "metadata": {},
173 | "source": [
174 | "templates/index.html"
175 | ]
176 | },
177 | {
178 | "cell_type": "code",
179 | "execution_count": null,
180 | "metadata": {
181 | "collapsed": true
182 | },
183 | "outputs": [],
184 | "source": [
185 | "\n",
186 | "\n",
187 | " Super Geocoder \n",
188 | "\n",
189 | " \n",
190 | "\n",
191 | " \n",
192 | " \n",
193 | "
Super Geocoder
\n",
194 | "
Please upload your CSV file. The values containing addresses should be in a column named address or Address
\n",
195 | "
\n",
199 | "
\n",
200 | " {{text|safe}}\n",
201 | " {% include btn ignore missing %}\n",
202 | "
\n",
203 | "
\n",
204 | " \n",
205 | ""
206 | ]
207 | },
208 | {
209 | "cell_type": "markdown",
210 | "metadata": {},
211 | "source": [
212 | "templates/download.html"
213 | ]
214 | },
215 | {
216 | "cell_type": "code",
217 | "execution_count": null,
218 | "metadata": {
219 | "collapsed": true
220 | },
221 | "outputs": [],
222 | "source": [
223 | "\n",
224 | "\n",
225 | "\n",
228 | ""
229 | ]
230 | },
231 | {
232 | "cell_type": "markdown",
233 | "metadata": {},
234 | "source": [
235 | "static/main.css"
236 | ]
237 | },
238 | {
239 | "cell_type": "code",
240 | "execution_count": null,
241 | "metadata": {
242 | "collapsed": true
243 | },
244 | "outputs": [],
245 | "source": [
246 | "html, body {\n",
247 | " height: 100%;\n",
248 | " margin: 0;\n",
249 | "}\n",
250 | "\n",
251 | ".container {\n",
252 | " margin: 0 auto;\n",
253 | " width: 100%;\n",
254 | " height: 100%;\n",
255 | " background-color: #006666;\n",
256 | " color: #e6ffff;\n",
257 | " overflow:hidden;\n",
258 | " text-align: center;\n",
259 | "}\n",
260 | "\n",
261 | ".container form {\n",
262 | " margin: 20px;\n",
263 | "}\n",
264 | "\n",
265 | ".container h1 {\n",
266 | " font-family: Arial, sans-serif;\n",
267 | " font-size: 30px;\n",
268 | " color: #DDCCEE;\n",
269 | " margin-top: 80px;\n",
270 | "}\n",
271 | "\n",
272 | ".container button {\n",
273 | " width:70px;\n",
274 | " height: 30px;\n",
275 | " background-color: steelblue;\n",
276 | " margin: 3px;\n",
277 | "}\n",
278 | "\n",
279 | ".container input {\n",
280 | " width: 200px;\n",
281 | " height: 15px;\n",
282 | " font-size: 15px;\n",
283 | " margin: 2px;\n",
284 | " padding: 5px;\n",
285 | " transition: all 0.2s ease-in-out;\n",
286 | "}\n",
287 | "\n",
288 | ".output {\n",
289 | " display: inline-block;\n",
290 | "}"
291 | ]
292 | },
293 | {
294 | "cell_type": "markdown",
295 | "metadata": {},
296 | "source": [
297 | "The following files are only necessary if you deploy your app on Heroku"
298 | ]
299 | },
300 | {
301 | "cell_type": "markdown",
302 | "metadata": {},
303 | "source": [
304 | "Procfile"
305 | ]
306 | },
307 | {
308 | "cell_type": "markdown",
309 | "metadata": {
310 | "collapsed": true
311 | },
312 | "source": [
313 | "`web: gunicorn app:app`"
314 | ]
315 | },
316 | {
317 | "cell_type": "markdown",
318 | "metadata": {},
319 | "source": [
320 | "requirements.txt"
321 | ]
322 | },
323 | {
324 | "cell_type": "markdown",
325 | "metadata": {
326 | "collapsed": true
327 | },
328 | "source": [
329 | "`Flask==0.10.1\n",
330 | "gunicorn==19.6.0\n",
331 | "itsdangerous==0.24\n",
332 | "Jinja2==2.8\n",
333 | "MarkupSafe==0.23\n",
334 | "Werkzeug==0.11.10`"
335 | ]
336 | },
337 | {
338 | "cell_type": "markdown",
339 | "metadata": {},
340 | "source": [
341 | "runtime.txt (Go to the [Heroku Python runtime webpage](https://devcenter.heroku.com/articles/python-runtimes#supported-python-runtimes) to see the latest version of Python used by Heroku and put that version in runtime.txt instead of the one below)"
342 | ]
343 | },
344 | {
345 | "cell_type": "markdown",
346 | "metadata": {
347 | "collapsed": true
348 | },
349 | "source": [
350 | "`python-3.5.1`"
351 | ]
352 | }
353 | ],
354 | "metadata": {
355 | "kernelspec": {
356 | "display_name": "Python 3",
357 | "language": "python",
358 | "name": "python3"
359 | },
360 | "language_info": {
361 | "codemirror_mode": {
362 | "name": "ipython",
363 | "version": 3
364 | },
365 | "file_extension": ".py",
366 | "mimetype": "text/x-python",
367 | "name": "python",
368 | "nbconvert_exporter": "python",
369 | "pygments_lexer": "ipython3",
370 | "version": "3.6.3"
371 | }
372 | },
373 | "nbformat": 4,
374 | "nbformat_minor": 2
375 | }
376 |
--------------------------------------------------------------------------------
/Scatter_plotting.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 | Bokeh Plot
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
47 |
82 |
83 |
84 |
85 |
--------------------------------------------------------------------------------
/data/Galaxy_resized.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/Galaxy_resized.jpg
--------------------------------------------------------------------------------
/data/Volcanoes.txt:
--------------------------------------------------------------------------------
1 | VOLCANX020,NUMBER,NAME,LOCATION,STATUS,ELEV,TYPE,TIMEFRAME,LAT,LON
2 | 509.000000000000000,1201-01=,Baker,US-Washington,Historical,3285.000000000000000,Stratovolcanoes,D3,48.7767982,-121.8109970
3 | 511.000000000000000,1201-02-,Glacier Peak,US-Washington,Tephrochronology,3213.000000000000000,Stratovolcano,D4,48.1118011,-121.1110001
4 | 513.000000000000000,1201-03-,Rainier,US-Washington,Dendrochronology,4392.000000000000000,Stratovolcano,D3,46.8698006,-121.7509995
5 | 515.000000000000000,1201-05-,St. Helens,US-Washington,Historical,2549.000000000000000,Stratovolcano,D1,46.1997986,-122.1809998
6 | 516.000000000000000,1201-04-,Adams,US-Washington,Tephrochronology,3742.000000000000000,Stratovolcano,D6,46.2057991,-121.4909973
7 | 517.000000000000000,1201-06-,West Crater,US-Washington,Radiocarbon,1329.000000000000000,Volcanic field,D7,45.8797989,-122.0810013
8 | 518.000000000000000,1201-07-,Indian Heaven,US-Washington,Radiocarbon,1806.000000000000000,Shield volcanoes,D7,45.9297981,-121.8209991
9 | 519.000000000000000,1202-01-,Hood,US-Oregon,Historical,3426.000000000000000,Stratovolcano,D3,45.3737984,-121.6910019
10 | 521.000000000000000,1202-02-,Jefferson,US-Oregon,Varve Count,3199.000000000000000,Stratovolcano,D6,44.6917992,-121.8010025
11 | 522.000000000000000,1202-03-,Blue Lake Crater,US-Oregon,Radiocarbon,1230.000000000000000,Maar,D7,44.4197998,-121.7710037
12 | 523.000000000000000,1202-04-,Sand Mountain Field,US-Oregon,Radiocarbon,1664.000000000000000,Cinder cones,D7,44.3797989,-121.9309998
13 | 524.000000000000000,1202-05-,Washington,US-Oregon,Radiocarbon,2376.000000000000000,Shield volcano,D6,44.3317986,-121.8310013
14 | 525.000000000000000,1202-06-,Belknap,US-Oregon,Radiocarbon,2095.000000000000000,Shield volcanoes,D6,44.2848015,-121.8410034
15 | 526.000000000000000,1202-07-,North Sister Field,US-Oregon,Radiocarbon,3074.000000000000000,Complex volcano,D6,44.1697998,-121.7710037
16 | 527.000000000000000,1202-08-,South Sister,US-Oregon,Radiocarbon,3157.000000000000000,Complex volcano,D7,44.0998001,-121.7710037
17 | 528.000000000000000,1202-09-,Bachelor,US-Oregon,Tephrochronology,2763.000000000000000,Stratovolcano,D7,43.9788017,-121.6809998
18 | 531.000000000000000,1202-11-,Newberry Volcano,US-Oregon,Radiocarbon,2434.000000000000000,Shield volcano,D6,43.7218018,-121.2210007
19 | 532.000000000000000,1202-10-,Davis Lake,US-Oregon,Radiocarbon,2163.000000000000000,Volcanic field,D7,43.5698013,-121.8209991
20 | 534.000000000000000,1202-12-,Devils Garden,US-Oregon,Holocene?,1698.000000000000000,Volcanic field,?,43.5119019,-120.8610001
21 | 535.000000000000000,1202-15-,Cinnamon Butte,US-Oregon,Holocene?,1956.000000000000000,Cinder cones,?,43.2407990,-122.1009979
22 | 536.000000000000000,1202-13-,Squaw Ridge Lava Field,US-Oregon,Holocene?,1711.000000000000000,Volcanic field,?,43.4719009,-120.7509995
23 | 537.000000000000000,1202-14-,Four Craters Lava Field,US-Oregon,Holocene?,1501.000000000000000,Volcanic field,?,43.3609009,-120.6610031
24 | 538.000000000000000,1202-16-,Crater Lake,US-Oregon,Radiocarbon,2487.000000000000000,Caldera,D7,42.9299011,-122.1210022
25 | 539.000000000000000,1205-01-,Yellowstone,US-Wyoming,Tephrochronology,2805.000000000000000,Calderas,D7,44.4299011,-110.6709976
26 | 541.000000000000000,1202-17-,Diamond Craters,US-Oregon,Holocene?,1435.000000000000000,Volcanic field,?,43.0998993,-118.7509995
27 | 542.000000000000000,1202-19-,Jordan Craters,US-Oregon,Radiocarbon,1473.000000000000000,Volcanic field,D7,43.1498985,-117.4710007
28 | 543.000000000000000,1202-18-,Saddle Butte,US-Oregon,Holocene?,1700.000000000000000,Volcanic field,?,42.9999008,-117.8010025
29 | 544.000000000000000,1204-02-,Craters of the Moon,US-Idaho,Radiocarbon,2005.000000000000000,Cinder cones,D7,43.4198990,-113.5009995
30 | 545.000000000000000,1204-04-,Hell's Half Acre,US-Idaho,Radiocarbon,1631.000000000000000,Shield volcano,D7,43.4999008,-112.4509964
31 | 546.000000000000000,1204-01-,Shoshone Lava Field,US-Idaho,Holocene,1478.000000000000000,Shield volcano,D7,43.1799011,-114.3509979
32 | 547.000000000000000,1202-20-,Jackies Butte,US-Oregon,Holocene?,1418.000000000000000,Volcanic field,?,42.6058998,-117.5810013
33 | 548.000000000000000,1203-02-,Medicine Lake,US-California,Radiocarbon,2412.000000000000000,Shield volcano,D6,41.5798988,-121.5709991
34 | 549.000000000000000,1204-03-,Wapi Lava Field,US-Idaho,Radiocarbon,1604.000000000000000,Shield volcano,D7,42.8799019,-113.2210007
35 | 550.000000000000000,1203-01-,Shasta,US-California,Historical,4317.000000000000000,Stratovolcano,D4,41.4198990,-122.2009964
36 | 551.000000000000000,1203-03-,Brushy Butte,US-California,Holocene?,1174.000000000000000,Shield volcano,?,41.1778984,-121.4410019
37 | 552.000000000000000,1203-04-,Big Cave,US-California,Holocene?,1259.000000000000000,Shield volcano,?,40.9548988,-121.3610001
38 | 553.000000000000000,1203-05-,Twin Buttes,US-California,Holocene?,1631.000000000000000,Cinder cones,?,40.7798996,-121.6009979
39 | 554.000000000000000,1203-06-,Tumble Buttes,US-California,Holocene?,2191.000000000000000,Cinder cones,?,40.6799011,-121.5510025
40 | 555.000000000000000,1203-09-,Eagle Lake Field,US-California,Holocene?,1652.000000000000000,Fissure vents,?,40.6299019,-120.8310013
41 | 556.000000000000000,1203-08-,Lassen Volc Center,US-California,Historical,3187.000000000000000,Stratovolcano,D2,40.4919014,-121.5009995
42 | 557.000000000000000,1203-10-,Clear Lake,US-California,Holocene,1439.000000000000000,Volcanic field,U,38.9698982,-122.7710037
43 | 558.000000000000000,1206-01-,Steamboat Springs,US-Nevada,Pleistocene-Fumarolic,1415.000000000000000,Lava domes,Q,39.3749008,-119.7210007
44 | 560.000000000000000,1208-01-,Dotsero,US-Colorado,Radiocarbon,2230.000000000000000,Maar,D7,39.6500015,-107.0309982
45 | 561.000000000000000,1203-11-,Mono Lake Volc Field,US-California,Tephrochronology,2121.000000000000000,Cinder cones,D4,37.9999008,-119.0309982
46 | 562.000000000000000,1207-05-,Black Rock Desert,US-Utah,Radiocarbon,1800.000000000000000,Volcanic field,D6,38.9700012,-112.5009995
47 | 563.000000000000000,1203-12-,Mono Craters,US-California,Radiocarbon,2796.000000000000000,Lava domes,D6,37.8799019,-119.0009995
48 | 564.000000000000000,1203-13-,Inyo Craters,US-California,Radiocarbon,2629.000000000000000,Lava domes,D6,37.6918983,-119.0210037
49 | 565.000000000000000,1203-14-,Long Valley,US-California,Pleistocene-Fumarolic,3390.000000000000000,Caldera,Q,37.6999016,-118.8710022
50 | 566.000000000000000,1203-15-,Red Cones,US-California,Radiocarbon,2748.000000000000000,Cinder cones,D7,37.5798988,-119.0510025
51 | 568.000000000000000,1203-16-,Ubehebe Craters,US-California,Anthropology,752.000000000000000,Maars,D7,37.0199013,-117.4509964
52 | 569.000000000000000,1207-04-,Markagunt Plateau,US-Utah,Dendrochronology,2840.000000000000000,Volcanic field,D6,37.5800018,-112.6709976
53 | 570.000000000000000,1207-01-,Santa Clara,US-Utah,Holocene?,1465.000000000000000,Volcanic field,?,37.2570000,-113.6210022
54 | 571.000000000000000,1207-03-,Bald Knoll,US-Utah,Holocene,2135.000000000000000,Cinder cones,U,37.3279991,-112.4010010
55 | 572.000000000000000,1203-17-,Golden Trout Creek,US-California,Tephrochronology,2886.000000000000000,Volcanic field,D7,36.3578987,-118.3209991
56 | 574.000000000000000,1203-18-,Coso Volc Field,US-California,Holocene?,2400.000000000000000,Lava domes,?,36.0298996,-117.8209991
57 | 576.000000000000000,1209-01-,Uinkaret Field,US-Arizona,Anthropology,1555.000000000000000,Volcanic field,D6,36.3800011,-113.1309967
58 | 579.000000000000000,1203-19-,Lavic Lake,US-California,Holocene?,1495.000000000000000,Volcanic field,?,34.7500000,-116.6210022
59 | 580.000000000000000,1210-03-,Valles Caldera,US-New Mexico,Pleistocene-Fumarolic,3430.000000000000000,Caldera,Q,35.8699989,-106.5709991
60 | 581.000000000000000,1209-02-,Sunset Crater,US-Arizona,Dendrochronology,2447.000000000000000,Cinder cone,D6,35.3699989,-111.5009995
61 | 584.000000000000000,1203-20-,Amboy,US-California,Holocene,288.000000000000000,Cinder cone,U,34.5499992,-115.7809982
62 | 586.000000000000000,1210-02-,Zuni-Bandera,US-New Mexico,Anthropology,2550.000000000000000,Volcanic field,D7,34.7999992,-108.0009995
63 | 588.000000000000000,1210-01-,Carrizozo,US-New Mexico,Holocene,1731.000000000000000,Cinder cone,U,33.7801018,-105.9309998
64 |
--------------------------------------------------------------------------------
/data/balance.txt:
--------------------------------------------------------------------------------
1 | 907
--------------------------------------------------------------------------------
/data/books.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/books.db
--------------------------------------------------------------------------------
/data/circle_markers.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/circle_markers.png
--------------------------------------------------------------------------------
/data/galaxy.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/galaxy.jpg
--------------------------------------------------------------------------------
/data/images/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/images/.DS_Store
--------------------------------------------------------------------------------
/data/images/Lighthouse.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/images/Lighthouse.jpg
--------------------------------------------------------------------------------
/data/images/Moon sinking, sun rising.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/images/Moon sinking, sun rising.jpg
--------------------------------------------------------------------------------
/data/images/galaxy.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/images/galaxy.jpg
--------------------------------------------------------------------------------
/data/images/kangaroos-rain-australia_71370_990x742.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/images/kangaroos-rain-australia_71370_990x742.jpg
--------------------------------------------------------------------------------
/data/lecture_link_demo.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/lecture_link_demo.gif
--------------------------------------------------------------------------------
/data/lite.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/lite.db
--------------------------------------------------------------------------------
/data/newlite.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/newlite.db
--------------------------------------------------------------------------------
/data/news.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/news.jpg
--------------------------------------------------------------------------------
/data/newsmallgray.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/newsmallgray.png
--------------------------------------------------------------------------------
/data/smallgray.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/smallgray.png
--------------------------------------------------------------------------------
/data/supermarkets-commas.txt:
--------------------------------------------------------------------------------
1 | ID,Address,City,State,Country,Name,Employees
2 | 1,3666 21st St,San Francisco,CA 94114,USA, Madeira,8
3 | 2,735 Dolores St,San Francisco,CA 94119,USA,Bready Shop,15
4 | 3,332 Hill St,San Francisco,California 94114,USA,Super River,25
5 | 4,3995 23rd St,San Francisco,CA 94114,USA,Ben's Shop,10
6 | 5,1056 Sanchez St,San Francisco,California,USA,Sanchez,12
7 | 6,551 Alvarado St,San Francisco,CA 94114,USA,Richvalley,20
8 |
--------------------------------------------------------------------------------
/data/supermarkets-semi-colons.txt:
--------------------------------------------------------------------------------
1 | ID;Address;City;State;Country;Name;Employees
2 | 1;3666 21st St;San Francisco;CA 94114;USA;Madeira;8
3 | 2;735 Dolores St;San Francisco;CA 94119;USA;Bready Shop;15
4 | 3;332 Hill St;San Francisco;California 94114;USA; Super River;25
5 | 4;3995 23rd St;San Francisco;CA 94114;USA;Ben's Shop;10
6 | 5;1056 Sanchez St;San Francisco;California;USA;Sanchez;12
7 | 6;551 Alvarado St;San Francisco;CA 94114;USA;Richvalley;20
8 |
--------------------------------------------------------------------------------
/data/supermarkets.csv:
--------------------------------------------------------------------------------
1 | ID,Address,City,State,Country,Name,Employees
2 | 1,3666 21st St,San Francisco,CA 94114,USA,Madeira,8
3 | 2,735 Dolores St,San Francisco,CA 94119,USA,Bready Shop,15
4 | 3,332 Hill St,San Francisco,California 94114,USA,Super River,25
5 | 4,3995 23rd St,San Francisco,CA 94114,USA,Ben's Shop,10
6 | 5,1056 Sanchez St,San Francisco,California,USA,Sanchez,12
7 | 6,551 Alvarado St,San Francisco,CA 94114,USA,Richvalley,20
8 |
--------------------------------------------------------------------------------
/data/supermarkets.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "ID": 1,
4 | "Address": "3666 21st St",
5 | "City": "San Francisco",
6 | "State": "CA 94114",
7 | "Country": "USA",
8 | "Name": "Madeira",
9 | "Employees": 8
10 | },
11 | {
12 | "ID": 2,
13 | "Address": "735 Dolores St",
14 | "City": "San Francisco",
15 | "State": "CA 94119",
16 | "Country": "USA",
17 | "Name": "Bready Shop",
18 | "Employees": 15
19 | },
20 | {
21 | "ID": 3,
22 | "Address": "332 Hill St",
23 | "City": "San Francisco",
24 | "State": "California 94114",
25 | "Country": "USA",
26 | "Name": "Super River",
27 | "Employees": 25
28 | },
29 | {
30 | "ID": 4,
31 | "Address": "3995 23rd St",
32 | "City": "San Francisco",
33 | "State": "CA 94114",
34 | "Country": "USA",
35 | "Name": "Ben's Shop",
36 | "Employees": 10
37 | },
38 | {
39 | "ID": 5,
40 | "Address": "1056 Sanchez St",
41 | "City": "San Francisco",
42 | "State": "California",
43 | "Country": "USA",
44 | "Name": "Sanchez",
45 | "Employees": 12
46 | },
47 | {
48 | "ID": 6,
49 | "Address": "551 Alvarado St",
50 | "City": "San Francisco",
51 | "State": "CA 94114",
52 | "Country": "USA",
53 | "Name": "Richvalley",
54 | "Employees": 20
55 | }
56 | ]
--------------------------------------------------------------------------------
/data/supermarkets.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/data/supermarkets.xlsx
--------------------------------------------------------------------------------
/scratch1.py:
--------------------------------------------------------------------------------
1 | import json
2 | from difflib import get_close_matches
3 | data = json.load(open("data.json"))
4 | def translate(w):
5 | w = w.lower()
6 | if w in data:
7 | return data[w]
8 | elif w.title() in data:
9 | return data[w.title()]
10 | elif w.upper() in data: #in case user enters words like USA or NATO
11 | return data[w.upper()]
12 | elif len(get_close_matches(w, data.keys())) > 0:
13 | yn = input("Did you mean %s instead? Enter Y if yes, or N if no: " % get_close_matches(w, data.keys())[0])
14 | if yn == "Y":
15 | return data[get_close_matches(w, data.keys())[0]]
16 | elif yn == "N":
17 | return "The word doesn't exist. Please double check it."
18 | else:
19 | return "We didn't understand your entry."
20 | else:
21 | return "The word doesn't exist. Please double check it."
22 | word = input("Enter word: ")
23 | output = translate(word)
24 | if type(output) == list:
25 | for item in output:
26 | print(item)
27 | else:
28 | print(output)
--------------------------------------------------------------------------------
/smallgray.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/smallgray.png
--------------------------------------------------------------------------------
/supermarkets-commas.txt:
--------------------------------------------------------------------------------
1 | ID,Address,City,State,Country,Name,Employees
2 | 1,3666 21st St,San Francisco,CA 94114,USA, Madeira,8
3 | 2,735 Dolores St,San Francisco,CA 94119,USA,Bready Shop,15
4 | 3,332 Hill St,San Francisco,California 94114,USA,Super River,25
5 | 4,3995 23rd St,San Francisco,CA 94114,USA,Ben's Shop,10
6 | 5,1056 Sanchez St,San Francisco,California,USA,Sanchez,12
7 | 6,551 Alvarado St,San Francisco,CA 94114,USA,Richvalley,20
8 |
--------------------------------------------------------------------------------
/supermarkets-semi-colons.txt:
--------------------------------------------------------------------------------
1 | ID;Address;City;State;Country;Name;Employees
2 | 1;3666 21st St;San Francisco;CA 94114;USA;Madeira;8
3 | 2;735 Dolores St;San Francisco;CA 94119;USA;Bready Shop;15
4 | 3;332 Hill St;San Francisco;California 94114;USA; Super River;25
5 | 4;3995 23rd St;San Francisco;CA 94114;USA;Ben's Shop;10
6 | 5;1056 Sanchez St;San Francisco;California;USA;Sanchez;12
7 | 6;551 Alvarado St;San Francisco;CA 94114;USA;Richvalley;20
8 |
--------------------------------------------------------------------------------
/supermarkets.csv:
--------------------------------------------------------------------------------
1 | ID,Address,City,State,Country,Name,Employees
2 | 1,3666 21st St,San Francisco,CA 94114,USA,Madeira,8
3 | 2,735 Dolores St,San Francisco,CA 94119,USA,Bready Shop,15
4 | 3,332 Hill St,San Francisco,California 94114,USA,Super River,25
5 | 4,3995 23rd St,San Francisco,CA 94114,USA,Ben's Shop,10
6 | 5,1056 Sanchez St,San Francisco,California,USA,Sanchez,12
7 | 6,551 Alvarado St,San Francisco,CA 94114,USA,Richvalley,20
8 |
--------------------------------------------------------------------------------
/supermarkets.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "ID": 1,
4 | "Address": "3666 21st St",
5 | "City": "San Francisco",
6 | "State": "CA 94114",
7 | "Country": "USA",
8 | "Name": "Madeira",
9 | "Employees": 8
10 | },
11 | {
12 | "ID": 2,
13 | "Address": "735 Dolores St",
14 | "City": "San Francisco",
15 | "State": "CA 94119",
16 | "Country": "USA",
17 | "Name": "Bready Shop",
18 | "Employees": 15
19 | },
20 | {
21 | "ID": 3,
22 | "Address": "332 Hill St",
23 | "City": "San Francisco",
24 | "State": "California 94114",
25 | "Country": "USA",
26 | "Name": "Super River",
27 | "Employees": 25
28 | },
29 | {
30 | "ID": 4,
31 | "Address": "3995 23rd St",
32 | "City": "San Francisco",
33 | "State": "CA 94114",
34 | "Country": "USA",
35 | "Name": "Ben's Shop",
36 | "Employees": 10
37 | },
38 | {
39 | "ID": 5,
40 | "Address": "1056 Sanchez St",
41 | "City": "San Francisco",
42 | "State": "California",
43 | "Country": "USA",
44 | "Name": "Sanchez",
45 | "Employees": 12
46 | },
47 | {
48 | "ID": 6,
49 | "Address": "551 Alvarado St",
50 | "City": "San Francisco",
51 | "State": "CA 94114",
52 | "Country": "USA",
53 | "Name": "Richvalley",
54 | "Employees": 20
55 | }
56 | ]
--------------------------------------------------------------------------------
/supermarkets.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arditsulceteaching/thepythonmegacourse/836574eeb29b4fbb6192c9e8ade6f63facac6858/supermarkets.xlsx
--------------------------------------------------------------------------------