├── .ipynb_checkpoints ├── Untitled-checkpoint.ipynb ├── Untitled1-checkpoint.ipynb └── test-checkpoint.ipynb ├── Assignment └── Assignment Python Fundamentals.pdf ├── Business Statistics.pptx ├── Module1 ├── 1.py ├── 2.py ├── Code_Test_File.py ├── dictionary.py ├── operators.py ├── sample.ipynb ├── set.py ├── test.ipynb ├── testing.ipynb └── tuple.py ├── Module3 ├── Business Statistics.pptx ├── netflix.csv ├── practical.ipynb ├── scikit-learn-feature.py └── ~$Business Statistics.pptx ├── Module4 ├── Module 4.pptx ├── Notes │ ├── Python_Matplotlib_Cheat_Sheet.pdf │ └── cheatsheets-matplotlib.pdf ├── animated_graph.gif ├── data.csv ├── data.json ├── data.xlsx ├── netflix.csv ├── practical.ipynb └── stock.csv ├── Module5 ├── Module5 - Copy.pptx ├── Module5.pptx ├── cctv.jpg ├── plots.ipynb └── ~$Module5.pptx ├── Module6 ├── Module6.pptx ├── practicale │ └── tensorflow-deeplearning.pptx └── ~$Module6.pptx ├── Module7 ├── 7ppt.pptx ├── chapter 7.pdf ├── practicle.ipynb ├── tensorflow.ipynb └── ~$7ppt.pptx ├── Module8 ├── 1.ipynb ├── 2.ipynb ├── 3.py ├── 4.ipynb ├── 5.ipynb ├── CHAPTER 8 (1).pdf ├── image.jpg ├── stop_data.xml └── z.jpg ├── Myfile1.txt ├── Notes ├── Beginning-Programming-with-Python-For-Dummies-John-Paul-Mueller(www.ebook-dl.com).pdf ├── Best Python Learning Resources.pdf ├── Git and GitHub Datasheet.pdf ├── Hands-on-Machine-Learning-with-Scikit-2E_compressed.pdf ├── Kenoics_curriculum-AI with Python_final.pdf ├── PPT NOTES │ ├── CHAPTER 1.pdf │ ├── CHAPTER 2.pdf │ ├── CHAPTER 4 py (3).pdf │ ├── CHAPTER 5 (1).pdf │ ├── CHAPTER 6 [Autosaved] (1).pdf │ ├── CHAPTER 8 (1).pdf │ ├── chapter 3.pdf │ └── chapter 7.pdf ├── Python Interview Cheatsheet.pdf ├── Python Programming-Book.pdf ├── Python_Fundamentals_01_Control Flows.pdf ├── Python_Fundamentals_01_Data_Types_and_Operators.pdf ├── Python_Fundamentals_01_Python_Fundamentals.pdf ├── Python_Programming_Tutorial.pdf ├── Software requirement for AI with Python.pdf └── Statistics and Machine Learning in Python.pdf ├── README.md ├── Untitled.ipynb ├── Untitled1.ipynb ├── module2 ├── 1.ipynb ├── Myfile1.txt ├── credits.csv ├── datastructures.ipynb ├── email_records.csv ├── email_records1.csv ├── files.py ├── functions.ipynb ├── inh.py ├── kle_university_record.csv ├── module3.ipynb ├── module_test.py ├── myfile.txt ├── netflix.csv ├── ppt │ ├── Advanced Concepts Using Datastructures.pptx │ ├── Functions.pptx │ ├── Python Classes.pptx │ ├── Python Scripting.pptx │ ├── ~$Advanced Concepts Using Datastructures.pptx │ ├── ~$Python Classes.pptx │ └── ~$Python Scripting.pptx ├── record1.csv ├── records.csv ├── set_operators.py ├── sort.py ├── studentrecords.csv └── university_records.csv ├── my_first_file.txt ├── myfile.txt ├── test.ipynb └── utility ├── tensorflow.yml └── tensorflow_tutorial.pdf /.ipynb_checkpoints/Untitled-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [], 3 | "metadata": {}, 4 | "nbformat": 4, 5 | "nbformat_minor": 5 6 | } 7 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Untitled1-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [], 3 | "metadata": {}, 4 | "nbformat": 4, 5 | "nbformat_minor": 5 6 | } 7 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/test-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "ename": "AttributeError", 10 | "evalue": "module 'tensorflow._api.v2.compat.v2.__internal__' has no attribute 'register_load_context_function'", 11 | "output_type": "error", 12 | "traceback": [ 13 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 14 | "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", 15 | "Input \u001b[1;32mIn [2]\u001b[0m, in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[1;32m----> 1\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtensorflow\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mtf\u001b[39;00m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;28mprint\u001b[39m(tf\u001b[38;5;241m.\u001b[39m__version__)\n", 16 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\tensorflow\\__init__.py:469\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 467\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mhasattr\u001b[39m(_current_module, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mkeras\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[0;32m 468\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 469\u001b[0m \u001b[43m_keras\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_load\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 470\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m:\n\u001b[0;32m 471\u001b[0m \u001b[38;5;28;01mpass\u001b[39;00m\n", 17 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\tensorflow\\python\\util\\lazy_loader.py:41\u001b[0m, in \u001b[0;36mLazyLoader._load\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 39\u001b[0m \u001b[38;5;124;03m\"\"\"Load the module and insert it into the parent's globals.\"\"\"\u001b[39;00m\n\u001b[0;32m 40\u001b[0m \u001b[38;5;66;03m# Import the target module and insert it into the parent's namespace\u001b[39;00m\n\u001b[1;32m---> 41\u001b[0m module \u001b[38;5;241m=\u001b[39m \u001b[43mimportlib\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mimport_module\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;18;43m__name__\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 42\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_parent_module_globals[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_local_name] \u001b[38;5;241m=\u001b[39m module\n\u001b[0;32m 44\u001b[0m \u001b[38;5;66;03m# Emit a warning if one was specified\u001b[39;00m\n", 18 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\importlib\\__init__.py:127\u001b[0m, in \u001b[0;36mimport_module\u001b[1;34m(name, package)\u001b[0m\n\u001b[0;32m 125\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[0;32m 126\u001b[0m level \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m--> 127\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_bootstrap\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_gcd_import\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m[\u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m:\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpackage\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m)\u001b[49m\n", 19 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\__init__.py:21\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 15\u001b[0m \u001b[38;5;124;03m\"\"\"Implementation of the Keras API, the high-level API of TensorFlow.\u001b[39;00m\n\u001b[0;32m 16\u001b[0m \n\u001b[0;32m 17\u001b[0m \u001b[38;5;124;03mDetailed documentation and user guides are available at\u001b[39;00m\n\u001b[0;32m 18\u001b[0m \u001b[38;5;124;03m[keras.io](https://keras.io).\u001b[39;00m\n\u001b[0;32m 19\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 20\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m distribute\n\u001b[1;32m---> 21\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m models\n\u001b[0;32m 22\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01minput_layer\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Input\n\u001b[0;32m 23\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msequential\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Sequential\n", 20 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\models\\__init__.py:18\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;66;03m# Copyright 2022 The TensorFlow Authors. All Rights Reserved.\u001b[39;00m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;66;03m#\u001b[39;00m\n\u001b[0;32m 3\u001b[0m \u001b[38;5;66;03m# Licensed under the Apache License, Version 2.0 (the \"License\");\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 13\u001b[0m \u001b[38;5;66;03m# limitations under the License.\u001b[39;00m\n\u001b[0;32m 14\u001b[0m \u001b[38;5;66;03m# ==============================================================================\u001b[39;00m\n\u001b[0;32m 15\u001b[0m \u001b[38;5;124;03m\"\"\"Keras models API.\"\"\"\u001b[39;00m\n\u001b[1;32m---> 18\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mfunctional\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Functional\n\u001b[0;32m 19\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msequential\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Sequential\n\u001b[0;32m 20\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mtraining\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Model\n", 21 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\engine\\functional.py:34\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 32\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m input_spec\n\u001b[0;32m 33\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m node \u001b[38;5;28;01mas\u001b[39;00m node_module\n\u001b[1;32m---> 34\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m training \u001b[38;5;28;01mas\u001b[39;00m training_lib\n\u001b[0;32m 35\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m training_utils\n\u001b[0;32m 36\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m serialization\n", 22 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\engine\\training.py:45\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 43\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mexperimental\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m saving_lib\n\u001b[0;32m 44\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m hdf5_format\n\u001b[1;32m---> 45\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m save\n\u001b[0;32m 46\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m saving_utils\n\u001b[0;32m 47\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m serialization\n", 23 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\saving\\legacy\\save.py:24\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 22\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m serialization\n\u001b[0;32m 23\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaved_model\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m load \u001b[38;5;28;01mas\u001b[39;00m saved_model_load\n\u001b[1;32m---> 24\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaved_model\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m load_context\n\u001b[0;32m 25\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaved_model\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m save \u001b[38;5;28;01mas\u001b[39;00m saved_model_save\n\u001b[0;32m 26\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mutils\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m traceback_utils\n", 24 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\saving\\legacy\\saved_model\\load_context.py:68\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 64\u001b[0m \u001b[38;5;124;03m\"\"\"Returns whether under a load context.\"\"\"\u001b[39;00m\n\u001b[0;32m 65\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m _load_context\u001b[38;5;241m.\u001b[39min_load_context()\n\u001b[1;32m---> 68\u001b[0m \u001b[43mtf\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__internal__\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mregister_load_context_function\u001b[49m(in_load_context)\n", 25 | "\u001b[1;31mAttributeError\u001b[0m: module 'tensorflow._api.v2.compat.v2.__internal__' has no attribute 'register_load_context_function'" 26 | ] 27 | } 28 | ], 29 | "source": [ 30 | "import tensorflow as tf\n", 31 | "print(tf.__version__)" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": null, 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [] 40 | } 41 | ], 42 | "metadata": { 43 | "kernelspec": { 44 | "display_name": "Python 3 (ipykernel)", 45 | "language": "python", 46 | "name": "python3" 47 | }, 48 | "language_info": { 49 | "codemirror_mode": { 50 | "name": "ipython", 51 | "version": 3 52 | }, 53 | "file_extension": ".py", 54 | "mimetype": "text/x-python", 55 | "name": "python", 56 | "nbconvert_exporter": "python", 57 | "pygments_lexer": "ipython3", 58 | "version": "3.8.5" 59 | }, 60 | "vscode": { 61 | "interpreter": { 62 | "hash": "ad2bdc8ecc057115af97d19610ffacc2b4e99fae6737bb82f5d7fb13d2f2c186" 63 | } 64 | } 65 | }, 66 | "nbformat": 4, 67 | "nbformat_minor": 2 68 | } 69 | -------------------------------------------------------------------------------- /Assignment/Assignment Python Fundamentals.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Assignment/Assignment Python Fundamentals.pdf -------------------------------------------------------------------------------- /Business Statistics.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Business Statistics.pptx -------------------------------------------------------------------------------- /Module1/1.py: -------------------------------------------------------------------------------- 1 | 2 | List1 = ["Hello1",'Python1',["Hi","Python2"]] 3 | List2 = ["Hello",'World',10,5.0,List1] 4 | print(List2[0]) 5 | 6 | a = List2[2] + List2[3] 7 | print(a) 8 | 9 | 10 | List3 = List1 + List2 11 | print(List3[2][1]) 12 | 13 | def sqr(str1,str2): 14 | result = str1 + str2 15 | print("Result of ", str1, " * ", str2, "= ",result) 16 | 17 | f1num =2.5 18 | f2num=2 19 | result = str1 * f2num 20 | print("Result of ", f1num, " * ", f2num, "= ",result) 21 | sqr("hemanth","J") 22 | 23 | name="tahir" 24 | 25 | 26 | -------------------------------------------------------------------------------- /Module1/2.py: -------------------------------------------------------------------------------- 1 | # for i in range(10): 2 | # print(i,end=" ") 3 | 4 | # if i== 2: 5 | # print("Break applied") 6 | # break 7 | # print("Outside loop") 8 | 9 | 10 | # for var in 'Python world': 11 | # if var == 't': 12 | # print('\nskipping t\n') 13 | # continue 14 | # print(var, end = " ") 15 | # string1='Python' 16 | # pi=3.14 17 | # print(len(string1)) 18 | # string1= 'World\'s curves is defined by ' + str(pi) 19 | # print(string1) 20 | 21 | 22 | 23 | 24 | # name=['abc','def','ghi'] 25 | # roll_no = [1,4,2] 26 | 27 | 28 | # mapped = zip(roll_no,name) 29 | # print(dict(mapped)) 30 | 31 | 32 | 33 | 34 | # l1=['andy','apple','sam'] 35 | # s1='Python' 36 | 37 | # obj1=enumerate(l1) 38 | # obj2=enumerate(s1) 39 | 40 | # print('Return type:', type(obj1)) 41 | # print(list(enumerate(l1))) 42 | 43 | # print(dict(enumerate(l1,2))) 44 | 45 | a=10 46 | b=20 47 | list1 = ['alpha','beta','gama','delta'] 48 | list2 = ['d','c','b','a'] 49 | list3=[10,20,50,30,40] 50 | list1.append(list2) 51 | print(list1) 52 | print("____concatenate_______") 53 | list4 = list1 + list2 54 | 55 | print(list4) 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | # print(min(list3)) 67 | # print("___________________") 68 | # print(tuple(list1)) 69 | # print("___________________") 70 | # print(set(list1)) 71 | # print("___________________") 72 | # zipped = zip(list2,list1) 73 | # print(dict(zipped)) 74 | # print("___________________") 75 | # print(set(zip(list2,list1))) 76 | -------------------------------------------------------------------------------- /Module1/Code_Test_File.py: -------------------------------------------------------------------------------- 1 | # list3 = [0,0,0] 2 | # list3[0] = "Tirath" 3 | # list3[1] = "Prasad" 4 | # list3[2] = "Nagvekar" 5 | 6 | # print("List3 = ", list3) 7 | 8 | #Tuple 9 | Tuple1 = (1,2,3) 10 | Tuple2 = (0.1,0.2,0.3) 11 | Tuple3 = Tuple1 + Tuple2 12 | print("Tuple 1 = ", Tuple1) 13 | print("resultant tuple is concetenated tuple:", Tuple3) 14 | Tuple4 = (Tuple1, Tuple2) 15 | print("resultant tuple is the nested tuple:", Tuple4) 16 | print("3rd element of tuple2 in typle4 :", Tuple4[1][2]) 17 | Tuple5 = Tuple3 + Tuple4 18 | print("Resultant Tuple5:", Tuple5) 19 | 20 | tuple6 =(0,0,0) 21 | tuple6[0] = "Tirath" 22 | tuple6[1] = "Prasad" 23 | tuple6[2] = "Nagvekar" 24 | 25 | print("tuple6 = ", tuple6) -------------------------------------------------------------------------------- /Module1/dictionary.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | d1={124:"Python",465:"World",300:" "} 4 | d2={"First":"Python","Second":"World",12323:[1,2,'Hello world',[10,20,30]]} 5 | print(d2[12323][3][0]) 6 | 7 | 8 | -------------------------------------------------------------------------------- /Module1/operators.py: -------------------------------------------------------------------------------- 1 | #Operators in python 2 | a= 7 3 | b= 4 4 | d=a%b 5 | print(d) 6 | if((a%2)==0): 7 | print("even") 8 | else: 9 | print("odd") 10 | 11 | 12 | # powers 13 | a=4; b=2 14 | result = (a**2)+(b**2)+(2*a*b) 15 | print(result) 16 | 17 | a= 13 18 | b=33 19 | 20 | print(ab) 22 | print(a<=b) 23 | print(a>=b) 24 | print(a!=b) 25 | print(a==b) 26 | 27 | 28 | 29 | a = 34 30 | b = 33 31 | print("Check if", a ,">",b," =", a>b) 32 | print("Check if a < b =", a" 13 | ] 14 | }, 15 | "metadata": { 16 | "needs_background": "light" 17 | }, 18 | "output_type": "display_data" 19 | } 20 | ], 21 | "source": [ 22 | "import matplotlib.pyplot as plt\n", 23 | "import matplotlib.patches as patches\n", 24 | "import matplotlib.cbook as cbook\n", 25 | "\n", 26 | "\n", 27 | "with cbook.get_sample_data('G:\\Backup Sep 2 2022\\Desk\\sample\\Module5\\cctv.jpg') as image_file:\n", 28 | " image = plt.imread(image_file)\n", 29 | "\n", 30 | "fig, ax = plt.subplots()\n", 31 | "im = ax.imshow(image)\n", 32 | "patch = patches.Circle((175, 250), radius=200, transform=ax.transData)\n", 33 | "im.set_clip_path(patch)\n", 34 | "\n", 35 | "ax.axis('off')\n", 36 | "plt.show()" 37 | ] 38 | } 39 | ], 40 | "metadata": { 41 | "kernelspec": { 42 | "display_name": "Python 3.8.5 ('base')", 43 | "language": "python", 44 | "name": "python3" 45 | }, 46 | "language_info": { 47 | "codemirror_mode": { 48 | "name": "ipython", 49 | "version": 3 50 | }, 51 | "file_extension": ".py", 52 | "mimetype": "text/x-python", 53 | "name": "python", 54 | "nbconvert_exporter": "python", 55 | "pygments_lexer": "ipython3", 56 | "version": "3.8.5" 57 | }, 58 | "orig_nbformat": 4, 59 | "vscode": { 60 | "interpreter": { 61 | "hash": "ad2bdc8ecc057115af97d19610ffacc2b4e99fae6737bb82f5d7fb13d2f2c186" 62 | } 63 | } 64 | }, 65 | "nbformat": 4, 66 | "nbformat_minor": 2 67 | } 68 | -------------------------------------------------------------------------------- /Module5/~$Module5.pptx: -------------------------------------------------------------------------------- 1 | Dell Dell -------------------------------------------------------------------------------- /Module6/Module6.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Module6/Module6.pptx -------------------------------------------------------------------------------- /Module6/practicale/tensorflow-deeplearning.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Module6/practicale/tensorflow-deeplearning.pptx -------------------------------------------------------------------------------- /Module6/~$Module6.pptx: -------------------------------------------------------------------------------- 1 | Dell Dell -------------------------------------------------------------------------------- /Module7/7ppt.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Module7/7ppt.pptx -------------------------------------------------------------------------------- /Module7/chapter 7.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Module7/chapter 7.pdf -------------------------------------------------------------------------------- /Module7/practicle.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 36, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "[[-0.00314208 0.00668894]\n", 13 | " [-0.01096473 -0.0089467 ]]\n" 14 | ] 15 | } 16 | ], 17 | "source": [ 18 | "import numpy as np \n", 19 | "w = np.random.randn(2,2) * 0.01\n", 20 | "print(w)" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 37, 26 | "metadata": {}, 27 | "outputs": [ 28 | { 29 | "name": "stdout", 30 | "output_type": "stream", 31 | "text": [ 32 | "[[0]\n", 33 | " [0]]\n" 34 | ] 35 | } 36 | ], 37 | "source": [ 38 | "b=np.zeros([2,1],dtype=int)\n", 39 | "print(b)" 40 | ] 41 | } 42 | ], 43 | "metadata": { 44 | "kernelspec": { 45 | "display_name": "Python 3.8.5 ('base')", 46 | "language": "python", 47 | "name": "python3" 48 | }, 49 | "language_info": { 50 | "codemirror_mode": { 51 | "name": "ipython", 52 | "version": 3 53 | }, 54 | "file_extension": ".py", 55 | "mimetype": "text/x-python", 56 | "name": "python", 57 | "nbconvert_exporter": "python", 58 | "pygments_lexer": "ipython3", 59 | "version": "3.8.5" 60 | }, 61 | "orig_nbformat": 4, 62 | "vscode": { 63 | "interpreter": { 64 | "hash": "ad2bdc8ecc057115af97d19610ffacc2b4e99fae6737bb82f5d7fb13d2f2c186" 65 | } 66 | } 67 | }, 68 | "nbformat": 4, 69 | "nbformat_minor": 2 70 | } 71 | -------------------------------------------------------------------------------- /Module7/tensorflow.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "ename": "AttributeError", 10 | "evalue": "module 'tensorflow.compat.v2.__internal__' has no attribute 'register_load_context_function'", 11 | "output_type": "error", 12 | "traceback": [ 13 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 14 | "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", 15 | "\u001b[1;32mg:\\Backup Sep 2 2022\\Desk\\sample\\Module7\\tensorflow.ipynb Cell 1\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[1;32m----> 1\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mtensorflow\u001b[39;00m \u001b[39mas\u001b[39;00m \u001b[39mtf\u001b[39;00m\n\u001b[0;32m 3\u001b[0m const1 \u001b[39m=\u001b[39m tf\u001b[39m.\u001b[39mconstant([[\u001b[39m1\u001b[39m,\u001b[39m2\u001b[39m,\u001b[39m3\u001b[39m], [\u001b[39m1\u001b[39m,\u001b[39m2\u001b[39m,\u001b[39m3\u001b[39m]])\n\u001b[0;32m 4\u001b[0m const2 \u001b[39m=\u001b[39m tf\u001b[39m.\u001b[39mconstant([[\u001b[39m3\u001b[39m,\u001b[39m4\u001b[39m,\u001b[39m5\u001b[39m], [\u001b[39m3\u001b[39m,\u001b[39m4\u001b[39m,\u001b[39m5\u001b[39m]])\n", 16 | "File \u001b[1;32mc:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\tensorflow\\__init__.py:469\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 467\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mhasattr\u001b[39m(_current_module, \u001b[39m\"\u001b[39m\u001b[39mkeras\u001b[39m\u001b[39m\"\u001b[39m):\n\u001b[0;32m 468\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m--> 469\u001b[0m _keras\u001b[39m.\u001b[39;49m_load()\n\u001b[0;32m 470\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mImportError\u001b[39;00m:\n\u001b[0;32m 471\u001b[0m \u001b[39mpass\u001b[39;00m\n", 17 | "File \u001b[1;32mc:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\tensorflow\\python\\util\\lazy_loader.py:41\u001b[0m, in \u001b[0;36mLazyLoader._load\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 39\u001b[0m \u001b[39m\"\"\"Load the module and insert it into the parent's globals.\"\"\"\u001b[39;00m\n\u001b[0;32m 40\u001b[0m \u001b[39m# Import the target module and insert it into the parent's namespace\u001b[39;00m\n\u001b[1;32m---> 41\u001b[0m module \u001b[39m=\u001b[39m importlib\u001b[39m.\u001b[39;49mimport_module(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m\u001b[39m__name__\u001b[39;49m)\n\u001b[0;32m 42\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_parent_module_globals[\u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_local_name] \u001b[39m=\u001b[39m module\n\u001b[0;32m 44\u001b[0m \u001b[39m# Emit a warning if one was specified\u001b[39;00m\n", 18 | "File \u001b[1;32mc:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\importlib\\__init__.py:127\u001b[0m, in \u001b[0;36mimport_module\u001b[1;34m(name, package)\u001b[0m\n\u001b[0;32m 125\u001b[0m \u001b[39mbreak\u001b[39;00m\n\u001b[0;32m 126\u001b[0m level \u001b[39m+\u001b[39m\u001b[39m=\u001b[39m \u001b[39m1\u001b[39m\n\u001b[1;32m--> 127\u001b[0m \u001b[39mreturn\u001b[39;00m _bootstrap\u001b[39m.\u001b[39;49m_gcd_import(name[level:], package, level)\n", 19 | "File \u001b[1;32mc:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\__init__.py:21\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 15\u001b[0m \u001b[39m\"\"\"Implementation of the Keras API, the high-level API of TensorFlow.\u001b[39;00m\n\u001b[0;32m 16\u001b[0m \n\u001b[0;32m 17\u001b[0m \u001b[39mDetailed documentation and user guides are available at\u001b[39;00m\n\u001b[0;32m 18\u001b[0m \u001b[39m[keras.io](https://keras.io).\u001b[39;00m\n\u001b[0;32m 19\u001b[0m \u001b[39m\"\"\"\u001b[39;00m\n\u001b[0;32m 20\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m \u001b[39mimport\u001b[39;00m distribute\n\u001b[1;32m---> 21\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m \u001b[39mimport\u001b[39;00m models\n\u001b[0;32m 22\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mengine\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39minput_layer\u001b[39;00m \u001b[39mimport\u001b[39;00m Input\n\u001b[0;32m 23\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mengine\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msequential\u001b[39;00m \u001b[39mimport\u001b[39;00m Sequential\n", 20 | "File \u001b[1;32mc:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\models\\__init__.py:18\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[39m# Copyright 2022 The TensorFlow Authors. All Rights Reserved.\u001b[39;00m\n\u001b[0;32m 2\u001b[0m \u001b[39m#\u001b[39;00m\n\u001b[0;32m 3\u001b[0m \u001b[39m# Licensed under the Apache License, Version 2.0 (the \"License\");\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 13\u001b[0m \u001b[39m# limitations under the License.\u001b[39;00m\n\u001b[0;32m 14\u001b[0m \u001b[39m# ==============================================================================\u001b[39;00m\n\u001b[0;32m 15\u001b[0m \u001b[39m\"\"\"Keras models API.\"\"\"\u001b[39;00m\n\u001b[1;32m---> 18\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mengine\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mfunctional\u001b[39;00m \u001b[39mimport\u001b[39;00m Functional\n\u001b[0;32m 19\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mengine\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msequential\u001b[39;00m \u001b[39mimport\u001b[39;00m Sequential\n\u001b[0;32m 20\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mengine\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mtraining\u001b[39;00m \u001b[39mimport\u001b[39;00m Model\n", 21 | "File \u001b[1;32mc:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\engine\\functional.py:34\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 32\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mengine\u001b[39;00m \u001b[39mimport\u001b[39;00m input_spec\n\u001b[0;32m 33\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mengine\u001b[39;00m \u001b[39mimport\u001b[39;00m node \u001b[39mas\u001b[39;00m node_module\n\u001b[1;32m---> 34\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mengine\u001b[39;00m \u001b[39mimport\u001b[39;00m training \u001b[39mas\u001b[39;00m training_lib\n\u001b[0;32m 35\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mengine\u001b[39;00m \u001b[39mimport\u001b[39;00m training_utils\n\u001b[0;32m 36\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaving\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mlegacy\u001b[39;00m \u001b[39mimport\u001b[39;00m serialization\n", 22 | "File \u001b[1;32mc:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\engine\\training.py:45\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 43\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaving\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mexperimental\u001b[39;00m \u001b[39mimport\u001b[39;00m saving_lib\n\u001b[0;32m 44\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaving\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mlegacy\u001b[39;00m \u001b[39mimport\u001b[39;00m hdf5_format\n\u001b[1;32m---> 45\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaving\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mlegacy\u001b[39;00m \u001b[39mimport\u001b[39;00m save\n\u001b[0;32m 46\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaving\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mlegacy\u001b[39;00m \u001b[39mimport\u001b[39;00m saving_utils\n\u001b[0;32m 47\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaving\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mlegacy\u001b[39;00m \u001b[39mimport\u001b[39;00m serialization\n", 23 | "File \u001b[1;32mc:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\saving\\legacy\\save.py:24\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 22\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaving\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mlegacy\u001b[39;00m \u001b[39mimport\u001b[39;00m serialization\n\u001b[0;32m 23\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaving\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mlegacy\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaved_model\u001b[39;00m \u001b[39mimport\u001b[39;00m load \u001b[39mas\u001b[39;00m saved_model_load\n\u001b[1;32m---> 24\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaving\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mlegacy\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaved_model\u001b[39;00m \u001b[39mimport\u001b[39;00m load_context\n\u001b[0;32m 25\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaving\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mlegacy\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39msaved_model\u001b[39;00m \u001b[39mimport\u001b[39;00m save \u001b[39mas\u001b[39;00m saved_model_save\n\u001b[0;32m 26\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mutils\u001b[39;00m \u001b[39mimport\u001b[39;00m traceback_utils\n", 24 | "File \u001b[1;32mc:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\saving\\legacy\\saved_model\\load_context.py:68\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 64\u001b[0m \u001b[39m\"\"\"Returns whether under a load context.\"\"\"\u001b[39;00m\n\u001b[0;32m 65\u001b[0m \u001b[39mreturn\u001b[39;00m _load_context\u001b[39m.\u001b[39min_load_context()\n\u001b[1;32m---> 68\u001b[0m tf\u001b[39m.\u001b[39;49m__internal__\u001b[39m.\u001b[39;49mregister_load_context_function(in_load_context)\n", 25 | "\u001b[1;31mAttributeError\u001b[0m: module 'tensorflow.compat.v2.__internal__' has no attribute 'register_load_context_function'" 26 | ] 27 | } 28 | ], 29 | "source": [ 30 | "import tensorflow as tf\n", 31 | "\n", 32 | "const1 = tf.constant([[1,2,3], [1,2,3]])\n", 33 | "const2 = tf.constant([[3,4,5], [3,4,5]])\n", 34 | "\n", 35 | "result = tf.add(const1, const2)\n", 36 | "\n", 37 | "with tf.Session() as sess:\n", 38 | " output = sess.run(result)\n", 39 | " print(output)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": null, 45 | "metadata": {}, 46 | "outputs": [ 47 | { 48 | "ename": "", 49 | "evalue": "", 50 | "output_type": "error", 51 | "traceback": [ 52 | "\u001b[1;31mRunning cells with 'Python 3.8.13 ('tf1')' requires ipykernel package.\n", 53 | "\u001b[1;31mRun the following command to install 'ipykernel' into the Python environment. \n", 54 | "\u001b[1;31mCommand: 'conda install -n tf1 ipykernel --update-deps --force-reinstall'" 55 | ] 56 | } 57 | ], 58 | "source": [ 59 | "import tensorflow as tf\n", 60 | "\n", 61 | "var1 = tf.Variable([[1, 2], [1, 2]], name=\"variable1\")\n", 62 | "var2 = tf.Variable([[3, 4], [3, 4]], name=\"variable2\")\n", 63 | "\n", 64 | "result = tf.matmul(var1, var2)\n", 65 | "\n", 66 | "with tf.Session() as sess:\n", 67 | " output = sess.run(result)\n", 68 | " print(output)" 69 | ] 70 | } 71 | ], 72 | "metadata": { 73 | "kernelspec": { 74 | "display_name": "Python 3.8.5 ('tf')", 75 | "language": "python", 76 | "name": "python3" 77 | }, 78 | "language_info": { 79 | "codemirror_mode": { 80 | "name": "ipython", 81 | "version": 3 82 | }, 83 | "file_extension": ".py", 84 | "mimetype": "text/x-python", 85 | "name": "python", 86 | "nbconvert_exporter": "python", 87 | "pygments_lexer": "ipython3", 88 | "version": "3.8.5" 89 | }, 90 | "orig_nbformat": 4, 91 | "vscode": { 92 | "interpreter": { 93 | "hash": "78ddfc3686b8b7161f2836984651df038ec9a0366954334fc42499f59ad2b3c8" 94 | } 95 | } 96 | }, 97 | "nbformat": 4, 98 | "nbformat_minor": 2 99 | } 100 | -------------------------------------------------------------------------------- /Module7/~$7ppt.pptx: -------------------------------------------------------------------------------- 1 | Dell Dell -------------------------------------------------------------------------------- /Module8/1.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "from sklearn.linear_model import LinearRegression\n", 10 | "import random\n" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 3, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": [ 19 | "feature_set = []\n", 20 | "target_set = []" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 4, 26 | "metadata": {}, 27 | "outputs": [], 28 | "source": [ 29 | "number_of_rows =10\n", 30 | "random_number_limit =10" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 5, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "for i in range(number_of_rows):\n", 40 | " x= random.randint(0,random_number_limit)\n", 41 | " y= random.randint(0,random_number_limit)\n", 42 | " z= random.randint(0,random_number_limit)\n", 43 | " function = (10*x)+(2*y)+(3*z)+(1*x)\n", 44 | " feature_set.append([x,y,z])\n", 45 | " target_set.append(function)" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 6, 51 | "metadata": {}, 52 | "outputs": [ 53 | { 54 | "data": { 55 | "text/plain": [ 56 | "LinearRegression()" 57 | ] 58 | }, 59 | "execution_count": 6, 60 | "metadata": {}, 61 | "output_type": "execute_result" 62 | } 63 | ], 64 | "source": [ 65 | "model = LinearRegression()\n", 66 | "model.fit(feature_set, target_set)" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": 7, 72 | "metadata": {}, 73 | "outputs": [ 74 | { 75 | "name": "stdout", 76 | "output_type": "stream", 77 | "text": [ 78 | "predicted Value: [75.]Coefficients:[11. 2. 3.]\n" 79 | ] 80 | } 81 | ], 82 | "source": [ 83 | "test_set= [[6,3,1]]\n", 84 | "\n", 85 | "prediction = model.predict(test_set)\n", 86 | "print('predicted Value: '+str(prediction)+'Coefficients:'+str(model.coef_)) \n" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": 8, 92 | "metadata": {}, 93 | "outputs": [], 94 | "source": [ 95 | "import cv2\n", 96 | "import imutils\n", 97 | "\n", 98 | "# Initializing the HOG person detector\n", 99 | "hog = cv2.HOGDescriptor()\n", 100 | "hog.setSVMDetector(cv2.HOGDescriptor_getDefaultPeopleDetector())\n", 101 | "\n", 102 | "# Reading the Image\n", 103 | "image = cv2.imread('C:/Users/User/Desktop/img.jpg')\n", 104 | "\n", 105 | "# Resizing the Image\n", 106 | "image = imutils.resize(image,width=min(400, image.shape[1]))\n", 107 | "\n", 108 | "# Detecting all the regions in the image that has a pedestrians inside it\n", 109 | "(regions, _) = hog.detectMultiScale(image,winStride=(4, 4),padding=(4, 4),scale=1.05)\n", 110 | "\n", 111 | "# Drawing the regions in the Image\n", 112 | "for (x, y, w, h) in regions:\n", 113 | " cv2.rectangle(image, (x, y),(x + w, y + h),(0, 0, 255), 2)\n", 114 | "\n", 115 | "# Showing the output Image\n", 116 | "cv2.imshow(\"Image\", image)\n", 117 | "cv2.waitKey(0)\n", 118 | "cv2.destroyAllWindows()" 119 | ] 120 | }, 121 | { 122 | "cell_type": "code", 123 | "execution_count": 11, 124 | "metadata": {}, 125 | "outputs": [], 126 | "source": [ 127 | "import cv2\n", 128 | "\n", 129 | "image = cv2.imread('C:/Users/User/Desktop/img.jpg')\n", 130 | "# cv2.imshow('Image name',image)\n", 131 | "out=image.copy()\n", 132 | "h, w = image.shape[:2]\n", 133 | "rectangle = cv2.rectangle(out, (w,h),(w//3,h//4), (0, 255, 0), 10)\n", 134 | "cv2.imshow('Rect',rectangle)\n", 135 | "\n", 136 | "# center = (w // 2, h // 2)\n", 137 | "# print(center)\n", 138 | "# matrix =cv2.getRotationMatrix2D(center,0,1.0)\n", 139 | "# rotation = cv2.warpAffine(image,matrix,(w,h))\n", 140 | "# cv2.imshow('Image name',rotation)\n", 141 | "# rectangle = cv2.rectangle(rotation, (w,h),(w//2,h//2), (255, 0, 0), 2)\n", 142 | "cv2.waitKey(0)\n", 143 | "cv2.destroyAllWindows()" 144 | ] 145 | } 146 | ], 147 | "metadata": { 148 | "kernelspec": { 149 | "display_name": "Python 3.8.5 ('base')", 150 | "language": "python", 151 | "name": "python3" 152 | }, 153 | "language_info": { 154 | "codemirror_mode": { 155 | "name": "ipython", 156 | "version": 3 157 | }, 158 | "file_extension": ".py", 159 | "mimetype": "text/x-python", 160 | "name": "python", 161 | "nbconvert_exporter": "python", 162 | "pygments_lexer": "ipython3", 163 | "version": "3.8.5" 164 | }, 165 | "orig_nbformat": 4, 166 | "vscode": { 167 | "interpreter": { 168 | "hash": "ad2bdc8ecc057115af97d19610ffacc2b4e99fae6737bb82f5d7fb13d2f2c186" 169 | } 170 | } 171 | }, 172 | "nbformat": 4, 173 | "nbformat_minor": 2 174 | } 175 | -------------------------------------------------------------------------------- /Module8/3.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import matplotlib.pyplot as plt 3 | 4 | img = cv2.imread('z.jpg') 5 | img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB) 6 | 7 | plt.figure(figsize=(10,6)) 8 | # plt.title('Image display example') 9 | plt.imshow('Window for Zebra image',img) 10 | cv2.waitKey(0) 11 | cv2.destroyAllWindows() -------------------------------------------------------------------------------- /Module8/4.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import cv2\n", 11 | "\n", 12 | "cap = cv2.VideoCapture('videoplayback.mp4')\n", 13 | "\n", 14 | "while(cap.isOpened()):\n", 15 | " ret, frame = cap.read()\n", 16 | "\n", 17 | " gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n", 18 | "\n", 19 | " cv2.imshow('frame',gray)\n", 20 | " if cv2.waitKey(1) & 0xFF == ord('q'):\n", 21 | " break\n", 22 | "\n", 23 | "cap.release()\n", 24 | "cv2.destroyAllWindows()" 25 | ] 26 | } 27 | ], 28 | "metadata": { 29 | "kernelspec": { 30 | "display_name": "Python 3.8.5 ('base')", 31 | "language": "python", 32 | "name": "python3" 33 | }, 34 | "language_info": { 35 | "codemirror_mode": { 36 | "name": "ipython", 37 | "version": 3 38 | }, 39 | "file_extension": ".py", 40 | "mimetype": "text/x-python", 41 | "name": "python", 42 | "nbconvert_exporter": "python", 43 | "pygments_lexer": "ipython3", 44 | "version": "3.8.5" 45 | }, 46 | "orig_nbformat": 4, 47 | "vscode": { 48 | "interpreter": { 49 | "hash": "ad2bdc8ecc057115af97d19610ffacc2b4e99fae6737bb82f5d7fb13d2f2c186" 50 | } 51 | } 52 | }, 53 | "nbformat": 4, 54 | "nbformat_minor": 2 55 | } 56 | -------------------------------------------------------------------------------- /Module8/5.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 16, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "[[-1.69143208 -0.7527533 ]\n", 13 | " [ 1.45903233 -0.85727482]]\n", 14 | "[[0.]\n", 15 | " [0.]]\n" 16 | ] 17 | } 18 | ], 19 | "source": [ 20 | "import numpy as np \n", 21 | "w= np.random.randn(2,2)*0.01\n", 22 | "print(w)\n", 23 | "b=np.zeros([2,1])\n", 24 | "print(b)" 25 | ] 26 | } 27 | ], 28 | "metadata": { 29 | "kernelspec": { 30 | "display_name": "Python 3.8.5 ('base')", 31 | "language": "python", 32 | "name": "python3" 33 | }, 34 | "language_info": { 35 | "codemirror_mode": { 36 | "name": "ipython", 37 | "version": 3 38 | }, 39 | "file_extension": ".py", 40 | "mimetype": "text/x-python", 41 | "name": "python", 42 | "nbconvert_exporter": "python", 43 | "pygments_lexer": "ipython3", 44 | "version": "3.8.5" 45 | }, 46 | "orig_nbformat": 4, 47 | "vscode": { 48 | "interpreter": { 49 | "hash": "ad2bdc8ecc057115af97d19610ffacc2b4e99fae6737bb82f5d7fb13d2f2c186" 50 | } 51 | } 52 | }, 53 | "nbformat": 4, 54 | "nbformat_minor": 2 55 | } 56 | -------------------------------------------------------------------------------- /Module8/CHAPTER 8 (1).pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Module8/CHAPTER 8 (1).pdf -------------------------------------------------------------------------------- /Module8/image.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Module8/image.jpg -------------------------------------------------------------------------------- /Module8/z.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Module8/z.jpg -------------------------------------------------------------------------------- /Myfile1.txt: -------------------------------------------------------------------------------- 1 | Hello 2 | This is Dharwad 3 | This is Hubli 4 | This is London 5 | Hello 6 | This is Dharwad 7 | This is Hubli 8 | This is London 9 | Hello 10 | This is Dharwad 11 | This is Hubli 12 | This is London 13 | Hello 14 | This is Dharwad 15 | This is Hubli 16 | This is London 17 | Hello 18 | This is Dharwad 19 | This is Hubli 20 | This is Belgaum 21 | Hello 22 | This is Dharwad 23 | This is Hubli 24 | This is Belgaum 25 | Hello 26 | This is Dharwad 27 | This is Hubli 28 | This is Belgaum 29 | -------------------------------------------------------------------------------- /Notes/Beginning-Programming-with-Python-For-Dummies-John-Paul-Mueller(www.ebook-dl.com).pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Beginning-Programming-with-Python-For-Dummies-John-Paul-Mueller(www.ebook-dl.com).pdf -------------------------------------------------------------------------------- /Notes/Best Python Learning Resources.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Best Python Learning Resources.pdf -------------------------------------------------------------------------------- /Notes/Git and GitHub Datasheet.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Git and GitHub Datasheet.pdf -------------------------------------------------------------------------------- /Notes/Hands-on-Machine-Learning-with-Scikit-2E_compressed.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Hands-on-Machine-Learning-with-Scikit-2E_compressed.pdf -------------------------------------------------------------------------------- /Notes/Kenoics_curriculum-AI with Python_final.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Kenoics_curriculum-AI with Python_final.pdf -------------------------------------------------------------------------------- /Notes/PPT NOTES/CHAPTER 1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/PPT NOTES/CHAPTER 1.pdf -------------------------------------------------------------------------------- /Notes/PPT NOTES/CHAPTER 2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/PPT NOTES/CHAPTER 2.pdf -------------------------------------------------------------------------------- /Notes/PPT NOTES/CHAPTER 4 py (3).pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/PPT NOTES/CHAPTER 4 py (3).pdf -------------------------------------------------------------------------------- /Notes/PPT NOTES/CHAPTER 5 (1).pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/PPT NOTES/CHAPTER 5 (1).pdf -------------------------------------------------------------------------------- /Notes/PPT NOTES/CHAPTER 6 [Autosaved] (1).pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/PPT NOTES/CHAPTER 6 [Autosaved] (1).pdf -------------------------------------------------------------------------------- /Notes/PPT NOTES/CHAPTER 8 (1).pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/PPT NOTES/CHAPTER 8 (1).pdf -------------------------------------------------------------------------------- /Notes/PPT NOTES/chapter 3.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/PPT NOTES/chapter 3.pdf -------------------------------------------------------------------------------- /Notes/PPT NOTES/chapter 7.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/PPT NOTES/chapter 7.pdf -------------------------------------------------------------------------------- /Notes/Python Interview Cheatsheet.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Python Interview Cheatsheet.pdf -------------------------------------------------------------------------------- /Notes/Python Programming-Book.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Python Programming-Book.pdf -------------------------------------------------------------------------------- /Notes/Python_Fundamentals_01_Control Flows.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Python_Fundamentals_01_Control Flows.pdf -------------------------------------------------------------------------------- /Notes/Python_Fundamentals_01_Data_Types_and_Operators.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Python_Fundamentals_01_Data_Types_and_Operators.pdf -------------------------------------------------------------------------------- /Notes/Python_Fundamentals_01_Python_Fundamentals.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Python_Fundamentals_01_Python_Fundamentals.pdf -------------------------------------------------------------------------------- /Notes/Python_Programming_Tutorial.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Python_Programming_Tutorial.pdf -------------------------------------------------------------------------------- /Notes/Software requirement for AI with Python.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Software requirement for AI with Python.pdf -------------------------------------------------------------------------------- /Notes/Statistics and Machine Learning in Python.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/Notes/Statistics and Machine Learning in Python.pdf -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AI with Python @KeonicsX 2 | ![Python](https://lifewithdata.com/wp-content/uploads/2022/03/python_logo-8.png) 3 | 4 | ## Theory 5 | ## Module 1 - Python Foundation 6 | 7 | ### Python Programming Introduction 8 | - [PPT](https://github.com/tahirmirji/ai_with_python_keonics/blob/main/Notes/Python_Fundamentals_01_Python_Fundamentals.pdf) 9 | 10 | ### Data Types and Operators 11 | - [PPT](https://github.com/tahirmirji/ai_with_python_keonics/blob/main/Notes/Python_Fundamentals_01_Data_Types_and_Operators.pdf) 12 | 13 | ### Control Flow 14 | - [PPT](https://github.com/tahirmirji/ai_with_python_keonics/blob/main/Notes/Python_Fundamentals_01_Control%20Flows.pdf) 15 | 16 | 17 | ## Practice Assignments 18 | ### Python Fundamentals 19 | - [PPT](https://github.com/tahirmirji/ai_with_python_keonics/blob/main/Assignment/Assignment%20Python%20Fundamentals.pdf) 20 | 21 | ## Module 2 - Python Advanced 22 | 23 | ## Theory + Practice 24 | ### Python Functions 25 | - [PPT](https://github.com/tahirmirji/ai_with_python_keonics/blob/main/module2/ppt/Functions.pptx) 26 | 27 | ### Advanced Concepts using Python lists, tuples, sets & dictionaries 28 | - [PPT](https://github.com/tahirmirji/ai_with_python_keonics/blob/main/module2/ppt/Advanced%20Concepts%20Using%20Datastructures.pptx) 29 | 30 | ### Python Scripting 31 | - [PPT]( https://github.com/tahirmirji/ai_with_python_keonics/blob/main/module2/ppt/Python%20Scripting.pptx) 32 | 33 | #### Class assignment: 34 | - Write a program to implement stack data structure using python. 35 | 36 | In the english dictionary the word stack means arranging objects on over another. It is the same way memory is allocated in this data structure. It stores the data elements in a similar fashion as a bunch of plates are stored one above another in the kitchen. So stack data structure allows operations at one end which can be called top of the stack.We can add elements or remove elements only form this en dof the stack. 37 | 38 | In a stack the element inserted last in sequence will come out first as we can remove only from the top of the stack. Such feature is known as Last in First Out(LIFO) feature. The operations of adding and removing the elements is known as PUSH and POP. In the following program we implement it as add and and remove functions. We declare an empty list and use the append() and pop() methods to add and remove the data elements. 39 | 40 | 41 | ## Surprise Practical Test Questions: 42 | - Python program to find the power of a numbers in a list using recursion 43 | - How to Print Multiple Arguments in Python and call a function to display the arguments? 44 | - Write a program to sort unsorted numbers using list in ascending order. 45 | - Find fibonacci series up to n using lambda expression (Sample Output : 0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 4181.) 46 | 47 | 48 | ### Software Installation Reference 49 | 50 | - [Python](https://www.python.org/downloads/windows/) 51 | - [Anaconda](https://www.anaconda.com/) 52 | - [VS Code](https://code.visualstudio.com/download) 53 | 54 | ### Other Editors 55 | - [Jupyter Online](https://jupyter.org/try-jupyter/lab/) 56 | - [PyCharm Community Version](https://www.jetbrains.com/pycharm/download/#section=windows) 57 | 58 | ### Version Control Tools 59 | - [GIT](https://git-scm.com/download/win) 60 | - [Github.com](https://github.com/) 61 | 62 | ### Useful Links 63 | - [Get python libraries](https://pypi.org/) 64 | - [Official Python Tutorials](https://docs.python.org/3/tutorial/index.html) 65 | - [AI with Python Tutorial](https://www.tutorialspoint.com/artificial_intelligence_with_python/index.htm) 66 | - [pip command](https://bootstrap.pypa.io/get-pip.py) 67 | 68 | -------------------------------------------------------------------------------- /Untitled.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [], 3 | "metadata": {}, 4 | "nbformat": 4, 5 | "nbformat_minor": 5 6 | } 7 | -------------------------------------------------------------------------------- /Untitled1.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "id": "5ea4d9b5", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "ename": "AttributeError", 11 | "evalue": "module 'tensorflow.compat.v2.__internal__' has no attribute 'register_load_context_function'", 12 | "output_type": "error", 13 | "traceback": [ 14 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 15 | "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", 16 | "Input \u001b[1;32mIn [1]\u001b[0m, in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[1;32m----> 1\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtensorflow\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mtf\u001b[39;00m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;28mprint\u001b[39m(tf\u001b[38;5;241m.\u001b[39m__version__)\n", 17 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\tensorflow\\__init__.py:469\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 467\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mhasattr\u001b[39m(_current_module, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mkeras\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[0;32m 468\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 469\u001b[0m \u001b[43m_keras\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_load\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 470\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m:\n\u001b[0;32m 471\u001b[0m \u001b[38;5;28;01mpass\u001b[39;00m\n", 18 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\tensorflow\\python\\util\\lazy_loader.py:41\u001b[0m, in \u001b[0;36mLazyLoader._load\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 39\u001b[0m \u001b[38;5;124;03m\"\"\"Load the module and insert it into the parent's globals.\"\"\"\u001b[39;00m\n\u001b[0;32m 40\u001b[0m \u001b[38;5;66;03m# Import the target module and insert it into the parent's namespace\u001b[39;00m\n\u001b[1;32m---> 41\u001b[0m module \u001b[38;5;241m=\u001b[39m \u001b[43mimportlib\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mimport_module\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;18;43m__name__\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 42\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_parent_module_globals[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_local_name] \u001b[38;5;241m=\u001b[39m module\n\u001b[0;32m 44\u001b[0m \u001b[38;5;66;03m# Emit a warning if one was specified\u001b[39;00m\n", 19 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\importlib\\__init__.py:127\u001b[0m, in \u001b[0;36mimport_module\u001b[1;34m(name, package)\u001b[0m\n\u001b[0;32m 125\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[0;32m 126\u001b[0m level \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m--> 127\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_bootstrap\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_gcd_import\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m[\u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m:\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpackage\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m)\u001b[49m\n", 20 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\__init__.py:21\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 15\u001b[0m \u001b[38;5;124;03m\"\"\"Implementation of the Keras API, the high-level API of TensorFlow.\u001b[39;00m\n\u001b[0;32m 16\u001b[0m \n\u001b[0;32m 17\u001b[0m \u001b[38;5;124;03mDetailed documentation and user guides are available at\u001b[39;00m\n\u001b[0;32m 18\u001b[0m \u001b[38;5;124;03m[keras.io](https://keras.io).\u001b[39;00m\n\u001b[0;32m 19\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 20\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m distribute\n\u001b[1;32m---> 21\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m models\n\u001b[0;32m 22\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01minput_layer\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Input\n\u001b[0;32m 23\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msequential\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Sequential\n", 21 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\models\\__init__.py:18\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;66;03m# Copyright 2022 The TensorFlow Authors. All Rights Reserved.\u001b[39;00m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;66;03m#\u001b[39;00m\n\u001b[0;32m 3\u001b[0m \u001b[38;5;66;03m# Licensed under the Apache License, Version 2.0 (the \"License\");\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 13\u001b[0m \u001b[38;5;66;03m# limitations under the License.\u001b[39;00m\n\u001b[0;32m 14\u001b[0m \u001b[38;5;66;03m# ==============================================================================\u001b[39;00m\n\u001b[0;32m 15\u001b[0m \u001b[38;5;124;03m\"\"\"Keras models API.\"\"\"\u001b[39;00m\n\u001b[1;32m---> 18\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mfunctional\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Functional\n\u001b[0;32m 19\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msequential\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Sequential\n\u001b[0;32m 20\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mtraining\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Model\n", 22 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\engine\\functional.py:34\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 32\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m input_spec\n\u001b[0;32m 33\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m node \u001b[38;5;28;01mas\u001b[39;00m node_module\n\u001b[1;32m---> 34\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m training \u001b[38;5;28;01mas\u001b[39;00m training_lib\n\u001b[0;32m 35\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m training_utils\n\u001b[0;32m 36\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m serialization\n", 23 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\engine\\training.py:45\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 43\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mexperimental\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m saving_lib\n\u001b[0;32m 44\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m hdf5_format\n\u001b[1;32m---> 45\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m save\n\u001b[0;32m 46\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m saving_utils\n\u001b[0;32m 47\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m serialization\n", 24 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\saving\\legacy\\save.py:24\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 22\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m serialization\n\u001b[0;32m 23\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaved_model\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m load \u001b[38;5;28;01mas\u001b[39;00m saved_model_load\n\u001b[1;32m---> 24\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaved_model\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m load_context\n\u001b[0;32m 25\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaved_model\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m save \u001b[38;5;28;01mas\u001b[39;00m saved_model_save\n\u001b[0;32m 26\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mutils\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m traceback_utils\n", 25 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\saving\\legacy\\saved_model\\load_context.py:68\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 64\u001b[0m \u001b[38;5;124;03m\"\"\"Returns whether under a load context.\"\"\"\u001b[39;00m\n\u001b[0;32m 65\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m _load_context\u001b[38;5;241m.\u001b[39min_load_context()\n\u001b[1;32m---> 68\u001b[0m \u001b[43mtf\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__internal__\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mregister_load_context_function\u001b[49m(in_load_context)\n", 26 | "\u001b[1;31mAttributeError\u001b[0m: module 'tensorflow.compat.v2.__internal__' has no attribute 'register_load_context_function'" 27 | ] 28 | } 29 | ], 30 | "source": [ 31 | "import tensorflow as tf\n", 32 | "print(tf.__version__)" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "id": "cc4f94a4", 39 | "metadata": {}, 40 | "outputs": [], 41 | "source": [] 42 | } 43 | ], 44 | "metadata": { 45 | "kernelspec": { 46 | "display_name": "Python 3 (ipykernel)", 47 | "language": "python", 48 | "name": "python3" 49 | }, 50 | "language_info": { 51 | "codemirror_mode": { 52 | "name": "ipython", 53 | "version": 3 54 | }, 55 | "file_extension": ".py", 56 | "mimetype": "text/x-python", 57 | "name": "python", 58 | "nbconvert_exporter": "python", 59 | "pygments_lexer": "ipython3", 60 | "version": "3.8.5" 61 | } 62 | }, 63 | "nbformat": 4, 64 | "nbformat_minor": 5 65 | } 66 | -------------------------------------------------------------------------------- /module2/1.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "a=10" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [] 18 | } 19 | ], 20 | "metadata": { 21 | "kernelspec": { 22 | "display_name": "Python 3.8.5 ('base')", 23 | "language": "python", 24 | "name": "python3" 25 | }, 26 | "language_info": { 27 | "name": "python", 28 | "version": "3.8.5" 29 | }, 30 | "orig_nbformat": 4, 31 | "vscode": { 32 | "interpreter": { 33 | "hash": "ad2bdc8ecc057115af97d19610ffacc2b4e99fae6737bb82f5d7fb13d2f2c186" 34 | } 35 | } 36 | }, 37 | "nbformat": 4, 38 | "nbformat_minor": 2 39 | } 40 | -------------------------------------------------------------------------------- /module2/Myfile1.txt: -------------------------------------------------------------------------------- 1 | Hello 2 | This is Dharwad 3 | This is Hubli 4 | This is London 5 | -------------------------------------------------------------------------------- /module2/email_records.csv: -------------------------------------------------------------------------------- 1 | Name,Branch,Year,CGPA 2 | Nikhil,EC,2,9.0 3 | Sanchit,CE,2,9.1 4 | Yusuf,EC,2,9.3 5 | Sagar,CS,1,9.5 6 | Ijaz,ME,3,7.8 7 | Hemanth,CE,2,9.1 8 | Pushpa,EC,2,9.1 9 | -------------------------------------------------------------------------------- /module2/email_records1.csv: -------------------------------------------------------------------------------- 1 | Name,Email 2 | Nikhil,nikhil.gfg@gmail.com 3 | Sanchit,sanchit.gfg@gmail.com 4 | Aditya,aditya.gfg@gmail.com 5 | Sagar,sagar.gfg@gmail.com 6 | Prateek,prateek.gfg@gmail.com 7 | Sahil,sahil.gfg@gmail.com 8 | -------------------------------------------------------------------------------- /module2/files.py: -------------------------------------------------------------------------------- 1 | # Program to show various ways to read and 2 | # write data in a file. 3 | file1 = open("Myfile1.txt","a") 4 | 5 | # \n is placed to indicate EOL (End of Line) 6 | file1.write("Hello \n") 7 | L = ["This is Dharwad \n","This is Hubli \n","This is Belgaum \n"] 8 | file1.writelines(L) 9 | file1.close() #to change file access modes 10 | 11 | file1 = open("Myfile1.txt","r+") 12 | print("Output of Read function is ") 13 | print(file1.read()) 14 | # seek(n) takes the file handle to the nth 15 | # bite from the beginning. 16 | file1.seek(0) 17 | print( "Output of Readline function is ") 18 | print(file1.readline()) 19 | 20 | file1.seek(0) 21 | # To show difference between read and readline 22 | print("Output of Read(100) function is ") 23 | print(file1.read(100)) 24 | file1.seek(0) 25 | print("Output of Readline(9) function is ") 26 | print(file1.readline()) 27 | file1.seek(0) 28 | # readlines function 29 | print("Output of Readlines function is ") 30 | print(file1.readlines()[3]) 31 | file1.close() 32 | -------------------------------------------------------------------------------- /module2/functions.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 128, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "10\n", 13 | "20\n", 14 | "30\n", 15 | "40\n" 16 | ] 17 | } 18 | ], 19 | "source": [ 20 | "#function\n", 21 | "def fun(list1):\n", 22 | " result=0\n", 23 | " for index in range(len(list1)):\n", 24 | " # result= list1[index]\n", 25 | " print(list1[index])\n", 26 | "\n", 27 | "list1=[10,20,30,40]\n", 28 | "fun(list1)\n" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 89, 34 | "metadata": {}, 35 | "outputs": [ 36 | { 37 | "name": "stdout", 38 | "output_type": "stream", 39 | "text": [ 40 | "[1, 0, 10, 30, 23, 78, 100, 4, 2, 43, 52, 67]\n", 41 | "0 i updated going for next round\n", 42 | "[100, 0, 1, 10, 23, 30, 78, 4, 2, 43, 52, 67] 1 i updated going for next round\n", 43 | "[0, 100, 1, 10, 23, 30, 78, 4, 2, 43, 52, 67] 2 i updated going for next round\n", 44 | "[0, 1, 100, 10, 23, 30, 78, 4, 2, 43, 52, 67] 3 i updated going for next round\n", 45 | "[0, 1, 10, 100, 23, 30, 78, 4, 2, 43, 52, 67] 4 i updated going for next round\n", 46 | "[0, 1, 10, 23, 100, 30, 78, 4, 2, 43, 52, 67] 5 i updated going for next round\n", 47 | "[0, 1, 10, 23, 30, 100, 78, 4, 2, 43, 52, 67] 6 i updated going for next round\n", 48 | "[0, 1, 10, 23, 30, 78, 100, 4, 2, 43, 52, 67] 7 i updated going for next round\n", 49 | "[0, 1, 4, 10, 23, 30, 78, 100, 2, 43, 52, 67] 8 i updated going for next round\n", 50 | "[0, 1, 2, 4, 10, 23, 30, 78, 100, 43, 52, 67] 9 i updated going for next round\n", 51 | "[0, 1, 2, 4, 10, 23, 30, 43, 78, 100, 52, 67] 10 i updated going for next round\n", 52 | "[0, 1, 2, 4, 10, 23, 30, 43, 52, 78, 100, 67] 11 i updated going for next round\n", 53 | "[0, 1, 2, 4, 10, 23, 30, 43, 52, 67, 78, 100] " 54 | ] 55 | } 56 | ], 57 | "source": [ 58 | "\n", 59 | "#Q3 : Sorting in Ascending order\n", 60 | "a =[1,0,10,30,23,78,100,4,2,43,52,67]\n", 61 | "# a.sort()\n", 62 | "print(a)\n", 63 | "\n", 64 | "# a =[1,0,10,30,23,78,100,4,2,43,52,67]\n", 65 | "# a =[1,10,0,30,23,78,100,4,2,43,52,67]\n", 66 | "# a =[1,10,30,0,23,78,100,4,2,43,52,67]\n", 67 | "# a =[1,10,30,23,0,78,100,4,2,43,52,67]\n", 68 | "# a =[1,10,30,23,78,0,100,4,2,43,52,67]\n", 69 | "# a =[1,10,30,23,78,100,0,4,2,43,52,67]\n", 70 | "# a =[1,10,30,23,78,100,4,0,2,43,52,67]\n", 71 | "# a =[1,10,30,23,78,100,4,2,0,43,52,67]\n", 72 | "# a =[1,10,30,23,78,100,4,2,43,0,52,67]\n", 73 | "# a =[1,10,30,23,78,100,4,2,43,52,0,67]\n", 74 | "# a =[1,10,30,23,78,100,4,2,43,52,67,0]\n", 75 | "n = len(a)\n", 76 | "# (0 till 11)\n", 77 | "for i in range(n):\n", 78 | " print(i, 'i updated going for next round')\n", 79 | " for j in range(n):\n", 80 | " # print('i=',i,' j=',j,end=\"\")\n", 81 | " if(a[i] < a[j]):\n", 82 | " s = a[i]\n", 83 | " a[i] = a[j]\n", 84 | " a[j] = s\n", 85 | " print(a, end=\" \")\n" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": 90, 91 | "metadata": {}, 92 | "outputs": [ 93 | { 94 | "name": "stdout", 95 | "output_type": "stream", 96 | "text": [ 97 | "{1: 'alpha', 2: 'beta', 3: 'gama'}\n" 98 | ] 99 | } 100 | ], 101 | "source": [ 102 | "#function to merge two lists \n", 103 | "# as group of 1 name and 1 number\n", 104 | "\n", 105 | "def merge_fun(list1,list2):\n", 106 | " merged_list= zip(list1,list2)\n", 107 | " print(dict(merged_list))\n", 108 | "\n", 109 | "list1=[1,2,3]\n", 110 | "list2=['alpha','beta','gama']\n", 111 | "merge_fun(list1,list2)\n" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": 91, 117 | "metadata": {}, 118 | "outputs": [ 119 | { 120 | "name": "stdout", 121 | "output_type": "stream", 122 | "text": [ 123 | "1.0\n" 124 | ] 125 | } 126 | ], 127 | "source": [ 128 | "from math import *\n", 129 | "\n", 130 | "x= sqrt(1)\n", 131 | "\n", 132 | "print(x)" 133 | ] 134 | }, 135 | { 136 | "cell_type": "code", 137 | "execution_count": 92, 138 | "metadata": {}, 139 | "outputs": [ 140 | { 141 | "name": "stdout", 142 | "output_type": "stream", 143 | "text": [ 144 | "Global scope1\n", 145 | "Global scope1 Global scope2\n", 146 | "Enclosing scope Global scope1 Global scope2\n", 147 | "Local scope Enclosing scope Global scope1\n" 148 | ] 149 | } 150 | ], 151 | "source": [ 152 | "x='Global scope1'\n", 153 | "print(x)\n", 154 | "\n", 155 | "def outer_func():\n", 156 | " y='Enclosing scope'\n", 157 | " print(y,x,s)\n", 158 | " def inner_func():\n", 159 | " z='Local scope'\n", 160 | " print(z,y,x)\n", 161 | " inner_func()\n", 162 | "s='Global scope2'\n", 163 | "print(x,s)\n", 164 | "outer_func()" 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": 93, 170 | "metadata": {}, 171 | "outputs": [ 172 | { 173 | "name": "stdout", 174 | "output_type": "stream", 175 | "text": [ 176 | "Help on class str in module builtins:\n", 177 | "\n", 178 | "class str(object)\n", 179 | " | str(object='') -> str\n", 180 | " | str(bytes_or_buffer[, encoding[, errors]]) -> str\n", 181 | " | \n", 182 | " | Create a new string object from the given object. If encoding or\n", 183 | " | errors is specified, then the object must expose a data buffer\n", 184 | " | that will be decoded using the given encoding and error handler.\n", 185 | " | Otherwise, returns the result of object.__str__() (if defined)\n", 186 | " | or repr(object).\n", 187 | " | encoding defaults to sys.getdefaultencoding().\n", 188 | " | errors defaults to 'strict'.\n", 189 | " | \n", 190 | " | Methods defined here:\n", 191 | " | \n", 192 | " | __add__(self, value, /)\n", 193 | " | Return self+value.\n", 194 | " | \n", 195 | " | __contains__(self, key, /)\n", 196 | " | Return key in self.\n", 197 | " | \n", 198 | " | __eq__(self, value, /)\n", 199 | " | Return self==value.\n", 200 | " | \n", 201 | " | __format__(self, format_spec, /)\n", 202 | " | Return a formatted version of the string as described by format_spec.\n", 203 | " | \n", 204 | " | __ge__(self, value, /)\n", 205 | " | Return self>=value.\n", 206 | " | \n", 207 | " | __getattribute__(self, name, /)\n", 208 | " | Return getattr(self, name).\n", 209 | " | \n", 210 | " | __getitem__(self, key, /)\n", 211 | " | Return self[key].\n", 212 | " | \n", 213 | " | __getnewargs__(...)\n", 214 | " | \n", 215 | " | __gt__(self, value, /)\n", 216 | " | Return self>value.\n", 217 | " | \n", 218 | " | __hash__(self, /)\n", 219 | " | Return hash(self).\n", 220 | " | \n", 221 | " | __iter__(self, /)\n", 222 | " | Implement iter(self).\n", 223 | " | \n", 224 | " | __le__(self, value, /)\n", 225 | " | Return self<=value.\n", 226 | " | \n", 227 | " | __len__(self, /)\n", 228 | " | Return len(self).\n", 229 | " | \n", 230 | " | __lt__(self, value, /)\n", 231 | " | Return self int\n", 273 | " | \n", 274 | " | Return the number of non-overlapping occurrences of substring sub in\n", 275 | " | string S[start:end]. Optional arguments start and end are\n", 276 | " | interpreted as in slice notation.\n", 277 | " | \n", 278 | " | encode(self, /, encoding='utf-8', errors='strict')\n", 279 | " | Encode the string using the codec registered for encoding.\n", 280 | " | \n", 281 | " | encoding\n", 282 | " | The encoding in which to encode the string.\n", 283 | " | errors\n", 284 | " | The error handling scheme to use for encoding errors.\n", 285 | " | The default is 'strict' meaning that encoding errors raise a\n", 286 | " | UnicodeEncodeError. Other possible values are 'ignore', 'replace' and\n", 287 | " | 'xmlcharrefreplace' as well as any other name registered with\n", 288 | " | codecs.register_error that can handle UnicodeEncodeErrors.\n", 289 | " | \n", 290 | " | endswith(...)\n", 291 | " | S.endswith(suffix[, start[, end]]) -> bool\n", 292 | " | \n", 293 | " | Return True if S ends with the specified suffix, False otherwise.\n", 294 | " | With optional start, test S beginning at that position.\n", 295 | " | With optional end, stop comparing S at that position.\n", 296 | " | suffix can also be a tuple of strings to try.\n", 297 | " | \n", 298 | " | expandtabs(self, /, tabsize=8)\n", 299 | " | Return a copy where all tab characters are expanded using spaces.\n", 300 | " | \n", 301 | " | If tabsize is not given, a tab size of 8 characters is assumed.\n", 302 | " | \n", 303 | " | find(...)\n", 304 | " | S.find(sub[, start[, end]]) -> int\n", 305 | " | \n", 306 | " | Return the lowest index in S where substring sub is found,\n", 307 | " | such that sub is contained within S[start:end]. Optional\n", 308 | " | arguments start and end are interpreted as in slice notation.\n", 309 | " | \n", 310 | " | Return -1 on failure.\n", 311 | " | \n", 312 | " | format(...)\n", 313 | " | S.format(*args, **kwargs) -> str\n", 314 | " | \n", 315 | " | Return a formatted version of S, using substitutions from args and kwargs.\n", 316 | " | The substitutions are identified by braces ('{' and '}').\n", 317 | " | \n", 318 | " | format_map(...)\n", 319 | " | S.format_map(mapping) -> str\n", 320 | " | \n", 321 | " | Return a formatted version of S, using substitutions from mapping.\n", 322 | " | The substitutions are identified by braces ('{' and '}').\n", 323 | " | \n", 324 | " | index(...)\n", 325 | " | S.index(sub[, start[, end]]) -> int\n", 326 | " | \n", 327 | " | Return the lowest index in S where substring sub is found,\n", 328 | " | such that sub is contained within S[start:end]. Optional\n", 329 | " | arguments start and end are interpreted as in slice notation.\n", 330 | " | \n", 331 | " | Raises ValueError when the substring is not found.\n", 332 | " | \n", 333 | " | isalnum(self, /)\n", 334 | " | Return True if the string is an alpha-numeric string, False otherwise.\n", 335 | " | \n", 336 | " | A string is alpha-numeric if all characters in the string are alpha-numeric and\n", 337 | " | there is at least one character in the string.\n", 338 | " | \n", 339 | " | isalpha(self, /)\n", 340 | " | Return True if the string is an alphabetic string, False otherwise.\n", 341 | " | \n", 342 | " | A string is alphabetic if all characters in the string are alphabetic and there\n", 343 | " | is at least one character in the string.\n", 344 | " | \n", 345 | " | isascii(self, /)\n", 346 | " | Return True if all characters in the string are ASCII, False otherwise.\n", 347 | " | \n", 348 | " | ASCII characters have code points in the range U+0000-U+007F.\n", 349 | " | Empty string is ASCII too.\n", 350 | " | \n", 351 | " | isdecimal(self, /)\n", 352 | " | Return True if the string is a decimal string, False otherwise.\n", 353 | " | \n", 354 | " | A string is a decimal string if all characters in the string are decimal and\n", 355 | " | there is at least one character in the string.\n", 356 | " | \n", 357 | " | isdigit(self, /)\n", 358 | " | Return True if the string is a digit string, False otherwise.\n", 359 | " | \n", 360 | " | A string is a digit string if all characters in the string are digits and there\n", 361 | " | is at least one character in the string.\n", 362 | " | \n", 363 | " | isidentifier(self, /)\n", 364 | " | Return True if the string is a valid Python identifier, False otherwise.\n", 365 | " | \n", 366 | " | Call keyword.iskeyword(s) to test whether string s is a reserved identifier,\n", 367 | " | such as \"def\" or \"class\".\n", 368 | " | \n", 369 | " | islower(self, /)\n", 370 | " | Return True if the string is a lowercase string, False otherwise.\n", 371 | " | \n", 372 | " | A string is lowercase if all cased characters in the string are lowercase and\n", 373 | " | there is at least one cased character in the string.\n", 374 | " | \n", 375 | " | isnumeric(self, /)\n", 376 | " | Return True if the string is a numeric string, False otherwise.\n", 377 | " | \n", 378 | " | A string is numeric if all characters in the string are numeric and there is at\n", 379 | " | least one character in the string.\n", 380 | " | \n", 381 | " | isprintable(self, /)\n", 382 | " | Return True if the string is printable, False otherwise.\n", 383 | " | \n", 384 | " | A string is printable if all of its characters are considered printable in\n", 385 | " | repr() or if it is empty.\n", 386 | " | \n", 387 | " | isspace(self, /)\n", 388 | " | Return True if the string is a whitespace string, False otherwise.\n", 389 | " | \n", 390 | " | A string is whitespace if all characters in the string are whitespace and there\n", 391 | " | is at least one character in the string.\n", 392 | " | \n", 393 | " | istitle(self, /)\n", 394 | " | Return True if the string is a title-cased string, False otherwise.\n", 395 | " | \n", 396 | " | In a title-cased string, upper- and title-case characters may only\n", 397 | " | follow uncased characters and lowercase characters only cased ones.\n", 398 | " | \n", 399 | " | isupper(self, /)\n", 400 | " | Return True if the string is an uppercase string, False otherwise.\n", 401 | " | \n", 402 | " | A string is uppercase if all cased characters in the string are uppercase and\n", 403 | " | there is at least one cased character in the string.\n", 404 | " | \n", 405 | " | join(self, iterable, /)\n", 406 | " | Concatenate any number of strings.\n", 407 | " | \n", 408 | " | The string whose method is called is inserted in between each given string.\n", 409 | " | The result is returned as a new string.\n", 410 | " | \n", 411 | " | Example: '.'.join(['ab', 'pq', 'rs']) -> 'ab.pq.rs'\n", 412 | " | \n", 413 | " | ljust(self, width, fillchar=' ', /)\n", 414 | " | Return a left-justified string of length width.\n", 415 | " | \n", 416 | " | Padding is done using the specified fill character (default is a space).\n", 417 | " | \n", 418 | " | lower(self, /)\n", 419 | " | Return a copy of the string converted to lowercase.\n", 420 | " | \n", 421 | " | lstrip(self, chars=None, /)\n", 422 | " | Return a copy of the string with leading whitespace removed.\n", 423 | " | \n", 424 | " | If chars is given and not None, remove characters in chars instead.\n", 425 | " | \n", 426 | " | partition(self, sep, /)\n", 427 | " | Partition the string into three parts using the given separator.\n", 428 | " | \n", 429 | " | This will search for the separator in the string. If the separator is found,\n", 430 | " | returns a 3-tuple containing the part before the separator, the separator\n", 431 | " | itself, and the part after it.\n", 432 | " | \n", 433 | " | If the separator is not found, returns a 3-tuple containing the original string\n", 434 | " | and two empty strings.\n", 435 | " | \n", 436 | " | replace(self, old, new, count=-1, /)\n", 437 | " | Return a copy with all occurrences of substring old replaced by new.\n", 438 | " | \n", 439 | " | count\n", 440 | " | Maximum number of occurrences to replace.\n", 441 | " | -1 (the default value) means replace all occurrences.\n", 442 | " | \n", 443 | " | If the optional argument count is given, only the first count occurrences are\n", 444 | " | replaced.\n", 445 | " | \n", 446 | " | rfind(...)\n", 447 | " | S.rfind(sub[, start[, end]]) -> int\n", 448 | " | \n", 449 | " | Return the highest index in S where substring sub is found,\n", 450 | " | such that sub is contained within S[start:end]. Optional\n", 451 | " | arguments start and end are interpreted as in slice notation.\n", 452 | " | \n", 453 | " | Return -1 on failure.\n", 454 | " | \n", 455 | " | rindex(...)\n", 456 | " | S.rindex(sub[, start[, end]]) -> int\n", 457 | " | \n", 458 | " | Return the highest index in S where substring sub is found,\n", 459 | " | such that sub is contained within S[start:end]. Optional\n", 460 | " | arguments start and end are interpreted as in slice notation.\n", 461 | " | \n", 462 | " | Raises ValueError when the substring is not found.\n", 463 | " | \n", 464 | " | rjust(self, width, fillchar=' ', /)\n", 465 | " | Return a right-justified string of length width.\n", 466 | " | \n", 467 | " | Padding is done using the specified fill character (default is a space).\n", 468 | " | \n", 469 | " | rpartition(self, sep, /)\n", 470 | " | Partition the string into three parts using the given separator.\n", 471 | " | \n", 472 | " | This will search for the separator in the string, starting at the end. If\n", 473 | " | the separator is found, returns a 3-tuple containing the part before the\n", 474 | " | separator, the separator itself, and the part after it.\n", 475 | " | \n", 476 | " | If the separator is not found, returns a 3-tuple containing two empty strings\n", 477 | " | and the original string.\n", 478 | " | \n", 479 | " | rsplit(self, /, sep=None, maxsplit=-1)\n", 480 | " | Return a list of the words in the string, using sep as the delimiter string.\n", 481 | " | \n", 482 | " | sep\n", 483 | " | The delimiter according which to split the string.\n", 484 | " | None (the default value) means split according to any whitespace,\n", 485 | " | and discard empty strings from the result.\n", 486 | " | maxsplit\n", 487 | " | Maximum number of splits to do.\n", 488 | " | -1 (the default value) means no limit.\n", 489 | " | \n", 490 | " | Splits are done starting at the end of the string and working to the front.\n", 491 | " | \n", 492 | " | rstrip(self, chars=None, /)\n", 493 | " | Return a copy of the string with trailing whitespace removed.\n", 494 | " | \n", 495 | " | If chars is given and not None, remove characters in chars instead.\n", 496 | " | \n", 497 | " | split(self, /, sep=None, maxsplit=-1)\n", 498 | " | Return a list of the words in the string, using sep as the delimiter string.\n", 499 | " | \n", 500 | " | sep\n", 501 | " | The delimiter according which to split the string.\n", 502 | " | None (the default value) means split according to any whitespace,\n", 503 | " | and discard empty strings from the result.\n", 504 | " | maxsplit\n", 505 | " | Maximum number of splits to do.\n", 506 | " | -1 (the default value) means no limit.\n", 507 | " | \n", 508 | " | splitlines(self, /, keepends=False)\n", 509 | " | Return a list of the lines in the string, breaking at line boundaries.\n", 510 | " | \n", 511 | " | Line breaks are not included in the resulting list unless keepends is given and\n", 512 | " | true.\n", 513 | " | \n", 514 | " | startswith(...)\n", 515 | " | S.startswith(prefix[, start[, end]]) -> bool\n", 516 | " | \n", 517 | " | Return True if S starts with the specified prefix, False otherwise.\n", 518 | " | With optional start, test S beginning at that position.\n", 519 | " | With optional end, stop comparing S at that position.\n", 520 | " | prefix can also be a tuple of strings to try.\n", 521 | " | \n", 522 | " | strip(self, chars=None, /)\n", 523 | " | Return a copy of the string with leading and trailing whitespace removed.\n", 524 | " | \n", 525 | " | If chars is given and not None, remove characters in chars instead.\n", 526 | " | \n", 527 | " | swapcase(self, /)\n", 528 | " | Convert uppercase characters to lowercase and lowercase characters to uppercase.\n", 529 | " | \n", 530 | " | title(self, /)\n", 531 | " | Return a version of the string where each word is titlecased.\n", 532 | " | \n", 533 | " | More specifically, words start with uppercased characters and all remaining\n", 534 | " | cased characters have lower case.\n", 535 | " | \n", 536 | " | translate(self, table, /)\n", 537 | " | Replace each character in the string using the given translation table.\n", 538 | " | \n", 539 | " | table\n", 540 | " | Translation table, which must be a mapping of Unicode ordinals to\n", 541 | " | Unicode ordinals, strings, or None.\n", 542 | " | \n", 543 | " | The table must implement lookup/indexing via __getitem__, for instance a\n", 544 | " | dictionary or list. If this operation raises LookupError, the character is\n", 545 | " | left untouched. Characters mapped to None are deleted.\n", 546 | " | \n", 547 | " | upper(self, /)\n", 548 | " | Return a copy of the string converted to uppercase.\n", 549 | " | \n", 550 | " | zfill(self, width, /)\n", 551 | " | Pad a numeric string with zeros on the left, to fill a field of the given width.\n", 552 | " | \n", 553 | " | The string is never truncated.\n", 554 | " | \n", 555 | " | ----------------------------------------------------------------------\n", 556 | " | Static methods defined here:\n", 557 | " | \n", 558 | " | __new__(*args, **kwargs) from builtins.type\n", 559 | " | Create and return a new object. See help(type) for accurate signature.\n", 560 | " | \n", 561 | " | maketrans(...)\n", 562 | " | Return a translation table usable for str.translate().\n", 563 | " | \n", 564 | " | If there is only one argument, it must be a dictionary mapping Unicode\n", 565 | " | ordinals (integers) or characters to Unicode ordinals, strings or None.\n", 566 | " | Character keys will be then converted to ordinals.\n", 567 | " | If there are two arguments, they must be strings of equal length, and\n", 568 | " | in the resulting dictionary, each character in x will be mapped to the\n", 569 | " | character at the same position in y. If there is a third argument, it\n", 570 | " | must be a string, whose characters will be mapped to None in the result.\n", 571 | "\n", 572 | "None\n" 573 | ] 574 | } 575 | ], 576 | "source": [ 577 | "import math\n", 578 | "print(help(str))" 579 | ] 580 | }, 581 | { 582 | "cell_type": "code", 583 | "execution_count": 94, 584 | "metadata": {}, 585 | "outputs": [ 586 | { 587 | "name": "stdout", 588 | "output_type": "stream", 589 | "text": [ 590 | "No Python documentation found for 'Am the part of docstr\\n and am multiline string'.\n", 591 | "Use help() to get the interactive help utility.\n", 592 | "Use help(str) for help on the str class.\n", 593 | "\n", 594 | "None\n" 595 | ] 596 | } 597 | ], 598 | "source": [ 599 | "def sq(n):\n", 600 | " '''Am the part of docstr\n", 601 | " and am multiline string\n", 602 | " '''\n", 603 | " # i Am comment\n", 604 | " return n**2\n", 605 | "\n", 606 | "print(help(sq.__doc__))\n", 607 | "\n", 608 | " " 609 | ] 610 | }, 611 | { 612 | "cell_type": "code", 613 | "execution_count": 95, 614 | "metadata": {}, 615 | "outputs": [ 616 | { 617 | "name": "stdout", 618 | "output_type": "stream", 619 | "text": [ 620 | "L1: 6\n" 621 | ] 622 | } 623 | ], 624 | "source": [ 625 | "def cube(y):\n", 626 | " n=y\n", 627 | " return lambda x: x*n\n", 628 | "\n", 629 | "L1=cube(3)\n", 630 | "print('L1: ',L1(2))\n", 631 | "\n", 632 | "# lambda string: string\n", 633 | "\n", 634 | "\n" 635 | ] 636 | }, 637 | { 638 | "cell_type": "code", 639 | "execution_count": 96, 640 | "metadata": {}, 641 | "outputs": [ 642 | { 643 | "name": "stdout", 644 | "output_type": "stream", 645 | "text": [ 646 | "alpha beta " 647 | ] 648 | } 649 | ], 650 | "source": [ 651 | "t1 = ('alpha','beta','gama')\n", 652 | "# it_obj = iter(t1)\n", 653 | "# print(next(it_obj))\n", 654 | "# print(next(it_obj))\n", 655 | "str1=\"banana\"\n", 656 | "for i in t1:\n", 657 | " if i=='gama':\n", 658 | " continue\n", 659 | " print(i,end=\" \")\n", 660 | "\n" 661 | ] 662 | }, 663 | { 664 | "cell_type": "code", 665 | "execution_count": 97, 666 | "metadata": {}, 667 | "outputs": [ 668 | { 669 | "name": "stdout", 670 | "output_type": "stream", 671 | "text": [ 672 | "1\n", 673 | "1 is dispatched and waiting\n", 674 | "2\n", 675 | "2 is dispatched and waiting\n", 676 | "3\n" 677 | ] 678 | } 679 | ], 680 | "source": [ 681 | "def simpleGeneratorFun():\n", 682 | " yield 1\n", 683 | " print('1 is dispatched and waiting')\n", 684 | " yield 2\n", 685 | " print('2 is dispatched and waiting')\n", 686 | " yield 3\n", 687 | " print('3 is dispatched and waiting')\n", 688 | " \n", 689 | "\n", 690 | "x= simpleGeneratorFun()\n", 691 | "print(x.__next__())\n", 692 | "print(x.__next__())\n", 693 | "print(x.__next__())\n", 694 | "\n", 695 | "\n" 696 | ] 697 | }, 698 | { 699 | "cell_type": "code", 700 | "execution_count": 98, 701 | "metadata": {}, 702 | "outputs": [ 703 | { 704 | "name": "stdout", 705 | "output_type": "stream", 706 | "text": [ 707 | "alpha\n", 708 | "beta\n", 709 | "gama\n" 710 | ] 711 | } 712 | ], 713 | "source": [ 714 | "mytuple=('alpha','beta','gama')\n", 715 | "Iterable_object=iter(mytuple)\n", 716 | "\n", 717 | "print(next(Iterable_object))\n", 718 | "print(next(Iterable_object))\n", 719 | "print(next(Iterable_object))\n" 720 | ] 721 | }, 722 | { 723 | "cell_type": "code", 724 | "execution_count": 99, 725 | "metadata": {}, 726 | "outputs": [ 727 | { 728 | "name": "stdout", 729 | "output_type": "stream", 730 | "text": [ 731 | "B a n a " 732 | ] 733 | } 734 | ], 735 | "source": [ 736 | "tuple =('a','b','c')\n", 737 | "string1 ='Banana'\n", 738 | "\n", 739 | "for i in range(len(string1)):\n", 740 | " if(i>3):\n", 741 | " continue\n", 742 | " print(string1[i],end=' ')" 743 | ] 744 | }, 745 | { 746 | "cell_type": "code", 747 | "execution_count": 100, 748 | "metadata": {}, 749 | "outputs": [ 750 | { 751 | "name": "stdout", 752 | "output_type": "stream", 753 | "text": [ 754 | "1\n", 755 | "Am here in yield at 2nd position\n", 756 | "3\n" 757 | ] 758 | } 759 | ], 760 | "source": [ 761 | "def simplegen():\n", 762 | " yield 1\n", 763 | " yield \"Am here in yield at 2nd position\"\n", 764 | " yield 3\n", 765 | "for i in simplegen():\n", 766 | " print(i)" 767 | ] 768 | }, 769 | { 770 | "cell_type": "code", 771 | "execution_count": 101, 772 | "metadata": {}, 773 | "outputs": [ 774 | { 775 | "name": "stdout", 776 | "output_type": "stream", 777 | "text": [ 778 | "{10, 20, 30}\n" 779 | ] 780 | } 781 | ], 782 | "source": [ 783 | "l1=[10,20,20,30]\n", 784 | "# l1.sort()\n", 785 | "# print(l1)\n", 786 | "# l1.reverse()\n", 787 | "# print(l1)\n", 788 | "print(set(l1))" 789 | ] 790 | }, 791 | { 792 | "cell_type": "code", 793 | "execution_count": 105, 794 | "metadata": {}, 795 | "outputs": [ 796 | { 797 | "name": "stdout", 798 | "output_type": "stream", 799 | "text": [ 800 | "0\n", 801 | "1\n", 802 | "2\n", 803 | "3\n", 804 | "4\n" 805 | ] 806 | } 807 | ], 808 | "source": [ 809 | "num=5\n", 810 | "res=0; num2=1\n", 811 | "for i in range(5):\n", 812 | " if i == 0:\n", 813 | " print(0)\n", 814 | " else:\n", 815 | " res = res + num2\n", 816 | " Fn = Fn-1 + Fn-2 \n", 817 | " F5 = f4 + f3\n", 818 | " print(res)\n" 819 | ] 820 | }, 821 | { 822 | "cell_type": "code", 823 | "execution_count": null, 824 | "metadata": {}, 825 | "outputs": [], 826 | "source": [] 827 | }, 828 | { 829 | "cell_type": "markdown", 830 | "metadata": {}, 831 | "source": [ 832 | "This program is for comprehension" 833 | ] 834 | }, 835 | { 836 | "cell_type": "code", 837 | "execution_count": 110, 838 | "metadata": {}, 839 | "outputs": [ 840 | { 841 | "name": "stdout", 842 | "output_type": "stream", 843 | "text": [ 844 | "[0, 2, 4, 6, 8, 10]\n", 845 | "[1, 3, 5, 7, 9]\n" 846 | ] 847 | } 848 | ], 849 | "source": [ 850 | "\n", 851 | "\n", 852 | "# evenlist=[i for i in range(11) if i%2 == 0]\n", 853 | "print([i for i in range(11) if i%2 == 0])\n", 854 | "oddlist=[i for i in range(11) if i%2 != 0]\n", 855 | "print(oddlist)\n", 856 | "\n", 857 | "\n" 858 | ] 859 | }, 860 | { 861 | "cell_type": "code", 862 | "execution_count": 23, 863 | "metadata": {}, 864 | "outputs": [ 865 | { 866 | "name": "stdout", 867 | "output_type": "stream", 868 | "text": [ 869 | "[[0, 1, 2], [0, 1, 2], [0, 1, 2]]\n" 870 | ] 871 | } 872 | ], 873 | "source": [ 874 | "mat= [[j for j in range(3)] for i in range(3) if i<3]\n", 875 | "print(mat)" 876 | ] 877 | }, 878 | { 879 | "cell_type": "code", 880 | "execution_count": 24, 881 | "metadata": {}, 882 | "outputs": [ 883 | { 884 | "name": "stdout", 885 | "output_type": "stream", 886 | "text": [ 887 | "['banana', 'grapes', 'mango', 'orange']\n" 888 | ] 889 | } 890 | ], 891 | "source": [ 892 | "fruits =['banana','grapes','apple','mango','orange']\n", 893 | "newlist = [x for x in fruits if x != 'apple']\n", 894 | "print(newlist)\n" 895 | ] 896 | }, 897 | { 898 | "cell_type": "code", 899 | "execution_count": 25, 900 | "metadata": {}, 901 | "outputs": [ 902 | { 903 | "name": "stdout", 904 | "output_type": "stream", 905 | "text": [ 906 | "['sold out', 'grapes', 'apple', 'mango', 'orange']\n" 907 | ] 908 | } 909 | ], 910 | "source": [ 911 | "newlist = [x if x != 'banana' else 'sold out' for x in fruits ]\n", 912 | "print(newlist)" 913 | ] 914 | }, 915 | { 916 | "cell_type": "code", 917 | "execution_count": 26, 918 | "metadata": {}, 919 | "outputs": [ 920 | { 921 | "name": "stdout", 922 | "output_type": "stream", 923 | "text": [ 924 | "['G', 'e', 't', ' ', 's', 'e', 't', ' ', 'g', 'o']\n" 925 | ] 926 | } 927 | ], 928 | "source": [ 929 | "list1=[]\n", 930 | "for char in 'Get set go':\n", 931 | " list1.append(char)\n", 932 | "print(list1)" 933 | ] 934 | }, 935 | { 936 | "cell_type": "code", 937 | "execution_count": 33, 938 | "metadata": {}, 939 | "outputs": [ 940 | { 941 | "name": "stdout", 942 | "output_type": "stream", 943 | "text": [ 944 | "['Dharwad', [1, 2], 30, 40]\n" 945 | ] 946 | } 947 | ], 948 | "source": [ 949 | "list1=['Dharwad',[1,2],30,40,50,60,70,80]\n", 950 | "\n", 951 | "new=[list1[i] for i in range(len(list1)) if i<4 ]\n", 952 | "print(new)" 953 | ] 954 | }, 955 | { 956 | "cell_type": "code", 957 | "execution_count": null, 958 | "metadata": {}, 959 | "outputs": [], 960 | "source": [] 961 | } 962 | ], 963 | "metadata": { 964 | "kernelspec": { 965 | "display_name": "Python 3.8.5 ('base')", 966 | "language": "python", 967 | "name": "python3" 968 | }, 969 | "language_info": { 970 | "codemirror_mode": { 971 | "name": "ipython", 972 | "version": 3 973 | }, 974 | "file_extension": ".py", 975 | "mimetype": "text/x-python", 976 | "name": "python", 977 | "nbconvert_exporter": "python", 978 | "pygments_lexer": "ipython3", 979 | "version": "3.8.5" 980 | }, 981 | "orig_nbformat": 4, 982 | "vscode": { 983 | "interpreter": { 984 | "hash": "ad2bdc8ecc057115af97d19610ffacc2b4e99fae6737bb82f5d7fb13d2f2c186" 985 | } 986 | } 987 | }, 988 | "nbformat": 4, 989 | "nbformat_minor": 2 990 | } 991 | -------------------------------------------------------------------------------- /module2/inh.py: -------------------------------------------------------------------------------- 1 | # Python code to demonstrate how parent constructors 2 | # are called. 3 | 4 | # parent class 5 | class Person(object): 6 | 7 | # __init__ is known as the constructor 8 | def __init__(self, name, idnumber): 9 | self.name = name 10 | self.idnumber = idnumber 11 | 12 | def display(self): 13 | print(self.name) 14 | print(self.idnumber) 15 | 16 | def details(self): 17 | print("My name is {}".format(self.name)) 18 | print("IdNumber: {}".format(self.idnumber)) 19 | 20 | # child class 21 | class Employee(Person): 22 | def __init__(self, name, idnumber, salary, post): 23 | self.salary = salary 24 | self.post = post 25 | 26 | # invoking the __init__ of the parent class 27 | Person.__init__(self, name, idnumber) 28 | 29 | def details(self): 30 | print("My name is {}".format(self.name)) 31 | print("IdNumber: {}".format(self.idnumber)) 32 | print("Post: {}".format(self.post)) 33 | 34 | 35 | # creation of an object variable or an instance 36 | a = Employee('Rahul', 886012, 200000, "Intern") 37 | 38 | # calling a function of the class Person using 39 | # its instance 40 | a.display() 41 | a.details() -------------------------------------------------------------------------------- /module2/kle_university_record.csv: -------------------------------------------------------------------------------- 1 | Branch,cgpa,name,year 2 | COE,9.0,Nikhil,2 3 | COE,9.1,Sanchit,2 4 | IT,9.3,Aditya,2 5 | SE,9.5,Sagar,1 6 | MCE,7.8,Prateek,3 7 | EP,9.1,Sahil,2 8 | -------------------------------------------------------------------------------- /module2/module3.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "Module 3 Business Stats" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 11, 13 | "metadata": {}, 14 | "outputs": [ 15 | { 16 | "name": "stdout", 17 | "output_type": "stream", 18 | "text": [ 19 | "The mode of list values:7\n" 20 | ] 21 | } 22 | ], 23 | "source": [ 24 | "import statistics\n", 25 | "\n", 26 | "values= [2,3,7,7,7,7,7,4,2,4,6,4,4,2,4] \n", 27 | "print('The mode of list values:',end='')\n", 28 | "print(statistics.mode(values))\n", 29 | "\n" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": 12, 35 | "metadata": {}, 36 | "outputs": [ 37 | { 38 | "name": "stdout", 39 | "output_type": "stream", 40 | "text": [ 41 | "The median of list values:19.9\n" 42 | ] 43 | } 44 | ], 45 | "source": [ 46 | "\n", 47 | "import statistics\n", 48 | "\n", 49 | "values= [15,19,20.8,21.3] \n", 50 | "print('The median of list values:',end='')\n", 51 | "print(statistics.median(values))\n" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 24, 57 | "metadata": {}, 58 | "outputs": [ 59 | { 60 | "name": "stdout", 61 | "output_type": "stream", 62 | "text": [ 63 | "The median of list values:1.1760204081632653\n" 64 | ] 65 | } 66 | ], 67 | "source": [ 68 | "import statistics\n", 69 | "\n", 70 | "values1= [15,19.2,20.8,21.3,10.5,15.2] \n", 71 | "values2=[2.75, 1.75, 1.25, 0.25, 0.5, 1.25, 3.5]\n", 72 | "print('The median of list values:',end='')\n", 73 | "print(statistics.pvariance(values2))\n" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 25, 79 | "metadata": {}, 80 | "outputs": [ 81 | { 82 | "name": "stdout", 83 | "output_type": "stream", 84 | "text": [ 85 | "The median of list values:19\n" 86 | ] 87 | } 88 | ], 89 | "source": [ 90 | "import statistics\n", 91 | "\n", 92 | "values= [15,19,20.8,21.3] \n", 93 | "print('The median of list values:',end='')\n", 94 | "print(statistics.median_low(values))\n" 95 | ] 96 | } 97 | ], 98 | "metadata": { 99 | "kernelspec": { 100 | "display_name": "Python 3.8.5 ('base')", 101 | "language": "python", 102 | "name": "python3" 103 | }, 104 | "language_info": { 105 | "codemirror_mode": { 106 | "name": "ipython", 107 | "version": 3 108 | }, 109 | "file_extension": ".py", 110 | "mimetype": "text/x-python", 111 | "name": "python", 112 | "nbconvert_exporter": "python", 113 | "pygments_lexer": "ipython3", 114 | "version": "3.8.5" 115 | }, 116 | "orig_nbformat": 4, 117 | "vscode": { 118 | "interpreter": { 119 | "hash": "ad2bdc8ecc057115af97d19610ffacc2b4e99fae6737bb82f5d7fb13d2f2c186" 120 | } 121 | } 122 | }, 123 | "nbformat": 4, 124 | "nbformat_minor": 2 125 | } 126 | -------------------------------------------------------------------------------- /module2/module_test.py: -------------------------------------------------------------------------------- 1 | import time 2 | from module1 import timestamp 3 | 4 | print("Testing Python sleep()...") 5 | 6 | # modularity 7 | timestamp.Timer() 8 | time.sleep(3) 9 | timestamp.Timer() 10 | -------------------------------------------------------------------------------- /module2/myfile.txt: -------------------------------------------------------------------------------- 1 | Hello 2 | This is Dharwad 3 | This is Dharwad 4 | This is London 5 | -------------------------------------------------------------------------------- /module2/netflix.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/module2/netflix.csv -------------------------------------------------------------------------------- /module2/ppt/Advanced Concepts Using Datastructures.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/module2/ppt/Advanced Concepts Using Datastructures.pptx -------------------------------------------------------------------------------- /module2/ppt/Functions.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/module2/ppt/Functions.pptx -------------------------------------------------------------------------------- /module2/ppt/Python Classes.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/module2/ppt/Python Classes.pptx -------------------------------------------------------------------------------- /module2/ppt/Python Scripting.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/module2/ppt/Python Scripting.pptx -------------------------------------------------------------------------------- /module2/ppt/~$Advanced Concepts Using Datastructures.pptx: -------------------------------------------------------------------------------- 1 | Dell Dell -------------------------------------------------------------------------------- /module2/ppt/~$Python Classes.pptx: -------------------------------------------------------------------------------- 1 | Dell Dell -------------------------------------------------------------------------------- /module2/ppt/~$Python Scripting.pptx: -------------------------------------------------------------------------------- 1 | Dell Dell -------------------------------------------------------------------------------- /module2/record1.csv: -------------------------------------------------------------------------------- 1 | branch,cgpa,name,year 2 | COE,9.0,Nikhil,2 3 | COE,9.1,Sanchit,2 4 | IT,9.3,Aditya,2 5 | SE,9.5,Sagar,1 6 | MCE,7.8,Prateek,3 7 | EP,9.1,Sahil,2 8 | -------------------------------------------------------------------------------- /module2/records.csv: -------------------------------------------------------------------------------- 1 | Name,Branch,Year,CGPA 2 | Nikhil,CS,2,9.0 3 | Sanchit,CE,2,9.1 4 | Yusuf,EC,2,9.3 5 | Sagar,CS,1,9.5 6 | Ijaz,ME,3,7.8 7 | Hemanth,CE,2,9.1 8 | Pushpa,EC,2,9.1 9 | -------------------------------------------------------------------------------- /module2/set_operators.py: -------------------------------------------------------------------------------- 1 | # Creating two sets 2 | set1 = set() 3 | set2 = set() 4 | 5 | # Adding elements to set1 6 | for i in range(1, 6): 7 | set1.add(i) 8 | 9 | # Adding elements to set2 10 | for i in range(3, 8): 11 | set2.add(i) 12 | 13 | print("Set1 = ", set1) 14 | print("Set2 = ", set2) 15 | print("\n") 16 | 17 | # Union of set1 and set2 18 | set3 = set1 | set2# set1.union(set2) 19 | print("Union of Set1 & Set2: Set3 = ", set3) 20 | 21 | # Intersection of set1 and set2 22 | set4 = set1 & set2# set1.intersection(set2) 23 | print("Intersection of Set1 & Set2: Set4 = ", set4) 24 | print("\n") 25 | 26 | # Checking relation between set3 and set4 27 | if set3 > set4: # set3.issuperset(set4) 28 | print("Set3 is superset of Set4") 29 | else if set3 < set4: # set3.issubset(set4) 30 | print("Set3 is subset of Set4") 31 | else : # set3 == set4 32 | print("Set3 is same as Set4") 33 | 34 | # displaying relation between set4 and set3 35 | if set4 < set3: # set4.issubset(set3) 36 | print("Set4 is subset of Set3") 37 | print("\n") 38 | 39 | # difference between set3 and set4 40 | set5 = set3 - set4 41 | print("Elements in Set3 and not in Set4: Set5 = ", set5) 42 | print("\n") 43 | 44 | # check if set4 and set5 are disjoint sets 45 | if set4.isdisjoint(set5): 46 | print("Set4 and Set5 have nothing in common\n") 47 | 48 | # Removing all the values of set5 49 | set5.clear() 50 | 51 | print("After applying clear on sets Set5: ") 52 | print("Set5 = ", set5) -------------------------------------------------------------------------------- /module2/sort.py: -------------------------------------------------------------------------------- 1 | 2 | #Q3 : Sorting in Ascending order 3 | # a =[1,0,10,30,23,78,100,4,2,43,52,67] 4 | # a.sort() 5 | # print(a) 6 | 7 | a =[1,0,10,30,23,78,100,4,2,43,52,67] 8 | # a =[1,10,0,30,23,78,100,4,2,43,52,67] 9 | # a =[1,10,30,0,23,78,100,4,2,43,52,67] 10 | # a =[1,10,30,23,0,78,100,4,2,43,52,67] 11 | # a =[1,10,30,23,78,0,100,4,2,43,52,67] 12 | # a =[1,10,30,23,78,100,0,4,2,43,52,67] 13 | # a =[1,10,30,23,78,100,4,0,2,43,52,67] 14 | # a =[1,10,30,23,78,100,4,2,0,43,52,67] 15 | # a =[1,10,30,23,78,100,4,2,43,0,52,67] 16 | # a =[1,10,30,23,78,100,4,2,43,52,0,67] 17 | # a =[1,10,30,23,78,100,4,2,43,52,67,0] 18 | n = len(a) 19 | # (0 till 11) 20 | for i in range(n): 21 | print('i updated going for next round') 22 | for j in range(n): 23 | print('i=',i,' j=',j) 24 | if(a[i] < a[j]): 25 | s = a[i] 26 | a[i] = a[j] 27 | a[j] = s 28 | print(a) 29 | -------------------------------------------------------------------------------- /module2/studentrecords.csv: -------------------------------------------------------------------------------- 1 | Name,Branch,Year,CGPA 2 | Nikhil,EC,2,9.0 3 | Sanchit,CE,2,9.1 4 | Yusuf,EC,2,9.3 5 | Sagar,CS,1,9.5 6 | Ijaz,ME,3,7.8 7 | Hemanth,CE,2,9.1 8 | Pushpa,EC,2,9.1 9 | -------------------------------------------------------------------------------- /module2/university_records.csv: -------------------------------------------------------------------------------- 1 | name,branch,year,cgpa 2 | Nikhil,COE,2,9.0 3 | Sanchit,COE,2,9.1 4 | Aditya,IT,2,9.3 5 | Sagar,SE,1,9.5 6 | Prateek,MCE,3,7.8 7 | Sahil,EP,2,9.1 8 | -------------------------------------------------------------------------------- /my_first_file.txt: -------------------------------------------------------------------------------- 1 | Hello 2 | This is Dharwad 3 | This is Hubli 4 | This is London 5 | Hello 6 | This is Dharwad 7 | This is Hubli 8 | This is London 9 | Hello 10 | This is Dharwad 11 | This is Hubli 12 | This is London 13 | -------------------------------------------------------------------------------- /myfile.txt: -------------------------------------------------------------------------------- 1 | Hello 2 | This is Dharwad 3 | This is Hubli 4 | This is London 5 | -------------------------------------------------------------------------------- /test.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "ename": "AttributeError", 10 | "evalue": "module 'tensorflow._api.v2.compat.v2.__internal__' has no attribute 'register_load_context_function'", 11 | "output_type": "error", 12 | "traceback": [ 13 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 14 | "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", 15 | "Input \u001b[1;32mIn [2]\u001b[0m, in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[1;32m----> 1\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtensorflow\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mtf\u001b[39;00m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;28mprint\u001b[39m(tf\u001b[38;5;241m.\u001b[39m__version__)\n", 16 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\tensorflow\\__init__.py:469\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 467\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mhasattr\u001b[39m(_current_module, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mkeras\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[0;32m 468\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 469\u001b[0m \u001b[43m_keras\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_load\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 470\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m:\n\u001b[0;32m 471\u001b[0m \u001b[38;5;28;01mpass\u001b[39;00m\n", 17 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\tensorflow\\python\\util\\lazy_loader.py:41\u001b[0m, in \u001b[0;36mLazyLoader._load\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 39\u001b[0m \u001b[38;5;124;03m\"\"\"Load the module and insert it into the parent's globals.\"\"\"\u001b[39;00m\n\u001b[0;32m 40\u001b[0m \u001b[38;5;66;03m# Import the target module and insert it into the parent's namespace\u001b[39;00m\n\u001b[1;32m---> 41\u001b[0m module \u001b[38;5;241m=\u001b[39m \u001b[43mimportlib\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mimport_module\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;18;43m__name__\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 42\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_parent_module_globals[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_local_name] \u001b[38;5;241m=\u001b[39m module\n\u001b[0;32m 44\u001b[0m \u001b[38;5;66;03m# Emit a warning if one was specified\u001b[39;00m\n", 18 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\importlib\\__init__.py:127\u001b[0m, in \u001b[0;36mimport_module\u001b[1;34m(name, package)\u001b[0m\n\u001b[0;32m 125\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[0;32m 126\u001b[0m level \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m--> 127\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_bootstrap\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_gcd_import\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m[\u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m:\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpackage\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m)\u001b[49m\n", 19 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\__init__.py:21\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 15\u001b[0m \u001b[38;5;124;03m\"\"\"Implementation of the Keras API, the high-level API of TensorFlow.\u001b[39;00m\n\u001b[0;32m 16\u001b[0m \n\u001b[0;32m 17\u001b[0m \u001b[38;5;124;03mDetailed documentation and user guides are available at\u001b[39;00m\n\u001b[0;32m 18\u001b[0m \u001b[38;5;124;03m[keras.io](https://keras.io).\u001b[39;00m\n\u001b[0;32m 19\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 20\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m distribute\n\u001b[1;32m---> 21\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m models\n\u001b[0;32m 22\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01minput_layer\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Input\n\u001b[0;32m 23\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msequential\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Sequential\n", 20 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\models\\__init__.py:18\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;66;03m# Copyright 2022 The TensorFlow Authors. All Rights Reserved.\u001b[39;00m\n\u001b[0;32m 2\u001b[0m \u001b[38;5;66;03m#\u001b[39;00m\n\u001b[0;32m 3\u001b[0m \u001b[38;5;66;03m# Licensed under the Apache License, Version 2.0 (the \"License\");\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 13\u001b[0m \u001b[38;5;66;03m# limitations under the License.\u001b[39;00m\n\u001b[0;32m 14\u001b[0m \u001b[38;5;66;03m# ==============================================================================\u001b[39;00m\n\u001b[0;32m 15\u001b[0m \u001b[38;5;124;03m\"\"\"Keras models API.\"\"\"\u001b[39;00m\n\u001b[1;32m---> 18\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mfunctional\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Functional\n\u001b[0;32m 19\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msequential\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Sequential\n\u001b[0;32m 20\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mtraining\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Model\n", 21 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\engine\\functional.py:34\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 32\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m input_spec\n\u001b[0;32m 33\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m node \u001b[38;5;28;01mas\u001b[39;00m node_module\n\u001b[1;32m---> 34\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m training \u001b[38;5;28;01mas\u001b[39;00m training_lib\n\u001b[0;32m 35\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mengine\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m training_utils\n\u001b[0;32m 36\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m serialization\n", 22 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\engine\\training.py:45\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 43\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mexperimental\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m saving_lib\n\u001b[0;32m 44\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m hdf5_format\n\u001b[1;32m---> 45\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m save\n\u001b[0;32m 46\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m saving_utils\n\u001b[0;32m 47\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m serialization\n", 23 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\saving\\legacy\\save.py:24\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 22\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m serialization\n\u001b[0;32m 23\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaved_model\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m load \u001b[38;5;28;01mas\u001b[39;00m saved_model_load\n\u001b[1;32m---> 24\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaved_model\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m load_context\n\u001b[0;32m 25\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaving\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlegacy\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msaved_model\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m save \u001b[38;5;28;01mas\u001b[39;00m saved_model_save\n\u001b[0;32m 26\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkeras\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mutils\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m traceback_utils\n", 24 | "File \u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\tf\\lib\\site-packages\\keras\\saving\\legacy\\saved_model\\load_context.py:68\u001b[0m, in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 64\u001b[0m \u001b[38;5;124;03m\"\"\"Returns whether under a load context.\"\"\"\u001b[39;00m\n\u001b[0;32m 65\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m _load_context\u001b[38;5;241m.\u001b[39min_load_context()\n\u001b[1;32m---> 68\u001b[0m \u001b[43mtf\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__internal__\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mregister_load_context_function\u001b[49m(in_load_context)\n", 25 | "\u001b[1;31mAttributeError\u001b[0m: module 'tensorflow._api.v2.compat.v2.__internal__' has no attribute 'register_load_context_function'" 26 | ] 27 | } 28 | ], 29 | "source": [ 30 | "import tensorflow as tf\n", 31 | "print(tf.__version__)" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": null, 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [] 40 | } 41 | ], 42 | "metadata": { 43 | "kernelspec": { 44 | "display_name": "Python 3 (ipykernel)", 45 | "language": "python", 46 | "name": "python3" 47 | }, 48 | "language_info": { 49 | "codemirror_mode": { 50 | "name": "ipython", 51 | "version": 3 52 | }, 53 | "file_extension": ".py", 54 | "mimetype": "text/x-python", 55 | "name": "python", 56 | "nbconvert_exporter": "python", 57 | "pygments_lexer": "ipython3", 58 | "version": "3.8.5" 59 | }, 60 | "vscode": { 61 | "interpreter": { 62 | "hash": "ad2bdc8ecc057115af97d19610ffacc2b4e99fae6737bb82f5d7fb13d2f2c186" 63 | } 64 | } 65 | }, 66 | "nbformat": 4, 67 | "nbformat_minor": 2 68 | } 69 | -------------------------------------------------------------------------------- /utility/tensorflow.yml: -------------------------------------------------------------------------------- 1 | name: tensorflow 2 | 3 | dependencies: 4 | - python=3.8 5 | - pip>=19.0 6 | - jupyter 7 | - scikit-learn 8 | - scipy 9 | - pandas 10 | - pandas-datareader 11 | - matplotlib 12 | - pillow 13 | - tqdm 14 | - requests 15 | - h5py 16 | - pyyaml 17 | - flask 18 | - boto3 19 | - pip: 20 | - tensorflow==2.4 21 | - bayesian-optimization 22 | - gym 23 | - kaggle 24 | 25 | -------------------------------------------------------------------------------- /utility/tensorflow_tutorial.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RashmivernekarN/ai_with_python_keonics/15bb9abfbf27ff47e26f092e91f8050f555fa31b/utility/tensorflow_tutorial.pdf --------------------------------------------------------------------------------