├── .gitignore ├── Notebooks ├── 1. Making Sure Everything Works │ └── Activity 1 - Make Sure Everything is Working!.json ├── 2. Swift Basics │ ├── Generics and Protocols - Complete.ipynb │ ├── Generics and Protocols - Starter.ipynb │ ├── Homework and Reference │ │ └── Swift Basics Reference │ ├── Python Examples - Comparison.ipynb │ ├── Swift Examples - Complete.ipynb │ ├── Swift Examples - Starter.ipynb │ ├── test.csv │ └── test.json ├── 3. Training a Model │ ├── Meet TensorFlow! Training a Model - Complete.ipynb │ └── Meet TensorFlow! Training a Model - Starter.ipynb ├── 4. Swift and Python │ ├── Complete - Swift and Python.json │ └── Starter - Swift and Python.ipynb ├── 5. Building a GAN │ ├── Complete - Building a GAN.ipynb │ └── Starter - Building a GAN.ipynb ├── 6. Bigger Example │ ├── Complete - Linear Regression.ipynb │ └── Starter - Linear Regression.ipynb └── Extras │ ├── Extra 1 - Temperature.ipynb │ └── Extra 2 - Raw.ipynb ├── README.md ├── agenda.md ├── presentation.md └── presentation_images ├── Swift_logo.png ├── Tensorflow_logo.png ├── add-to-drive.png ├── book.jpg ├── book3.png ├── books.png ├── booksold.png ├── connect-colab.jpeg ├── docker.jpg ├── hobart.jpg ├── open-in-colab.png ├── open-in-playground.png ├── open-with-installed-colab.jpeg ├── open-with.jpeg ├── tasmania.png └── team.png /Notebooks/1. Making Sure Everything Works/Activity 1 - Make Sure Everything is Working!.json: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"kernelspec":{"display_name":"Swift","language":"swift","name":"swift"},"language_info":{"file_extension":".swift","mimetype":"text/x-swift","name":"swift","version":""},"colab":{"name":"Activity 1 - Make Sure Everything is Working!","provenance":[],"private_outputs":true,"collapsed_sections":[]}},"cells":[{"cell_type":"markdown","metadata":{"id":"Vc8EB8_AMYAX","colab_type":"text"},"source":["# Activity 1 - Make Sure Everything is Working!"]},{"cell_type":"markdown","metadata":{"id":"Jhsue-oyMYAa","colab_type":"text"},"source":["First, we want to make sure everything is working correctly for you. Whether you're using Google Colab, or a local install, we want to step through this notebook with you\n","\n","Some of the things you'll see here are _spoilers_ of sorts, for later bits of the session. Never fear, we'll discuss it all!"]},{"cell_type":"markdown","metadata":{"id":"4Jt94qdYMYAd","colab_type":"text"},"source":["First, let's `import TensorFlow` (nothing should happen, but it should run successfully). Once that's done, you have the full Swift for TensorFlow library at your disposal. We're going to do that, and then not use it:"]},{"cell_type":"code","metadata":{"id":"dAbNrt3-MYAf","colab_type":"code","colab":{}},"source":["import TensorFlow"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"fZ6eIXSaMYAl","colab_type":"text"},"source":["Next, define a `message` `String`, and then `print()` it:"]},{"cell_type":"code","metadata":{"id":"SWq9mST8MYAn","colab_type":"code","colab":{}},"source":["let message = \"Welcome to TensorFlow World 2019!\"\n","print(message)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"X4-NLyuPMYAt","colab_type":"text"},"source":["Then `print()` a little `String`, and define an array of presenters:"]},{"cell_type":"code","metadata":{"id":"lv6fAjIlMYAv","colab_type":"code","colab":{}},"source":["print(\"This session is presented by: \")\n","var presenters = [\"Mars Geldard\", \"Tim Nugent\", \"Paris Buttfield-Addison\"]"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"LQSnLm3JMYAz","colab_type":"text"},"source":["And use a `for` loop to `print()` each presenter in the array:"]},{"cell_type":"code","metadata":{"id":"XpY4SIfjMYA0","colab_type":"code","colab":{}},"source":["for presenter in presenters {\n"," print(presenter)\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"PLFq7w3HMYA3","colab_type":"text"},"source":["Now, `import Python`:"]},{"cell_type":"code","metadata":{"id":"HAoxsP9CMYA6","colab_type":"code","colab":{}},"source":["import Python"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"DeLUvbBHMYA9","colab_type":"text"},"source":["And `print()` the `Python.version`:"]},{"cell_type":"code","metadata":{"id":"1BvGKH6aMYA-","colab_type":"code","colab":{}},"source":["print(Python.version)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"a6tNVDk5Ms-M","colab_type":"text"},"source":["That's enough for now! If all the cells above here work, then you're ready to go."]}]} -------------------------------------------------------------------------------- /Notebooks/2. Swift Basics/Generics and Protocols - Complete.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Generics and Protocols.ipynb","provenance":[],"collapsed_sections":[]},"kernelspec":{"name":"swift","display_name":"Swift"}},"cells":[{"cell_type":"markdown","metadata":{"id":"Iv7zRuEC_cfA","colab_type":"text"},"source":["# Complete - Generics and Protocols"]},{"cell_type":"markdown","metadata":{"id":"4he_soT3lXtp","colab_type":"text"},"source":["## Generics"]},{"cell_type":"markdown","metadata":{"id":"VfxnpbmYlaXj","colab_type":"text"},"source":["This material is derivative of the Swift documentation."]},{"cell_type":"code","metadata":{"id":"kZRlD4utdPuX","colab_type":"code","colab":{}},"source":["struct IntStack\n","{\n"," var items = [Int]()\n"," mutating func push(_ item: Int) \n"," {\n"," items.append(item)\n"," }\n"," mutating func pop() -> Int \n"," {\n"," return items.removeLast()\n"," }\n","\n"," func peek() -> Int\n"," {\n"," return items[0]\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"MYA_akgHlmyn","colab_type":"code","colab":{}},"source":["var intStack = IntStack()\n","intStack.push(9)\n","print(intStack.pop())"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"p5rPL5QMlrAp","colab_type":"code","colab":{}},"source":["struct Stack \n","{\n"," var items = [Element]()\n"," mutating func push(_ item: Element) \n"," {\n"," items.append(item)\n"," }\n"," mutating func pop() -> Element \n"," {\n"," return items.removeLast()\n"," }\n","\n"," func peek() -> Element\n"," {\n"," return items[0]\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"jgSgpEOSlvmn","colab_type":"code","colab":{}},"source":["var stack = Stack()\n","stack.push(9)\n","print(stack.pop())\n","\n","var stringStack = Stack()\n","stringStack.push(\"hello\")\n","stringStack.push(\"TensorFlow World\")\n","\n","print(stringStack.pop())\n","print(stringStack.pop())"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"f8Pw8uE8lyyi","colab_type":"code","colab":{}},"source":["struct AnyStack\n","{\n"," var items = [Any]()\n"," mutating func push(_ item: Any) \n"," {\n"," items.append(item)\n"," }\n"," mutating func pop() -> Any\n"," {\n"," return items.removeLast()\n"," }\n","\n"," func peek() -> Any\n"," {\n"," return items[0]\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"8JPNn3Bnl0P1","colab_type":"code","colab":{}},"source":["var anyStack = AnyStack()\n","anyStack.push(1)\n","anyStack.push(\"hello\")\n","//print(anyStack.pop())\n","//print(anyStack.pop())\n","\n","let top = anyStack.pop()\n","let bottom = anyStack.pop()\n","\n","if type(of: top) == type(of: bottom)\n","{\n"," print(\"yes\")\n","}\n","else\n","{\n"," print(\"nope\")\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"yRkqyamGif6L","colab_type":"code","outputId":"f76cda5a-078f-4cac-ee89-0dc758f34add","colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["//bottom + 2"],"execution_count":0,"outputs":[{"output_type":"execute_result","data":{"text/plain":["true\n"]},"metadata":{"tags":[]},"execution_count":17}]},{"cell_type":"markdown","metadata":{"id":"BIKqqE1mlegH","colab_type":"text"},"source":["## Protocol-oriented Programming"]},{"cell_type":"code","metadata":{"id":"x0isflb5tD3E","colab_type":"code","outputId":"ed08c6c9-b650-4347-f7b5-a24d2d514d05","colab":{"base_uri":"https://localhost:8080/","height":53}},"source":["protocol Car\n","{\n"," var price: Int { get }\n"," func turnOn()\n"," mutating func drive()\n","}\n","\n","protocol Electric\n","{\n"," mutating func recharge()\n"," // percentage of the battery level, 0-100%.\n"," var batteryLevel: Int { get set }\n","}\n","\n","typealias Litre = Int\n","protocol Petrol\n","{\n"," let tankCapacity: Litre\n"," mutating func refill()\n"," // # of liters the car is holding, varies b/w models.\n"," var fuelLevel: Litre { get set }\n","}\n","\n","extension Petrol\n","{\n"," mutating func refill()\n"," {\n"," fuelLevel = tankCapacity\n"," }\n","}\n","\n","struct Corolla: Car, Petrol\n","{\n"," var price: Int\n"," var fuelLevel: Litre\n","\n"," mutating func drive()\n"," {\n"," if fuelLevel > 1\n"," {\n"," fuelLevel -= 2\n"," print(\"driving\")\n"," }\n"," else\n"," {\n"," print(\"cant drive without fuel!\")\n"," }\n"," }\n"," func turnOn()\n"," {\n"," print(\"brm brm\")\n"," }\n","}\n","\n","var corolla = Corolla(price: 15_000, fuelLevel: 0, tankCapacity: 50)\n","corolla.refill()\n","corolla.drive()\n","\n","struct Robot: Electric\n","{\n"," var batteryLevel: Int\n"," mutating func recharge()\n"," {\n"," batteryLevel = 10\n"," }\n","\n"," func takeOverWorld()\n"," {\n"," if batteryLevel > 0\n"," {\n"," print(\"Kill all humans\")\n"," }\n"," else\n"," {\n"," print(\"error, not enough power!\")\n"," }\n"," }\n","}\n","\n","var terminator = Robot(batteryLevel: 0)\n","\n","terminator.takeOverWorld()"],"execution_count":0,"outputs":[{"output_type":"stream","text":["driving\r\n","error, not enough power!\r\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"Qox12MB008lX","colab_type":"code","outputId":"96bf6418-b979-4bee-bcbb-b7c8f79927f9","colab":{"base_uri":"https://localhost:8080/","height":71}},"source":["protocol Loveable\n","{\n"," func loveMe()\n","}\n","extension Int: Loveable\n","{\n"," func loveMe()\n"," {\n"," print(\"You are the best, number \\(self)!\")\n"," }\n","}\n","extension Double: Loveable\n","{\n"," func loveMe()\n"," {\n"," print(\"No one could ever love floating point, damn you IEEE 754!\")\n"," }\n","}\n","\n","6.loveMe()\n","6.1.loveMe()\n","\n","let loving: Loveable = 2\n","loving.loveMe()"],"execution_count":0,"outputs":[{"output_type":"stream","text":["You are the best, number 6!\r\n","No one could ever love floating point, damn you IEEE 754!\r\n","You are the best, number 2!\r\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"40Z272nQ3qqw","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]}]} -------------------------------------------------------------------------------- /Notebooks/2. Swift Basics/Generics and Protocols - Starter.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Starter - Generics and Protocols.ipynb","provenance":[],"collapsed_sections":[]},"kernelspec":{"name":"swift","display_name":"Swift"}},"cells":[{"cell_type":"markdown","metadata":{"id":"Iv7zRuEC_cfA","colab_type":"text"},"source":["# Starter - Generics and Protocols"]},{"cell_type":"markdown","metadata":{"id":"4he_soT3lXtp","colab_type":"text"},"source":["## Generics"]},{"cell_type":"markdown","metadata":{"id":"VfxnpbmYlaXj","colab_type":"text"},"source":["This material is derivative of the Swift documentation."]},{"cell_type":"code","metadata":{"id":"kZRlD4utdPuX","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"MYA_akgHlmyn","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"p5rPL5QMlrAp","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"jgSgpEOSlvmn","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"f8Pw8uE8lyyi","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"8JPNn3Bnl0P1","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"yRkqyamGif6L","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"BIKqqE1mlegH","colab_type":"text"},"source":["## Protocol-oriented Programming"]},{"cell_type":"code","metadata":{"id":"x0isflb5tD3E","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"Qox12MB008lX","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]}]} -------------------------------------------------------------------------------- /Notebooks/2. Swift Basics/Python Examples - Comparison.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Complete - Python Examples.ipynb","provenance":[],"private_outputs":true,"collapsed_sections":[]},"kernelspec":{"name":"python3","display_name":"Python 3"}},"cells":[{"cell_type":"markdown","metadata":{"id":"530ty549xWyh","colab_type":"text"},"source":["# Complete - Python Examples"]},{"cell_type":"markdown","metadata":{"id":"0ZTFRXdSfbmC","colab_type":"text"},"source":["## Basic Python Syntax"]},{"cell_type":"code","metadata":{"id":"SZJABB05gc2-","colab_type":"code","colab":{}},"source":["# Comments! (very important)\n","\n","'''\n","Multi-line\n","comments\n","'''"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"NGDn9tOshTAG","colab_type":"code","colab":{}},"source":["# Variables\n","\n","variable = \"value\""],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"XJxzbUQoicUd","colab_type":"code","colab":{}},"source":["# Printing\n","print(\"Hello World\")\n","\n","string1 = \"Hello World\"\n","print(string1)\n","\n","string2 = \"World\"\n","print(\"Hello\", string2)\n","\n","pi = 3.14\n","print(\"The value of pi is \" + str(pi) + \" approximately.\")"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"L0L8jgOjfUwI","colab_type":"code","colab":{}},"source":["# Collections\n","\n","collection = [1, 2, 3, 4, 5]\n","collection += [6, 7]\n","collection.append(8)\n","print(collection)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"YGI5c0DefNAQ","colab_type":"code","colab":{}},"source":["# Functions and optionals\n","\n","def scaled(collection, multiplier):\n"," new_collection = []\n","\n"," for element in collection:\n"," value = element if element is not None else 0\n"," new_collection.append(value * multiplier)\n","\n"," return new_collection\n","\n","values = [1, None, 6, 4, 8, 2]\n","print(scaled(values, 10))"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"zm7PEUPceyAm","colab_type":"code","colab":{}},"source":["# Tuples, dataclasses and classes\n","tuplet = (True, False)\n","tuplet = (False, False)\n","\n","from typing import NamedTuple\n","\n","class Dog(NamedTuple):\n"," name: str\n"," color: str\n"," gender: str\n"," age: int\n","\n","myDog = Dog(\"Argos\", \"Whippet\", \"Male\", 7)\n","\n","# Python 3.7+\n","from dataclasses import dataclass\n","\n","@dataclass\n","class NewDog:\n"," name: str\n"," color: str\n"," gender: str\n"," age: int\n","\n","myDog = NewDog(\"Argos\", \"Whippet\", \"Male\", 7)\n","\n","class Cat:\n"," def __init__(self, name, color, gender, age):\n"," self.name = name\n"," self.color = color\n"," self.gender = gender\n"," self.age = age\n","\n"," def have_birthday(self):\n"," self.age += 1\n","\n","myCat = Cat(\"Lola\", \"Gray\", \"Female\", 5)\n","myCat.have_birthday()\n","print(myCat.age)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"bh7OfBT6HBa2","colab_type":"code","colab":{}},"source":["# Enumerations\n","\n","from enum import Enum\n","\n","class Day(Enum):\n"," SUNDAY = 0\n"," MONDAY = 1\n"," TUESDAY = 2\n"," WEDNESDAY = 3\n"," THURSDAY = 4\n"," FRIDAY = 5\n"," SATURDAY = 6\n"," \n","print(\"MONDAY is day\", Day.MONDAY.value)\n","print(Day(3).name, \"is day 3\")"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"bPv72KkYT2vj","colab_type":"text"},"source":["## Example Activity Break"]},{"cell_type":"code","metadata":{"id":"bZ8bQ59GT0Qu","colab_type":"code","colab":{}},"source":["# define a class called '' with the following properties\n","# name - a string\n","# days - an integer\n","class Conference:\n"," def __init__(self, name, days):\n"," self.name = name\n"," self.days = days\n","\n"," def describe(self):\n"," print(\"A conference called\", self.name, \"that runs for\", str(self.days), \"days.\")\n","\n","# initialise an instance for TensorFlow World with a length of 4 days\n","tfw = Conference(\"TensorFlow World\", days=4)\n","\n","tfw.describe()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"JppoA3CeffIn","colab_type":"text"},"source":["## Common Data Tasks"]},{"cell_type":"code","metadata":{"id":"A5J9SCS6yFMC","colab_type":"code","colab":{}},"source":["# Import a JSON file\n","import json\n","\n","with open(\"/test.json\", \"r\") as json_file:\n"," json_objects = json.load(json_file)\n"," for element in json_objects:\n"," print(element)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"dwYk_vPZe6Xa","colab_type":"code","colab":{}},"source":["# Import a CSV file\n","import csv\n","\n","with open(\"/test.csv\", \"r\") as csv_file:\n"," reader = csv.reader(csv_file, delimiter=\",\")\n"," for row in reader:\n"," print(row)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"fGkEfv21yGwO","colab_type":"code","colab":{}},"source":["# Manipulating DataFrames\n","import pandas as pd\n","\n","data_frame = pd.read_csv(\"/test.csv\")\n","print(data_frame)\n","\n","# do things!"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"r0a8CEq9yNzg","colab_type":"code","colab":{}},"source":["# Cleaning Data\n","collection = [0, 6, 2, 5, 5, 1, 8, 10]\n","\n","# lambdas!\n","doubles = list(map(lambda x: x * 2, collection))\n","print(doubles)\n","\n","evens = list(filter(lambda x: x % 2 == 0, collection))\n","print(evens)\n","\n","total = 0\n","for element in collection:\n"," total += element\n","print(total)\n","\n","# function passing\n","def nonsense_function(number):\n"," return number + 4\n","\n","def apply_sporadic(collection, stride, function):\n"," new_collection = collection\n","\n"," for index, element in enumerate(collection):\n"," if index % stride == 0:\n"," new_collection[index] = function(element)\n","\n"," return collection\n","\n","thing = [1, 1, 1, 1, 1, 1, 1, 1]\n","thing = apply_sporadic(thing, 3, nonsense_function)\n","print(thing)"],"execution_count":0,"outputs":[]}]} -------------------------------------------------------------------------------- /Notebooks/2. Swift Basics/Swift Examples - Complete.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Swift Examples - Complete.ipynb","provenance":[],"collapsed_sections":[]},"kernelspec":{"name":"swift","display_name":"Swift"}},"cells":[{"cell_type":"markdown","metadata":{"id":"jz-JORGPxMhG","colab_type":"text"},"source":["# Complete - Swift Examples"]},{"cell_type":"markdown","metadata":{"id":"IRLckjzQgUVU","colab_type":"text"},"source":["## Basic Swift Syntax"]},{"cell_type":"code","metadata":{"id":"zoLpbQ4zgVPu","colab_type":"code","colab":{}},"source":["// Comments! (very important)\n","\n","/* \n"," Multi-line \n"," comments\n","*/"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"ynax-4mjhUri","colab_type":"code","colab":{}},"source":["// Variables and constants\n","\n","var variable = \"value\"\n","let constant = \"value\"\n","// ⚠️ constant = \"a different value\"\n","\n","// Implicit and explicit types\n","\n","let implicitString = \"This is a String!\"\n","let implicitInteger = 1\n","let implicitDouble = 1.0\n","let explicitDouble: Double = 1\n","// ⚠️ sum = implicitInteger + implicitDouble"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"o6jnXGp2S-gq","colab_type":"code","colab":{}},"source":["// Printing\n","print(\"Hello World\")\n","\n","let string1 = \"Hello World\"\n","print(string1)\n","\n","let string2 = \"World\"\n","print(\"Hello\", string2)\n","\n","let pi = 3.14\n","print(\"The value of pi is \\(pi) approximately.\")"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"d0HwPVbGka0Y","colab_type":"code","colab":{}},"source":["// Collections\n","\n","var collection = [1, 2, 3, 4, 5]\n","collection += [6, 7]\n","collection.append(8)\n","print(collection)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"UaCceGIkl8S9","colab_type":"code","colab":{}},"source":["// Functions and optionals\n","\n","func scaled(_ collection: [Int?], multiplier: Int) -> [Int]\n","{\n"," var newCollection: [Int] = []\n","\n"," for element in collection\n"," {\n"," let value = element ?? 0\n"," newCollection.append(value * multiplier)\n"," }\n","\n"," return newCollection\n","}\n","\n","let values = [1, nil, 6, 4, 8, 2]\n","print(scaled(values, multiplier: 10))\n","// ⚠️ print(scaled(values, 10))\n","// mention function parameter labels"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"B5dOxad3RjQR","colab_type":"code","colab":{}},"source":["// A bit more on optionals\n","var stringOptional: String? = nil\n","var stringNonOptional: String = \"Hello World!\"\n","// ⚠️ stringNonOptional = nil\n","// will come back to optionals and their power in Swift throughout\n","\n","stringOptional = \"A non-nil value\"\n","\n","if var nonOptional = stringOptional\n","{\n"," print(\"stringOptional was not nil so now it is not optional\")\n","}\n","\n","guard var nonOptional = stringOptional else\n","{\n"," fatalError(\"Have to exit here because stringOptional was nil\")\n"," //print(\"Have to exit here because stringOptional was nil\")\n"," //exit(-1)\n","}\n","\n","//stringOptional = nil\n","nonOptional = stringOptional ?? \"New value because stringOptional was nil\"\n","\n","print(\"nonOptional is now: \\(nonOptional)\")"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"Alno5nOAsOMK","colab_type":"code","colab":{}},"source":["// Tuples, structs and classes\n","var tuplet = (true, false)\n","tuplet.0 = false\n","// ⚠️ tuplet = (true, true, true)\n","\n","var anotherTuplet = (firstElement: 0, secondElement: 5, thirdElement: 10)\n","anotherTuplet.firstElement = 1\n","// ⚠️ anotherTuplet.thirdElement = 10.0\n","\n","struct Dog\n","{\n"," let name: String\n"," let breed: String\n"," let gender: String\n"," var age: Int\n","}\n","\n","let myDog = Dog(name: \"Argos\", breed: \"Whippet\", gender: \"Male\", age: 7)\n","\n","class Cat\n","{\n"," let name: String\n"," let color: String\n"," let gender: String\n"," var age: Int\n","\n"," init(name: String, color: String, gender: String, age: Int)\n"," {\n"," self.name = name\n"," self.color = color\n"," self.gender = gender\n"," self.age = age\n"," }\n","\n"," func haveBirthday()\n"," {\n"," self.age += 1\n"," }\n","}\n","\n","let myCat = Cat(name: \"Lola\", color: \"Gray\", gender: \"Female\", age: 6)\n","myCat.haveBirthday()\n","// note \"let\" does not prevent changing components, just the overall object\n","print(\"\\(myCat.name) is \\(myCat.age) years old.\")\n","\n","// Pass by Value versus Pass by Reference\n","var originalDog = Dog(name: \"Apollo\", breed: \"Löwchen\", gender: \"Male\", age: 8)\n","var duplicateDog = originalDog\n","originalDog.age += 1\n","print(\"\\nStruct Object Mutation\\n=====================\")\n","print(\"Original age: \\(originalDog.age)\")\n","print(\"Duplicate age: \\(duplicateDog.age)\")\n","\n","var originalCat = Cat(name: \"Alexis\", color: \"Black Tabby\", gender: \"Female\", age: 4)\n","var duplicateCat = originalCat\n","duplicateCat.haveBirthday()\n","print(\"\\nClass Object Mutation\\n=====================\")\n","print(\"Original age: \\(originalCat.age)\")\n","print(\"Duplicate age: \\(duplicateCat.age)\")"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"DgHkviiTG-2m","colab_type":"code","colab":{}},"source":["// Enumerations\n","\n","enum Day: Int\n","{\n"," case sunday = 0\n"," case monday, tuesday, wednesday, thursday, friday, saturday\n","}\n","\n","print(\"monday is day \\(Day.monday.rawValue)\")\n","print(\"\\(Day(rawValue: 3)!) is day 3\")"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"MTFVMI_3T4rp","colab_type":"text"},"source":["## Example Activity Break"]},{"cell_type":"code","metadata":{"id":"0ZRvG7jeTzBu","colab_type":"code","colab":{}},"source":["// define a struct called '' with the following properties\n","// name - a string\n","// days - an integer\n","struct Conference\n","{\n"," let name: String\n"," let days: Int\n","\n"," func describe() \n"," {\n"," print(\"A conference called \\(self.name) that runs for \\(self.days) days.\")\n"," }\n","}\n","\n","// initialise an instance for TensorFlow World with a length of 4 days\n","let tfw = Conference(name: \"TensorFlow World\", days: 4)\n","\n","tfw.describe()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"EWoedQiB35iH","colab_type":"text"},"source":["## Common Data Tasks"]},{"cell_type":"code","metadata":{"id":"LBBTz1GY4AtF","colab_type":"code","colab":{}},"source":["// Import a JSON file\n","if let url = URL(string: \"file:///test.json\"),\n"," let data = try? Data(contentsOf: url, options: .mappedIfSafe),\n"," let jsonResult = try? JSONSerialization.jsonObject(with: data),\n"," let jsonObjects = jsonResult as? Array>\n","{\n"," for element in jsonObjects\n"," {\n"," print(element)\n"," }\n","}\n","\n","// write a function so you never have to figure it out again!\n","func getJSONResult(from path: String) -> Array>\n","{\n"," if let url = URL(string: path),\n"," let data = try? Data(contentsOf: url, options: .mappedIfSafe),\n"," let jsonResult = try? JSONSerialization.jsonObject(with: data)\n"," {\n"," return jsonResult as? Array>\n"," }\n"," \n"," return nil\n","}\n","\n","let easierJSONObjects = getJSONArray(from: \"file:///test.json\") ?? []\n","\n","// write a function with error handling so it's easier to debug\n","func getJSONResult(from path: String) -> Array>\n","{\n"," guard let url = URL(string: path) else \n"," {\n"," fatalError(\"URL could not be formed from path.\")\n"," }\n","\n"," do \n"," {\n"," let data = try Data(contentsOf: url, options: .mappedIfSafe)\n"," let jsonResult = try JSONSerialization.jsonObject(with: data)\n"," guard let jsonResult as? Array> else\n"," {\n"," fatalError(\"JSON result could not be coerced to desired output type.\")\n"," }\n"," }\n"," catch\n"," {\n"," throw error\n"," }\n"," \n"," return nil\n","}\n","\n","// mention JSONDecoder\n","// https://quicktype.io"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"eg6S12Zx398W","colab_type":"code","colab":{}},"source":["// Import a CSV file\n","import Foundation \n","\n","if let url = URL(string: \"file:///test.csv\"),\n"," let data = try? String(contentsOf: url)\n","{\n"," var result: [[String]] = []\n"," let rows = data.components(separatedBy: \"\\n\")\n","\n"," for row in rows\n"," {\n"," let elements = row.components(separatedBy: \",\")\n"," print(elements)\n"," }\n","}\n","\n","// no cleaning, value checking\n","// can't handle extra lines or varying line endings\n","// not a great solution tbh"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"EujBrJhJ4A5W","colab_type":"code","colab":{}},"source":["// Manipulating DataTables\n","// ⚠️ import CoreML\n","\n","// ⚠️ let dataTable = try! MLDataTable(contentsOf: csvFile)\n","// print(dataTable)\n","\n","// do things!"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"HqUxb-d74Ay9","colab_type":"code","colab":{}},"source":["// Cleaning Data\n","let collection = [0, 6, 2, 5, 5, 1, 8, 10]\n","\n","// higher-order functions!\n","let doubles = collection.map { x in x * 2}\n","print(doubles)\n","\n","let evens = collection.filter {x in x % 2 == 0 }\n","print(evens)\n","\n","let total = collection.reduce(0) { sum, x in sum + x }\n","print(total)\n","\n","// closures!\n","// typealias IntTransform = (Int) -> Int\n","func applySporadic(_ collection: inout [Int], stride: Int, function: (Int) -> Int)\n","{\n"," for (index, element) in collection.enumerated()\n"," {\n"," if index % stride == 0\n"," {\n"," collection[index] = function(element)\n"," }\n"," }\n","}\n","\n","var thing = [1, 1, 1, 1, 1, 1, 1, 1]\n","applySporadic(&thing, stride: 3)\n","{ element in \n"," return element + 4\n","}\n","print(thing)"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"a20UfIzt2JDN","colab_type":"code","colab":{}},"source":["// but they get better!\n","let creditCardNumbers = [\n"," 4964149475059987,\n"," 4898620401632387,\n"," 4393958570449195,\n"," 4751492711160905,\n"," 4437340772573099,\n","]\n","\n","let valid = creditCardNumbers.filter \n","{ number in\n"," let digits = String(number).compactMap{ $0.wholeNumberValue }\n"," let reversed = digits.reversed()\n"," var sum = 0\n","\n"," for (index, element) in reversed.enumerated()\n"," {\n"," let even = (index % 2 == 0)\n"," let digit = even ? element : element * 2\n"," sum += digit > 9 ? digit - 9 : digit\n"," }\n","\n"," return sum % 10 == 0\n","}\n","\n","print(valid)"],"execution_count":0,"outputs":[]}]} -------------------------------------------------------------------------------- /Notebooks/2. Swift Basics/Swift Examples - Starter.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Swift Examples - Starter.ipynb","provenance":[],"collapsed_sections":[]},"kernelspec":{"name":"swift","display_name":"Swift"}},"cells":[{"cell_type":"markdown","metadata":{"id":"zLWQ5XN8xRyg","colab_type":"text"},"source":["# Starter - Swift Examples"]},{"cell_type":"markdown","metadata":{"id":"IRLckjzQgUVU","colab_type":"text"},"source":["## Basic Swift Syntax"]},{"cell_type":"code","metadata":{"id":"zoLpbQ4zgVPu","colab_type":"code","colab":{}},"source":["// Comments! (very important)\n","\n","/* \n"," Multi-line \n"," comments\n","*/"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"ynax-4mjhUri","colab_type":"code","colab":{}},"source":["// Variables"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"o6jnXGp2S-gq","colab_type":"code","colab":{}},"source":["// Printing\n"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"d0HwPVbGka0Y","colab_type":"code","colab":{}},"source":["// Collections\n"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"UaCceGIkl8S9","colab_type":"code","colab":{}},"source":["// Functions and optionals\n"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"B5dOxad3RjQR","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"Alno5nOAsOMK","colab_type":"code","colab":{}},"source":["// Tuples, dataclasses and classes\n"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"DgHkviiTG-2m","colab_type":"code","colab":{}},"source":["// Enumerations\n"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"MTFVMI_3T4rp","colab_type":"text"},"source":["## Example Activity Break"]},{"cell_type":"code","metadata":{"id":"0ZRvG7jeTzBu","colab_type":"code","colab":{}},"source":["// define a struct called '' with the following properties\n","// name - a string\n","// days - an integer\n","\n","// initialise an instance for TensorFlow World with a length of 4 days\n"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"EWoedQiB35iH","colab_type":"text"},"source":["## Common Data Tasks"]},{"cell_type":"code","metadata":{"id":"LBBTz1GY4AtF","colab_type":"code","colab":{}},"source":["// Import a JSON file\n"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"eg6S12Zx398W","colab_type":"code","colab":{}},"source":["// Import a CSV file\n"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"EujBrJhJ4A5W","colab_type":"code","colab":{}},"source":["// Manipulating DataTables\n"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"HqUxb-d74Ay9","colab_type":"code","colab":{}},"source":["// Cleaning Data\n"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"Wcc36Zh92a6u","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]}]} -------------------------------------------------------------------------------- /Notebooks/2. Swift Basics/test.csv: -------------------------------------------------------------------------------- 1 | userId, id, title, completed 2 | 1,1,delectus aut autem,false 3 | 1,2,quis ut nam facilis et officia qui,false 4 | 1,3,fugiat veniam minus,false 5 | 1,4,et porro tempora,true 6 | 1,5,laboriosam mollitia et enim quasi adipisci quia provident illum,false 7 | 1,6,qui ullam ratione quibusdam voluptatem quia omnis,false 8 | 1,7,illo expedita consequatur quia in,false 9 | 1,8,quo adipisci enim quam ut ab,true 10 | 1,9,molestiae perspiciatis ipsa,false 11 | 1,10,illo est ratione doloremque quia maiores aut,true 12 | -------------------------------------------------------------------------------- /Notebooks/2. Swift Basics/test.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "userId": 1, 4 | "id": 1, 5 | "title": "delectus aut autem", 6 | "completed": false 7 | }, 8 | { 9 | "userId": 1, 10 | "id": 2, 11 | "title": "quis ut nam facilis et officia qui", 12 | "completed": false 13 | }, 14 | { 15 | "userId": 1, 16 | "id": 3, 17 | "title": "fugiat veniam minus", 18 | "completed": false 19 | }, 20 | { 21 | "userId": 1, 22 | "id": 4, 23 | "title": "et porro tempora", 24 | "completed": true 25 | }, 26 | { 27 | "userId": 1, 28 | "id": 5, 29 | "title": "laboriosam mollitia et enim quasi adipisci quia provident illum", 30 | "completed": false 31 | }, 32 | { 33 | "userId": 1, 34 | "id": 6, 35 | "title": "qui ullam ratione quibusdam voluptatem quia omnis", 36 | "completed": false 37 | }, 38 | { 39 | "userId": 1, 40 | "id": 7, 41 | "title": "illo expedita consequatur quia in", 42 | "completed": false 43 | }, 44 | { 45 | "userId": 1, 46 | "id": 8, 47 | "title": "quo adipisci enim quam ut ab", 48 | "completed": true 49 | }, 50 | { 51 | "userId": 1, 52 | "id": 9, 53 | "title": "molestiae perspiciatis ipsa", 54 | "completed": false 55 | }, 56 | { 57 | "userId": 1, 58 | "id": 10, 59 | "title": "illo est ratione doloremque quia maiores aut", 60 | "completed": true 61 | } 62 | ] 63 | -------------------------------------------------------------------------------- /Notebooks/3. Training a Model/Meet TensorFlow! Training a Model - Complete.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"file_extension":".py","kernelspec":{"display_name":"Swift","language":"swift","name":"swift"},"language_info":{"file_extension":".swift","mimetype":"text/x-swift","name":"swift","version":""},"mimetype":"text/x-python","name":"python","npconvert_exporter":"python","pygments_lexer":"ipython3","version":3,"colab":{"name":"Meet TensorFlow! Training a Model - Complete.ipynb","provenance":[],"collapsed_sections":[]}},"cells":[{"cell_type":"markdown","metadata":{"id":"qq_n--SKNanf","colab_type":"text"},"source":["# Complete - Meet TensorFlow! Training a XOR Model"]},{"cell_type":"markdown","metadata":{"id":"16937WSdNank","colab_type":"text"},"source":["In this example, we assemble a multilayer peceptron network that can perform XOR. \n","\n","It's not very useful, but it showcases how you build up a model using layers, and how to execute training with that model. \n","\n","It's simple enough that you know whether it's correct... which is why we're doing it!\n"]},{"cell_type":"markdown","metadata":{"id":"jJZxT8xyTDXC","colab_type":"text"},"source":["## Setting up"]},{"cell_type":"markdown","metadata":{"id":"2bRvS7ipNsAf","colab_type":"text"},"source":["First, we need to `import` the TensorFlow framework:"]},{"cell_type":"code","metadata":{"id":"PbMmpBLrNann","colab_type":"code","colab":{}},"source":["import TensorFlow"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"L7nCrdjhN0hv","colab_type":"text"},"source":["## Creating the model\n","\n","To represent our XOR neural network model, we need to create a `struct`, adhering to the [`Layer` Protocol](https://www.tensorflow.org/swift/api_docs/Protocols/Layer) (which is part of Swift For TensorFlow's API). Ours is called `XORModel`.\n","\n","Inside the model, we want three layers:\n","* an input layer, to take the input\n","* a hidden layer \n","* an output layer, to provide the output\n","\n","All three layers should be a `Dense` layer (a [densely-connected layer](https://www.tensorflow.org/swift/api_docs/Structs/Dense)) that takes an `inputSize` and an `outputSize`. \n","\n","The `inputSize` specifies that the input to the layer is of that many values. Likewise `outputSize`, for the out of the layer.\n","\n","Each will have an activation using an `activation` function determines the output shape of each node in the layer. There are many available activations, but [ReLU](https://www.tensorflow.org/swift/api_docs/Functions#leakyrelu_:alpha:) and [Sigmoid](https://www.tensorflow.org/swift/api_docs/Functions#sigmoid_:) are common. \n","\n","For our three layers, we'll use `sigmoid`.\n","\n","We'll also need to provide a definition of our `@differentiable` `func`, `callAsFunction()`. In this case, we want it to return the `input` sequenced through (passed through) the three layers. \n","\n","Helpfully, the `Differentiable` `protocol` that comes with Swift for TensorFlow has a method, [`sequenced()`](https://www.tensorflow.org/swift/api_docs/Protocols/Differentiable#sequencedthrough:_:) that makes this trivial.\n","\n"]},{"cell_type":"code","metadata":{"id":"2812LRBTNant","colab_type":"code","colab":{}},"source":["// Create a XORModel Struct\n","struct XORModel: Layer\n","{\n"," // define three layers, each of Dense type\n"," var inputLayer = Dense(inputSize: 2, outputSize: 2, activation: sigmoid)\n"," var hiddenLayer = Dense(inputSize: 2, outputSize: 2, activation: sigmoid)\n"," var outputLayer = Dense(inputSize: 2, outputSize: 1, activation: sigmoid)\n"," \n"," // provide the differentiable thingo\n"," @differentiable func callAsFunction(_ input: Tensor) -> Tensor\n"," {\n"," return input.sequenced(through: inputLayer, hiddenLayer, outputLayer)\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"2ybin-B0Qy2o","colab_type":"text"},"source":["## Creating an instance of our model"]},{"cell_type":"markdown","metadata":{"id":"yKjOCu_RRBPM","colab_type":"text"},"source":["Here we need to create an instance of our XORModel Struct, which we defined above. This will be our model."]},{"cell_type":"code","metadata":{"colab_type":"code","id":"kZRlD4utdPuX","colab":{}},"source":["var model = XORModel()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"hxDDiGpXQ3DQ","colab_type":"text"},"source":["## Creating an optimizer"]},{"cell_type":"markdown","metadata":{"id":"blHjk_IpRLV9","colab_type":"text"},"source":["And we need an [optimiser](https://www.tensorflow.org/swift/api_docs/Protocols/Optimizer), in this case we're going to use [stochastic gradient descent (SGD) optimiser](https://www.tensorflow.org/swift/api_docs/Classes/SGD), which we can get from the Swift for TensorFlow library.\n","\n","Our optimiser is, obviously, for the model instance we defined a moment ago, and wants a learning rate of about 0.02."]},{"cell_type":"code","metadata":{"id":"op5PRWVoQ2iu","colab_type":"code","colab":{}},"source":["let optimiser = SGD(for: model, learningRate: 0.02)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"vlm3A-FKQ8MB","colab_type":"text"},"source":["## Creating and labelling training data\n","\n"]},{"cell_type":"markdown","metadata":{"id":"Gqn5RlZaRvrh","colab_type":"text"},"source":["We need an array of type [`Tensor`](https://www.tensorflow.org/swift/api_docs/Structs/Tensor) to hold our training data (`[0, 0], [0, 1], [1, 0], [1, 1]`):"]},{"cell_type":"code","metadata":{"id":"zjyQoNqnQ7t4","colab_type":"code","colab":{}},"source":["let trainingData: Tensor = [[0, 0], [0, 1], [1, 0], [1, 1]]"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"7ttw6mJUSJZc","colab_type":"text"},"source":["And we need to label the training data so that we know the correct outputs:\n"]},{"cell_type":"code","metadata":{"id":"KBRwcj0MSKX5","colab_type":"code","colab":{}},"source":["let trainingLabels: Tensor = [[0], [1], [1], [0]]"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"nmq98WiPSQE9","colab_type":"text"},"source":["## Training the model"]},{"cell_type":"markdown","metadata":{"id":"dieIbtmdVvgB","colab_type":"text"},"source":["First, we need a hyperparameter for epochs:"]},{"cell_type":"code","metadata":{"id":"CDt6kEjKVyWs","colab_type":"code","colab":{}},"source":["let epochs = 100_000"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"8zAz5DDiWVM7","colab_type":"text"},"source":["Then we need a training loop. We train the model by iterating through our epochs, and each time update the gradient (the 𝛁 symbol, nabla, is often used to represent gradient). Our gradient is of type [`TangentVector`](https://www.tensorflow.org/swift/api_docs/Protocols/Differentiable#tangentvector), and represents a differentiable value’s derivatives.\n","\n","Each epoch, we set the predicted value to be our training data, and the expected value to be our training data, and calculate the loss using [`meanSquaredError()`](https://www.tensorflow.org/swift/api_docs/Functions#meansquarederrorpredicted:expected:).\n","\n","Every so often we also want to print out the epoch we're in, and the current loss, so we can watch the traning. We also need to return loss.\n","\n","Finally, we need to use our [optimizer](https://www.tensorflow.org/swift/api_docs/Protocols/Optimizer) to [update](https://www.tensorflow.org/swift/api_docs/Protocols/Optimizer#update_:along:) the differentiable variables, along the gradient.\n"]},{"cell_type":"code","metadata":{"id":"8QGopdwKNan3","colab_type":"code","outputId":"ba04d754-9e6b-49a4-a77b-0cd5c14488c2","executionInfo":{"status":"ok","timestamp":1572299228737,"user_tz":240,"elapsed":375992,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}},"colab":{"base_uri":"https://localhost:8080/","height":371}},"source":["for epoch in 0.. Tensor in\n","\n"," // predicted value (the training data)\n"," let ŷ = model(trainingData)\n","\n"," // loss \n"," let loss = meanSquaredError(predicted: ŷ, expected: trainingLabels)\n","\n"," // sometimes we want to print an update\n"," if epoch % 5000 == 0\n"," {\n"," print(\"epoch: \\(epoch) loss: \\(loss)\")\n"," }\n"," return loss\n"," }\n"," // update the model\n"," optimiser.update(&model, along: 𝛁model)\n","}"],"execution_count":0,"outputs":[{"output_type":"stream","text":["epoch: 0 loss: 0.28061706\n","epoch: 5000 loss: 0.24945608\n","epoch: 10000 loss: 0.2490734\n","epoch: 15000 loss: 0.24835764\n","epoch: 20000 loss: 0.2469819\n","epoch: 25000 loss: 0.24420786\n","epoch: 30000 loss: 0.23800798\n","epoch: 35000 loss: 0.2237089\n","epoch: 40000 loss: 0.20193933\n","epoch: 45000 loss: 0.18637396\n","epoch: 50000 loss: 0.17841955\n","epoch: 55000 loss: 0.17337665\n","epoch: 60000 loss: 0.16862151\n","epoch: 65000 loss: 0.1626256\n","epoch: 70000 loss: 0.15436587\n","epoch: 75000 loss: 0.14475374\n","epoch: 80000 loss: 0.1311733\n","epoch: 85000 loss: 0.056418493\n","epoch: 90000 loss: 0.02172982\n","epoch: 95000 loss: 0.012243575\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"fwskbFpjNan7","colab_type":"text"},"source":["## Testing the model"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"uSC0vIJVv_Cj","outputId":"8f6ff5d2-f601-4a1b-cf4e-c126bc09a4c5","executionInfo":{"status":"ok","timestamp":1572299229757,"user_tz":240,"elapsed":377007,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}},"colab":{"base_uri":"https://localhost:8080/","height":88}},"source":["print(round(model.inferring(from: [[0, 0], [0, 1], [1, 0], [1, 1]])))"],"execution_count":0,"outputs":[{"output_type":"stream","text":["[[0.0],\r\n"," [1.0],\r\n"," [1.0],\r\n"," [0.0]]\r\n"],"name":"stdout"}]}]} -------------------------------------------------------------------------------- /Notebooks/3. Training a Model/Meet TensorFlow! Training a Model - Starter.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"file_extension":".py","kernelspec":{"display_name":"Swift","language":"swift","name":"swift"},"language_info":{"file_extension":".swift","mimetype":"text/x-swift","name":"swift","version":""},"mimetype":"text/x-python","name":"python","npconvert_exporter":"python","pygments_lexer":"ipython3","version":3,"colab":{"name":"Meet TensorFlow! Training a Model - Starter.ipynb","provenance":[{"file_id":"1u_Saarvb1qQVxA4dq8pv3-k3ZaouwfSh","timestamp":1572307423069}],"collapsed_sections":[]}},"cells":[{"cell_type":"markdown","metadata":{"id":"qq_n--SKNanf","colab_type":"text"},"source":["# Starter - Meet TensorFlow! Training a XOR Model"]},{"cell_type":"markdown","metadata":{"id":"u9SJrpjU5H_C","colab_type":"text"},"source":["**⚠️ This is the starter version, for you to code along with live.**"]},{"cell_type":"markdown","metadata":{"id":"16937WSdNank","colab_type":"text"},"source":["In this example, we assemble a multilayer peceptron network that can perform XOR. \n","\n","It's not very useful, but it showcases how you build up a model using layers, and how to execute training with that model. \n","\n","It's simple enough that you know whether it's correct... which is why we're doing it!\n"]},{"cell_type":"markdown","metadata":{"id":"jJZxT8xyTDXC","colab_type":"text"},"source":["## Setting up"]},{"cell_type":"markdown","metadata":{"id":"2bRvS7ipNsAf","colab_type":"text"},"source":["First, we need to `import` the TensorFlow framework:"]},{"cell_type":"code","metadata":{"id":"PbMmpBLrNann","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"L7nCrdjhN0hv","colab_type":"text"},"source":["## Creating the model\n","\n","To represent our XOR neural network model, we need to create a `struct`, adhering to the [`Layer` Protocol](https://www.tensorflow.org/swift/api_docs/Protocols/Layer) (which is part of Swift For TensorFlow's API). Ours is called `XORModel`.\n","\n","Inside the model, we want three layers:\n","* an input layer, to take the input\n","* a hidden layer \n","* an output layer, to provide the output\n","\n","All three layers should be a `Dense` layer (a [densely-connected layer](https://www.tensorflow.org/swift/api_docs/Structs/Dense)) that takes an `inputSize` and an `outputSize`. \n","\n","The `inputSize` specifies that the input to the layer is of that many values. Likewise `outputSize`, for the out of the layer.\n","\n","Each will have an activation using an `activation` function determines the output shape of each node in the layer. There are many available activations, but [ReLU](https://www.tensorflow.org/swift/api_docs/Functions#leakyrelu_:alpha:) and [Sigmoid](https://www.tensorflow.org/swift/api_docs/Functions#sigmoid_:) are common. \n","\n","For our three layers, we'll use `sigmoid`.\n","\n","We'll also need to provide a definition of our `@differentiable` `func`, `callAsFunction()`. In this case, we want it to return the `input` sequenced through (passed through) the three layers. \n","\n","Helpfully, the `Differentiable` `protocol` that comes with Swift for TensorFlow has a method, [`sequenced()`](https://www.tensorflow.org/swift/api_docs/Protocols/Differentiable#sequencedthrough:_:) that makes this trivial.\n","\n"]},{"cell_type":"code","metadata":{"id":"2812LRBTNant","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"2ybin-B0Qy2o","colab_type":"text"},"source":["## Creating an instance of our model"]},{"cell_type":"markdown","metadata":{"id":"yKjOCu_RRBPM","colab_type":"text"},"source":["Here we need to create an instance of our XORModel Struct, which we defined above. This will be our model."]},{"cell_type":"code","metadata":{"colab_type":"code","id":"kZRlD4utdPuX","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"hxDDiGpXQ3DQ","colab_type":"text"},"source":["## Creating an optimizer"]},{"cell_type":"markdown","metadata":{"id":"blHjk_IpRLV9","colab_type":"text"},"source":["And we need an [optimiser](https://www.tensorflow.org/swift/api_docs/Protocols/Optimizer), in this case we're going to use [stochastic gradient descent (SGD) optimiser](https://www.tensorflow.org/swift/api_docs/Classes/SGD), which we can get from the Swift for TensorFlow library.\n","\n","Our optimiser is, obviously, for the model instance we defined a moment ago, and wants a learning rate of about 0.02."]},{"cell_type":"code","metadata":{"id":"op5PRWVoQ2iu","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"vlm3A-FKQ8MB","colab_type":"text"},"source":["## Creating and labelling training data\n","\n"]},{"cell_type":"markdown","metadata":{"id":"Gqn5RlZaRvrh","colab_type":"text"},"source":["We need an array of type [`Tensor`](https://www.tensorflow.org/swift/api_docs/Structs/Tensor) to hold our training data (`[0, 0], [0, 1], [1, 0], [1, 1]`):"]},{"cell_type":"code","metadata":{"id":"zjyQoNqnQ7t4","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"7ttw6mJUSJZc","colab_type":"text"},"source":["And we need a similar one to mark/label the training data so that we know the correct outputs:\n"]},{"cell_type":"code","metadata":{"id":"KBRwcj0MSKX5","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"nmq98WiPSQE9","colab_type":"text"},"source":["## Training the model"]},{"cell_type":"markdown","metadata":{"id":"dieIbtmdVvgB","colab_type":"text"},"source":["First, we need a hyperparameter for epochs (100,000 is about right here):"]},{"cell_type":"code","metadata":{"id":"CDt6kEjKVyWs","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"8zAz5DDiWVM7","colab_type":"text"},"source":["Then we need a training loop. We train the model by iterating through our epochs, and each time update the gradient (the 𝛁 symbol, nabla, is often used to represent gradient). Our gradient is of type [`TangentVector`](https://www.tensorflow.org/swift/api_docs/Protocols/Differentiable#tangentvector), and represents a differentiable value’s derivatives.\n","\n","Each epoch, we set the predicted value to be our training data, and the expected value to be our training data, and calculate the loss using [`meanSquaredError()`](https://www.tensorflow.org/swift/api_docs/Functions#meansquarederrorpredicted:expected:).\n","\n","Every so often we also want to print out the epoch we're in, and the current loss, so we can watch the traning. We also need to return loss.\n","\n","Finally, we need to use our [optimizer](https://www.tensorflow.org/swift/api_docs/Protocols/Optimizer) to [update](https://www.tensorflow.org/swift/api_docs/Protocols/Optimizer#update_:along:) the differentiable variables, along the gradient.\n"]},{"cell_type":"code","metadata":{"id":"8QGopdwKNan3","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"fwskbFpjNan7","colab_type":"text"},"source":["## Testing the model"]},{"cell_type":"markdown","metadata":{"id":"uLwxZwm64-rG","colab_type":"text"},"source":["Uncomment the following to test the model:"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"uSC0vIJVv_Cj","colab":{}},"source":["//print(round(model.inferring(from: [[0, 0], [0, 1], [1, 0], [1, 1]])))"],"execution_count":0,"outputs":[]}]} -------------------------------------------------------------------------------- /Notebooks/4. Swift and Python/Complete - Swift and Python.json: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Complete - Swift and Python","provenance":[],"collapsed_sections":[]},"kernelspec":{"display_name":"Swift","language":"swift","name":"swift"},"language_info":{"file_extension":".swift","mimetype":"text/x-swift","name":"swift","version":""}},"cells":[{"cell_type":"markdown","metadata":{"id":"axkiMfMdXoNv","colab_type":"text"},"source":["# Complete - Swift and Python"]},{"cell_type":"markdown","metadata":{"id":"E0JG1WqkYHps","colab_type":"text"},"source":["In this example, we're going to add some Python magic to the the multilayer peceptron XOR network that we made in the previous activity.\n"]},{"cell_type":"markdown","metadata":{"id":"ogFqL7EKYNZJ","colab_type":"text"},"source":["## Setting up"]},{"cell_type":"markdown","metadata":{"id":"cqP0u8hYYP7I","colab_type":"text"},"source":["First, we need to `import` the TensorFlow framework:"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"kZRlD4utdPuX","colab":{}},"source":["import TensorFlow"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"colab_type":"text","id":"Tup2LjvzLz0D"},"source":["Next, we need to `import Python`, and hook it into the notebook environment:"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"IFDhw6oILtTM","outputId":"7945301c-7bec-402c-f44c-9f95a113c94d","executionInfo":{"status":"ok","timestamp":1572304489018,"user_tz":240,"elapsed":11392,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["import Python\n","%include \"EnableIPythonDisplay.swift\"\n","IPythonDisplay.shell.enable_matplotlib(\"inline\")"],"execution_count":0,"outputs":[{"output_type":"execute_result","data":{"text/plain":["('inline', 'module://ipykernel.pylab.backend_inline')\n"]},"metadata":{"tags":[]},"execution_count":3}]},{"cell_type":"markdown","metadata":{"id":"D_m1yBsQYXnp","colab_type":"text"},"source":["## Creating the model"]},{"cell_type":"markdown","metadata":{"id":"LGcGWpubYZFG","colab_type":"text"},"source":["As before, we'll create our model:"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"z93l8JV5Khv5","colab":{}},"source":["// Create a XORModel Struct\n","struct XORModel: Layer\n","{\n"," // define three layers, each of Dense type\n"," var inputLayer = Dense(inputSize: 2, outputSize: 2, activation: sigmoid)\n"," var hiddenLayer = Dense(inputSize: 2, outputSize: 2, activation: sigmoid)\n"," var outputLayer = Dense(inputSize: 2, outputSize: 1, activation: sigmoid)\n"," \n"," // procide the differentiable thingo\n"," @differentiable func callAsFunction(_ input: Tensor) -> Tensor\n"," {\n"," return input.sequenced(through: inputLayer, hiddenLayer, outputLayer)\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"jiCDB-4pYeCb","colab_type":"text"},"source":["## Preparing to train the model"]},{"cell_type":"markdown","metadata":{"id":"d2O1YCf9YgwT","colab_type":"text"},"source":["Likewise, as before, we'll create an instance of the model, an optimiser, and some data."]},{"cell_type":"code","metadata":{"colab_type":"code","id":"SSR6Mi0MKiWg","colab":{}},"source":["// create an instance of our XORModel Struct (defined above)\n","var model = XORModel()\n","\n","// create an optimizer (standard gradient descent)\n","let optimizer = SGD(for: model, learningRate: 0.02)\n","\n","// create some training data\n","let trainingData: Tensor = [[0, 0], [0, 1], [1, 0], [1, 1]]\n","\n","// label the training data (so we know the correct outputs)\n","let trainingLabels: Tensor = [[0], [1], [1], [0]]"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"L5Rqvsj-Yl_W","colab_type":"text"},"source":["But we'll also create an array to store our loss in, so we can keep track of it:"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"NSA-emMQe3fU","colab":{}},"source":["var losses: [Float] = []"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"fYJAOc3NYqPy","colab_type":"text"},"source":["Then we'll train:"]},{"cell_type":"code","metadata":{"id":"2M2Jin-9YtQs","colab_type":"code","colab":{}},"source":["let epochs = 100_000"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"colab_type":"code","id":"14HmPb1xKmG2","outputId":"97091c22-c71f-468e-905a-7eb89bb821db","colab":{"base_uri":"https://localhost:8080/","height":357}},"source":["for epoch in 0 ..< epochs\n","{\n"," // do the ting\n"," let 𝛁model = model.gradient { model -> Tensor in\n"," let ŷ = model(trainingData)\n"," let loss = meanSquaredError(predicted: ŷ, expected: trainingLabels)\n"," if epoch % 5000 == 0\n"," {\n"," print(\"epoch: \\(epoch) loss: \\(loss)\")\n"," }\n"," losses.append(loss.scalarized())\n"," return loss\n"," }\n"," optimizer.update(&model, along: 𝛁model)\n","}"],"execution_count":0,"outputs":[{"output_type":"stream","text":["epoch: 0 loss: 0.25536832\n","epoch: 5000 loss: 0.2501201\n","epoch: 10000 loss: 0.25008714\n","epoch: 15000 loss: 0.25006086\n","epoch: 20000 loss: 0.25003833\n","epoch: 25000 loss: 0.25001723\n","epoch: 30000 loss: 0.24999528\n","epoch: 35000 loss: 0.24997\n","epoch: 40000 loss: 0.24993798\n","epoch: 45000 loss: 0.24989387\n","epoch: 50000 loss: 0.24982883\n","epoch: 55000 loss: 0.24972737\n","epoch: 60000 loss: 0.24956138\n","epoch: 65000 loss: 0.24927837\n","epoch: 70000 loss: 0.24877715\n","epoch: 75000 loss: 0.2478496\n","epoch: 80000 loss: 0.24600479\n","epoch: 85000 loss: 0.24184218\n","epoch: 90000 loss: 0.23113453\n","epoch: 95000 loss: 0.20894165\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"9PaQffxoYzpx","colab_type":"text"},"source":["## Visualising with Python"]},{"cell_type":"markdown","metadata":{"id":"inRy6KMEY2ea","colab_type":"text"},"source":["We're going to use the ever-useful [Matplotlib](https://matplotlib.org/) to visualise our losses. Matplotlib is a Python library, not a Swift library."]},{"cell_type":"markdown","metadata":{"id":"WPcFFt_kZJGA","colab_type":"text"},"source":["First, we need a handle on [`matplotlib.pyplot'](https://matplotlib.org/api/_as_gen/matplotlib.pyplot.html#module-matplotlib.pyplot), which is the MATLAB style Python API that Matplotlib has, providing for simple programmatic plot"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"YMGdEacnfJqN","outputId":"ad6335f1-10c8-4400-ee10-dbb64b250922","scrolled":true,"colab":{"base_uri":"https://localhost:8080/","height":517}},"source":["let plot = Python.import(\"matplotlib.pyplot\")\n","plot.figure(figsize: [12, 8])\n","\n","plot.ylabel(\"Loss\")\n","plot.xlabel(\"epoch\")\n","plot.plot(losses)\n","\n","plot.show()"],"execution_count":0,"outputs":[{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAAtcAAAHgCAYAAABuGUHVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deXwd9X3/+/fnnKN9lyxvkmx5BYzx\nAsJsYUniJpD2B0mTJpDQbLQ0v5Tml3LbXvrgd9PHj/z6uE3obdPcpi3cLA3ZCJCNUAghJCEhYLCN\nF7CNsbGxJe+7LcvWdj73jzOyj2TZlq0zmrO8no/HPDTzne/M+Yhh4H1G35kxdxcAAACA0YtFXQAA\nAACQLwjXAAAAQIYQrgEAAIAMIVwDAAAAGUK4BgAAADKEcA0AAABkSCLqAjJl3Lhx3traGnUZAAAA\nyHPLly/f6+6Nw63Lm3Dd2tqqZcuWRV0GAAAA8pyZbTndOoaFAAAAABlCuAYAAAAyhHANAAAAZAjh\nGgAAAMgQwjUAAACQIYRrAAAAIEMI1wAAAECGEK4BAACADCFcAwAAABlCuAYAAAAyhHANAAAAZAjh\nGgAAAMgQwjUAAACQIYRrAAAAIEMI1wAAAECGEK4BAACADCFcj1Jnd586u/uiLgMAAABZgHA9Su/+\n59/ocz95LeoyAAAAkAUI16NUWhRTd28y6jIAAACQBQjXo1RaFNfx3v6oywAAAEAWIFyPUmlRXMf7\nCNcAAAAgXI9aaVFMxxkWAgAAABGuR60kwbAQAAAApBCuR6m0KKbuPq5cAwAAgHA9aqVcuQYAAECA\ncD1KJUVxxlwDAABAEuF61FLPuebKNQAAAAjXo8aj+AAAADCAcD1KpYm4evtd/UmPuhQAAABEjHA9\nSqVFqX+E3NQIAAAAwvUolRbFJYnH8QEAAIBwPVplQbju6umLuBIAAABEjXA9SlWlCUnSkeOEawAA\ngEJHuB6l6rIiSdLhY70RVwIAAICoEa5Hqbo0Fa65cg0AAADC9SgNDAs5fJwr1wAAAIWOcD1KDAsB\nAADAAML1KJ28cs2wEAAAgEJHuB6lonhMZUVxrlwDAACAcJ0J9RXF2ne0J+oyAAAAEDHCdQZMqinV\nzkPHoy4DAAAAESNcZ8DEmlLtPEy4BgAAKHSE6wyYVFOqHYeOyd2jLgUAAAARIlxnQFNtmY73JrXn\nSHfUpQAAACBChOsMmDO5RpK0ZvvhiCsBAABAlAjXGTBncrXMpBXtB6MuBQAAABEKNVyb2Y1mtt7M\nNprZPcOsv9vM1prZajN71sympq3rN7OVwfR4mHWOVmVJQpdPrdd/rd6uZJJx1wAAAIUqtHBtZnFJ\nX5F0k6Q5km4zszlDuq2Q1Obu8yQ9JumLaeuOufuCYLo5rDoz5cNXTNGbe47q759cp/b9XTre20/Q\nBgAAKDCJEPe9SNJGd98kSWb2sKRbJK0d6ODuv0rrv0TS7SHWE6pbFkzWkk379LXnN+trz28+0Z6I\nmRJxU9xMMTOZSfHYwLwpZlLMTPFYal0sre10fc00aDlmkilt+1jqpzR0f0HfIfsxnVw+0a6R9U3/\njPQ+dmI/aftQaiF9HwPrLe1zU/1PzuuU/qnP05Df/cS+lFoY9DlD9neyzsG/qzT4c07Wd3K/sSHb\nDvTXKbUMrit937Ehv6+G7m+Y32u42gb2c2L7tNpO/jM/ta6Bf2468Vln2Odw2w50AAAAg4QZrpsk\ntactd0i64gz975D0VNpyqZktk9Qn6R/c/ceZLzFzzEz/8P55+tjVrVqx9aAOHutRT19Svf1J9fa7\nkklX0qWke9okubv609Z58LM/eXJ+oG8y6XIN7pfex13qT3rq8wb2r5Ofkb6N6+RnKug3dL9+Yh8D\ntabahvaVS/1p+/W0z02f50J+/hnuy4KGfDlKD/nDfkka5kvHiS9usZPbx2zwl5tTvvTFTn5RGvji\ndWrfgS+Dg79MxtK+NAzUHosNLA/+Yjv0C2s8duo+h35Zjp/4cpvqb0Gf9C/XiZgpHospER+YNyVi\nMcVjpqL44OWBPunL8ZipKBZTfND2pkQ8NmiZL0YAEK4ww/WImdntktokXZ/WPNXdt5nZdEm/NLNX\n3f3NIdvdKelOSZoyZcqY1XsmF02q1kWTqqMuI6t5eoiXBofytPn09Rq0fDL0D90mPdRL6V8mTn4x\nSK1Lbw++QCRP7kdD6xq0/ZBth/weGrLfodtr0Oedva707YfWlfRT95u+ffLEOj+l3oFth/tdB/65\naZjfd7h9Dn8sh99f+he8gd9BJ+ZP3ebk8T75ZW7QF8Fh/p1K+pC2gS+ASalfybQvuoP7JU983jDb\nDvncgS/FA/1P7POUL9KDf/+oxUynBO54LJYW4EcW7OOxYB9pQT99ObVtbMg+Tw37xYmYiuIxFSdi\nKk77WVIUU3E8rpKimEoSMZUk4ipODMzHlIhzPz6A7BRmuN4mqSVtuTloG8TMFku6V9L17n7iQdHu\nvi34ucnMfi1poaRB4drdH5T0oCS1tbVlyf+6cDYDwyVi4goaCsfAl4RUKD8Z2vuTqUDel0zN9yWT\nwU8P/hI1eLmvP9WnL+nq7x+8XV+/p/VN/dUsfbnvxPYnlwf+2pW+nOo3/Oce7zu53J909Q7Um/ZZ\nfcnkKbVl+i9XiZiptCgeTDGVFsVVljZ/YkoE64pT8yVFcZUXD0wJlRen1lWkzVeWJFRRklARAR7A\neQgzXC+VNMvMpikVqm+V9OH0Dma2UNIDkm50991p7XWSuty928zGSbpGg292BICcYmaKW+o+ikI0\n3BeI3n5XT39SvX1J9fQn1dOXVHcwnK67b2C5/0R7d29/6mdfUsd7+3Wst1/He1PtqfnUcmd3n/Yc\n6R7SL7XuXBQnYqoojquiJKGq0iJVlyZUXVakmrIiVZcWqboskTafWl9TfnK5ojjOMBygAIUWrt29\nz8zukvS0pLikr7v7GjO7T9Iyd39c0v2SKiU9GvwHaGvwZJCLJD1gZkmlnmjyD+6+dtgPAgBkvVjM\nVHzii0U8khrcXd19SR3r6dfRnj4d6+lX15D5rp4+He3u19HuPnX29Olod5+6uvt1+HifDh/vVfv+\nLq093qdDx3rV2d13xs+Lx2zYQF5dmlquqyhWfUWxxlUWq6GiJJgvUVlxNP98AGSGebYMBByltrY2\nX7ZsWdRlAAAKRF9/6ir5oWO9OnwsFb5T871p80PbU/0PHetVT9/wV9LLi+OqryhWQ2WJxlUUq7Gq\n5MQ0uaZMTXWpqbq0aIx/YwADzGy5u7cNty4rbmgEACDXJOIx1ZYXq7a8+Jy3dXd19fRr/9Ee7e3s\n1v6jPdrX2aN9R3u0r7M79fNoj3YcOq7V2w5pX2f3KePWq0sTaqorV1NtmZrrUlNTbRC+a8tUX1HM\nsBQgAoRrAADGmJmpIrhxsqW+/Kz9+5OufZ3d2nbwWGo6kPrZceCY2vd3acmmfacMUykrip8I2gM/\nT4bwco2vKlGsQO8BAMJEuAYAIMvFY6bx1aUaX12qhVPqTlnv7jp8rE/tB7qGhO/U8uqOgzrQ1Tto\nm6K4aXJtmaY2VGhGY4VmNFZqRmOlZo6v1LhKrnoD54twDQBAjjMz1ZQXqaa8RnObaobtc7S770Tw\n7hj4eaBLb+07qqWb9+tYb/+Jvg0VxbpgYpUumFilCydW6YKJ1Zo9oVLlxcQG4Gw4SwAAKAAVJQnN\nnlCl2ROqTlmXTLp2Hj6uN/d0asOuTq3feUSv7zysh19uPxG6zaTWhgrNbarRJU3VmtuUCvLcWAkM\nRrgGAKDAxWKpISKTa8t07azGE+3JpGvr/i69HoTtdTsO65UtB/TTVdtP9JnRWKHLW+t12dQ6tbXW\nq7WhnCElKGg8ig8AAJyTfZ3dem37Yb3acVCvbD2o5VsO6NCx1JjucZXFunRKXSpwt9Zp7uQaFSd4\n2yXyC4/iAwAAGdNQWaLrZzfq+tmpq9zJpGvjnk4te+uAlm3Zr2VvHdDP1+6SJJUkYmprrdN1sxp1\n3exGXTixiivbyGtcuQYAABm3+/BxLd9yQC+/tV+/27hXb+zqlCRNqC7RtUHQvnbmONVVnPtzwoGo\nnenKNeEaAACEbsehY/rtG3v13IY9en7DXh061iszaX5zrW6cO1E3zZ2oqQ0VUZcJjAjhGgAAZI3+\npGtVx0H95o09+uXru7W645Akac6kat00d6JuumSSZo6vjLhK4PQI1wAAIGu17+/S02t26qnXdmr5\nlgOSpNkTKvWHlzbrDxc2aXx1acQVAoMRrgEAQE7Yeei4nl6zUz9dtV3LthxQPGZ6+wWN+sBlLXrH\nheN58giyAuEaAADknDf3dOqx5R36wfIO7T7SrYaKYr13YZM+2NaiCyae+jIcYKwQrgEAQM7q60/q\ntxv26pFl7frFul3q7XddMa1en3zbNC2+aILiMR7th7FFuAYAAHlh/9EePba8Xd98YYu2HTymlvoy\nffzqabr18hZVlPD6DowNwjUAAMgrff1JPbN2l77+u81a+tYB1ZUX6U+una6PXjVVVaVFUZeHPEe4\nBgAAeeuVrQf0r7/cqF++vls1ZUX65DXT9PFrWlVTRshGOAjXAAAg773acUhf/uUGPbN2l6pKE/r0\nDTP1iWtaVVoUj7o05BnCNQAAKBhrth/SP/38DT37+m4115Xpnpsu1O9fMklm3PiIzDhTuOZhkQAA\nIK9cPLlGX/v45frOn1yhqtIi3fXdFXr/v7+gFVsPRF0aCgDhGgAA5KVrZo7TE3/xNn3x/fPUfuCY\n3vdvL+juR1Zq/9GeqEtDHiNcAwCAvBWPmT54eYt+/Vc36M/fPkOPr9yuxf/0nH60okP5MjQW2YVw\nDQAA8l5FSUJ//e4L9V+fuVatDeX6y++v0ke//rK2HzwWdWnIM4RrAABQMC6YWKXHPnW17rvlYi3f\nckA3fuk3+umq7VGXhTxCuAYAAAUlFjN99KpWPfU/rtWM8ZX6i++t0N3fX6kjx3ujLg15gHANAAAK\n0tSGCj36Z1fps4tn6Sertuumf/mtXu04FHVZyHGEawAAULAS8Zg+u3i2Hvmzq+Quvf8/XtDDL2+N\nuizkMMI1AAAoeJdNrdNP/+JtumJave754av6m8dWqacvGXVZyEGEawAAAEn1FcX6z08s0l1vn6lH\nlnXoo19/SYe6GIeNc0O4BgAACMRjpr969wX65w/N1ytbDuoP//13at/fFXVZyCGEawAAgCHet7BZ\nD92xSHs7e/S+f/sdr07HiBGuAQAAhnHl9Ab98NNXq7w4oVsfXKJfvr4r6pKQAwjXAAAApzGjsVI/\n+vTVmj2hSn/2reV6es3OqEtCliNcAwAAnEFDZYm+/SdX6OLJNfrz77yi/1q9I+qSkMUI1wAAAGdR\nU1akb92xSAtaavUX33tFP1m5LeqSkKUI1wAAACNQVVqkb35ykS5vrddffn+lfvYaV7BxKsI1AADA\nCFWUJPSNT1yuBS21+sz3VuqFjXujLglZhnANAABwDsqLE/r6xy9X67hy/elDy7S642DUJSGLEK4B\nAADOUW15sR765BWqqyjWx7+xVJv3Ho26JGQJwjUAAMB5mFhTqm/dcYUk6Y5vLtWhY7wqHYRrAACA\n8zZtXIX+7SOXauu+Ln3meyvUn/SoS0LECNcAAACjcOX0Bn3+vXP13Bt79H8/uS7qchCxRNQFAAAA\n5LrbFk3R+p1H9NXnN2v2hCp98PKWqEtCRLhyDQAAkAH/8/cv0ttmjtO9P35Vq9p5gkihIlwDAABk\nQCIe079+eKHGV5Xqru+9osPHucGxEBGuAQAAMqS2vFhfvm2Bth88rnt+sFru3OBYaAjXAAAAGXTZ\n1Hr99bsv0JOv7tS3X9oadTkYY4RrAACADLvz2um6fnajPv/EWm3cfSTqcjCGCNcAAAAZFouZ/vGP\n5quiOK7/49HV6utPRl0SxgjhGgAAIASNVSW675a5WtV+UA/+dlPU5WCMhBquzexGM1tvZhvN7J5h\n1t9tZmvNbLWZPWtmU4esrzazDjP71zDrBAAACMMfzJuk91wyUV96ZoPW72R4SCEILVybWVzSVyTd\nJGmOpNvMbM6Qbisktbn7PEmPSfrikPWfl/SbsGoEAAAIk5np87fMVVVpQn/16Cr1Mjwk74V55XqR\npI3uvsndeyQ9LOmW9A7u/it37woWl0hqHlhnZpdJmiDp5yHWCAAAEKqGyhL97/fO1avbDumrv90c\ndTkIWZjhuklSe9pyR9B2OndIekqSzCwm6f+R9Fdn+gAzu9PMlpnZsj179oyyXAAAgHDcdMkkvWvO\nBH352Q3qONB19g2Qs7LihkYzu11Sm6T7g6ZPS3rS3TvOtJ27P+jube7e1tjYGHaZAAAA5+1z/y01\nOva+n66NuBKEKcxwvU1SS9pyc9A2iJktlnSvpJvdvTtovkrSXWb2lqR/lPRRM/uHEGsFAAAIVXNd\nuT7zzln6+dpdenbdrqjLQUjCDNdLJc0ys2lmVizpVkmPp3cws4WSHlAqWO8eaHf3j7j7FHdvVWpo\nyEPufsrTRgAAAHLJHW+bplnjK/V3j6/R8d7+qMtBCEIL1+7eJ+kuSU9LWifpEXdfY2b3mdnNQbf7\nJVVKetTMVprZ46fZHQAAQM4rTsT0v26+WB0Hjuk/X3gr6nIQAnP3qGvIiLa2Nl+2bFnUZQAAAJzV\nHf+5VC9v3q/n/ubtqq8ojrocnCMzW+7ubcOty4obGgEAAArJPTddqKM9ffrysxuiLgUZRrgGAAAY\nY7MmVOnWRVP07SVbtHnv0ajLQQYRrgEAACLw2cWzVJKI6QtPvR51KcggwjUAAEAExleV6lPXz9DP\n1uzUsrf2R10OMoRwDQAAEJE7rp2mcZXF+tIvGHudLwjXAAAAESkvTuhT18/Q8xv3ailXr/MC4RoA\nACBCH7liqsZVluhLv3gj6lKQAYRrAACACJUVx/Wp66frdxv36eXNXL3OdYRrAACAiA1cvf6XZ7l6\nnesI1wAAABErK47rv98wQ7/buI8nh+Q4wjUAAEAW+PCiKaorL9IDv9kUdSkYBcI1AABAFigrjuuP\nr2rVL9bt0pt7OqMuB+eJcA0AAJAlPnrVVBXHY/rqb7l6nasI1wAAAFliXGWJ3n9Zs37wyjbtOdId\ndTk4D4RrAACALPKn105Xb39S33zhrahLwXkgXAMAAGSRaeMq9K45E/StJVt0tLsv6nJwjgjXAAAA\nWeZPr52uQ8d69eOV26IuBeeIcA0AAJBlLptapzmTqvWtF7fI3aMuB+eAcA0AAJBlzEwfvWqqXt95\nhFei5xjCNQAAQBa6ZUGTqksTemjJlqhLwTkgXAMAAGShsuK4PtjWoqdf26ldh49HXQ5GiHANAACQ\npW6/cqr6kq7vvrQ16lIwQoRrAACALNU6rkLXzW7UI8va1Z/kxsZcQLgGAADIYh9qa9GOQ8f12w17\noi4FI0C4BgAAyGKL54xXXXmRHl3WEXUpGAHCNQAAQBYrScT1voXN+vnandp/tCfqcnAWhGsAAIAs\n98HLm9Xb7/rRCt7YmO0I1wAAAFnuwonVmt9co0eWtvPGxixHuAYAAMgBH7y8Ret3HdGqjkNRl4Iz\nIFwDAADkgP82f7JKEjH96BVubMxmhGsAAIAcUF1apMUXTdATq3eotz8ZdTk4DcI1AABAjrhlwWTt\nO9qj323cG3UpOA3CNQAAQI644YLxqikr0k9Wbo+6FJwG4RoAACBHFCdies8lE/X0mp3q6umLuhwM\ng3ANAACQQ25Z0KSunn79Yt3uqEvBMAjXAAAAOWRRa70m1ZTqJ7xQJisRrgEAAHJILGa6ef5kPffG\nHl6HnoUI1wAAADnmlgVN6ku6nnptR9SlYAjCNQAAQI65aFKVpo2r0M9e2xl1KRiCcA0AAJBjzEw3\nzp2oF9/cp4NdDA3JJoRrAACAHHTT3InqS7qeWbsr6lKQhnANAACQgy5pqlFTbRlDQ7IM4RoAACAH\nmZlumjtRv92wV0eO90ZdDgKEawAAgBx10yUT1dOf1C9f54Uy2YJwDQAAkKMWttRpQnWJnnqVoSHZ\ngnANAACQo2Ix07svnqhfv7FbXT19UZcDEa4BAABy2o1zJ+p4b1LPrd8TdSkQ4RoAACCnLWqtV01Z\nkZ5l3HVWIFwDAADksEQ8phsuaNSvXt+t/qRHXU7BI1wDAADkuHdeNEH7jvZoZfvBqEspeKGGazO7\n0czWm9lGM7tnmPV3m9laM1ttZs+a2dSgfaqZvWJmK81sjZl9Ksw6AQAActn1sxsVj5meXcfbGqMW\nWrg2s7ikr0i6SdIcSbeZ2Zwh3VZIanP3eZIek/TFoH2HpKvcfYGkKyTdY2aTw6oVAAAgl9WUFeny\n1jqed50FwrxyvUjSRnff5O49kh6WdEt6B3f/lbt3BYtLJDUH7T3u3h20l4RcJwAAQM5bfNEEvb7z\niDoOdJ29M0ITZmhtktSettwRtJ3OHZKeGlgwsxYzWx3s4wvuvj2UKgEAAPLAOy+aIEl6dh1Xr6OU\nFVeEzex2SW2S7h9oc/f2YLjITEkfM7MJw2x3p5ktM7Nle/bwbEcAAFC4po2r0PRxFTySL2Jhhutt\nklrSlpuDtkHMbLGkeyXdnDYU5ITgivVrkq4dZt2D7t7m7m2NjY0ZKxwAACAXvfOi8Vry5j51dvO2\nxqiEGa6XSpplZtPMrFjSrZIeT+9gZgslPaBUsN6d1t5sZmXBfJ2kt0laH2KtAAAAOe8dF05QT39S\nv9u4N+pSClZo4drd+yTdJelpSeskPeLua8zsPjO7Oeh2v6RKSY8Gj90bCN8XSXrJzFZJek7SP7r7\nq2HVCgAAkA8um1qniuK4fvMGw2Wjkghz5+7+pKQnh7R9Lm1+8Wm2e0bSvDBrAwAAyDfFiZiumjFO\nv9mwR+4uM4u6pIKTFTc0AgAAIDOunz1O7fuP6a19PJIvCoRrAACAPHLd7NRDHhgaEg3CNQAAQB6Z\n2lChqQ3lhOuIEK4BAADyzHWzGvXCm/vU3dcfdSkFh3ANAACQZ66b3ahjvf1a/taBqEspOIRrAACA\nPHPVjAYlYqbnNjA0ZKwRrgEAAPJMZUlCba11+s0bvExmrBGuAQAA8tB1sxu1bsdh7T5yPOpSCgrh\nGgAAIA9dM2OcJGnJpv0RV1JYCNcAAAB56OLJ1aoqTejFNxkaMpYI1wAAAHkoEY/pimkNeuHNfVGX\nUlAI1wAAAHnq6hkN2rKvSx0HeBX6WCFcAwAA5KmrZzZIkl7k6vWYIVwDAADkqdnjq9RQUUy4HkOE\nawAAgDwVi5munJEad+3uUZdTEAjXAAAAeezqGQ3aefi4Nu89GnUpBYFwDQAAkMeuDp53zVNDxgbh\nGgAAII+1NpRrUk0p467HCOEaAAAgj5mZrprRoBc37VMyybjrsBGuAQAA8tzVM8Zp/9Eerd91JOpS\n8h7hGgAAIM9dPSP1vGvGXYePcA0AAJDnJteWqaW+TC9vJlyHjXANAABQABa1NmjpWwd43nXICNcA\nAAAFYNG0Ou0/2qM393RGXUpeI1wDAAAUgEXTUuOuX958IOJK8hvhGgAAoAC0NpRrXGUJ465DRrgG\nAAAoAGamK6bVa+lbXLkOE+EaAACgQFzeWqdtB4+p40BX1KXkLcI1AABAgRgYd730rf0RV5K/CNcA\nAAAF4oKJVaoqTejlzYTrsBCuAQAACkQ8Zrq8tZ5wHSLCNQAAQAG5vLVeb+45qr2d3VGXkpcI1wAA\nAAVk0bR6SdIyxl2HYkTh2sxmmFlJMH+DmX3GzGrDLQ0AAACZdklTjUqLYnqJoSGhGOmV6x9I6jez\nmZIelNQi6buhVQUAAIBQFCdiWthSxxNDQjLScJ109z5J75P0/7r7X0uaFF5ZAAAACEtba53W7Tii\no919UZeSd0YarnvN7DZJH5P0RNBWFE5JAAAACNOlU+vUn3St6jgYdSl5Z6Th+hOSrpL09+6+2cym\nSfpWeGUBAAAgLJe21EmSXtnCq9AzLTGSTu6+VtJnJMnM6iRVufsXwiwMAAAA4agpL9Ks8ZV6ZStX\nrjNtpE8L+bWZVZtZvaRXJP1/ZvZP4ZYGAACAsFw6pU6vbD2gZNKjLiWvjHRYSI27H5b0h5Iecvcr\nJC0OrywAAACE6bKpdTrY1atNe49GXUpeGWm4TpjZJEkf1MkbGgEAAJCjLp3KuOswjDRc3yfpaUlv\nuvtSM5suaUN4ZQEAACBM08dVqLa8SMsJ1xk10hsaH5X0aNryJknvD6soAAAAhCsWM106pU7LtxKu\nM2mkNzQ2m9mPzGx3MP3AzJrDLg4AAADhuWxqnTbu7tShrt6oS8kbIx0W8g1Jj0uaHEw/DdoAAACQ\noxZOqZUkvdLO1etMGWm4bnT3b7h7XzD9p6TGEOsCAABAyOY31yoeM25qzKCRhut9Zna7mcWD6XZJ\n+8IsDAAAAOGqKEnooklV3NSYQSMN159U6jF8OyXtkPQBSR8PqSYAAACMkcum1Gll+0H19SejLiUv\njChcu/sWd7/Z3Rvdfby7v1c8LQQAACDnXTq1Tl09/Vq/60jUpeSFkV65Hs7dGasCAAAAkbh0Ci+T\nyaTRhGs7awezG81svZltNLN7hll/t5mtNbPVZvasmU0N2heY2YtmtiZY96FR1AkAAIDTaK4r07jK\nEq1oPxh1KXlhNOHaz7TSzOKSviLpJklzJN1mZnOGdFshqc3d50l6TNIXg/YuSR9194sl3SjpS2ZW\nO4paAQAAMAwz04KWWq0kXGfEGcO1mR0xs8PDTEeUet71mSyStNHdN7l7j6SHJd2S3sHdf+XuXcHi\nEknNQfsb7r4hmN8uabd49B8AAEAoFk6p1aY9R3mZTAacMVy7e5W7Vw8zVbn72V6d3iSpPW25I2g7\nnTskPTW00cwWSSqW9OZZPg8AAADnYUFLaoDAqg6uXo/WaIaFZEzw3Ow2SfcPaZ8k6VuSPuHupzwf\nxszuNLNlZrZsz549Y1MsAABAnpnXXCMzMTQkA8IM19sktaQtNwdtg5jZYkn3SrrZ3bvT2qsl/Zek\ne919yXAf4O4Punubu7c1NjJqBAAA4HxUlRZpZmMl4ToDwgzXSyXNMrNpZlYs6VZJj6d3MLOFkh5Q\nKljvTmsvlvQjSQ+5+2Mh1v8jjcMAABjxSURBVAgAAADpxE2N7md8ZgXOIrRw7e59ku6S9LSkdZIe\ncfc1Znafmd0cdLtfUqWkR81spZkNhO8PSrpO0seD9pVmtiCsWgEAAArdgim12n+0R+37j0VdSk47\n202Jo+LuT0p6ckjb59LmF59mu29L+naYtQEAAOCk+c2pmxpXtB/QlIbyiKvJXVlxQyMAAACideHE\nKpUWxRh3PUqEawAAACgRj+mSphrC9SgRrgEAACApdVPjmu2H1dN3yhOQMUKEawAAAEiSFrTUqacv\nqXU7DkddSs4iXAMAAEBS6okhEm9qHA3CNQAAACRJk2tK1VhVopVbCdfni3ANAAAASZKZnXiZDM4P\n4RoAAAAnLGip1aa9R3WoqzfqUnIS4RoAAAAnLGxJjbteybjr80K4BgAAwAlzm2skSasZGnJeCNcA\nAAA4obq0SNMbK7R626GoS8lJhGsAAAAMMr+5VqsZFnJeCNcAAAAYZF5zjXYd7tauw8ejLiXnEK4B\nAAAwyLzm4GUyjLs+Z4RrAAAADHLx5GrFY6bVHYy7PleEawAAAAxSWhTX7AlVvAb9PBCuAQAAcIr5\nzTV6ddshuXvUpeQUwjUAAABOMa+5Vge7erV1f1fUpeQUwjUAAABOMW/gZTKMuz4nhGsAAACc4oKJ\nVSpJxHje9TkiXAMAAOAURfGY5kyu1iquXJ8TwjUAAACGNb+5Vq9tO6T+JDc1jhThGgAAAMO6pKlG\nXT39enNPZ9Sl5AzCNQAAAIY1vyV1UyNvahw5wjUAAACGNX1cpSpLEjwx5BwQrgEAADCsWMw0t6ma\nJ4acA8I1AAAATmt+c63W7Tiinr5k1KXkBMI1AAAATmtec616+pNav/NI1KXkBMI1AAAATmvgTY2r\nGBoyIoRrAAAAnFZzXZnqyosYdz1ChGsAAACclplpXnMtTwwZIcI1AAAAzmh+c43e2HVEXT19UZeS\n9QjXAAAAOKN5zbVKurRm++GoS8l6hGsAAACc0YmbGnlT41kRrgEAAHBG46tLNbG6lHHXI0C4BgAA\nwFnNa67Rq9sI12dDuAYAAMBZzW+p1ea9R3XoWG/UpWQ1wjUAAADOamDc9WtcvT4jwjUAAADO6pIm\n3tQ4EoRrAAAAnFVtebGmNpRrdTtXrs+EcA0AAIARSb2pkSvXZ0K4BgAAwIjMb67R9kPHtedId9Sl\nZC3CNQAAAEZkYNz1q9u4en06hGsAAACMyNymGsVMWsW469MiXAMAAGBEKkoSmjm+kpfJnAHhGgAA\nACN2SVPqpkZ3j7qUrES4BgAAwIjNb6nR3s4ebT90POpSshLhGgAAACM2r7lWkvQqj+QbFuEaAAAA\nI3bRpCoVxU2rOhh3PRzCNQAAAEasJBHXBROreJnMaYQars3sRjNbb2YbzeyeYdbfbWZrzWy1mT1r\nZlPT1v3MzA6a2RNh1ggAAIBzk3pT4yElk9zUOFRo4drM4pK+IukmSXMk3WZmc4Z0WyGpzd3nSXpM\n0hfT1t0v6Y/Dqg8AAADnZ35zjY4c79OW/V1Rl5J1wrxyvUjSRnff5O49kh6WdEt6B3f/lbsPHJUl\nkprT1j0r6UiI9QEAAOA8XNKUuqmRoSGnCjNcN0lqT1vuCNpO5w5JT4VYDwAAADJg9oRKlRbFeFPj\nMBJRFyBJZna7pDZJ15/jdndKulOSpkyZEkJlAAAAGCoRj+niyTV6dRtXrocK88r1NkktacvNQdsg\nZrZY0r2Sbnb37nP5AHd/0N3b3L2tsbFxVMUCAABg5OY11+i1bYfV15+MupSsEma4XipplplNM7Ni\nSbdKejy9g5ktlPSAUsF6d4i1AAAAIIPmNdfoWG+/Nu7pjLqUrBJauHb3Pkl3SXpa0jpJj7j7GjO7\nz8xuDrrdL6lS0qNmttLMToRvM/utpEclvdPMOszs3WHVCgAAgHMz8KbG1bxMZpBQx1y7+5OSnhzS\n9rm0+cVn2PbaEEsDAADAKExrqFBVSUKrOw7qg20tZ9+gQPCGRgAAAJyzWMw0t6mGK9dDEK4BAABw\nXua11GjdjsPq7uuPupSsQbgGAADAeZnfXKveftf6nbz3bwDhGgAAAOdlXnONJGkVQ0NOIFwDAADg\nvDTVlqm+olir23mZzADCNQAAAM6LmWlec41e3caV6wGEawAAAJy3ec21emPXEXX19EVdSlYgXAMA\nAOC8zWuqUdKlNdsPR11KViBcAwAA4LzNa0nd1MjzrlMI1wAAADhv46tKNammVKs7uKlRIlwDAABg\nlC7hTY0nEK4BAAAwKvNbarV571EdOtYbdSmRI1wDAABgVAZeJvMaj+QjXAMAAGB05jXVSpJWMe6a\ncA0AAIDRqSkv0tSGcq1u58o14RoAAACjNq+5ljc1inANAACADJjfXKNtB49pb2d31KVEinANAACA\nUbukaeBlMoU97ppwDQAAgFGb21SjmPGmRsI1AAAARq2iJKGZ4ysJ11EXAAAAgPwwv7lWq9oPyt2j\nLiUyhGsAAABkxIIptdp3tEft+49FXUpkCNcAAADIiIUtdZKkFe0HIq4kOoRrAAAAZMTsCZUqK4pr\nxdbCfWII4RoAAAAZkYjHNK+5RivaCdcAAADAqC2YUqt12w+ru68/6lIiQbgGAABAxixsqVNPf1Jr\nth+OupRIEK4BAACQMQun1EqSVhbouGvCNQAAADJmQnWpJteUFuy4a8I1AAAAMmrhlDqtLNDH8RGu\nAQAAkFELWmrVvv+Y9hzpjrqUMUe4BgAAQEadGHddgENDCNcAAADIqLlNNUrErCCHhhCuAQAAkFGl\nRXFdNKm6IN/USLgGAABAxi2cUqvVHYfUn/SoSxlThGsAAABk3IKWWnV292nj7s6oSxlThGsAAABk\n3MIpdZJUcOOuCdcAAADIuNaGctWWFxXcuGvCNQAAADLOzLSgpZZwDQAAAGTCgpZavbH7iDq7+6Iu\nZcwQrgEAABCKhVPq5C6tKqCXyRCuAQAAEIqFU2plJi17q3BuaiRcAwAAIBTVpUW6YEKVlm3ZH3Up\nY4ZwDQAAgNC0tdZpxdaDBfMyGcI1AAAAQtM2tV6d3X16fefhqEsZE4RrAAAAhKatNfUymeVbCmPc\nNeEaAAAAoWmqLdPE6tKCuamRcA0AAIDQmJkua63jyjUAAACQCW1T67Tt4DFtP3gs6lJCR7gGAABA\nqC5vrZckLSuAq9eEawAAAITqwolVKi+Oa/lb+f+861DDtZndaGbrzWyjmd0zzPq7zWytma02s2fN\nbGrauo+Z2YZg+liYdQIAACA8iXhMC6fUcuV6NMwsLukrkm6SNEfSbWY2Z0i3FZLa3H2epMckfTHY\ntl7S30m6QtIiSX9nZnVh1QoAAIBwXTa1Xut2HFZnd1/UpYQqzCvXiyRtdPdN7t4j6WFJt6R3cPdf\nuXtXsLhEUnMw/25Jz7j7fnc/IOkZSTeGWCsAAABCdHlrnZIurdia31evwwzXTZLa05Y7grbTuUPS\nU+e5LQAAALLYwil1ipny/nnXiagLkCQzu11Sm6Trz3G7OyXdKUlTpkwJoTIAAABkQmVJQhdOrM77\n512HeeV6m6SWtOXmoG0QM1ss6V5JN7t797ls6+4Punubu7c1NjZmrHAAAABkXltrnV7ZekB9/cmo\nSwlNmOF6qaRZZjbNzIol3Srp8fQOZrZQ0gNKBevdaauelvQuM6sLbmR8V9AGAACAHNXWWq+unn69\nvvNI1KWEJrRw7e59ku5SKhSvk/SIu68xs/vM7Oag2/2SKiU9amYrzezxYNv9kj6vVEBfKum+oA0A\nAAA5qm1q6uFvy/L4edehjrl29yclPTmk7XNp84vPsO3XJX09vOoAAAAwlibXlqmptkwvv7VfH79m\nWtTlhII3NAIAAGDMXDG9Xi9t2i93j7qUUBCuAQAAMGaunN6gfUd7tHF3Z9SlhIJwDQAAgDFz5bQG\nSdKSTfsiriQchGsAAACMmZb6Mk2uKdWSTfl5UyPhGgAAAGPGzHTl9AYt2bQvL8ddE64BAAAwpvJ5\n3DXhGgAAAGPqyunBuOvN+Tc0hHANAACAMdVSX6ZJNaV5eVMj4RoAAABjamDc9Ut5OO6acA0AAIAx\nd+X0eu3t7NGbe/Jr3DXhGgAAAGNuYNz1i3n2SD7CNQAAAMbclPryvBx3TbgGAADAmDMzXTGtXi9t\n2p9X464J1wAAAIjEldMbtLezO6+ed024BgAAQCSumTlOkvS7jXsjriRzCNcAAACIREt9uaY2lOt5\nwjUAAAAwetfMHKclm/artz8ZdSkZQbgGAABAZK6dOU6d3X1a1X4w6lIygnANAACAyFw1o0Fmypuh\nIYRrAAAARKa2vFiXNNXkzU2NhGsAAABE6m0zx2nF1oPq7O6LupRRI1wDAAAgUm+bOU59SddLefC2\nRsI1AAAAInXp1DqVFsXyYtw14RoAAACRKi2K6/LWej2/gXANAAAAjNq1s8Zpw+5O7Th0LOpSRoVw\nDQAAgMhdP3u8JOm59XsirmR0CNcAAACI3OwJlZpUU6pfE64BAACA0TEz3XBBo57fuFc9fbn7KnTC\nNQAAALLC9bPHq7O7T8u3HIi6lPNGuAYAAEBWuGZmgxIx06/f2B11KeeNcA0AAICsUFVapLbWupy+\nqZFwDQAAgKzx9gvG6/WdR3L2kXyEawAAAGSNGy5IPZIvV58aQrgGAABA1jj5SL7cHHdNuAYAAEDW\nSD2Sb7ye37BX3X39UZdzzgjXAAAAyCq/N2e8jvb068U390VdyjkjXAMAACCrXD1jnMqK4vrFul1R\nl3LOCNcAAADIKqVFcV03e5x+sXa33D3qcs4J4RoAAABZZ/FFE7Tz8HG9tu1w1KWcE8I1AAAAss47\nLhyvmEnP5NjQEMI1AAAAsk5DZYkum1qnX6wlXAMAAACjtviiCVq747A6DnRFXcqIEa4BAACQlX5v\nzgRJyqmr14RrAAAAZKXpjZWaOb5SP1uzM+pSRoxwDQAAgKz1nrkT9fLm/dpzpDvqUkaEcA0AAICs\n9Z55k5R06ekcuXpNuAYAAEDWumBClaaPq9BTr+2IupQRIVwDAAAga5mZ3nPJJL345j7t68z+oSGE\nawAAAGS1my6ZqKRLP8+Bp4YQrgEAAJDV5kyqVmtDuZ58NfuHhhCuAQAAkNUGhoa88OY+HTjaE3U5\nZxRquDazG81svZltNLN7hll/nZm9YmZ9ZvaBIeu+YGavBdOHwqwTAAAA2e09l0xSf9L187XZ/dSQ\n0MK1mcUlfUXSTZLmSLrNzOYM6bZV0sclfXfItr8v6VJJCyRdIemvzKw6rFoBAACQ3S6eXK0p9eV6\nYnV2Dw0J88r1Ikkb3X2Tu/dIeljSLekd3P0td18tKTlk2zmSfuPufe5+VNJqSTeGWCsAAACymJnp\nD+alhoZk8wtlwgzXTZLa05Y7graRWCXpRjMrN7Nxkt4uqWVoJzO708yWmdmyPXv2jLpgAAAAZK/3\nLWxSf9L1xOrtUZdyWll5Q6O7/1zSk5JekPQ9SS9K6h+m34Pu3ububY2NjWNcJQAAAMbSrAlVmjOp\nWj9esS3qUk4rzHC9TYOvNjcHbSPi7n/v7gvc/fckmaQ3MlwfAAAAcsz7FjZpVcchbdrTGXUpwwoz\nXC+VNMvMpplZsaRbJT0+kg3NLG5mDcH8PEnzJP08tEoBAACQE25eMFlm0o9XZufQkNDCtbv3SbpL\n0tOS1kl6xN3XmNl9ZnazJJnZ5WbWIemPJD1gZmuCzYsk/dbM1kp6UNLtwf4AAABQwCZUl+rqGQ36\n8YptcveoyzlFIsydu/uTSo2dTm/7XNr8UqWGiwzd7rhSTwwBAAAABnnvgib99WOrtaL9oC6dUhd1\nOYNk5Q2NAAAAwOncOHeiZo2vzMq3NYZ65RoAAADItKrSIj1z9/VRlzEsrlwDAAAAGUK4BgAAADKE\ncA0AAABkCOEaAAAAyBDCNQAAAJAhhGsAAAAgQwjXAAAAQIYQrgEAAIAMIVwDAAAAGUK4BgAAADKE\ncA0AAABkCOEaAAAAyBDCNQAAAJAhhGsAAAAgQwjXAAAAQIYQrgEAAIAMIVwDAAAAGUK4BgAAADLE\n3D3qGjLCzPZI2hLRx4+TtDeiz8bY4TjnP45xYeA4FwaOc2GI6jhPdffG4VbkTbiOkpktc/e2qOtA\nuDjO+Y9jXBg4zoWB41wYsvE4MywEAAAAyBDCNQAAAJAhhOvMeDDqAjAmOM75j2NcGDjOhYHjXBiy\n7jgz5hoAAADIEK5cAwAAABlCuB4FM7vRzNab2UYzuyfqenB2ZtZiZr8ys7VmtsbM/kfQXm9mz5jZ\nhuBnXdBuZvbl4BivNrNL0/b1saD/BjP7WFr7ZWb2arDNl83Mxv43hZnFzWyFmT0RLE8zs5eC4/J9\nMysO2kuC5Y3B+ta0ffxt0L7ezN6d1s65nwXMrNbMHjOz181snZldxbmcf8zsL4P/Xr9mZt8zs1LO\n59xnZl83s91m9lpaW+jn7+k+I6Pcnek8JklxSW9Kmi6pWNIqSXOirovprMdtkqRLg/kqSW9ImiPp\ni5LuCdrvkfSFYP49kp6SZJKulPRS0F4vaVPwsy6YrwvWvRz0tWDbm6L+vQtxknS3pO9KeiJYfkTS\nrcH8f0j678H8pyX9RzB/q6TvB/NzgvO6RNK04HyPc+5nzyTpm5L+JJgvllTLuZxfk6QmSZsllQXL\nj0j6OOdz7k+SrpN0qaTX0tpCP39P9xmZnLhyff4WSdro7pvcvUfSw5JuibgmnIW773D3V4L5I5LW\nKfUf71uU+h+1gp/vDeZvkfSQpyyRVGtmkyS9W9Iz7r7f3Q9IekbSjcG6andf4qkz96G0fWGMmFmz\npN+X9NVg2SS9Q9JjQZehx3jg2D8m6Z1B/1skPezu3e6+WdJGpc57zv0sYGY1Sv3P+WuS5O497n5Q\nnMv5KCGpzMwSksol7RDnc85z999I2j+keSzO39N9RsYQrs9fk6T2tOWOoA05Ivhz4UJJL0ma4O47\nglU7JU0I5k93nM/U3jFMO8bWlyT9jaRksNwg6aC79wXL6cflxLEM1h8K+p/rscfYmiZpj6RvBMN/\nvmpmFeJczivuvk3SP0raqlSoPiRpuTif89VYnL+n+4yMIVyjIJlZpaQfSPqsux9OXxd8y+UxOjnK\nzP5A0m53Xx51LQhVQqk/Kf+7uy+UdFSpP/GewLmc+4LxsLco9WVqsqQKSTdGWhTGxFicv2F9BuH6\n/G2T1JK23By0IcuZWZFSwfo77v7DoHlX8GckBT93B+2nO85nam8eph1j5xpJN5vZW0r9ifcdkv5F\nqT8jJoI+6cflxLEM1tdI2qdzP/YYWx2SOtz9pWD5MaXCNudyflksabO773H3Xkk/VOoc53zOT2Nx\n/p7uMzKGcH3+lkqaFdyxXKzUjROPR1wTziIYe/c1Sevc/Z/SVj0uaeAu449J+kla+0eDO5WvlHQo\n+HPS05LeZWZ1wZWVd0l6Olh32MyuDD7ro2n7whhw979192Z3b1XqvPylu39E0q8kfSDoNvQYDxz7\nDwT9PWi/NXj6wDRJs5S6QYZzPwu4+05J7WZ2QdD0Tklrxbmcb7ZKutLMyoPjMHCcOZ/z01icv6f7\njMzJ9B2ShTQpdffqG0rdaXxv1PUwjeiYvU2pPwGtlrQymN6j1Ji8ZyVtkPQLSfVBf5P0leAYvyqp\nLW1fn1TqppiNkj6R1t4m6bVgm39V8LImpkiO9w06+bSQ6Ur9z3SjpEcllQTtpcHyxmD99LTt7w2O\n43qlPSmCcz87JkkLJC0LzucfK/W0AM7lPJsk/S9JrwfH4ltKPfGD8znHJ0nfU2ocfa9Sf4m6YyzO\n39N9RiYn3tAIAAAAZAjDQgAAAIAMIVwDAAAAGUK4BgAAADKEcA0AAABkCOEaAAAAyBDCNQDgjMzs\nBjN7Iuo6ACAXEK4BAACADCFcA0CeMLPbzexlM1tpZg+YWdzMOs3sn81sjZk9a2aNQd8FZrbEzFab\n2Y+Ct5vJzGaa2S/MbJWZvWJmM4LdV5rZY2b2upl9J3jrGQBgCMI1AOQBM7tI0ockXePuCyT1S/qI\npApJy9z9YknPSfq7YJOHJP2f7j5PqTeeDbR/R9JX3H2+pKuVeoOaJC2U9FlJc5R6O941of9SAJCD\nElEXAADIiHdKukzS0uCicpmk3ZKSkr4f9Pm2pB+aWY2kWnd/Lmj/pqRHzaxKUpO7/0iS3P24JAX7\ne9ndO4LllZJaJT0f/q8FALmFcA0A+cEkfdPd/3ZQo9n/NaSfn+f+u9Pm+8X/PwBgWAwLAYD88Kyk\nD5jZeEkys3ozm6rUf+c/EPT5sKTn3f2QpANmdm3Q/seSnnP3I5I6zOy9wT5KzKx8TH8LAMhxXHkA\ngDzg7mvN7H9K+rmZxST1SvpzSUclLQrW7VZqXLYkfUzSfwTheZOkTwTtfyzpATO7L9jHH43hrwEA\nOc/cz/cvhACAbGdmne5eGXUdAFAoGBYCAAAAZAhXrgEAAIAM4co1AAAAkCGEawAAACBDCNcAAABA\nhhCuAQAAgAwhXAMAAAAZQrgGAAAAMuT/By22mHYk+FvpAAAAAElFTkSuQmCC\n","text/plain":["
"]},"metadata":{"tags":[]}},{"output_type":"execute_result","data":{"text/plain":["None\n"]},"metadata":{"tags":[]},"execution_count":7}]},{"cell_type":"code","metadata":{"colab_type":"code","id":"YkNFr_D2Kn-f","outputId":"dbe90c10-22b8-4e96-b674-49b67b8392be","colab":{"base_uri":"https://localhost:8080/","height":85}},"source":["print(round(model.inferring(from: [[0, 0], [0, 1], [1, 0], [1, 1]])))"],"execution_count":0,"outputs":[{"output_type":"stream","text":["[[0.0],\r\n"," [1.0],\r\n"," [1.0],\r\n"," [1.0]]\r\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"R3OcEtkgfA98","colab_type":"text"},"source":["## Python Interopability in Depth\n","\n","This section is an up-to-date derivative of some of the TensorFlow documentation."]},{"cell_type":"markdown","metadata":{"id":"DVnib3G0fEDG","colab_type":"text"},"source":["You can ask for the Python version:"]},{"cell_type":"code","metadata":{"id":"cNcL-sEMfF1e","colab_type":"code","colab":{}},"source":["print(Python.version)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"eM4GJnWDfKJX","colab_type":"text"},"source":["You can also _set_ a specific Python version, if you need. \n","\n","Note: you should run `PythonLibrary`.useVersion right after import Python, before calling any Python code. It cannot be used to dynamically switch Python versions. The Swift class [`PythonLibrary`](https://www.tensorflow.org/swift/api_docs/Structs/PythonLibrary) represents... a Python library!"]},{"cell_type":"code","metadata":{"id":"7y-EZBzhfRzi","colab_type":"code","colab":{}},"source":["// PythonLibrary.useVersion(2)\n","// PythonLibrary.useVersion(3, 7)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"UasavsMfft0k","colab_type":"text"},"source":["Using Swift, you can represent an object from Python using the Swift class [`PythonObject`](https://www.tensorflow.org/swift/api_docs/Structs/PythonObject). Everything Python will return a Swift `PythonObject`."]},{"cell_type":"markdown","metadata":{"id":"0YxX4onBhy8J","colab_type":"text"},"source":["All of Swift's basic types can be converted to a `PythonObject`. Some happen implicitly, and some need to be cast from a Swift value to a `PythonObject` using a `PythonObject` initialiser:\n","\n","\n"]},{"cell_type":"code","metadata":{"id":"uGJcsblhfqNm","colab_type":"code","colab":{}},"source":["let pythonInt: PythonObject = 1\n","let pythonFloat: PythonObject = 3.0\n","let pythonString: PythonObject = \"Hello Python!\"\n","let pythonRange: PythonObject = PythonObject(5..<10)\n","let pythonArray: PythonObject = [1, 2, 3, 4]\n","let pythonDict: PythonObject = [\"foo\": [0], \"bar\": [1, 2, 3]]"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Au-ypfRViBHN","colab_type":"text"},"source":["To make it easier to use in Swift, `PythonObject` defines most standard operations, including numeric operations, indexing, and iteration:"]},{"cell_type":"code","metadata":{"id":"GW3zEmfiiE1O","colab_type":"code","colab":{}},"source":["print(pythonInt + pythonFloat)\n","print(pythonString[0..<6])\n","print(pythonRange)\n","print(pythonArray[2])\n","print(pythonDict[\"bar\"])"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"2hnyr2Oxigqn","colab_type":"text"},"source":["You can also convert a `PythonObject` back to Swift types:"]},{"cell_type":"code","metadata":{"id":"h1QodWq8ieVK","colab_type":"code","colab":{}},"source":["let int = Int(pythonInt)!\n","let float = Float(pythonFloat)!\n","let string = String(pythonString)!\n","let range = Range(pythonRange)!\n","let array: [Int] = Array(pythonArray)!\n","let dict: [String: [Int]] = Dictionary(pythonDict)!"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"XEzzGpTRipuL","colab_type":"text"},"source":["And you can, of course, perform all the operations that you'd expect. The outputs are, also of course, the same as from Python:"]},{"cell_type":"code","metadata":{"id":"prw2ZxEzioEc","colab_type":"code","colab":{}},"source":["print(Float(int) + float)\n","print(string.prefix(6))\n","print(range)\n","print(array[2])\n","print(dict[\"bar\"]!)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"pLhpxWaspuYV","colab_type":"text"},"source":["You need to be careful when you're using Python and Swift together, as the compiler can't figure out things about Python objects. For example, if you have a `PythonObject` that holds a Python String:"]},{"cell_type":"code","metadata":{"id":"ErpQKgYOpURL","colab_type":"code","outputId":"be7850d8-2eb0-4e90-a75d-6fd4956f02bf","executionInfo":{"status":"ok","timestamp":1572304498530,"user_tz":240,"elapsed":282,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["var myPythonString: PythonObject = \"I am a Python String!\"\n","print(myPythonString)"],"execution_count":0,"outputs":[{"output_type":"stream","text":["I am a Python String!\r\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"JoUjQ6rgp27q","colab_type":"text"},"source":["And then, for example, try and add 5 to it. The compiler won't complain, but it will crash (which could be 10 hours into a complex training run) on runtime:"]},{"cell_type":"code","metadata":{"id":"Mwp7xKbCpdG9","colab_type":"code","colab":{}},"source":["// myPythonString = myPythonString + 5 // this will crash on runtime, but compile just fine\n","print(myPythonString)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"ldIYoNcEi0oy","colab_type":"text"},"source":["`PythonObject` defines conformances to many useful, standard Swift protocols: \n","\n","* [Equatable](https://developer.apple.com/documentation/swift/equatable) \n","* [Comparable](https://developer.apple.com/documentation/swift/comparable) \n","* [Hashable](https://developer.apple.com/documentation/swift/hashable) \n","* [SignedNumeric](https://developer.apple.com/documentation/swift/numeric) \n","* [Strideable](https://developer.apple.com/documentation/swift/strideable) \n","* [MutableCollection](https://developer.apple.com/documentation/swift/mutablecollection) \n","* The [ExpressibleBy_Literal](https://developer.apple.com/documentation/swift/swift_standard_library/initialization_with_literals) protocols\n","\n","You can learn more about this in [the documentation](https://www.tensorflow.org/swift/api_docs/Structs/PythonObject). None of the conformances type-safe, and crashes will occur if you attempt to use protocol functionality from an incompatible PythonObject instance."]},{"cell_type":"code","metadata":{"id":"PvcQUC2QmCGd","colab_type":"code","colab":{}},"source":["let one: PythonObject = 1\n","print(one == one)\n","print(one < one)\n","print(one + one)\n","\n","let array: PythonObject = [1, 2, 3]\n","for (i, x) in array.enumerated() {\n"," print(i, x)\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"QodBVesnmVUx","colab_type":"text"},"source":["When you convert a tuple from Python to Swift, you have to statically know the arity of the tuple and call one of the instance methods `PythonObject.tuple2`, `PythonObject.tuple3`, or `PythonObject.tuple4`."]},{"cell_type":"code","metadata":{"id":"lgdRKuL8mU5F","colab_type":"code","colab":{}},"source":["let pythonTuple = Python.tuple([1, 2, 3])\n","print(pythonTuple, Python.len(pythonTuple))\n","\n","// Convert to Swift.\n","let tuple = pythonTuple.tuple3\n","print(tuple)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"5hv0cCDamvxp","colab_type":"text"},"source":["You an also use `Python.builtins` to access all the Python builtins:"]},{"cell_type":"code","metadata":{"id":"zJi3Oei3mvEX","colab_type":"code","colab":{}},"source":["_ = Python.builtins\n","\n","print(Python.type(1))\n","print(Python.len([1, 2, 3]))\n","print(Python.sum([1, 2, 3]))"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Ocn5edp4nAHt","colab_type":"text"},"source":["And, as we showed above with Matplotlib, you can access and import Python modules:"]},{"cell_type":"code","metadata":{"id":"WIMppNRLm_sY","colab_type":"code","colab":{}},"source":["let np = Python.import(\"numpy\")\n","print(np)\n","let zeros = np.ones([2, 3])\n","print(zeros)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"bJ54f2OVnJb9","colab_type":"text"},"source":["There's even support for checking that the Python import is safe:"]},{"cell_type":"code","metadata":{"id":"E8_L9UxOnHmW","colab_type":"code","colab":{}},"source":["let maybeModule = try? Python.attemptImport(\"nonexistent_module\")\n","print(maybeModule)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"IwoDsURVnOcq","colab_type":"text"},"source":["There's also explicit support for converting the Swift types `Array`, `ShapedArray`, and `Tensor` to and from Python's `numpy.ndarray`.\n","\n","Note that the conversion will only succeed if the `dtype` of the `numpy.ndarray` is compatible with the `Element` or `Scalar` generic parameter type.\n","\n","For `Array`, conversion from numpy succeeds only if the `numpy.ndarray` is one dimension (1-D)."]},{"cell_type":"code","metadata":{"id":"5zCkIAASnnHF","colab_type":"code","colab":{}},"source":["let numpyArray = np.ones([4], dtype: np.float32)\n","print(\"Swift type:\", type(of: numpyArray))\n","print(\"Python type:\", Python.type(numpyArray))\n","print(numpyArray.shape)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"QLHtvs3wnwBz","colab_type":"text"},"source":["Examples of converting `numpy.ndarray` to Swift types:"]},{"cell_type":"code","metadata":{"id":"2PGYgVRwnvrI","colab_type":"code","colab":{}},"source":["let array: [Float] = Array(numpy: numpyArray)!\n","let shapedArray = ShapedArray(numpy: numpyArray)!\n","let tensor = Tensor(numpy: numpyArray)!"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"nWJjJMvWn-LD","colab_type":"text"},"source":["Examples of converting Swift types to `numpy.ndarray`:"]},{"cell_type":"code","metadata":{"id":"SuHZzrV4nqTd","colab_type":"code","colab":{}},"source":["print(array.makeNumpyArray())\n","print(shapedArray.makeNumpyArray())\n","print(tensor.makeNumpyArray())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"umKxn053n1Ri","colab_type":"text"},"source":["Examples with different `dtype`:"]},{"cell_type":"code","metadata":{"id":"x0wCrXHInznj","colab_type":"code","colab":{}},"source":["let doubleArray: [Double] = Array(numpy: np.ones([3], dtype: np.float))!\n","let intTensor = Tensor(numpy: np.ones([2, 3], dtype: np.int32))!"],"execution_count":0,"outputs":[]}]} -------------------------------------------------------------------------------- /Notebooks/4. Swift and Python/Starter - Swift and Python.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Starter - Swift and Python.ipynb","provenance":[],"collapsed_sections":[]},"kernelspec":{"display_name":"Swift","language":"swift","name":"swift"},"language_info":{"file_extension":".swift","mimetype":"text/x-swift","name":"swift","version":""}},"cells":[{"cell_type":"markdown","metadata":{"id":"axkiMfMdXoNv","colab_type":"text"},"source":["# Starter - Swift and Python"]},{"cell_type":"markdown","metadata":{"id":"tfHArQyA6hpW","colab_type":"text"},"source":["**⚠️ This is the starter version, for you to code along with live.**"]},{"cell_type":"markdown","metadata":{"id":"E0JG1WqkYHps","colab_type":"text"},"source":["In this example, we're going to add some Python magic to the the multilayer peceptron XOR network that we made in the previous activity.\n"]},{"cell_type":"markdown","metadata":{"id":"ogFqL7EKYNZJ","colab_type":"text"},"source":["## Setting up"]},{"cell_type":"markdown","metadata":{"id":"cqP0u8hYYP7I","colab_type":"text"},"source":["First, we need to `import` the TensorFlow framework:"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"kZRlD4utdPuX","colab":{}},"source":["import TensorFlow"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"colab_type":"text","id":"Tup2LjvzLz0D"},"source":["Next, we need to `import Python`, and hook it into the notebook environment:"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"IFDhw6oILtTM","outputId":"7945301c-7bec-402c-f44c-9f95a113c94d","executionInfo":{"status":"ok","timestamp":1572304489018,"user_tz":240,"elapsed":11392,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["// code goes here"],"execution_count":0,"outputs":[{"output_type":"execute_result","data":{"text/plain":["('inline', 'module://ipykernel.pylab.backend_inline')\n"]},"metadata":{"tags":[]},"execution_count":3}]},{"cell_type":"markdown","metadata":{"id":"D_m1yBsQYXnp","colab_type":"text"},"source":["## Creating the model"]},{"cell_type":"markdown","metadata":{"id":"LGcGWpubYZFG","colab_type":"text"},"source":["As before, we'll create our model:"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"z93l8JV5Khv5","colab":{}},"source":["// Create a XORModel Struct\n","struct XORModel: Layer\n","{\n"," // define three layers, each of Dense type\n"," var inputLayer = Dense(inputSize: 2, outputSize: 2, activation: sigmoid)\n"," var hiddenLayer = Dense(inputSize: 2, outputSize: 2, activation: sigmoid)\n"," var outputLayer = Dense(inputSize: 2, outputSize: 1, activation: sigmoid)\n"," \n"," // procide the differentiable thingo\n"," @differentiable func callAsFunction(_ input: Tensor) -> Tensor\n"," {\n"," return input.sequenced(through: inputLayer, hiddenLayer, outputLayer)\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"jiCDB-4pYeCb","colab_type":"text"},"source":["## Preparing to train the model"]},{"cell_type":"markdown","metadata":{"id":"d2O1YCf9YgwT","colab_type":"text"},"source":["Likewise, as before, we'll create an instance of the model, an optimiser, and some data."]},{"cell_type":"code","metadata":{"colab_type":"code","id":"SSR6Mi0MKiWg","colab":{}},"source":["// create an instance of our XORModel Struct (defined above)\n","var model = XORModel()\n","\n","// create an optimizer (standard gradient descent)\n","let optimizer = SGD(for: model, learningRate: 0.02)\n","\n","// create some training data\n","let trainingData: Tensor = [[0, 0], [0, 1], [1, 0], [1, 1]]\n","\n","// label the training data (so we know the correct outputs)\n","let trainingLabels: Tensor = [[0], [1], [1], [0]]"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"L5Rqvsj-Yl_W","colab_type":"text"},"source":["But we'll also create an array to store our loss in, so we can keep track of it:"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"NSA-emMQe3fU","colab":{}},"source":["var losses: [Float] = []"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"fYJAOc3NYqPy","colab_type":"text"},"source":["Then we'll train:"]},{"cell_type":"code","metadata":{"id":"2M2Jin-9YtQs","colab_type":"code","colab":{}},"source":["let epochs = 100_000"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"colab_type":"code","id":"14HmPb1xKmG2","outputId":"97091c22-c71f-468e-905a-7eb89bb821db","colab":{"base_uri":"https://localhost:8080/","height":357}},"source":["for epoch in 0 ..< epochs\n","{\n"," // do the ting\n"," let 𝛁model = model.gradient { model -> Tensor in\n"," let ŷ = model(trainingData)\n"," let loss = meanSquaredError(predicted: ŷ, expected: trainingLabels)\n"," if epoch % 5000 == 0\n"," {\n"," print(\"epoch: \\(epoch) loss: \\(loss)\")\n"," }\n"," losses.append(loss.scalarized())\n"," return loss\n"," }\n"," optimizer.update(&model, along: 𝛁model)\n","}"],"execution_count":0,"outputs":[{"output_type":"stream","text":["epoch: 0 loss: 0.25536832\n","epoch: 5000 loss: 0.2501201\n","epoch: 10000 loss: 0.25008714\n","epoch: 15000 loss: 0.25006086\n","epoch: 20000 loss: 0.25003833\n","epoch: 25000 loss: 0.25001723\n","epoch: 30000 loss: 0.24999528\n","epoch: 35000 loss: 0.24997\n","epoch: 40000 loss: 0.24993798\n","epoch: 45000 loss: 0.24989387\n","epoch: 50000 loss: 0.24982883\n","epoch: 55000 loss: 0.24972737\n","epoch: 60000 loss: 0.24956138\n","epoch: 65000 loss: 0.24927837\n","epoch: 70000 loss: 0.24877715\n","epoch: 75000 loss: 0.2478496\n","epoch: 80000 loss: 0.24600479\n","epoch: 85000 loss: 0.24184218\n","epoch: 90000 loss: 0.23113453\n","epoch: 95000 loss: 0.20894165\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"9PaQffxoYzpx","colab_type":"text"},"source":["## Visualising with Python"]},{"cell_type":"markdown","metadata":{"id":"inRy6KMEY2ea","colab_type":"text"},"source":["We're going to use the ever-useful [Matplotlib](https://matplotlib.org/) to visualise our losses. Matplotlib is a Python library, not a Swift library."]},{"cell_type":"markdown","metadata":{"id":"WPcFFt_kZJGA","colab_type":"text"},"source":["First, we need a handle on [`matplotlib.pyplot'](https://matplotlib.org/api/_as_gen/matplotlib.pyplot.html#module-matplotlib.pyplot), which is the MATLAB style Python API that Matplotlib has, providing for simple programmatic plot"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"YMGdEacnfJqN","scrolled":true,"colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"S49LjmqU6oU8","colab_type":"text"},"source":["## Testing the model"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"YkNFr_D2Kn-f","outputId":"dbe90c10-22b8-4e96-b674-49b67b8392be","colab":{"base_uri":"https://localhost:8080/","height":85}},"source":["print(round(model.inferring(from: [[0, 0], [0, 1], [1, 0], [1, 1]])))"],"execution_count":0,"outputs":[{"output_type":"stream","text":["[[0.0],\r\n"," [1.0],\r\n"," [1.0],\r\n"," [1.0]]\r\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"R3OcEtkgfA98","colab_type":"text"},"source":["## Python Interopability in Depth\n","\n","This section is an up-to-date derivative of some of the TensorFlow documentation."]},{"cell_type":"markdown","metadata":{"id":"DVnib3G0fEDG","colab_type":"text"},"source":["You can ask for the Python version:"]},{"cell_type":"code","metadata":{"id":"cNcL-sEMfF1e","colab_type":"code","colab":{}},"source":["print(Python.version)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"eM4GJnWDfKJX","colab_type":"text"},"source":["You can also _set_ a specific Python version, if you need. \n","\n","Note: you should run `PythonLibrary`.useVersion right after import Python, before calling any Python code. It cannot be used to dynamically switch Python versions. The Swift class [`PythonLibrary`](https://www.tensorflow.org/swift/api_docs/Structs/PythonLibrary) represents... a Python library!"]},{"cell_type":"code","metadata":{"id":"7y-EZBzhfRzi","colab_type":"code","colab":{}},"source":["// PythonLibrary.useVersion(2)\n","// PythonLibrary.useVersion(3, 7)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"UasavsMfft0k","colab_type":"text"},"source":["Using Swift, you can represent an object from Python using the Swift class [`PythonObject`](https://www.tensorflow.org/swift/api_docs/Structs/PythonObject). Everything Python will return a Swift `PythonObject`."]},{"cell_type":"markdown","metadata":{"id":"0YxX4onBhy8J","colab_type":"text"},"source":["All of Swift's basic types can be converted to a `PythonObject`. Some happen implicitly, and some need to be cast from a Swift value to a `PythonObject` using a `PythonObject` initialiser:\n","\n","\n"]},{"cell_type":"code","metadata":{"id":"uGJcsblhfqNm","colab_type":"code","colab":{}},"source":["let pythonInt: PythonObject = 1\n","let pythonFloat: PythonObject = 3.0\n","let pythonString: PythonObject = \"Hello Python!\"\n","let pythonRange: PythonObject = PythonObject(5..<10)\n","let pythonArray: PythonObject = [1, 2, 3, 4]\n","let pythonDict: PythonObject = [\"foo\": [0], \"bar\": [1, 2, 3]]"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Au-ypfRViBHN","colab_type":"text"},"source":["To make it easier to use in Swift, `PythonObject` defines most standard operations, including numeric operations, indexing, and iteration:"]},{"cell_type":"code","metadata":{"id":"GW3zEmfiiE1O","colab_type":"code","colab":{}},"source":["print(pythonInt + pythonFloat)\n","print(pythonString[0..<6])\n","print(pythonRange)\n","print(pythonArray[2])\n","print(pythonDict[\"bar\"])"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"2hnyr2Oxigqn","colab_type":"text"},"source":["You can also convert a `PythonObject` back to Swift types:"]},{"cell_type":"code","metadata":{"id":"h1QodWq8ieVK","colab_type":"code","colab":{}},"source":["let int = Int(pythonInt)!\n","let float = Float(pythonFloat)!\n","let string = String(pythonString)!\n","let range = Range(pythonRange)!\n","let array: [Int] = Array(pythonArray)!\n","let dict: [String: [Int]] = Dictionary(pythonDict)!"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"XEzzGpTRipuL","colab_type":"text"},"source":["And you can, of course, perform all the operations that you'd expect. The outputs are, also of course, the same as from Python:"]},{"cell_type":"code","metadata":{"id":"prw2ZxEzioEc","colab_type":"code","colab":{}},"source":["print(Float(int) + float)\n","print(string.prefix(6))\n","print(range)\n","print(array[2])\n","print(dict[\"bar\"]!)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"pLhpxWaspuYV","colab_type":"text"},"source":["You need to be careful when you're using Python and Swift together, as the compiler can't figure out things about Python objects. For example, if you have a `PythonObject` that holds a Python String:"]},{"cell_type":"code","metadata":{"id":"ErpQKgYOpURL","colab_type":"code","outputId":"be7850d8-2eb0-4e90-a75d-6fd4956f02bf","executionInfo":{"status":"ok","timestamp":1572304498530,"user_tz":240,"elapsed":282,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["var myPythonString: PythonObject = \"I am a Python String!\"\n","print(myPythonString)"],"execution_count":0,"outputs":[{"output_type":"stream","text":["I am a Python String!\r\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"JoUjQ6rgp27q","colab_type":"text"},"source":["And then, for example, try and add 5 to it. The compiler won't complain, but it will crash (which could be 10 hours into a complex training run) on runtime:"]},{"cell_type":"code","metadata":{"id":"Mwp7xKbCpdG9","colab_type":"code","colab":{}},"source":["// myPythonString = myPythonString + 5 // this will crash on runtime, but compile just fine\n","print(myPythonString)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"ldIYoNcEi0oy","colab_type":"text"},"source":["`PythonObject` defines conformances to many useful, standard Swift protocols: \n","\n","* [Equatable](https://developer.apple.com/documentation/swift/equatable) \n","* [Comparable](https://developer.apple.com/documentation/swift/comparable) \n","* [Hashable](https://developer.apple.com/documentation/swift/hashable) \n","* [SignedNumeric](https://developer.apple.com/documentation/swift/numeric) \n","* [Strideable](https://developer.apple.com/documentation/swift/strideable) \n","* [MutableCollection](https://developer.apple.com/documentation/swift/mutablecollection) \n","* The [ExpressibleBy_Literal](https://developer.apple.com/documentation/swift/swift_standard_library/initialization_with_literals) protocols\n","\n","You can learn more about this in [the documentation](https://www.tensorflow.org/swift/api_docs/Structs/PythonObject). None of the conformances type-safe, and crashes will occur if you attempt to use protocol functionality from an incompatible PythonObject instance."]},{"cell_type":"code","metadata":{"id":"PvcQUC2QmCGd","colab_type":"code","colab":{}},"source":["let one: PythonObject = 1\n","print(one == one)\n","print(one < one)\n","print(one + one)\n","\n","let array: PythonObject = [1, 2, 3]\n","for (i, x) in array.enumerated() {\n"," print(i, x)\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"QodBVesnmVUx","colab_type":"text"},"source":["When you convert a tuple from Python to Swift, you have to statically know the arity of the tuple and call one of the instance methods `PythonObject.tuple2`, `PythonObject.tuple3`, or `PythonObject.tuple4`."]},{"cell_type":"code","metadata":{"id":"lgdRKuL8mU5F","colab_type":"code","colab":{}},"source":["let pythonTuple = Python.tuple([1, 2, 3])\n","print(pythonTuple, Python.len(pythonTuple))\n","\n","// Convert to Swift.\n","let tuple = pythonTuple.tuple3\n","print(tuple)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"5hv0cCDamvxp","colab_type":"text"},"source":["You an also use `Python.builtins` to access all the Python builtins:"]},{"cell_type":"code","metadata":{"id":"zJi3Oei3mvEX","colab_type":"code","colab":{}},"source":["_ = Python.builtins\n","\n","print(Python.type(1))\n","print(Python.len([1, 2, 3]))\n","print(Python.sum([1, 2, 3]))"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Ocn5edp4nAHt","colab_type":"text"},"source":["And, as we showed above with Matplotlib, you can access and import Python modules:"]},{"cell_type":"code","metadata":{"id":"WIMppNRLm_sY","colab_type":"code","colab":{}},"source":["let np = Python.import(\"numpy\")\n","print(np)\n","let zeros = np.ones([2, 3])\n","print(zeros)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"bJ54f2OVnJb9","colab_type":"text"},"source":["There's even support for checking that the Python import is safe:"]},{"cell_type":"code","metadata":{"id":"E8_L9UxOnHmW","colab_type":"code","colab":{}},"source":["let maybeModule = try? Python.attemptImport(\"nonexistent_module\")\n","print(maybeModule)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"IwoDsURVnOcq","colab_type":"text"},"source":["There's also explicit support for converting the Swift types `Array`, `ShapedArray`, and `Tensor` to and from Python's `numpy.ndarray`.\n","\n","Note that the conversion will only succeed if the `dtype` of the `numpy.ndarray` is compatible with the `Element` or `Scalar` generic parameter type.\n","\n","For `Array`, conversion from numpy succeeds only if the `numpy.ndarray` is one dimension (1-D)."]},{"cell_type":"code","metadata":{"id":"5zCkIAASnnHF","colab_type":"code","colab":{}},"source":["let numpyArray = np.ones([4], dtype: np.float32)\n","print(\"Swift type:\", type(of: numpyArray))\n","print(\"Python type:\", Python.type(numpyArray))\n","print(numpyArray.shape)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"QLHtvs3wnwBz","colab_type":"text"},"source":["Examples of converting `numpy.ndarray` to Swift types:"]},{"cell_type":"code","metadata":{"id":"2PGYgVRwnvrI","colab_type":"code","colab":{}},"source":["let array: [Float] = Array(numpy: numpyArray)!\n","let shapedArray = ShapedArray(numpy: numpyArray)!\n","let tensor = Tensor(numpy: numpyArray)!"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"nWJjJMvWn-LD","colab_type":"text"},"source":["Examples of converting Swift types to `numpy.ndarray`:"]},{"cell_type":"code","metadata":{"id":"SuHZzrV4nqTd","colab_type":"code","colab":{}},"source":["print(array.makeNumpyArray())\n","print(shapedArray.makeNumpyArray())\n","print(tensor.makeNumpyArray())"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"umKxn053n1Ri","colab_type":"text"},"source":["Examples with different `dtype`:"]},{"cell_type":"code","metadata":{"id":"x0wCrXHInznj","colab_type":"code","colab":{}},"source":["let doubleArray: [Double] = Array(numpy: np.ones([3], dtype: np.float))!\n","let intTensor = Tensor(numpy: np.ones([2, 3], dtype: np.int32))!"],"execution_count":0,"outputs":[]}]} -------------------------------------------------------------------------------- /Notebooks/5. Building a GAN/Complete - Building a GAN.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"kernelspec":{"display_name":"Swift","language":"swift","name":"swift"},"language_info":{"file_extension":".swift","mimetype":"text/x-swift","name":"swift","version":""},"colab":{"name":"Complete - Building a GAN.ipynb","provenance":[],"collapsed_sections":[]},"accelerator":"GPU"},"cells":[{"cell_type":"markdown","metadata":{"id":"M4Q0R87raV1i","colab_type":"text"},"source":["# Complete - Building a GAN"]},{"cell_type":"markdown","metadata":{"id":"JVIJyE2IaV1n","colab_type":"text"},"source":["We're not here to teach the fundamentals of neural networks or ML, but we think GANs are a pretty neat demo. GANs (Generative Adversarial Networks) have two entirely separate networks (models) that work together/compete against each other to generate something.\n","\n","Their overarching goal is to generate new data that is somewhat similar to some of the data they were trained with.\n"," \n","Basically, the **generator** generates fake images that are then used by the **discriminator** to see if they're real. Working together, they both get cleverer and cleverer, until the discriminator cannot distinguish the difference between generator-generated images, and the real thing."]},{"cell_type":"markdown","metadata":{"id":"kQxYVlrFaV1q","colab_type":"text"},"source":["## Imports"]},{"cell_type":"markdown","metadata":{"id":"q9zGryN4aV1s","colab_type":"text"},"source":["We need `Foundation` so we can use the Swift types, `FoundationNetworking` so we can download stuff, `TensorFlow`, so we can use the machine learning bits and pieces, \n","\n","NOTE: If you're running this on your own local install then you might also need to import `Datasets` and `ModelSupport`, which helps you work with existing datasets and files. "]},{"cell_type":"code","metadata":{"id":"TyOmg8j5aV1u","colab_type":"code","colab":{}},"source":["import Foundation\n","import FoundationNetworking\n","import TensorFlow"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"jV7fZ8w0bCds","colab_type":"text"},"source":["### Some support code\n","\n","This is a collection of convenience methods and helpers to write/read files, and such. It's quite long, so leave this section collapsed. The code here is a little beyond the scope of the session. Ask us, and if we have time we can go through it with you."]},{"cell_type":"markdown","metadata":{"id":"eR1-Vo_4bEYk","colab_type":"text"},"source":["We need to bring in some support Swift code that allows us to manipulate local files, download files, and get the MNIST dataset. You can expand this and read it if you want, but it's beyond the scope of this session."]},{"cell_type":"code","metadata":{"id":"iZQD1e5ya-7L","colab_type":"code","colab":{}},"source":["// This code comes from the Swift-Models repo, from the TF team.\n","\n","// Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n","//\n","// Licensed under the Apache License, Version 2.0 (the \"License\");\n","// you may not use this file except in compliance with the License.\n","// You may obtain a copy of the License at\n","//\n","// http://www.apache.org/licenses/LICENSE-2.0\n","//\n","// Unless required by applicable law or agreed to in writing, software\n","// distributed under the License is distributed on an \"AS IS\" BASIS,\n","// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","// See the License for the specific language governing permissions and\n","// limitations under the License.\n","\n","public struct DatasetUtilities {\n"," public static let curentWorkingDirectoryURL = URL(\n"," fileURLWithPath: FileManager.default.currentDirectoryPath)\n","\n"," public static func fetchResource(\n"," filename: String,\n"," remoteRoot: URL,\n"," localStorageDirectory: URL = curentWorkingDirectoryURL\n"," ) -> Data {\n"," print(\"Loading resource: \\(filename)\")\n","\n"," let resource = ResourceDefinition(\n"," filename: filename,\n"," remoteRoot: remoteRoot,\n"," localStorageDirectory: localStorageDirectory)\n","\n"," let localURL = resource.localURL\n","\n"," if !FileManager.default.fileExists(atPath: localURL.path) {\n"," print(\n"," \"File does not exist locally at expected path: \\(localURL.path) and must be fetched\"\n"," )\n"," fetchFromRemoteAndSave(resource)\n"," }\n","\n"," do {\n"," print(\"Loading local data at: \\(localURL.path)\")\n"," let data = try Data(contentsOf: localURL)\n"," print(\"Succesfully loaded resource: \\(filename)\")\n"," return data\n"," } catch {\n"," fatalError(\"Failed to contents of resource: \\(localURL)\")\n"," }\n"," }\n","\n"," struct ResourceDefinition {\n"," let filename: String\n"," let remoteRoot: URL\n"," let localStorageDirectory: URL\n","\n"," var localURL: URL {\n"," localStorageDirectory.appendingPathComponent(filename)\n"," }\n","\n"," var remoteURL: URL {\n"," remoteRoot.appendingPathComponent(filename).appendingPathExtension(\"gz\")\n"," }\n","\n"," var archiveURL: URL {\n"," localURL.appendingPathExtension(\"gz\")\n"," }\n"," }\n","\n"," static func fetchFromRemoteAndSave(_ resource: ResourceDefinition) {\n"," let remoteLocation = resource.remoteURL\n"," let archiveLocation = resource.archiveURL\n","\n"," do {\n"," print(\"Fetching URL: \\(remoteLocation)...\")\n"," let archiveData = try Data(contentsOf: remoteLocation)\n"," print(\"Writing fetched archive to: \\(archiveLocation.path)\")\n"," try archiveData.write(to: archiveLocation)\n"," } catch {\n"," fatalError(\"Failed to fetch and save resource with error: \\(error)\")\n"," }\n"," print(\"Archive saved to: \\(archiveLocation.path)\")\n","\n"," extractArchive(for: resource)\n"," }\n","\n"," static func extractArchive(for resource: ResourceDefinition) {\n"," print(\"Extracting archive...\")\n","\n"," let archivePath = resource.archiveURL.path\n","\n"," #if os(macOS)\n"," let gunzipLocation = \"/usr/bin/gunzip\"\n"," #else\n"," let gunzipLocation = \"/bin/gunzip\"\n"," #endif\n","\n"," let task = Process()\n"," task.executableURL = URL(fileURLWithPath: gunzipLocation)\n"," task.arguments = [archivePath]\n"," do {\n"," try task.run()\n"," task.waitUntilExit()\n"," } catch {\n"," fatalError(\"Failed to extract \\(archivePath) with error: \\(error)\")\n"," }\n"," }\n","}\n","\n","\n","public struct MNIST {\n"," public let trainingImages: Tensor\n"," public let trainingLabels: Tensor\n"," public let testImages: Tensor\n"," public let testLabels: Tensor\n","\n"," public let trainingSize: Int\n"," public let testSize: Int\n","\n"," public let batchSize: Int\n","\n"," public init(\n"," batchSize: Int, flattening: Bool = false, normalizing: Bool = false,\n"," localStorageDirectory: URL = DatasetUtilities.curentWorkingDirectoryURL\n"," ) {\n"," self.batchSize = batchSize\n","\n"," let (trainingImages, trainingLabels) = fetchDataset(\n"," localStorageDirectory: localStorageDirectory,\n"," imagesFilename: \"train-images-idx3-ubyte\",\n"," labelsFilename: \"train-labels-idx1-ubyte\",\n"," flattening: flattening,\n"," normalizing: normalizing)\n","\n"," self.trainingImages = trainingImages\n"," self.trainingLabels = trainingLabels\n"," self.trainingSize = Int(trainingLabels.shape[0])\n","\n"," let (testImages, testLabels) = fetchDataset(\n"," localStorageDirectory: localStorageDirectory,\n"," imagesFilename: \"t10k-images-idx3-ubyte\",\n"," labelsFilename: \"t10k-labels-idx1-ubyte\",\n"," flattening: flattening,\n"," normalizing: normalizing)\n"," self.testImages = testImages\n"," self.testLabels = testLabels\n"," self.testSize = Int(testLabels.shape[0])\n"," }\n","}\n","\n","extension Tensor {\n"," public func minibatch(at index: Int, batchSize: Int) -> Tensor {\n"," let start = index * batchSize\n"," return self[start.. (images: Tensor, labels: Tensor) {\n"," guard let remoteRoot: URL = URL(string: \"http://yann.lecun.com/exdb/mnist\") else {\n"," fatalError(\"Failed to create MNST root url: http://yann.lecun.com/exdb/mnist\")\n"," }\n","\n"," let imagesData = DatasetUtilities.fetchResource(\n"," filename: imagesFilename,\n"," remoteRoot: remoteRoot,\n"," localStorageDirectory: localStorageDirectory)\n"," let labelsData = DatasetUtilities.fetchResource(\n"," filename: labelsFilename,\n"," remoteRoot: remoteRoot,\n"," localStorageDirectory: localStorageDirectory)\n","\n"," let images = [UInt8](imagesData).dropFirst(16).map(Float.init)\n"," let labels = [UInt8](labelsData).dropFirst(8).map(Int32.init)\n","\n"," let rowCount = labels.count\n"," let (imageWidth, imageHeight) = (28, 28)\n","\n"," if flattening {\n"," var flattenedImages = Tensor(shape: [rowCount, imageHeight * imageWidth], scalars: images)\n"," / 255.0\n"," if normalizing {\n"," flattenedImages = flattenedImages * 2.0 - 1.0\n"," }\n"," return (images: flattenedImages, labels: Tensor(labels))\n"," } else {\n"," return (\n"," images:\n"," Tensor(shape: [rowCount, 1, imageHeight, imageWidth], scalars: images)\n"," .transposed(withPermutations: [0, 2, 3, 1]) / 255, // NHWC\n"," labels: Tensor(labels)\n"," )\n"," }\n","}\n","\n","public func createDirectoryIfMissing(at path: String) throws {\n"," guard !FileManager.default.fileExists(atPath: path) else { return }\n"," try FileManager.default.createDirectory(\n"," atPath: path,\n"," withIntermediateDirectories: false,\n"," attributes: nil)\n","}\n","\n","\n","public struct Image {\n"," public enum ByteOrdering {\n"," case bgr\n"," case rgb\n"," }\n","\n"," enum ImageTensor {\n"," case float(data: Tensor)\n"," case uint8(data: Tensor)\n"," }\n","\n"," let imageData: ImageTensor\n","\n"," public init(tensor: Tensor) {\n"," self.imageData = .uint8(data: tensor)\n"," }\n","\n"," public init(tensor: Tensor) {\n"," self.imageData = .float(data: tensor)\n"," }\n","\n"," public init(jpeg url: URL, byteOrdering: ByteOrdering = .rgb) {\n"," let loadedFile = Raw.readFile(filename: StringTensor(url.absoluteString))\n"," let loadedJpeg = Raw.decodeJpeg(contents: loadedFile, channels: 3, dctMethod: \"\")\n"," if byteOrdering == .bgr {\n"," self.imageData = .uint8(\n"," data: Raw.reverse(loadedJpeg, dims: Tensor([false, false, false, true])))\n"," } else {\n"," self.imageData = .uint8(data: loadedJpeg)\n"," }\n"," }\n","\n"," public func save(to url: URL, quality: Int64 = 95) {\n"," // This currently only saves in grayscale.\n"," let outputImageData: Tensor\n"," switch self.imageData {\n"," case let .uint8(data): outputImageData = data\n"," case let .float(data):\n"," let lowerBound = data.min(alongAxes: [0, 1])\n"," let upperBound = data.max(alongAxes: [0, 1])\n"," let adjustedData = (data - lowerBound) * (255.0 / (upperBound - lowerBound))\n"," outputImageData = Tensor(adjustedData)\n"," }\n","\n"," let encodedJpeg = Raw.encodeJpeg(\n"," image: outputImageData, format: .grayscale, quality: quality, xmpMetadata: \"\")\n"," Raw.writeFile(filename: StringTensor(url.absoluteString), contents: encodedJpeg)\n"," }\n","\n"," public func resized(to size: (Int, Int)) -> Image {\n"," switch self.imageData {\n"," case let .uint8(data):\n"," return Image(\n"," tensor: Raw.resizeBilinear(\n"," images: Tensor([data]),\n"," size: Tensor([Int32(size.0), Int32(size.1)])))\n"," case let .float(data):\n"," return Image(\n"," tensor: Raw.resizeBilinear(\n"," images: Tensor([data]),\n"," size: Tensor([Int32(size.0), Int32(size.1)])))\n"," }\n","\n"," }\n","}\n","\n","public func saveImage(_ tensor: Tensor, size: (Int, Int), directory: String, name: String) throws {\n"," try createDirectoryIfMissing(at: directory)\n"," let reshapedTensor = tensor.reshaped(to: [size.0, size.1, 1])\n"," let image = Image(tensor: reshapedTensor)\n"," let outputURL = URL(fileURLWithPath:\"\\(directory)\\(name).jpg\")\n"," image.save(to: outputURL)\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"-s10JC3icx_z","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"1DhL7o5AaV11","colab_type":"text"},"source":["## Parameters"]},{"cell_type":"markdown","metadata":{"id":"DNBBx3praV12","colab_type":"text"},"source":["Our parameters are as follows:\n","\n","* `epochCount` is how many epochs it should train for. 10 is a good number to get a reasonable GAN in this case.\n","* `batchSize` is the size of a batch that we're going to ask the MNIST dataset for.\n","* `outputFolder` defines the output folder where we'll be writing things on the file system.\n","* `imageHeight` and `imageWidth`, together with `imageSize` define the output imagesize that the Generator will make, as well as (naturally) the input image size the Discriminator will take.\n","* `latentSize` defines the latent representation size used by the Generator to generate.\n","* `testImageGridSize` defines the size of the grid of images that we'll generate to look at the result of the GAN."]},{"cell_type":"code","metadata":{"id":"mwmHgWNSaV13","colab_type":"code","colab":{}},"source":["let epochCount = 10\n","let batchSize = 32\n","let outputFolder = \"./MNIST_GAN_Output/\"\n","let imageHeight = 28\n","let imageWidth = 28\n","let imageSize = imageHeight * imageWidth\n","let latentSize = 64\n","let testImageGridSize = 4"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"7nVbPqOITi5T","colab_type":"text"},"source":["## Convenience helper to save an image grid"]},{"cell_type":"code","metadata":{"id":"FtGlyeViTlEa","colab_type":"code","colab":{}},"source":["func saveImageGrid(_ testImage: Tensor, name: String) throws {\n"," var gridImage = testImage.reshaped(\n"," to: [\n"," testImageGridSize, testImageGridSize,\n"," imageHeight, imageWidth,\n"," ])\n","\n"," // Add padding.\n"," gridImage = gridImage.padded(forSizes: [(0, 0), (0, 0), (1, 1), (1, 1)], with: 1)\n","\n"," // Transpose to create single image.\n"," gridImage = gridImage.transposed(withPermutations: [0, 2, 1, 3])\n"," gridImage = gridImage.reshaped(\n"," to: [\n"," (imageHeight + 2) * testImageGridSize,\n"," (imageWidth + 2) * testImageGridSize,\n"," ])\n"," \n"," // Convert [-1, 1] range to [0, 1] range.\n"," gridImage = (gridImage + 1) / 2\n","\n"," try saveImage(\n"," gridImage, size: (gridImage.shape[0], gridImage.shape[1]), directory: outputFolder,\n"," name: name)\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"bccfwHeLaV18","colab_type":"text"},"source":["# Generator Model"]},{"cell_type":"markdown","metadata":{"id":"3M52cWDbaV19","colab_type":"text"},"source":["Our `Generator` is a `Struct` adhering to the [`Layer` Protocol](https://www.tensorflow.org/swift/api_docs/Protocols/Layer) (which is part of Swift For TensorFlow's API). The Generator has the following layers:\n","\n","* `dense1`, a `Dense` layer (a [densely-connected layer](https://www.tensorflow.org/swift/api_docs/Structs/Dense)) that takes an `inputSize` of `latentSize` (defined earlier), and an `outputSize` of `latentSize*2`. The `activation` function determines the output shape of each node in the layer. There are many available activations, but [ReLU](https://www.tensorflow.org/swift/api_docs/Functions#leakyrelu_:alpha:) is common for hidden layers.\n","\n","* `dense2` is likewise, but with an `inputSize` of `latentSize*2` (taking the output of the previous layer), and an `outputSize` of `latestSize*4`.\n","\n","* `dense3` is likewise, taking the previous output as input, and outputting it larger.\n","\n","* `dense4` is, again, the same, but has an `outputSize` of `imageSize` instead (our final desired image size). It uses [tanh](https://www.tensorflow.org/swift/api_docs/Functions#tanh_:) as its activation, tanh (hyperbolic tangent) is sigmoidal (s-shaped) and outputs values that range from -1 to 1.\n","\n","* three [`BatchNorm`]() layers, `batchnorm1`, `batchnorm2`, `batchnorm3`, that normalise the activations of the previous layer at each batch by applying transformations that maintain the mean activation close to 0 and the activation standard deviation close to 1. `featureCount` is the number of features.\n"," \n","Finally, we have our `callAsFunction()` method, which sequences through the `Dense` layers, using the `BatchNorm` layers to normalise, before finally returning the output of the fourth and final `Dense` layer.\n","\n","\n","\n"," "]},{"cell_type":"code","metadata":{"id":"vrqazoRDaV1-","colab_type":"code","colab":{}},"source":["struct Generator: Layer {\n"," var dense1 = Dense(\n"," inputSize: latentSize, outputSize: latentSize * 2,\n"," activation: { leakyRelu($0) })\n","\n"," var dense2 = Dense(\n"," inputSize: latentSize * 2, outputSize: latentSize * 4,\n"," activation: { leakyRelu($0) })\n","\n"," var dense3 = Dense(\n"," inputSize: latentSize * 4, outputSize: latentSize * 8,\n"," activation: { leakyRelu($0) })\n","\n"," var dense4 = Dense(\n"," inputSize: latentSize * 8, outputSize: imageSize,\n"," activation: tanh)\n","\n"," var batchnorm1 = BatchNorm(featureCount: latentSize * 2)\n"," var batchnorm2 = BatchNorm(featureCount: latentSize * 4)\n"," var batchnorm3 = BatchNorm(featureCount: latentSize * 8)\n","\n"," @differentiable\n"," func callAsFunction(_ input: Tensor) -> Tensor {\n"," let x1 = batchnorm1(dense1(input))\n"," let x2 = batchnorm2(dense2(x1))\n"," let x3 = batchnorm3(dense3(x2))\n"," return dense4(x3)\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"sHI61VGCaV2D","colab_type":"text"},"source":["## Discriminator Model"]},{"cell_type":"markdown","metadata":{"id":"vzRnmzTBaV2E","colab_type":"text"},"source":["Our `Discriminator` is a `Struct` adhering to the `Layer` Protocol. The `Discriminator` has the following layers:\n","\n","* `dense1`, a `Dense` layer, taking an `inputSize` of `imageSize`, outputting an `outputSize` of 256. It also uses ReLU for activation.\n","\n","* `dense2` and `dense3`, which take an `inputSize` and `outputSize` of 256 and 64, and 64 and 16, respectively, also using ReLU.\n","\n","* `dense4`, which takes the `inputSize` of 16, and has an `outputSize` of 1, and using `identity` as the activation (just linear).\n","\n","Finally, we have our `callAsFunction()` method, which just sequences the input through the four (`Dense`) layers."]},{"cell_type":"code","metadata":{"id":"QYpSCYd1aV2F","colab_type":"code","colab":{}},"source":["struct Discriminator: Layer {\n"," var dense1 = Dense(\n"," inputSize: imageSize, outputSize: 256,\n"," activation: { leakyRelu($0) })\n","\n"," var dense2 = Dense(\n"," inputSize: 256, outputSize: 64,\n"," activation: { leakyRelu($0) })\n","\n"," var dense3 = Dense(\n"," inputSize: 64, outputSize: 16,\n"," activation: { leakyRelu($0) })\n","\n"," var dense4 = Dense(\n"," inputSize: 16, outputSize: 1,\n"," activation: identity)\n","\n"," @differentiable\n"," func callAsFunction(_ input: Tensor) -> Tensor {\n"," input.sequenced(through: dense1, dense2, dense3, dense4)\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"VD1eoCIkaV2J","colab_type":"text"},"source":["## Loss functions"]},{"cell_type":"markdown","metadata":{"id":"j9J4gWIFaV2K","colab_type":"text"},"source":["### Discriminator Loss Function"]},{"cell_type":"markdown","metadata":{"id":"8cWfh4Y8aV2L","colab_type":"text"},"source":["Our `discriminatorLoss()` function, which takes both the real and fake [logits](https://datascience.stackexchange.com/a/31045), and returns the `realLoss` and `fakeLoss`, via the `sigmoidCrossEntropy()` function. That's it!"]},{"cell_type":"code","metadata":{"id":"zwjx0LbSaV2M","colab_type":"code","colab":{}},"source":["@differentiable\n","func discriminatorLoss(realLogits: Tensor, fakeLogits: Tensor) -> Tensor {\n"," let realLoss = sigmoidCrossEntropy(\n"," logits: realLogits,\n"," labels: Tensor(ones: realLogits.shape))\n"," let fakeLoss = sigmoidCrossEntropy(\n"," logits: fakeLogits,\n"," labels: Tensor(zeros: fakeLogits.shape))\n"," return realLoss + fakeLoss\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"CHzdj9kbaV2Q","colab_type":"text"},"source":["### Generator Loss Function"]},{"cell_type":"markdown","metadata":{"id":"QFn5eBEwaV2R","colab_type":"text"},"source":["Our `generatorLoss()` function takes the fake logits, and calculates the `sigmoidCrossEntropy()`."]},{"cell_type":"code","metadata":{"id":"h9uAagmqaV2S","colab_type":"code","colab":{}},"source":["@differentiable\n","func generatorLoss(fakeLogits: Tensor) -> Tensor {\n"," sigmoidCrossEntropy(\n"," logits: fakeLogits,\n"," labels: Tensor(ones: fakeLogits.shape))\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"4Hai3lkCaV2W","colab_type":"text"},"source":["### Random Samples"]},{"cell_type":"markdown","metadata":{"id":"wz9iK6u-aV2X","colab_type":"text"},"source":["Our `sampleVector()` function returns random stuff, that we use for both the Discriminator and Generator later on."]},{"cell_type":"code","metadata":{"id":"DnIT9U4qaV2Y","colab_type":"code","colab":{}},"source":["/// Returns `size` samples of noise vector.\n","func sampleVector(size: Int) -> Tensor {\n"," Tensor(randomNormal: [size, latentSize])\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"tpkVE3qZaV2a","colab_type":"text"},"source":["## Setting up to train"]},{"cell_type":"markdown","metadata":{"id":"QdDB8KtZaV2b","colab_type":"text"},"source":["### Getting a dataset"]},{"cell_type":"markdown","metadata":{"id":"Lkg4MrEbaV2c","colab_type":"text"},"source":["We're going to use the \"Hello, world!\" of machine learning, MNIST, as our dataset. This comes from some of the helper libraries we've provided for this session (which, in turn, are largely drawn from deep in the bowels of the TensorFlow project):"]},{"cell_type":"code","metadata":{"id":"DdzHFPKFaV2d","colab_type":"code","outputId":"1b42a039-2680-4a6d-beea-da01ccb9bee5","executionInfo":{"status":"ok","timestamp":1572298964152,"user_tz":240,"elapsed":28051,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}},"colab":{"base_uri":"https://localhost:8080/","height":583}},"source":["let dataset = MNIST(batchSize: batchSize, flattening: true, normalizing: true)"],"execution_count":0,"outputs":[{"output_type":"stream","text":["Loading resource: train-images-idx3-ubyte\r\n","File does not exist locally at expected path: /content/train-images-idx3-ubyte and must be fetched\r\n","Fetching URL: http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz...\n","Writing fetched archive to: /content/train-images-idx3-ubyte.gz\n","Archive saved to: /content/train-images-idx3-ubyte.gz\n","Extracting archive...\n","Loading local data at: /content/train-images-idx3-ubyte\n","Succesfully loaded resource: train-images-idx3-ubyte\n","Loading resource: train-labels-idx1-ubyte\n","File does not exist locally at expected path: /content/train-labels-idx1-ubyte and must be fetched\n","Fetching URL: http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz...\n","Writing fetched archive to: /content/train-labels-idx1-ubyte.gz\n","Archive saved to: /content/train-labels-idx1-ubyte.gz\n","Extracting archive...\n","Loading local data at: /content/train-labels-idx1-ubyte\n","Succesfully loaded resource: train-labels-idx1-ubyte\n","Loading resource: t10k-images-idx3-ubyte\n","File does not exist locally at expected path: /content/t10k-images-idx3-ubyte and must be fetched\n","Fetching URL: http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz...\n","Writing fetched archive to: /content/t10k-images-idx3-ubyte.gz\n","Archive saved to: /content/t10k-images-idx3-ubyte.gz\n","Extracting archive...\n","Loading local data at: /content/t10k-images-idx3-ubyte\n","Succesfully loaded resource: t10k-images-idx3-ubyte\n","Loading resource: t10k-labels-idx1-ubyte\n","File does not exist locally at expected path: /content/t10k-labels-idx1-ubyte and must be fetched\n","Fetching URL: http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz...\n","Writing fetched archive to: /content/t10k-labels-idx1-ubyte.gz\n","Archive saved to: /content/t10k-labels-idx1-ubyte.gz\n","Extracting archive...\n","Loading local data at: /content/t10k-labels-idx1-ubyte\n","Succesfully loaded resource: t10k-labels-idx1-ubyte\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"7tzuMHzIaV2g","colab_type":"text"},"source":["### Creating a generator and a discriminator"]},{"cell_type":"code","metadata":{"id":"NX939dWAaV2h","colab_type":"code","colab":{}},"source":["var generator = Generator()\n","var discriminator = Discriminator()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"nY07PtXvaV2l","colab_type":"text"},"source":["### Creating optimisers for the generator and the discriminator"]},{"cell_type":"markdown","metadata":{"id":"SyHqhL4daV2l","colab_type":"text"},"source":["We need an optimization algorithm for both the models. In each case, we'll use the [Adam](https://www.tensorflow.org/swift/api_docs/Classes/Adam) optimisation algorithm. It's a popular choice!"]},{"cell_type":"markdown","metadata":{"id":"aAloZrVscDxi","colab_type":"text"},"source":["#### Generator's optimizer"]},{"cell_type":"code","metadata":{"id":"kUEvej91aV2m","colab_type":"code","colab":{}},"source":["let optG = Adam(for: generator, learningRate: 2e-4, beta1: 0.5)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"qRmW0oJJcHx5","colab_type":"text"},"source":["#### Discriminator's optimizer"]},{"cell_type":"code","metadata":{"id":"Xp54ZtpOcKIT","colab_type":"code","colab":{}},"source":["let optD = Adam(for: discriminator, learningRate: 2e-4, beta1: 0.5)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"5VjtOMYlaV2t","colab_type":"text"},"source":["## Training and Inference"]},{"cell_type":"markdown","metadata":{"id":"FHy5C76Zc8nV","colab_type":"text"},"source":["First, we'll print out a message to say we're starting training:"]},{"cell_type":"code","metadata":{"id":"BM099DZAc-UE","colab_type":"code","outputId":"314d0bd9-e339-49cd-9ae9-11190864d0b7","executionInfo":{"status":"ok","timestamp":1572298965076,"user_tz":240,"elapsed":28924,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["print(\"GAN: Training Begins\")"],"execution_count":0,"outputs":[{"output_type":"stream","text":["GAN: Training Begins\r\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"8trm-rcJc_MR","colab_type":"text"},"source":["To train, we iterate through to our desired `epochCount`, runs training using both the Generator and the Discriminator, and then runs an inference to generate a grid of images and print out the current epoch, and the generator's loss:\n","\n","Specifically, in each epoch, we:\n","* set the [`Context`](https://www.tensorflow.org/swift/api_docs/Structs/Context) to `.training` so that, for example, `BatchNorm` layers (like we're using in our Generator) will compute mean and variance when applied to inputs\n","* iterate through the training data batch and:\n"," * create a random sample using the `sampleVector()` function we wrote earlier\n"," * for the generator's gradient (𝛁), use the random sample and the output of the discriminator using that random sample to calculate a loss using the `generatorLoss()` function we wrote earlier\n"," * update the generator model, along the generator gradient, using the generator's optimizer\n"," * get a batch of of real images from the training data, as well as another random sample using `sampleVector()`, and use the generator to generate some generated (aka fake) images using the random sample data\n"," * for the discriminator's gradient (𝛁), calculate and return the loss between the generator running on the real images and on the fake images\n"," * update the discriminator model, along the discriminator gradient, using the discriminator's optimizer\n","* after iterating through the training data batch, we set the [`Context`](https://www.tensorflow.org/swift/api_docs/Structs/Context) to `.inferece`\n","* then (after training for that epoch) we generate a test image, using the generator and random sample of the size our parameters dictate for the test image grid\n"," * and attempt to save that test image, using one of our convenience functions, `saveImageGrid()`\n","* we then check the loss on the generator for the test image, with our `generatorLoss()` function\n","* and print out the current epoch and generator loss"]},{"cell_type":"code","metadata":{"id":"ArTjDq3RaV2v","colab_type":"code","outputId":"20dd4423-b705-4d87-c5de-d6e65366761d","executionInfo":{"status":"ok","timestamp":1572300017891,"user_tz":240,"elapsed":1081728,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}},"colab":{"base_uri":"https://localhost:8080/","height":194}},"source":["for epoch in 1...epochCount {\n","\n"," Context.local.learningPhase = .training\n","\n"," for i in 0 ..< dataset.trainingSize / batchSize {\n"," // Perform alternative update.\n"," // Update generator.\n"," let vec1 = sampleVector(size: batchSize)\n","\n"," let 𝛁generator = generator.gradient { generator -> Tensor in\n"," let fakeImages = generator(vec1)\n"," let fakeLogits = discriminator(fakeImages)\n"," let loss = generatorLoss(fakeLogits: fakeLogits)\n"," return loss\n"," }\n"," optG.update(&generator, along: 𝛁generator)\n","\n"," // Update discriminator.\n"," let realImages = dataset.trainingImages.minibatch(at: i, batchSize: batchSize)\n"," let vec2 = sampleVector(size: batchSize)\n"," let fakeImages = generator(vec2)\n","\n"," let 𝛁discriminator = discriminator.gradient { discriminator -> Tensor in\n"," let realLogits = discriminator(realImages)\n"," let fakeLogits = discriminator(fakeImages)\n"," let loss = discriminatorLoss(realLogits: realLogits, fakeLogits: fakeLogits)\n"," return loss\n"," }\n"," optD.update(&discriminator, along: 𝛁discriminator)\n"," }\n","\n"," // Start inference phase.\n"," Context.local.learningPhase = .inference\n"," let testImage = generator(sampleVector(size: testImageGridSize * testImageGridSize))\n","\n"," do {\n"," try saveImageGrid(testImage, name: \"epoch-\\(epoch)-output\")\n"," } catch {\n"," print(\"Could not save image grid with error: \\(error)\")\n"," }\n","\n"," let lossG = generatorLoss(fakeLogits: testImage)\n"," print(\"Current Epoch: \\(epoch) | Generator Loss: \\(lossG)\")\n","}"],"execution_count":0,"outputs":[{"output_type":"stream","text":["Current Epoch: 1 | Generator Loss: 1.1418108\n","Current Epoch: 2 | Generator Loss: 1.1449372\n","Current Epoch: 3 | Generator Loss: 1.1592628\n","Current Epoch: 4 | Generator Loss: 1.1639445\n","Current Epoch: 5 | Generator Loss: 1.1449314\n","Current Epoch: 6 | Generator Loss: 1.1559143\n","Current Epoch: 7 | Generator Loss: 1.1588637\n","Current Epoch: 8 | Generator Loss: 1.1727781\n","Current Epoch: 9 | Generator Loss: 1.1668153\n","Current Epoch: 10 | Generator Loss: 1.1905712\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"BvC2vj3-jxHs","colab_type":"text"},"source":["## Extra Credit"]},{"cell_type":"markdown","metadata":{"id":"RCIu9uc4jy7Z","colab_type":"text"},"source":["Our suggestions for what to do next:\n","\n","\n","1. use a Python library to visualise some of this in the notebook, either via graphs, or via displaying images inline in the notebook\n","2. modify the GAN to be able to generate one image of a digit at a time, upon request (e.g. make a function that lets you request a generated 5, or a generated 6)\n","3. modify the GAN to generate something other than MNIST digits \n","\n"]}]} -------------------------------------------------------------------------------- /Notebooks/5. Building a GAN/Starter - Building a GAN.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"kernelspec":{"display_name":"Swift","language":"swift","name":"swift"},"language_info":{"file_extension":".swift","mimetype":"text/x-swift","name":"swift","version":""},"colab":{"name":"Starter - Building a GAN.ipynb","provenance":[],"collapsed_sections":["jV7fZ8w0bCds"]},"accelerator":"GPU"},"cells":[{"cell_type":"markdown","metadata":{"id":"M4Q0R87raV1i","colab_type":"text"},"source":["# Starter - Building a GAN"]},{"cell_type":"markdown","metadata":{"id":"hWXTwKJ97B2T","colab_type":"text"},"source":["**⚠️ This is the starter version, for you to code along with live.** "]},{"cell_type":"markdown","metadata":{"id":"JVIJyE2IaV1n","colab_type":"text"},"source":["We're not here to teach the fundamentals of neural networks or ML, but we think GANs are a pretty neat demo. GANs (Generative Adversarial Networks) have two entirely separate networks (models) that work together/compete against each other to generate something.\n","\n","Their overarching goal is to generate new data that is somewhat similar to some of the data they were trained with.\n"," \n","Basically, the **generator** generates fake images that are then used by the **discriminator** to see if they're real. Working together, they both get cleverer and cleverer, until the discriminator cannot distinguish the difference between generator-generated images, and the real thing."]},{"cell_type":"markdown","metadata":{"id":"kQxYVlrFaV1q","colab_type":"text"},"source":["## Imports"]},{"cell_type":"markdown","metadata":{"id":"q9zGryN4aV1s","colab_type":"text"},"source":["We need `Foundation` so we can use the Swift types, `FoundationNetworking` so we can download stuff, `TensorFlow`, so we can use the machine learning bits and pieces, \n","\n","NOTE: If you're running this on your own local install then you might also need to import `Datasets` and `ModelSupport`, which helps you work with existing datasets and files. "]},{"cell_type":"code","metadata":{"id":"TyOmg8j5aV1u","colab_type":"code","colab":{}},"source":["import Foundation\n","import FoundationNetworking\n","import TensorFlow"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"jV7fZ8w0bCds","colab_type":"text"},"source":["### Some support code (collapse this)\n","\n","This is a collection of convenience methods and helpers to write/read files, and such. It's quite long, so leave this section collapsed. The code here is a little beyond the scope of the session. Ask us, and if we have time we can go through it with you."]},{"cell_type":"markdown","metadata":{"id":"eR1-Vo_4bEYk","colab_type":"text"},"source":["We need to bring in some support Swift code that allows us to manipulate local files, download files, and get the MNIST dataset. You can expand this and read it if you want, but it's beyond the scope of this session."]},{"cell_type":"code","metadata":{"id":"iZQD1e5ya-7L","colab_type":"code","colab":{}},"source":["// This code comes from the Swift-Models repo, from the TF team.\n","\n","// Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n","//\n","// Licensed under the Apache License, Version 2.0 (the \"License\");\n","// you may not use this file except in compliance with the License.\n","// You may obtain a copy of the License at\n","//\n","// http://www.apache.org/licenses/LICENSE-2.0\n","//\n","// Unless required by applicable law or agreed to in writing, software\n","// distributed under the License is distributed on an \"AS IS\" BASIS,\n","// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","// See the License for the specific language governing permissions and\n","// limitations under the License.\n","\n","public struct DatasetUtilities {\n"," public static let curentWorkingDirectoryURL = URL(\n"," fileURLWithPath: FileManager.default.currentDirectoryPath)\n","\n"," public static func fetchResource(\n"," filename: String,\n"," remoteRoot: URL,\n"," localStorageDirectory: URL = curentWorkingDirectoryURL\n"," ) -> Data {\n"," print(\"Loading resource: \\(filename)\")\n","\n"," let resource = ResourceDefinition(\n"," filename: filename,\n"," remoteRoot: remoteRoot,\n"," localStorageDirectory: localStorageDirectory)\n","\n"," let localURL = resource.localURL\n","\n"," if !FileManager.default.fileExists(atPath: localURL.path) {\n"," print(\n"," \"File does not exist locally at expected path: \\(localURL.path) and must be fetched\"\n"," )\n"," fetchFromRemoteAndSave(resource)\n"," }\n","\n"," do {\n"," print(\"Loading local data at: \\(localURL.path)\")\n"," let data = try Data(contentsOf: localURL)\n"," print(\"Succesfully loaded resource: \\(filename)\")\n"," return data\n"," } catch {\n"," fatalError(\"Failed to contents of resource: \\(localURL)\")\n"," }\n"," }\n","\n"," struct ResourceDefinition {\n"," let filename: String\n"," let remoteRoot: URL\n"," let localStorageDirectory: URL\n","\n"," var localURL: URL {\n"," localStorageDirectory.appendingPathComponent(filename)\n"," }\n","\n"," var remoteURL: URL {\n"," remoteRoot.appendingPathComponent(filename).appendingPathExtension(\"gz\")\n"," }\n","\n"," var archiveURL: URL {\n"," localURL.appendingPathExtension(\"gz\")\n"," }\n"," }\n","\n"," static func fetchFromRemoteAndSave(_ resource: ResourceDefinition) {\n"," let remoteLocation = resource.remoteURL\n"," let archiveLocation = resource.archiveURL\n","\n"," do {\n"," print(\"Fetching URL: \\(remoteLocation)...\")\n"," let archiveData = try Data(contentsOf: remoteLocation)\n"," print(\"Writing fetched archive to: \\(archiveLocation.path)\")\n"," try archiveData.write(to: archiveLocation)\n"," } catch {\n"," fatalError(\"Failed to fetch and save resource with error: \\(error)\")\n"," }\n"," print(\"Archive saved to: \\(archiveLocation.path)\")\n","\n"," extractArchive(for: resource)\n"," }\n","\n"," static func extractArchive(for resource: ResourceDefinition) {\n"," print(\"Extracting archive...\")\n","\n"," let archivePath = resource.archiveURL.path\n","\n"," #if os(macOS)\n"," let gunzipLocation = \"/usr/bin/gunzip\"\n"," #else\n"," let gunzipLocation = \"/bin/gunzip\"\n"," #endif\n","\n"," let task = Process()\n"," task.executableURL = URL(fileURLWithPath: gunzipLocation)\n"," task.arguments = [archivePath]\n"," do {\n"," try task.run()\n"," task.waitUntilExit()\n"," } catch {\n"," fatalError(\"Failed to extract \\(archivePath) with error: \\(error)\")\n"," }\n"," }\n","}\n","\n","\n","public struct MNIST {\n"," public let trainingImages: Tensor\n"," public let trainingLabels: Tensor\n"," public let testImages: Tensor\n"," public let testLabels: Tensor\n","\n"," public let trainingSize: Int\n"," public let testSize: Int\n","\n"," public let batchSize: Int\n","\n"," public init(\n"," batchSize: Int, flattening: Bool = false, normalizing: Bool = false,\n"," localStorageDirectory: URL = DatasetUtilities.curentWorkingDirectoryURL\n"," ) {\n"," self.batchSize = batchSize\n","\n"," let (trainingImages, trainingLabels) = fetchDataset(\n"," localStorageDirectory: localStorageDirectory,\n"," imagesFilename: \"train-images-idx3-ubyte\",\n"," labelsFilename: \"train-labels-idx1-ubyte\",\n"," flattening: flattening,\n"," normalizing: normalizing)\n","\n"," self.trainingImages = trainingImages\n"," self.trainingLabels = trainingLabels\n"," self.trainingSize = Int(trainingLabels.shape[0])\n","\n"," let (testImages, testLabels) = fetchDataset(\n"," localStorageDirectory: localStorageDirectory,\n"," imagesFilename: \"t10k-images-idx3-ubyte\",\n"," labelsFilename: \"t10k-labels-idx1-ubyte\",\n"," flattening: flattening,\n"," normalizing: normalizing)\n"," self.testImages = testImages\n"," self.testLabels = testLabels\n"," self.testSize = Int(testLabels.shape[0])\n"," }\n","}\n","\n","extension Tensor {\n"," public func minibatch(at index: Int, batchSize: Int) -> Tensor {\n"," let start = index * batchSize\n"," return self[start.. (images: Tensor, labels: Tensor) {\n"," guard let remoteRoot: URL = URL(string: \"http://yann.lecun.com/exdb/mnist\") else {\n"," fatalError(\"Failed to create MNST root url: http://yann.lecun.com/exdb/mnist\")\n"," }\n","\n"," let imagesData = DatasetUtilities.fetchResource(\n"," filename: imagesFilename,\n"," remoteRoot: remoteRoot,\n"," localStorageDirectory: localStorageDirectory)\n"," let labelsData = DatasetUtilities.fetchResource(\n"," filename: labelsFilename,\n"," remoteRoot: remoteRoot,\n"," localStorageDirectory: localStorageDirectory)\n","\n"," let images = [UInt8](imagesData).dropFirst(16).map(Float.init)\n"," let labels = [UInt8](labelsData).dropFirst(8).map(Int32.init)\n","\n"," let rowCount = labels.count\n"," let (imageWidth, imageHeight) = (28, 28)\n","\n"," if flattening {\n"," var flattenedImages = Tensor(shape: [rowCount, imageHeight * imageWidth], scalars: images)\n"," / 255.0\n"," if normalizing {\n"," flattenedImages = flattenedImages * 2.0 - 1.0\n"," }\n"," return (images: flattenedImages, labels: Tensor(labels))\n"," } else {\n"," return (\n"," images:\n"," Tensor(shape: [rowCount, 1, imageHeight, imageWidth], scalars: images)\n"," .transposed(withPermutations: [0, 2, 3, 1]) / 255, // NHWC\n"," labels: Tensor(labels)\n"," )\n"," }\n","}\n","\n","public func createDirectoryIfMissing(at path: String) throws {\n"," guard !FileManager.default.fileExists(atPath: path) else { return }\n"," try FileManager.default.createDirectory(\n"," atPath: path,\n"," withIntermediateDirectories: false,\n"," attributes: nil)\n","}\n","\n","\n","public struct Image {\n"," public enum ByteOrdering {\n"," case bgr\n"," case rgb\n"," }\n","\n"," enum ImageTensor {\n"," case float(data: Tensor)\n"," case uint8(data: Tensor)\n"," }\n","\n"," let imageData: ImageTensor\n","\n"," public init(tensor: Tensor) {\n"," self.imageData = .uint8(data: tensor)\n"," }\n","\n"," public init(tensor: Tensor) {\n"," self.imageData = .float(data: tensor)\n"," }\n","\n"," public init(jpeg url: URL, byteOrdering: ByteOrdering = .rgb) {\n"," let loadedFile = Raw.readFile(filename: StringTensor(url.absoluteString))\n"," let loadedJpeg = Raw.decodeJpeg(contents: loadedFile, channels: 3, dctMethod: \"\")\n"," if byteOrdering == .bgr {\n"," self.imageData = .uint8(\n"," data: Raw.reverse(loadedJpeg, dims: Tensor([false, false, false, true])))\n"," } else {\n"," self.imageData = .uint8(data: loadedJpeg)\n"," }\n"," }\n","\n"," public func save(to url: URL, quality: Int64 = 95) {\n"," // This currently only saves in grayscale.\n"," let outputImageData: Tensor\n"," switch self.imageData {\n"," case let .uint8(data): outputImageData = data\n"," case let .float(data):\n"," let lowerBound = data.min(alongAxes: [0, 1])\n"," let upperBound = data.max(alongAxes: [0, 1])\n"," let adjustedData = (data - lowerBound) * (255.0 / (upperBound - lowerBound))\n"," outputImageData = Tensor(adjustedData)\n"," }\n","\n"," let encodedJpeg = Raw.encodeJpeg(\n"," image: outputImageData, format: .grayscale, quality: quality, xmpMetadata: \"\")\n"," Raw.writeFile(filename: StringTensor(url.absoluteString), contents: encodedJpeg)\n"," }\n","\n"," public func resized(to size: (Int, Int)) -> Image {\n"," switch self.imageData {\n"," case let .uint8(data):\n"," return Image(\n"," tensor: Raw.resizeBilinear(\n"," images: Tensor([data]),\n"," size: Tensor([Int32(size.0), Int32(size.1)])))\n"," case let .float(data):\n"," return Image(\n"," tensor: Raw.resizeBilinear(\n"," images: Tensor([data]),\n"," size: Tensor([Int32(size.0), Int32(size.1)])))\n"," }\n","\n"," }\n","}\n","\n","public func saveImage(_ tensor: Tensor, size: (Int, Int), directory: String, name: String) throws {\n"," try createDirectoryIfMissing(at: directory)\n"," let reshapedTensor = tensor.reshaped(to: [size.0, size.1, 1])\n"," let image = Image(tensor: reshapedTensor)\n"," let outputURL = URL(fileURLWithPath:\"\\(directory)\\(name).jpg\")\n"," image.save(to: outputURL)\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"-s10JC3icx_z","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"1DhL7o5AaV11","colab_type":"text"},"source":["## Parameters"]},{"cell_type":"markdown","metadata":{"id":"DNBBx3praV12","colab_type":"text"},"source":["Our parameters are as follows:\n","\n","* `epochCount` is how many epochs it should train for. 10 is a good number to get a reasonable GAN in this case.\n","* `batchSize` is the size of a batch that we're going to ask the MNIST dataset for.\n","* `outputFolder` defines the output folder where we'll be writing things on the file system.\n","* `imageHeight` and `imageWidth`, together with `imageSize` define the output imagesize that the Generator will make, as well as (naturally) the input image size the Discriminator will take.\n","* `latentSize` defines the latent representation size used by the Generator to generate.\n","* `testImageGridSize` defines the size of the grid of images that we'll generate to look at the result of the GAN."]},{"cell_type":"code","metadata":{"id":"mwmHgWNSaV13","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"7nVbPqOITi5T","colab_type":"text"},"source":["## Convenience helper to save an image grid"]},{"cell_type":"code","metadata":{"id":"FtGlyeViTlEa","colab_type":"code","colab":{}},"source":["func saveImageGrid(_ testImage: Tensor, name: String) throws {\n"," var gridImage = testImage.reshaped(\n"," to: [\n"," testImageGridSize, testImageGridSize,\n"," imageHeight, imageWidth,\n"," ])\n","\n"," // Add padding.\n"," gridImage = gridImage.padded(forSizes: [(0, 0), (0, 0), (1, 1), (1, 1)], with: 1)\n","\n"," // Transpose to create single image.\n"," gridImage = gridImage.transposed(withPermutations: [0, 2, 1, 3])\n"," gridImage = gridImage.reshaped(\n"," to: [\n"," (imageHeight + 2) * testImageGridSize,\n"," (imageWidth + 2) * testImageGridSize,\n"," ])\n"," \n"," // Convert [-1, 1] range to [0, 1] range.\n"," gridImage = (gridImage + 1) / 2\n","\n"," try saveImage(\n"," gridImage, size: (gridImage.shape[0], gridImage.shape[1]), directory: outputFolder,\n"," name: name)\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"bccfwHeLaV18","colab_type":"text"},"source":["# Generator Model"]},{"cell_type":"markdown","metadata":{"id":"3M52cWDbaV19","colab_type":"text"},"source":["Our `Generator` is a `Struct` adhering to the [`Layer` Protocol](https://www.tensorflow.org/swift/api_docs/Protocols/Layer) (which is part of Swift For TensorFlow's API). The Generator has the following layers:\n","\n","* `dense1`, a `Dense` layer (a [densely-connected layer](https://www.tensorflow.org/swift/api_docs/Structs/Dense)) that takes an `inputSize` of `latentSize` (defined earlier), and an `outputSize` of `latentSize*2`. The `activation` function determines the output shape of each node in the layer. There are many available activations, but [ReLU](https://www.tensorflow.org/swift/api_docs/Functions#leakyrelu_:alpha:) is common for hidden layers.\n","\n","* `dense2` is likewise, but with an `inputSize` of `latentSize*2` (taking the output of the previous layer), and an `outputSize` of `latestSize*4`.\n","\n","* `dense3` is likewise, taking the previous output as input, and outputting it larger.\n","\n","* `dense4` is, again, the same, but has an `outputSize` of `imageSize` instead (our final desired image size). It uses [tanh](https://www.tensorflow.org/swift/api_docs/Functions#tanh_:) as its activation, tanh (hyperbolic tangent) is sigmoidal (s-shaped) and outputs values that range from -1 to 1.\n","\n","* three [`BatchNorm`]() layers, `batchnorm1`, `batchnorm2`, `batchnorm3`, that normalise the activations of the previous layer at each batch by applying transformations that maintain the mean activation close to 0 and the activation standard deviation close to 1. `featureCount` is the number of features.\n"," \n","Finally, we have our `callAsFunction()` method, which sequences through the `Dense` layers, using the `BatchNorm` layers to normalise, before finally returning the output of the fourth and final `Dense` layer.\n","\n","\n","\n"," "]},{"cell_type":"code","metadata":{"id":"vrqazoRDaV1-","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"sHI61VGCaV2D","colab_type":"text"},"source":["## Discriminator Model"]},{"cell_type":"markdown","metadata":{"id":"vzRnmzTBaV2E","colab_type":"text"},"source":["Our `Discriminator` is a `Struct` adhering to the `Layer` Protocol. The `Discriminator` has the following layers:\n","\n","* `dense1`, a `Dense` layer, taking an `inputSize` of `imageSize`, outputting an `outputSize` of 256. It also uses ReLU for activation.\n","\n","* `dense2` and `dense3`, which take an `inputSize` and `outputSize` of 256 and 64, and 64 and 16, respectively, also using ReLU.\n","\n","* `dense4`, which takes the `inputSize` of 16, and has an `outputSize` of 1, and using `identity` as the activation (just linear).\n","\n","Finally, we have our `callAsFunction()` method, which just sequences the input through the four (`Dense`) layers."]},{"cell_type":"code","metadata":{"id":"QYpSCYd1aV2F","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"VD1eoCIkaV2J","colab_type":"text"},"source":["## Loss functions"]},{"cell_type":"markdown","metadata":{"id":"j9J4gWIFaV2K","colab_type":"text"},"source":["### Discriminator Loss Function"]},{"cell_type":"markdown","metadata":{"id":"8cWfh4Y8aV2L","colab_type":"text"},"source":["Our `discriminatorLoss()` function, which takes both the real and fake [logits](https://datascience.stackexchange.com/a/31045), and returns the `realLoss` and `fakeLoss`, via the `sigmoidCrossEntropy()` function. That's it!"]},{"cell_type":"code","metadata":{"id":"zwjx0LbSaV2M","colab_type":"code","colab":{}},"source":["@differentiable\n","func discriminatorLoss(realLogits: Tensor, fakeLogits: Tensor) -> Tensor {\n"," // code goes here\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"CHzdj9kbaV2Q","colab_type":"text"},"source":["### Generator Loss Function"]},{"cell_type":"markdown","metadata":{"id":"QFn5eBEwaV2R","colab_type":"text"},"source":["Our `generatorLoss()` function takes the fake logits, and calculates the `sigmoidCrossEntropy()`."]},{"cell_type":"code","metadata":{"id":"h9uAagmqaV2S","colab_type":"code","colab":{}},"source":["@differentiable\n","func generatorLoss(fakeLogits: Tensor) -> Tensor {\n"," // code goes here\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"4Hai3lkCaV2W","colab_type":"text"},"source":["### Random Samples"]},{"cell_type":"markdown","metadata":{"id":"wz9iK6u-aV2X","colab_type":"text"},"source":["Our `sampleVector()` function returns random stuff, that we use for both the Discriminator and Generator later on."]},{"cell_type":"code","metadata":{"id":"DnIT9U4qaV2Y","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"tpkVE3qZaV2a","colab_type":"text"},"source":["## Setting up to train"]},{"cell_type":"markdown","metadata":{"id":"QdDB8KtZaV2b","colab_type":"text"},"source":["### Getting a dataset"]},{"cell_type":"markdown","metadata":{"id":"Lkg4MrEbaV2c","colab_type":"text"},"source":["We're going to use the \"Hello, world!\" of machine learning, MNIST, as our dataset. This comes from some of the helper libraries we've provided for this session (which, in turn, are largely drawn from deep in the bowels of the TensorFlow project):"]},{"cell_type":"code","metadata":{"id":"DdzHFPKFaV2d","colab_type":"code","colab":{}},"source":["let dataset = MNIST(batchSize: batchSize, flattening: true, normalizing: true)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"7tzuMHzIaV2g","colab_type":"text"},"source":["### Creating a generator and a discriminator"]},{"cell_type":"code","metadata":{"id":"NX939dWAaV2h","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"nY07PtXvaV2l","colab_type":"text"},"source":["### Creating optimisers for the generator and the discriminator"]},{"cell_type":"markdown","metadata":{"id":"SyHqhL4daV2l","colab_type":"text"},"source":["We need an optimization algorithm for both the models. In each case, we'll use the [Adam](https://www.tensorflow.org/swift/api_docs/Classes/Adam) optimisation algorithm, with a learning rate of `2e-4`, and a beta 1 of `0.5`. Adam is a popular choice!"]},{"cell_type":"markdown","metadata":{"id":"aAloZrVscDxi","colab_type":"text"},"source":["#### Generator's optimizer"]},{"cell_type":"code","metadata":{"id":"kUEvej91aV2m","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"qRmW0oJJcHx5","colab_type":"text"},"source":["#### Discriminator's optimizer"]},{"cell_type":"code","metadata":{"id":"Xp54ZtpOcKIT","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"5VjtOMYlaV2t","colab_type":"text"},"source":["## Training and Inference"]},{"cell_type":"markdown","metadata":{"id":"FHy5C76Zc8nV","colab_type":"text"},"source":["First, we'll print out a message to say we're starting training:"]},{"cell_type":"code","metadata":{"id":"BM099DZAc-UE","colab_type":"code","outputId":"314d0bd9-e339-49cd-9ae9-11190864d0b7","executionInfo":{"status":"ok","timestamp":1572298965076,"user_tz":240,"elapsed":28924,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["print(\"GAN: Training Begins\")"],"execution_count":0,"outputs":[{"output_type":"stream","text":["GAN: Training Begins\r\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"8trm-rcJc_MR","colab_type":"text"},"source":["To train, we iterate through to our desired `epochCount`, runs training using both the Generator and the Discriminator, and then runs an inference to generate a grid of images and print out the current epoch, and the generator's loss:\n","\n","Specifically, in each epoch, we:\n","* set the [`Context`](https://www.tensorflow.org/swift/api_docs/Structs/Context) to `.training` so that, for example, `BatchNorm` layers (like we're using in our Generator) will compute mean and variance when applied to inputs\n","* iterate through the training data batch and:\n"," * create a random sample using the `sampleVector()` function we wrote earlier\n"," * for the generator's gradient (𝛁), use the random sample and the output of the discriminator using that random sample to calculate a loss using the `generatorLoss()` function we wrote earlier\n"," * update the generator model, along the generator gradient, using the generator's optimizer\n"," * get a batch of of real images from the training data, as well as another random sample using `sampleVector()`, and use the generator to generate some generated (aka fake) images using the random sample data\n"," * for the discriminator's gradient (𝛁), calculate and return the loss between the generator running on the real images and on the fake images\n"," * update the discriminator model, along the discriminator gradient, using the discriminator's optimizer\n","* after iterating through the training data batch, we set the [`Context`](https://www.tensorflow.org/swift/api_docs/Structs/Context) to `.inferece`\n","* then (after training for that epoch) we generate a test image, using the generator and random sample of the size our parameters dictate for the test image grid\n"," * and attempt to save that test image, using one of our convenience functions, `saveImageGrid()`\n","* we then check the loss on the generator for the test image, with our `generatorLoss()` function\n","* and print out the current epoch and generator loss"]},{"cell_type":"code","metadata":{"id":"ArTjDq3RaV2v","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"BvC2vj3-jxHs","colab_type":"text"},"source":["## Extra Credit"]},{"cell_type":"markdown","metadata":{"id":"RCIu9uc4jy7Z","colab_type":"text"},"source":["Our suggestions for what to do next:\n","\n","\n","1. use a Python library to visualise some of this in the notebook, either via graphs, or via displaying images inline in the notebook\n","2. modify the GAN to be able to generate one image of a digit at a time, upon request (e.g. make a function that lets you request a generated 5, or a generated 6)\n","3. modify the GAN to generate something other than MNIST digits \n","\n"]}]} -------------------------------------------------------------------------------- /Notebooks/6. Bigger Example/Complete - Linear Regression.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Complete - Linear Regression.ipynb","provenance":[],"collapsed_sections":[]},"kernelspec":{"display_name":"Swift","language":"swift","name":"swift"},"language_info":{"file_extension":".swift","mimetype":"text/x-swift","name":"swift","version":""}},"cells":[{"cell_type":"markdown","metadata":{"colab_type":"text","id":"bChC2l3EPmoi"},"source":["# Complete - Linear Regression\n","\n","Simple linear regression (Width, Height, Sex) with multi-variable and categories.\n","\n","Dataset with Height, Weight, Sex statistics from: \n","\n","https://raw.githubusercontent.com/Dataweekends/zero_to_deep_learning_video/master/data/weight-height.csv\n","\n","**Swift with SciKit Learn MinMax normalization**\n","\n","Use Python/Pandas to import the dataset Use SciKit Learn to normalize values with MinMax scaler\n","Based on https://github.com/JacopoMangiavacchi/Swift-TensorFlow-Sample-Notebooks"]},{"cell_type":"markdown","metadata":{"id":"Gbr0BXbblDqP","colab_type":"text"},"source":["## Imports"]},{"cell_type":"code","metadata":{"id":"1e0Bg-rxlE39","colab_type":"code","colab":{}},"source":["import Python\n","import TensorFlow"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"r6xabtdolOgL","colab_type":"text"},"source":["## Setting up"]},{"cell_type":"code","metadata":{"id":"FXcEiwjqlSGY","colab_type":"code","colab":{}},"source":["let numpy = Python.import(\"numpy\")\n","let pandas = Python.import(\"pandas\")\n","let io = Python.import(\"io\")\n","let requests = Python.import(\"requests\")\n","let preprocessing = Python.import(\"sklearn.preprocessing\")"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"D7A9A826lIjs","colab_type":"text"},"source":["## Getting a dataset"]},{"cell_type":"markdown","metadata":{"id":"JkOqEdmLlamV","colab_type":"text"},"source":["We've got a helper function to get a Numpy normalised dataset. It uses the Python requests and pandas library to download and read the CSV file for the data, as well as SKLearn's Prepocessing library and numpy arrays. Lots of Python!"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"TgiX5mzQ5vTd","colab":{}},"source":["func getNumpyNormalizedDataset() -> (PythonObject, PythonObject) \n","{\n"," let url=\"https://raw.githubusercontent.com/Dataweekends/zero_to_deep_learning_video/master/data/weight-height.csv\"\n"," let s = requests.get(url).content\n"," let df = pandas.read_csv(io.StringIO(s.decode(\"utf-8\")))\n","\n"," let dummies = pandas.get_dummies(df[[\"Gender\"]])\n"," let transformed = pandas.concat([df[[\"Height\", \"Weight\"]], dummies], 1)\n"," print(transformed)\n","\n"," let X = transformed[[\"Height\",\"Gender_Female\",\"Gender_Male\"]].values\n"," let Y = transformed[[\"Weight\"]].values\n","\n"," let scaler = preprocessing.MinMaxScaler()\n"," let xNP = numpy.array(scaler.fit_transform(X))\n"," let yNP = numpy.array(scaler.fit_transform(Y)) \n"," \n"," return (xNP, yNP)\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"seUJ8reJmGbc","colab_type":"text"},"source":["## Creating the model"]},{"cell_type":"markdown","metadata":{"id":"35TtsfUOmNIF","colab_type":"text"},"source":["As usual, we need to create a `struct` to represent our model, adhering to the [`Layer` Protocol](https://www.tensorflow.org/swift/api_docs/Protocols/Layer).\n","\n","Since this is a bit of a contrived example, we actually only need layer (a [`Dense` layer](https://www.tensorflow.org/swift/api_docs/Structs/Dense)) that takes an `inputSize` and an `outputSize`, and is activated with [`identity`](https://www.tensorflow.org/swift/api_docs/Functions.html#identity_:). We use `identity` because we just want it to output a linear function of input.\n","\n","We create an initialiser, because we need to be able to take a variable number of variables. The default is 1. Inside the intitialiser, we define the layer.\n","\n","We'll also need to provide a definition of our `@differentiable` `func`, `callAsFunction()`. In this case, we want it to return the `input` passed through the single layer.\n","\n","\n"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"_e1hzXeb8J5d","colab":{}},"source":["struct LinearRegression: Layer \n","{\n"," var layer: Dense\n"," init(variables: Int = 1) \n"," {\n"," layer = Dense(inputSize: variables, outputSize: 1, activation: identity)\n"," }\n","\n"," @differentiable func callAsFunction(_ input: Tensor) -> Tensor\n"," {\n"," return layer(input)\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"EQlyIlsOnv74","colab_type":"text"},"source":["## Load our dataset "]},{"cell_type":"markdown","metadata":{"id":"G3ikd3xKnzJ5","colab_type":"text"},"source":["We need to get some x and y data, each in the form of a `PythonObject`, using the helper function we sefined `getNumpyNormalizedDataset()`.\n"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"JdGxV6K2VZ2X","colab":{}},"source":["let (xNP, yNP) = getNumpyNormalizedDataset()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"v9VaRPo5oHnQ","colab_type":"text"},"source":["We also need to create arrays for each:"]},{"cell_type":"code","metadata":{"id":"u0vsXtNzoH_1","colab_type":"code","colab":{}},"source":["let xArray = xNP.tolist().flatMap{ $0.map{ Float($0)! }}\n","let yArray = yNP.tolist().flatMap{ $0.map{ Float($0)! }}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"C4O-ErJBoLIn","colab_type":"text"},"source":["And then a native Swift for TensorFlow `Tensor`, for each of them:"]},{"cell_type":"code","metadata":{"id":"yJRdCEzkoLm7","colab_type":"code","colab":{}},"source":["let x = Tensor(shape: [10000, 3], scalars: xArray)\n","let y = Tensor(shape: [10000, 1], scalars: yArray)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"oSlMsTC1oZ_T","colab_type":"text"},"source":["## Creating an instance of our model"]},{"cell_type":"markdown","metadata":{"id":"x6xb5CzGocxX","colab_type":"text"},"source":["We want a 3 variable instance of our model:"]},{"cell_type":"code","metadata":{"id":"RLAbtnUPobyF","colab_type":"code","colab":{}},"source":["var model = LinearRegression(variables: 3)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"AtTNdGIWoSZZ","colab_type":"text"},"source":["## Creating an optimizer"]},{"cell_type":"markdown","metadata":{"id":"YibQSyouoTy9","colab_type":"text"},"source":["We'll need an optimizer. SGD will do fine here:\n","\n"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"OkEgM40GQGKG","colab":{}},"source":["let optimizer = SGD(for: model, learningRate: 0.03)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"d4YsZpaqoiad","colab_type":"text"},"source":["## Training the model"]},{"cell_type":"markdown","metadata":{"id":"OcYHb9u9ok5b","colab_type":"text"},"source":["First, we need a hyperparameter for epochs:"]},{"cell_type":"code","metadata":{"id":"asNWI2ZxonLz","colab_type":"code","colab":{}},"source":["let epochs = 2000"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"T0L0gkT8ooQE","colab_type":"text"},"source":["Then we need a training loop. \n","\n","For each epoch that we train, we:\n","\n","* calculate the cost and the gradient, and return the error using `meanSquaredError()` between the predicted and the expected\n","* update the model's optimizer along the gradient 𝛁\n","* occasionally print out the current epoch and cost"]},{"cell_type":"code","metadata":{"id":"zNk-AKTBoiCm","colab_type":"code","colab":{}},"source":["for epoch in 1...epochs {\n"," let (cost, 𝛁model) = model.valueWithGradient { m -> Tensor in\n"," let ŷ = m(x)\n"," return meanSquaredError(predicted: ŷ, expected: y)\n"," }\n"," optimizer.update(&model, along: 𝛁model)\n"," \n"," if epoch % 100 == 0 {\n"," print(\"Epoch: \\(epoch) Cost: \\(cost)\")\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"cVVHjSlspmYY","colab_type":"text"},"source":["## Testing the model"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"LOmonwRSVqui","outputId":"61317e3d-a26d-4612-ce5d-d1986ce7db64","executionInfo":{"status":"ok","timestamp":1572287088017,"user_tz":240,"elapsed":1878,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}},"colab":{"base_uri":"https://localhost:8080/","height":35}},"source":["print(model.inferring(from:[[0.7, 0, 1]])) //Height, Female, Male\n","// [[0.66004163]]"],"execution_count":0,"outputs":[{"output_type":"stream","text":["[[0.66004163]]\r\n"],"name":"stdout"}]}]} -------------------------------------------------------------------------------- /Notebooks/6. Bigger Example/Starter - Linear Regression.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Starter - Linear Regression.ipynb","provenance":[],"collapsed_sections":[]},"kernelspec":{"display_name":"Swift","language":"swift","name":"swift"},"language_info":{"file_extension":".swift","mimetype":"text/x-swift","name":"swift","version":""}},"cells":[{"cell_type":"markdown","metadata":{"colab_type":"text","id":"bChC2l3EPmoi"},"source":["# Starter - Linear Regression\n","\n","**⚠️ This is the starter version, for you to code along with live.**\n","\n","Simple linear regression (Width, Height, Sex) with multi-variable and categories.\n","\n","Dataset with Height, Weight, Sex statistics from: \n","\n","https://raw.githubusercontent.com/Dataweekends/zero_to_deep_learning_video/master/data/weight-height.csv\n","\n","**Swift with SciKit Learn MinMax normalization**\n","\n","Use Python/Pandas to import the dataset Use SciKit Learn to normalize values with MinMax scaler\n","Based on https://github.com/JacopoMangiavacchi/Swift-TensorFlow-Sample-Notebooks"]},{"cell_type":"markdown","metadata":{"id":"Gbr0BXbblDqP","colab_type":"text"},"source":["## Imports"]},{"cell_type":"code","metadata":{"id":"1e0Bg-rxlE39","colab_type":"code","colab":{}},"source":["import Python\n","import TensorFlow"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"r6xabtdolOgL","colab_type":"text"},"source":["## Setting up"]},{"cell_type":"markdown","metadata":{"id":"12Jl9t5e8KsN","colab_type":"text"},"source":["We need to bring in numpy, pandas, io, requests, and sklearn.preprocessing from Python:"]},{"cell_type":"code","metadata":{"id":"FXcEiwjqlSGY","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"D7A9A826lIjs","colab_type":"text"},"source":["## Getting a dataset"]},{"cell_type":"markdown","metadata":{"id":"JkOqEdmLlamV","colab_type":"text"},"source":["We've got a helper function to get a Numpy normalised dataset. It uses the Python requests and pandas library to download and read the CSV file for the data, as well as SKLearn's Prepocessing library and numpy arrays. Lots of Python!"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"TgiX5mzQ5vTd","colab":{}},"source":["func getNumpyNormalizedDataset() -> (PythonObject, PythonObject) \n","{\n"," let url=\"https://raw.githubusercontent.com/Dataweekends/zero_to_deep_learning_video/master/data/weight-height.csv\"\n","\n"," // more code goes here\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"seUJ8reJmGbc","colab_type":"text"},"source":["## Creating the model"]},{"cell_type":"markdown","metadata":{"id":"35TtsfUOmNIF","colab_type":"text"},"source":["As usual, we need to create a `struct` to represent our model, adhering to the [`Layer` Protocol](https://www.tensorflow.org/swift/api_docs/Protocols/Layer).\n","\n","Since this is a bit of a contrived example, we actually only need layer (a [`Dense` layer](https://www.tensorflow.org/swift/api_docs/Structs/Dense)) that takes an `inputSize` and an `outputSize`, and is activated with [`identity`](https://www.tensorflow.org/swift/api_docs/Functions.html#identity_:). We use `identity` because we just want it to output a linear function of input.\n","\n","We create an initialiser, because we need to be able to take a variable number of variables. The default is 1. Inside the intitialiser, we define the layer.\n","\n","We'll also need to provide a definition of our `@differentiable` `func`, `callAsFunction()`. In this case, we want it to return the `input` passed through the single layer.\n","\n","\n"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"_e1hzXeb8J5d","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"EQlyIlsOnv74","colab_type":"text"},"source":["## Load our dataset "]},{"cell_type":"markdown","metadata":{"id":"G3ikd3xKnzJ5","colab_type":"text"},"source":["We need to get some x and y data, each in the form of a `PythonObject`, using the helper function we defined `getNumpyNormalizedDataset()`.\n"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"JdGxV6K2VZ2X","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"v9VaRPo5oHnQ","colab_type":"text"},"source":["We also need to create arrays for each:"]},{"cell_type":"code","metadata":{"id":"u0vsXtNzoH_1","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"C4O-ErJBoLIn","colab_type":"text"},"source":["And then a native Swift for TensorFlow `Tensor`, for each of them:"]},{"cell_type":"code","metadata":{"id":"yJRdCEzkoLm7","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"oSlMsTC1oZ_T","colab_type":"text"},"source":["## Creating an instance of our model"]},{"cell_type":"markdown","metadata":{"id":"x6xb5CzGocxX","colab_type":"text"},"source":["We want a 3 variable instance of our model:"]},{"cell_type":"code","metadata":{"id":"RLAbtnUPobyF","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"AtTNdGIWoSZZ","colab_type":"text"},"source":["## Creating an optimizer"]},{"cell_type":"markdown","metadata":{"id":"YibQSyouoTy9","colab_type":"text"},"source":["We'll need an optimizer. SGD will do fine here:\n","\n"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"OkEgM40GQGKG","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"d4YsZpaqoiad","colab_type":"text"},"source":["## Training the model"]},{"cell_type":"markdown","metadata":{"id":"OcYHb9u9ok5b","colab_type":"text"},"source":["First, we need a hyperparameter for epochs:"]},{"cell_type":"code","metadata":{"id":"asNWI2ZxonLz","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"T0L0gkT8ooQE","colab_type":"text"},"source":["Then we need a training loop. \n","\n","For each epoch that we train, we:\n","\n","* calculate the cost and the gradient, and return the error using `meanSquaredError()` between the predicted and the expected\n","* update the model's optimizer along the gradient 𝛁\n","* occasionally print out the current epoch and cost"]},{"cell_type":"code","metadata":{"id":"zNk-AKTBoiCm","colab_type":"code","colab":{}},"source":["// code goes here"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"cVVHjSlspmYY","colab_type":"text"},"source":["## Testing the model"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"LOmonwRSVqui","colab":{}},"source":["//print(model.inferring(from:[[0.7, 0, 1]])) //Height, Female, Male\n","// [[0.66004163]]"],"execution_count":0,"outputs":[]}]} -------------------------------------------------------------------------------- /Notebooks/Extras/Extra 1 - Temperature.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Extra 1 - Temperature.ipynb","provenance":[],"collapsed_sections":[]},"language_info":{"file_extension":".swift","mimetype":"text/x-swift","name":"swift","version":""},"kernelspec":{"display_name":"Swift","language":"swift","name":"swift"},"accelerator":"GPU"},"cells":[{"cell_type":"markdown","metadata":{"id":"KHwRZTejqLRd","colab_type":"text"},"source":["# Extra - Temperature Model"]},{"cell_type":"markdown","metadata":{"id":"3_dbeWADqT5I","colab_type":"text"},"source":["This extra is based on the [Swift for TensorFlow team's Celsius to Fahreinheit notebook](https://github.com/Ayush517/S4TF-Tutorials). It is updated for the latest.\n","\n","It looks at training a model to convert Celsius temperatures to Fahrenheit temperatures. This is a contrived example, but illustrates the facets of building a model quite well."]},{"cell_type":"markdown","metadata":{"id":"h9veBnCaqoqA","colab_type":"text"},"source":["## Imports"]},{"cell_type":"code","metadata":{"id":"KaGgOzPQbGj3","colab_type":"code","colab":{}},"source":["import TensorFlow\n","import Python"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"rgQAY7Isq4Lf","colab_type":"text"},"source":["## Setting up"]},{"cell_type":"markdown","metadata":{"id":"V_-_SNLKrFur","colab_type":"text"},"source":["Then we need to hook Python into the notebook environment:"]},{"cell_type":"code","metadata":{"id":"ph47ZvOArEsZ","colab_type":"code","colab":{}},"source":["%include \"EnableIPythonDisplay.swift\"\n","IPythonDisplay.shell.enable_matplotlib(\"inline\")"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"LfSxYZLMrM4O","colab_type":"text"},"source":["## Python Imports"]},{"cell_type":"markdown","metadata":{"id":"SGm_WkPZrOgX","colab_type":"text"},"source":["And import the Python libraries that we'd like to use:"]},{"cell_type":"code","metadata":{"id":"qroHF5kxrRk1","colab_type":"code","colab":{}},"source":["let plt = Python.import(\"matplotlib.pyplot\")\n","let np = Python.import(\"numpy\") // Make numpy available using np."],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"oyZC0RiJrYdZ","colab_type":"text"},"source":["## Creating training data"]},{"cell_type":"markdown","metadata":{"id":"aBXSv3QOraGu","colab_type":"text"},"source":["We need to create some celcius temperatures:\n"]},{"cell_type":"code","metadata":{"id":"dQkyrB3krgTY","colab_type":"code","colab":{}},"source":["let celsiusTemps: [Float] = [-40, -10, 0, 8, 15, 22, 38]"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"jqVwWs-vrd0-","colab_type":"text"},"source":["And some fahrenheit temperatures:"]},{"cell_type":"code","metadata":{"id":"JQF3kKQIrkpy","colab_type":"code","colab":{}},"source":["let fahrenheitTemps: [Float] = [-40, 14, 32, 46, 59, 72, 100]"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"jpUyXCuVroOW","colab_type":"text"},"source":["And define them as inputs and outputs, respectively:"]},{"cell_type":"code","metadata":{"id":"QEwzuIE1rqr8","colab_type":"code","colab":{}},"source":["let input = Tensor(shape: [7, 1], scalars: celsiusTemps)\n","let output = Tensor(shape: [7, 1], scalars: fahrenheitTemps)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"G9uYLGxTrt6e","colab_type":"text"},"source":["## Displaying our data"]},{"cell_type":"markdown","metadata":{"id":"m0BNfeRtrw7m","colab_type":"text"},"source":["We can also loop through our data, to show the relationship:"]},{"cell_type":"code","metadata":{"id":"oExkFggVru03","colab_type":"code","colab":{}},"source":["for (index, celsiusTemp) in celsiusTemps.enumerated() {\n"," print(\"\\(celsiusTemp) degrees Celsius is equal to \\(fahrenheitTemps[index]) degrees Fahrenheit\")\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"tQT8MtU1sFdg","colab_type":"text"},"source":["## Creating the model"]},{"cell_type":"markdown","metadata":{"id":"tJCdvSI1sI0a","colab_type":"text"},"source":["Our model is very simple! It's a single `Dense` layer, and our `callAsFunction()` just returns the input, passed through that layer."]},{"cell_type":"code","metadata":{"id":"zQGY7qRBsGQg","colab_type":"code","colab":{}},"source":["struct CelsiusToFahrenheit: Layer {\n"," var layer = Dense(inputSize: 1, outputSize: 1)\n"," \n"," @differentiable\n"," func callAsFunction(_ input: Tensor) -> Tensor {\n"," return layer(input)\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"l_T1t76qssAy","colab_type":"text"},"source":["## Creating an instance of our model"]},{"cell_type":"code","metadata":{"id":"VNF9rry4biBh","colab_type":"code","colab":{}},"source":["var model = CelsiusToFahrenheit()"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"ZF-cBrL0su79","colab_type":"text"},"source":["## Creating an optimizer"]},{"cell_type":"markdown","metadata":{"id":"XZ0JwdSns4XK","colab_type":"text"},"source":["We'll create an optimizer, using Adam, for our model:"]},{"cell_type":"code","metadata":{"id":"wcQ0ZisSvpQL","colab_type":"code","colab":{}},"source":["let optimizer = Adam(for: model, learningRate: 0.1)"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"8vOUCHzvs8qu","colab_type":"text"},"source":["## Defining some parameters"]},{"cell_type":"markdown","metadata":{"id":"Bc27YOd9s-I-","colab_type":"text"},"source":["We want our context to be `.training`, and we need an epoch count, as well as somewhere to store our losses."]},{"cell_type":"code","metadata":{"id":"-LDM-Xh5vqbz","colab_type":"code","colab":{}},"source":["Context.local.learningPhase = .training"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"01cGnbrRvqlM","colab_type":"code","colab":{}},"source":["let epochCount = 500\n","var losses: [Float] = []"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Adkqpn5ptFyK","colab_type":"text"},"source":["## Training the model"]},{"cell_type":"code","metadata":{"id":"C32YpsAevqtR","colab_type":"code","colab":{}},"source":["for epoch in 1...epochCount {\n"," var epochLoss: Float = 0\n"," let (loss, grad) = model.valueWithGradient { model -> Tensor in\n"," let pred = model(input)\n"," return meanSquaredError(predicted: pred, expected: output)\n"," }\n"," optimizer.update(&model.allDifferentiableVariables, along: grad)\n"," epochLoss = loss.scalarized()\n"," trainLossResults.append(epochLoss)\n"," if epoch.isMultiple(of: 20) {\n"," print(\"Epoch \\(epoch): Loss: \\(loss)\")\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"KuY2waFWtX89","colab_type":"text"},"source":["## Visualising with Python"]},{"cell_type":"code","metadata":{"id":"LNUOZb-qxptI","colab_type":"code","outputId":"17558474-66a7-410b-f388-4d9e9ecd7c50","colab":{"base_uri":"https://localhost:8080/","height":540}},"source":["plt.figure(figsize: [12, 8])\n","\n","plt.plot(trainLossResults)\n","plt.xlabel(\"Epoch\")\n","plt.ylabel(\"Loss\")\n","\n","plt.show()"],"execution_count":0,"outputs":[{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAAt0AAAHjCAYAAAD/r6OyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3XmUXGd95//Pt/auruq9W2q19sWL\n8CLZwjbGdoTBxhAHO5CAGRbDQExYAplJJiE5J0N+meF3yEwCgeAAdjBLMIsJBgRxDMbYGNvYlrzI\nkuVF+y61Wt3qfanlmT/qdqslS6It1a3bXff9OqdO3fvcW6Vvn3vc/ujR9z7XnHMCAAAA4J9I0AUA\nAAAA1Y7QDQAAAPiM0A0AAAD4jNANAAAA+IzQDQAAAPiM0A0AAAD4jNANAAAA+IzQDQAAAPiM0A0A\nAAD4LBZ0AX5oaWlxCxcuDLoMAAAAVLknn3yyyznX+tvOq8rQvXDhQq1bty7oMgAAAFDlzGznVM6j\nvQQAAADwGaEbAAAA8BmhGwAAAPAZoRsAAADwGaEbAAAA8BmhGwAAAPAZoRsAAADwGaEbAAAA8Bmh\nGwAAAPAZoRsAAADwGaEbAAAA8BmhGwAAAPAZoRsAAADwGaEbAAAA8BmhGwAAAPAZoRsAAADwGaG7\njAZH8xoYzQddBgAAAKYZQncZXfu5h/Q/f7wx6DIAAAAwzRC6yyidiGp4rBB0GQAAAJhmCN1llE5E\nNUToBgAAwHEI3WVUw0w3AAAAToDQXUbpRExDOW6kBAAAwLEI3WVUQ3sJAAAAToDQXUa1tJcAAADg\nBAjdZZROxJjpBgAAwMsQusuIGykBAABwIoTuMkrHoxorFJUrFIMuBQAAANMIobuMahJRSaLFBAAA\nAMcgdJdROhGTJFpMAAAAcAxCdxmlJ2a6WasbAAAARxG6y4j2EgAAAJwIobuMxme6h3OEbgAAABxF\n6C6j8Z5uZroBAAAwGaG7jCZmuunpBgAAwCSE7jJK09MNAACAEyB0lxE3UgIAAOBECN1ldLSnm/YS\nAAAAHEXoLqOaODPdAAAAeDlCdxlFI6ZkLMITKQEAAHAMQneZpRNRZroBAABwDEJ3maUTMUI3AAAA\njkHoLrOaRFTDOW6kBAAAwFGE7jKrpb0EAAAAxyF0l1kNoRsAAADHIXSXWToRY/USAAAAHIPQXWY1\niagGeTgOAAAAJiF0l1k6HmWmGwAAAMcgdJcZ63QDAADgeITuMquhpxsAAADHIXSXWToR1VihqHyh\nGHQpAAAAmCYI3WWWTkQlSUM5ZrsBAABQQugus3QiJkm0mAAAAGCCb6HbzOaZ2QNmtsnMnjOzT3jj\nf2tme83sGe/15kmf+Ssz22JmL5rZGyeNX+eNbTGzT/pVczlMzHQTugEAAOCJ+fjdeUl/5px7ysyy\nkp40s/u8Y59zzv3D5JPNbLmkmyS9StIcSb8ws7O8w7dKukbSHklrzWyNc26Tj7WftpqJ0M1a3QAA\nACjxLXQ75/ZL2u9t95vZ85I6TvGRGyR91zk3Kmm7mW2RdIl3bItzbpskmdl3vXOnZegen+mmvQQA\nAADjKtLTbWYLJa2U9Lg39DEze9bM7jCzRm+sQ9LuSR/b442dbHxaGg/dg4RuAAAAeHwP3WaWkfQD\nSX/qnOuT9CVJSyStUGkm/B/L9OfcYmbrzGzdoUOHyvGVp6UmPn4jJe0lAAAAKPE1dJtZXKXAfadz\n7m5Jcs4ddM4VnHNFSbfraAvJXknzJn18rjd2svFjOOduc86tcs6tam1tLf8PM0XcSAkAAIDj+bl6\niUn6qqTnnXOfnTTePum035e00dteI+kmM0ua2SJJyyQ9IWmtpGVmtsjMEirdbLnGr7rPFKEbAAAA\nx/Nz9ZLXSnqPpA1m9ow39teS3mlmKyQ5STskfUiSnHPPmdldKt0gmZf0UedcQZLM7GOSfiYpKukO\n59xzPtZ9Rmq4kRIAAADH8XP1kocl2QkO3XOKz3xa0qdPMH7PqT43nYw/HIeZbgAAAIzjiZRlFo2Y\nkrGIhnLcSAkAAIASQrcP0oko7SUAAACYQOj2QToRo70EAAAAEwjdPqhJRHkMPAAAACYQun2QTkSZ\n6QYAAMAEQrcPauKEbgAAABxF6PYBN1ICAABgMkK3D0o3UtLTDQAAgBJCtw9qmOkGAADAJIRuH9Qm\nohrKEboBAABQQuj2QQ3rdAMAAGASQrcP0omoxvJF5QvFoEsBAADANEDo9kE6EZUkWkwAAAAgidDt\ni3QiJkncTAkAAABJhG5f1CZLM90DoywbCAAAAEK3L2q9me5BQjcAAABE6PZFbXI8dNNeAgAAAEK3\nL8bbS5jpBgAAgETo9sXETDePggcAAIAI3b7I0F4CAACASQjdPhhfp5v2EgAAAEiEbl+Mr17CkoEA\nAACQCN2+iERM6URUQ/R0AwAAQIRu36QTMQ3Q0w0AAAARun2TSUbp6QYAAIAkQrdv0okY7SUAAACQ\nROj2TSYZ40ZKAAAASCJ0+6Y2GWWdbgAAAEgidPsmnYzxREoAAABIInT7JpOIcSMlAAAAJBG6fVOb\njGmI9hIAAACI0O2b2mRUg2N5OeeCLgUAAAABI3T7pDYZU9FJwzlmuwEAAMKO0O2T2kRUkljBBAAA\nAIRuv9QmY5LEzZQAAAAgdPtlPHTzgBwAAAAQun1SmyiF7qEx2ksAAADCjtDtk9rkeE83M90AAABh\nR+j2SYb2EgAAAHgI3T5JJ8fbSwjdAAAAYUfo9kkmMT7TTU83AABA2BG6fZKmpxsAAAAeQrdP4tGI\nErGIBmkvAQAACD1Ct48yyRgz3QAAACB0+ymdiPIYeAAAABC6/cRMNwAAACRCt69qkzF6ugEAAEDo\n9lM6EWXJQAAAABC6/ZRJxjREewkAAEDoEbp9VEtPNwAAAETo9lVtIqrBMdpLAAAAwo7Q7aPxmW7n\nXNClAAAAIECEbh/VJmPKF51G88WgSwEAAECACN0+qk1EJUlDtJgAAACEGqHbR7XJmCRxMyUAAEDI\nEbp9NB66BwjdAAAAoUbo9tF46B7iqZQAAAChRuj2USZZ6unmqZQAAADhRuj2UTpBTzcAAAAI3b7K\ncCMlAAAAROj2VdpbMpDQDQAAEG6Ebh9NLBnIOt0AAAChRuj2UTIWUSxiLBkIAAAQcr6FbjObZ2YP\nmNkmM3vOzD7hjTeZ2X1mttl7b/TGzcy+YGZbzOxZM7to0nfd7J2/2cxu9qvmcjMzZVMxDYwQugEA\nAMLMz5nuvKQ/c84tl3SZpI+a2XJJn5R0v3NumaT7vX1JepOkZd7rFklfkkohXdKnJF0q6RJJnxoP\n6jNBJhVT/0gu6DIAAAAQIN9Ct3Nuv3PuKW+7X9Lzkjok3SDpG95p35B0o7d9g6RvupLHJDWYWbuk\nN0q6zznX7ZzrkXSfpOv8qrvcssm4+pnpBgAACLWK9HSb2UJJKyU9LmmWc26/d+iApFnedoek3ZM+\ntscbO9n48X/GLWa2zszWHTp0qKz1n4lsKkboBgAACDnfQ7eZZST9QNKfOuf6Jh9zzjlJrhx/jnPu\nNufcKufcqtbW1nJ8ZVlkUzH1cyMlAABAqPkaus0srlLgvtM5d7c3fNBrG5H33umN75U0b9LH53pj\nJxufEbKpOD3dAAAAIefn6iUm6auSnnfOfXbSoTWSxlcguVnSjyeNv9dbxeQySb1eG8rPJF1rZo3e\nDZTXemMzAu0lAAAAiPn43a+V9B5JG8zsGW/sryV9RtJdZvYBSTslvd07do+kN0vaImlI0vslyTnX\nbWb/S9Ja77y/c851+1h3WWVTMQ2M5uWcU+nvIQAAAAgb30K3c+5hSSdLma8/wflO0kdP8l13SLqj\nfNVVTiYZV6HoNJwrKJ3w8+84AAAAmK54IqXPsqlS0KbFBAAAILwI3T4jdAMAAIDQ7bO6VFySWMEE\nAAAgxAjdPssw0w0AABB6hG6f0V4CAAAAQrfPsl57ycAo7SUAAABhRej2WSbJTDcAAEDYEbp9Nh66\n+wjdAAAAoUXo9lk0YsokYxogdAMAAIQWobsCsqkYSwYCAACEGKG7AjLJGD3dAAAAIUboroBsKqaB\nUUI3AABAWBG6KyCbitNeAgAAEGKE7grIpGgvAQAACDNCdwXUpWIsGQgAABBihO4KyKbiPJESAAAg\nxAjdFZBNxjSSKypXKAZdCgAAAAJA6K6ATIpHwQMAAIQZobsCsqm4JPFUSgAAgJAidFdA1pvp7mPZ\nQAAAgFAidFdAlvYSAACAUCN0V0A2WWov4QE5AAAA4UToroDxmW4eBQ8AABBOhO4KoL0EAAAg3Ajd\nFXB0yUDaSwAAAMKI0F0ByVhUiVhE/bSXAAAAhBKhu0LqUjHaSwAAAEKK0F0h2VSc0A0AABBShO4K\nySRjGqCnGwAAIJQI3RWSpb0EAAAgtAjdFULoBgAACC9Cd4VkknGWDAQAAAgpQneFMNMNAAAQXoTu\nCqmriat/NK9C0QVdCgAAACqM0F0h9TVxSdIAs90AAAChQ+iukPHQ3TtMXzcAAEDYELorhNANAAAQ\nXoTuCqlLxSQRugEAAMKI0F0h9WlmugEAAMKK0F0htJcAAACEF6G7QsZDdx8PyAEAAAgdQneF1MSj\nikWMmW4AAIAQInRXiJmpviZO6AYAAAghQncFEboBAADCidBdQXU1cfURugEAAEKH0F1Bdcx0AwAA\nhBKhu4LqmekGAAAIJUJ3BdXXxJjpBgAACCFCdwXV18TVN5KXcy7oUgAAAFBBhO4KqkvFVSg6DYzm\ngy4FAAAAFUToriAeBQ8AABBOhO4KInQDAACEE6G7ghrSCUlS7xChGwAAIEwI3RXUkC7NdPcQugEA\nAEKF0F1Bjd5M95HhsYArAQAAQCURuitofKb7CDPdAAAAoULorqBUPKpUPKIjQ8x0AwAAhAmhu8Ia\nahLMdAMAAIQMobvCGtJxbqQEAAAIGUJ3hTWk4+rlRkoAAIBQIXRXWGM6wUw3AABAyBC6K6whTU83\nAABA2BC6K6whHdeRoTE554IuBQAAABXiW+g2szvMrNPMNk4a+1sz22tmz3ivN0869ldmtsXMXjSz\nN04av84b22Jmn/Sr3kppTMeVLzoNjhWCLgUAAAAV4udM99clXXeC8c8551Z4r3skycyWS7pJ0qu8\nz/yLmUXNLCrpVklvkrRc0ju9c2eshprSUyl7BrmZEgAAICx8C93OuYckdU/x9Bskfdc5N+qc2y5p\ni6RLvNcW59w259yYpO96585Y40+l7B2mrxsAACAsgujp/piZPeu1nzR6Yx2Sdk86Z483drLxlzGz\nW8xsnZmtO3TokB91l0VD2pvp5qmUAAAAoVHp0P0lSUskrZC0X9I/luuLnXO3OedWOedWtba2lutr\ny67Rm+lmBRMAAIDwiFXyD3POHRzfNrPbJf3U290rad6kU+d6YzrF+IzETDcAAED4VHSm28zaJ+3+\nvqTxlU3WSLrJzJJmtkjSMklPSForaZmZLTKzhEo3W66pZM3lNt7T3c2NlAAAAKHh20y3mX1H0mpJ\nLWa2R9KnJK02sxWSnKQdkj4kSc6558zsLkmbJOUlfdQ5V/C+52OSfiYpKukO59xzftVcCfFoRPU1\ncUI3AABAiPgWup1z7zzB8FdPcf6nJX36BOP3SLqnjKUFrqk2QegGAAAIEZ5IGQBCNwAAQLgQugPQ\nmCZ0AwAAhMmUQreZLTGzpLe92sw+bmYN/pZWvZqZ6QYAAAiVqc50/0BSwcyWSrpNpWX8vu1bVVWu\nsTahnqExOeeCLgUAAAAVMNXQXXTO5VVa5u+fnXP/Q1L7b/kMTqK5NqFcwal/NB90KQAAAKiAqYbu\nnJm9U9LNOvpAm7g/JVW/xtrSA3K6B2gxAQAACIOphu73S3qNpE8757Z7D7D5N//Kqm7N46Gbp1IC\nAACEwpTW6XbObZL0cUkys0ZJWefc3/tZWDVrYqYbAAAgVKa6esmDZlZnZk2SnpJ0u5l91t/SqlcT\nM90AAAChMtX2knrnXJ+kt0r6pnPuUklv8K+s6jYRulk2EAAAIBSmGrpjZtYu6e06eiMlTlM6EVUi\nFlEPoRsAACAUphq6/07SzyRtdc6tNbPFkjb7V1Z1MzM11ybURU83AABAKEz1RsrvS/r+pP1tkt7m\nV1Fh0JJJ6vDgaNBlAAAAoAKmeiPlXDP7oZl1eq8fmNlcv4urZi2ZhA71E7oBAADCYKrtJV+TtEbS\nHO/1E28Mp6klk1TXAKEbAAAgDKYauludc19zzuW919cltfpYV9VrySZ1eGBMxaILuhQAAAD4bKqh\n+7CZvdvMot7r3ZIO+1lYtWvJJJUvOvUO54IuBQAAAD6bauj+ryotF3hA0n5JfyDpfT7VFAotmdJa\n3bSYAAAAVL8phW7n3E7n3Fucc63OuTbn3I1i9ZIz0ppNSpIOEboBAACq3lRnuk/kv5etihBqzZRC\nN2t1AwAAVL8zCd1WtipCqGU8dLNsIAAAQNU7k9DNshtnoL4mrljE6OkGAAAIgVM+kdLM+nXicG2S\nanypKCQiEVMzD8gBAAAIhVOGbudctlKFhBEPyAEAAAiHM2kvwRlqzSa5kRIAACAECN0Bas0kaS8B\nAAAIAUJ3gGbXp3RoYFQFHgUPAABQ1QjdAWqrS6lQdDpMXzcAAEBVI3QHaJb3VMqDfYRuAACAakbo\nDtCsupQk6WDfSMCVAAAAwE+E7gCNh+4DhG4AAICqRugOUEsmoYhJnYRuAACAqkboDlAsGlFLJklP\nNwAAQJUjdAdsVl1KB/uZ6QYAAKhmhO6AzapL6kAvoRsAAKCaEboD1laXUidPpQQAAKhqhO6Aza5L\nqXtwTKP5QtClAAAAwCeE7oDNrvfW6u5lthsAAKBaEboD1tFQI0nae2Q44EoAAADgF0J3wOYQugEA\nAKoeoTtg7V57yT5CNwAAQNUidAcsFY+qJZMkdAMAAFQxQvc00NGQor0EAACgihG6p4E5DTXMdAMA\nAFQxQvc00NFQo71HhuWcC7oUAAAA+IDQPQ3MaajRSK6onqFc0KUAAADAB4TuaWB82UBaTAAAAKoT\noXsamNtYCt27u4cCrgQAAAB+IHRPA/Ob05KknYRuAACAqkTongbqUnE11Sa08zChGwAAoBoRuqeJ\n+U1p7eoeDLoMAAAA+IDQPU0saE5rRxcz3QAAANWI0D1NLGhKa3/vsMbyxaBLAQAAQJkRuqeJBc21\nKjppTw+z3QAAANWG0D1NLGAFEwAAgKpF6J4mJpYN7OJmSgAAgGpD6J4mWjNJZZIxbSd0AwAAVB1C\n9zRhZlraltHmzoGgSwEAAECZEbqnEUI3AABAdSJ0TyPL2jI61D+q3qFc0KUAAACgjAjd08iyWRlJ\n0pZD/QFXAgAAgHIidE8jy9qykqTNB2kxAQAAqCaE7mmko6FGqXiEvm4AAIAq41voNrM7zKzTzDZO\nGmsys/vMbLP33uiNm5l9wcy2mNmzZnbRpM/c7J2/2cxu9qve6SASKa1g8tJB2ksAAACqiZ8z3V+X\ndN1xY5+UdL9zbpmk+719SXqTpGXe6xZJX5JKIV3SpyRdKukSSZ8aD+rV6lXt9dq4t1fOuaBLAQAA\nQJn4Frqdcw9J6j5u+AZJ3/C2vyHpxknj33Qlj0lqMLN2SW+UdJ9zrts51yPpPr08yFeV8zrq1DOU\n077ekaBLAQAAQJlUuqd7lnNuv7d9QNIsb7tD0u5J5+3xxk42/jJmdouZrTOzdYcOHSpv1RV0Xke9\nJGnDnt6AKwEAAEC5BHYjpSv1T5Sth8I5d5tzbpVzblVra2u5vrbizm2vUzRi2riX0A0AAFAtKh26\nD3ptI/LeO73xvZLmTTpvrjd2svGqlYpHtawto437CN0AAADVotKhe42k8RVIbpb040nj7/VWMblM\nUq/XhvIzSdeaWaN3A+W13lhVO6+jXhv2cDMlAABAtfBzycDvSPqNpLPNbI+ZfUDSZyRdY2abJb3B\n25ekeyRtk7RF0u2SPiJJzrluSf9L0lrv9XfeWFW7eEGjDg+OaVvXYNClAAAAoAxifn2xc+6dJzn0\n+hOc6yR99CTfc4ekO8pY2rR36aImSdLj27q1pDUTcDUAAAA4UzyRchpa1FKr1mxSj28/HHQpAAAA\nKANC9zRkZrp0UZMe39ZNXzcAAEAVIHRPU5cubtaBvhHtODwUdCkAAAA4Q4TuaWr1WaW1xn+x6WDA\nlQAAAOBMEbqnqXlNaZ3bXqefbzoQdCkAAAA4Q4Tuaeza5bO0bmePugZGgy4FAAAAZ4DQPY1d+6pZ\nck76z43MdgMAAMxkhO5pbHl7nc5tr9Odj+1kFRMAAIAZjNA9jZmZ3nPZAr1woF9P7eoJuhwAAACc\nJkL3NHfDijnKJmP68q+2BV0KAAAAThOhe5qrTcb0x6uX6L5NB/Xo1q6gywEAAMBpIHTPAB+4YpE6\nGmr0Nz/aqP6RXNDlAAAA4BUidM8AqXhU//cPL9DOw0P66Lef1uBoPuiSAAAA8ArEgi4AU3P5khZ9\n+vfP0yfv3qDf++LD+ujqpbp0cZOSsaj6RnI6MjSmzr5RdfaPaixfVG0ypoXNaa2c36iaRDTo8gEA\nAEKN0D2DvOPV8zW/qVZ//cMN+rPvr5/SZxLRiN543mx96KrFOq+j3ucKAQAAcCJWjes/r1q1yq1b\nty7oMnzjnNNTu47oxQP9KhSLqquJq64mrrZsUm3ZlFLxiPpH8nrpYL8efPGQ7n5qj/pG8nrrRR36\nn9cvV0M6EfSPAAAAUBXM7Enn3Krfeh6hu/r1jeT0pQe36vaHtqk1m9Tt713FrDcAAEAZTDV0cyNl\nCNSl4vrL687RDz/yWpmkP/zyb/SfG/YHXRYAAEBoELpD5Py59frRx16rc9qz+vCdT+nrj2wPuiQA\nAIBQIHSHTFs2pe/80WW6dvks/e1PNulff82TLgEAAPxG6A6hVDyqW991kd503mz97/94Xrc9tDXo\nkgAAAKoaoTuk4tGI/vmdK3X9Be36/+95QXc8TKsJAACAX1inO8Ri0Yg+944VKhSd/u6nmxSPRfSe\nyxYEXRYAAEDVYaY75OLRiD5/00q94dw2/c2PNup7a3cFXRIAAEDVIXRDiVhEt77rIv3OWa365N0b\ndPdTe4IuCQAAoKoQuiFJSsai+sp7LtblS5r1599fr5+s3xd0SQAAAFWD0I0JqXhUt793lVYtbNKf\nfu8ZHqADAABQJoRuHCOdiOmO971aK+Y16GPfeVp3rd0ddEkAAAAzHqEbL5NJxvTN/3qJLl/SrL/4\nwbP64i83yzkXdFkAAAAzFqEbJ1SbjOmrN79aN66Yo3/4+Uv6s++v10iuEHRZAAAAMxLrdOOkErGI\nPvv2FVrQXKvP379ZLx7o15fffbHmNaWDLg0AAGBGYaYbpxSJmP7bNWfpqzev0q7uIb3587/WXet2\n024CAADwChC6MSWvP3eW7vn4lVo+p05/8e/P6oPfWKfd3UNBlwUAADAjELoxZfOa0vrOH12mv7l+\nuR7Z2qXXf/ZX+oefvajB0XzQpQEAAExrhG68IpGI6QNXLNIDf75abz5vtr74wBa97h8e1F3rditf\nKAZdHgAAwLRE6MZpaa+v0T/dtFJ3f+RytTfU6C/+/Vld87mHdPdTe1Qo0u8NAAAwGaEbZ+Si+Y36\n0Ucu11fec7FS8aj++13rdc1nf6UfP7OX8A0AAOAhdOOMmZne+KrZ+o8/uUJffvdFSsQi+sR3n9Eb\n/+khrVm/j/ANAABCj9CNsolETNed1657Pn6lbv0vFyli0se/87Su+6eHdO/GAywzCAAAQovQjbKL\nREy/e0G77v3EVfrnd65UwTn98bee1I3/8qge3doVdHkAAAAVR+iGbyIR0+9dOEc//9Or9PdvO1+d\nfSP6L7c/rvd89XFt2NMbdHkAAAAVY9X4T/6rVq1y69atC7oMHGckV9C//Wanbn1wi44M5fS7F7Tr\nz689W4taaoMuDQAA4LSY2ZPOuVW/9TxCNyqtbySn2x/apq8+vF1j+aLeecl8ffz1y9SaTQZdGgAA\nwCtC6CZ0T3ud/SP6/C8267trdysVi+iPrlqsP7pysWqTsaBLAwAAmBJCN6F7xth6aED/994Xde9z\nB9SSSeoTb1imm149T/EotxwAAIDpbaqhm1SDwC1pzejL77lYP/jw5VrUktbf/Gij3vi5h3Tvxv0s\nMwgAAKoCoRvTxsULGnXXh16j29+7SpGI6Y+/9ZTe+qVH9cT27qBLAwAAOCOEbkwrZqZrls/SvZ+4\nUp956/nad2RYb//Kb/TBb6zV5oP9QZcHAABwWujpxrQ2PFbQHY9s15ce3KqhsbxuXNmhP7l6GcsM\nAgCAaYEbKQndVaV7cEy3PrBF33psp3KFom5c0aGPXb1Ui1szQZcGAABCjNBN6K5Knf0juu1X2/St\nx3dqLF/UDV74XkL4BgAAASB0E7qr2qH+Ud320Fb922Ol8P17F87Rn1y9VEvbskGXBgAAQoTQTegO\nha6BUd3+0DZ98zc7NZIv6I3LZ+sjr1uiC+Y2BF0aAAAIAUI3oTtUDg+M6muP7NA3frND/SN5Xbms\nRR9evUSvWdwsMwu6PAAAUKUI3YTuUOofyenOx3fpX3+9XV0Do1o5v0EfWb1Urz+nTZEI4RsAAJQX\noZvQHWojuYK+v263vvLQNu3pGdbZs7L68Ooluv6CdsV4vDwAACgTQjehG5JyhaJ++uw+/csDW7W5\nc0Dzmmr0oauW6A8unqtUPBp0eQAAYIYjdBO6MUmx6PSL5w/q1ge3av3uI2rNJvXBKxbpXZctUCYZ\nC7o8AAAwQxG6Cd04AeecfrP1sP7lwa16eEuX6lIxvfuyBXrf5QvVVpcKujwAADDDELoJ3fgt1u8+\noi//aqt+9twBRSOmG1d06INXLtbZs1nrGwAATA2hm9CNKdp5eFB3PLxdd63bo+FcQb9zVqtuuWqx\nLl/CcoMAAODUCN2EbrxCPYNjuvPxnfr6ozvVNTCq5e11+qOrFun6C+YozoonAADgBKZ16DazHZL6\nJRUk5Z1zq8ysSdL3JC2UtEPS251zPVaaavy8pDdLGpL0PufcU6f6fkI3zsRIrqAfP7NXt/96u7Z0\nDqi9PqX3v3ahbrpkvupS8aDRtHd2AAAZlklEQVTLAwAA08hMCN2rnHNdk8b+j6Ru59xnzOyTkhqd\nc39pZm+W9Ccqhe5LJX3eOXfpqb6f0I1yKBadHnypU7c/tF2/2XZYmWRMf3DxXN18+UItaqkNujwA\nADANzMTQ/aKk1c65/WbWLulB59zZZvYVb/s7x593su8ndKPcNuzp1b8+vE33bNivXMHpdWe36n2v\nXaQrl7bwpEsAAEJsuofu7ZJ6JDlJX3HO3WZmR5xzDd5xk9TjnGsws59K+oxz7mHv2P2S/tI5t+64\n77xF0i2SNH/+/It37txZwZ8IYdHZN6I7H9+lOx/fpa6BUS1uqdXNly/U2y6ey3rfAACE0HQP3R3O\nub1m1ibpPpXaR9aMh27vnB7nXONUQ/dkzHTDb6P5gv5zwwF97ZHtWr+nV5lkTH+4aq5ufs1CLaT1\nBACA0Jhq6A5kas45t9d77zSzH0q6RNJBM2uf1F7S6Z2+V9K8SR+f640BgUnGorpxZYduXNmhp3f1\n6OuP7tC3Htuprz+6Q6vPovUEAAAcq+LroJlZrZllx7clXStpo6Q1km72TrtZ0o+97TWS3msll0nq\nPVU/N1BpK+c36vM3rdQjf3m1Pn71Mm3Y26eb73hCr/vHB/WVX21V9+BY0CUCAICAVby9xMwWS/qh\ntxuT9G3n3KfNrFnSXZLmS9qp0pKB3V5/9xclXafSkoHvP1VriUR7CYI1mi/o3o0H9K3Hdmrtjh4l\nohG9+fzZevdlC3TxgkYeuAMAQBWZ1j3dfiN0Y7p48UC/vv34Tt391F71j+Z1zuys3nXpfN24skNZ\n1vwGAGDGI3QTujGNDI7m9ZP1+/Stx3dq494+pRNR3bCiQ+++bL5eNac+6PIAAMBpInQTujENOef0\n7J5e3fn4Tq1Zv08juaJWzGvQTa+ep+svnMOygwAAzDCEbkI3prneoZzufnqPvv34Lm3uHFA6EdX1\nF7TrHa+ep4vm0/sNAMBMQOgmdGOGcM7p6d1H9L0nduunz+7T4FhBS1pr9Y5Xz9NbL5qrlkwy6BIB\nAMBJELoJ3ZiBBkfz+o9n9+t763bryZ09ikVMrz+3Te949TxdtaxVsWjFV/kEAACnQOgmdGOG29LZ\nr++t3a27n9qrw4Njml2X0tsu7tDbLpqrxa2ZoMsDAAAidBO6UTXG8kX98oWD+u7a3XropUMqOmnl\n/Aa99aK5+r0L2tWQTgRdIgAAoUXoJnSjCh3sG9GPn9mrHzy5Vy8e7FciGtHV57TprRd1aPXZbUrE\naD8BAKCSCN2EblQx55w27e/T3U/t1Y+f2auugTE1puN6y4Vz9LaL5+r8jnpWPwEAoAII3YRuhESu\nUNTDm7v0g6f26OebDmosX9TStox+f2WH3nLhHM1rSgddIgAAVYvQTehGCPUO5/SfG/br7qf26okd\n3ZJK/d9vuXCOfveCdrVlUwFXCABAdSF0E7oRcru7h/TTZ/drzfp9en5/nyImvWZJs95y4Rxd96p2\n1afjQZcIAMCMR+gmdAMTNh/s10/W79Oa9fu04/CQ4lHT75zVpt+7sF3XLJ+ldILHzwMAcDoI3YRu\n4GWcc9qwt1c/Wb9PP1m/Xwf6RlQTj+oNy2fpLRfO0ZXLWpSKR4MuEwCAGYPQTegGTqlYdFq7o1tr\n1u/TPRv2q2cop0wypqvPadObz5+t3zmrTTUJAjgAAKdC6CZ0A1OWKxT1yJYu3bvxgH6+6aC6B8dU\nE4/qdee06rrz2nX1OW3KJGlBAQDgeIRuQjdwWvKFop7Y3q3/3HhA9z53QIf6R5WIRXTVsla96bzZ\nesPyWaqv4SZMAAAkQjehGyiDQtHpqV09umfDft278YD2944oHjVdvqRlIoC3ZJJBlwkAQGAI3YRu\noKyKRaf1e47o3o0HdM/G/drdPSwz6eL5jXrD8lm6ZvksLWnNBF0mAAAVRegmdAO+cc7puX19+sXz\nB3XfpoN6bl+fJGlxS+1EAL9ofqOiER5FDwCoboRuQjdQMfuODE8E8Me2HVau4NRUm9Drzm7TNctn\n6cplLarlRkwAQBUidBO6gUD0j+T0q5cO6RebDuqXL3SqbySvRCyi1y5p1uvPnaXVZ7dqbmM66DIB\nACiLqYZupp4AlFU2Fdf1F8zR9RfMUa5Q1Nod3frFpk7d9/wBPfDiIUnSsraMVp/dqted3aZVC5uU\niEUCrhoAAH8x0w2gIpxz2npoUA++2KkHXzykJ7Z3a6xQVG0iqtcubdHqs9u0+uxWzWmoCbpUAACm\njJluANOKmWlpW0ZL2zL64JWLNTia16NbD0+E8J9vOihJOntWVqvPadXqs9q0amGj4lFmwQEAMx8z\n3QAC55zTls4BPeAF8LU7upUrOGWSMV22uEmvXdqiK5e1aElrRmasiAIAmD6Y6QYwY5iZls3Katms\nrG65aokGRvN6ZEuXHnrpkB7Z0qVfPN8pSZpdl5oI4JcvbVZbNhVw5QAATA0z3QCmvd3dQ3p4S5ce\n3tKlR7d0qWcoJ0k6Z3ZWVyxt0RXLWnTJoialE8wjAAAqiyUDCd1AVSoWnTbt79OvN3fp4S2HtHZH\nj8byRSWiEa2c36DXLGnWaxY3a8X8BiVj0aDLBQBUOUI3oRsIhZFcQWt3dOvhzV16ZGuXntvXJ+ek\nZCyii+Y36rLFzbpscRMhHADgC0I3oRsIpd7hnNZu79Zj2w7rse2HjwnhFy8YD+HNunBePSEcAHDG\nCN2EbgCSeodyemKHF8K3Hdam/ceG8FcvbNKqhY1aOb9RGR5VDwB4hQjdhG4AJ9A7lNPj2w/rsW2l\nIP7CgT4VnRQxafmcOq1aUArhr17YpFl1rI4CADg1QjehG8AU9I/k9PSuI1q3s0frdnTr6V1HNJwr\nSJLmNdUcE8KXtmYUibBOOADgKNbpBoApyKbiuuqsVl11VqskKVco6vn9fVq7oxTCH97SpR8+vVeS\nVF8T18ULGnXxgkatmNegC+bWK5uKB1k+AGCGYKYbAE7BOadd3UNat6NH63Z2a+2OHm3pHJAkmUlL\nWzNaMa9BK+Y3aMW8Bp09K6sYj64HgNCgvYTQDcAnvUM5rd9zRM/sPvrqHhyTJNXEozp/br1WzmuY\nCOPt9TUBVwwA8AvtJQDgk/r0sS0pzjnt7h7W07t79PSuUgj/2iM7NFYoSpJm1SW1Yl6Dzu+o13kd\n9Tq/o17NmWSQPwIAoMII3QBwhsxM85vTmt+c1g0rOiRJo/mCnt/fr6d39eiZ3Ue0fvcR/ey5gxOf\nmVOfmgjg582t13lz6tWaJYgDQLUidAOAD5KxaKm9ZF7DxFjvcE6b9vVp495ebdjbq417e/XzTUeD\n+Oy6o0H8/Ll1Oq+jXm1Zli0EgGpA6AaACqmvies1S5r1miXNE2P9Izk9d1wQv/+Fgxq/3aYlk9S5\n7Vktb6/TOe1ZndtepyWtGcW5WRMAZhRCNwAEKJuKTzyaftzAaF6b9vVpw95ePb+/T8/v7zumRzwe\nNS1ty06E8XPb63TO7Cx94gAwjRG6AWCaySRjumRRky5Z1DQxlisUte3QoF440KdN+/v0/P5+Pby5\nS3c/tXfinLZsshTAvTC+rC2rxa21SsWjQfwYAIBJCN0AMAPEoxGdPTurs2dnJ27WlKSugVG9sL+/\nNCN+oBTGH93apVyh1J8SMWlBc62WtWW0bFZGZ83KamlbRktaM4RxAKggQjcAzGAtmaSuWJbUFcta\nJsbG8kVt6xrQ5oMD2nywX5s7B/TSwX7d/0KnCsVjw/jStozOmpXRsrasls0ijAOAXwjdAFBlErGI\nzpldp3Nm1x0zPpYvanvXoDZ39uulSYH8gRc6lZ8Uxuc1pbW4pVaLWjJa3Fqrxa21WtKaUVs2KTML\n4kcCgBmP0A0AIZGIHW1RmWwsX9SOw4N66WApjG89NKBthwb1m22HNZIrTpxXm4hqUWutFk+E8YwX\nzmtVm+R/JwBwKvyWBICQS8QiOmtWVmfNOjaMF4tOB/pGtO3QoLZ1DXjvg3pqV49+8uy+iWUNpdIa\n44tba7WguVYLmtNa0FR6WNCC5lplCOQAQOgGAJxYJGKa01CjOQ01x/SMS9JIrqAdhwe13Qvi47Pj\nP3vugLoHx445t7k2UQrgTWnNb67Vgqa0FnhP8GzN0LICIBwI3QCAVywVj56wb1yS+kZy2nV4SLu6\nh7Tz8JB2dQ9q5+Ehrd3RozXr96k4aYY8nYhqflNa870gPrcxrbmNNeporFFHQ42yqXgFfyoA8A+h\nGwBQVnWpuM7rqNd5HfUvOzaWL2pPz5B2dg9pZ9egdnYPadfhIW3rGtSDLx3SWL54zPn1NXF1NNRM\nBPG5jemJ/bmNNaqviTNTDmBGIHQDAComEYuUbsBszUhnH3usWHTqGhzV3p5h7ekZ1t4jw9rTM6S9\nPcPa3jWoh7d0aWiscMxnahPRUhD3ZsbbG1Jqr09pdl1N6b0+xRKIAKYFQjcAYFqIRExt2ZTasimt\nnN/4suPOOR0ZynmBfEh7vHA+HtDX7ehW30j+ZZ9rTMc1u/5oCJ9Tnzpmv70+pXSC/x0C8Be/ZQAA\nM4KZqbE2ocbahM6f+/LWFUkaHM3rQN+IDvSOaH/viA70Dnvvpf1ndh952Y2eklSXiqm9vkaz61Nq\nyybVmk2qLZtUW13q6HY2pZoEs+YATg+hGwBQNWqTMS1pLT1Z82RGcgUd7Bs5JoxPhPO+Eb14oF+H\nBkYnnt45WSYZmwjlrV4Qb6tLqjWTVFtdab81m1Rjml5zAMcidAMAQiUVj3rridee9Jxi0al7aEyH\n+kfV2T+qzr4RHRoYVWffqA71l14b9/aqs7/zZX3mkhSPmpprk2qqTag5k1BzbULNmeTR7dqkmjIJ\ntdSWxtKJKCEdqHKEbgAAjhOJmFoySbVkkjq3/dTnDo7mXxbMO/tHdXhgVIcHx3R4cEzbuwbVPTh2\nwoAuSclYRC2ZySH9aEBvrE2oMZ1QYzquhnRcDemEGmriikUjPvzkAPxC6AYA4AzUJmNalIxpUcvJ\nZ87HDY8VdHhwVIcHxia9j6l7cExdA97+wJheOtCvrsGxly2hOFk2GVNDbVyN6YTqa+ITwbzee29M\nJ1SfPjreUJNQNhVTJMKMOhAEQjcAABVSk4hqbqL0EKDfxjmnwbGCugfGdGR4TEeGcuoZKr0f3R7T\nkeGceoZy2tU9pCNDOfUO5076nRGT6mriyqZiqkvFS68ab7vmRPuxo+fXxJVJENqB00XoBgBgGjIz\nZZIxZZIxzddvD+njCkWn3uGcjgyNqWcop97hMfUMHg3s/SM59Y3k1TecU99ITju6htQ3klPfcE6D\nJ2l/OVpTaYZ9ckDPpuLKJmOqTcaUScUmaq5NTt6OKps6OlZLeEcIEboBAKgi0YipqTahptrEK/5s\nvlBU/0jeC+H5iTB+/P7kc3YdHtLAaF6DY3kNjOSVP8GqLydSm4geDeapUhA/PrSnE1GlE1HVjL/H\njx+LHd2OR+lzx7RG6AYAAJKkWDQysRb66XDOaTRf1OBoXoOjBfWP5jQ4WtDAaE4DowUNjpaC+cBo\n6TU4euz2np5hDUx8Jn/KnvYTSUQjRwO6956Ox14+loipJl7aT8UiSsajSsUjSsWiSsWjSsYjpfdY\n6T0VL503vh1llh6ngdANAADKwswmgmnzyZdKn7J8oajhXEHDYwUNea/hXF5DYwUNjh7dPub4WGls\naOJzeR0ZGtO+I+OfL42N5F5ZoJ8sFhn/OSNKxrzAfnxI90J80jsnGYsoEYsoEfXeYxHFve3kceOJ\naERx733ic8eNJ6IRWnRmmBkTus3sOkmflxSV9K/Ouc8EXBIAAPBRLBpRNhpRNhUv+3cXi6VZ+ZFc\nQSP5gkZype2JsVxpbDR//HZxYn/EOzZ63Gf7RnLHnDeaK2isUNRYoSg3te6bKYlH7dggPimcJ2MR\nxaIRxaOmeDSiWMQm9mORiGJRUzwSUTxW2o9HvePeeRPHoyf4nPd98VhE8YmxExz3vicaKR2LRsa3\nS+9Rs1D9xWFGhG4zi0q6VdI1kvZIWmtma5xzm4KtDAAAzESRiKnGazmpFOec8kWnsXyx9Coc936y\nsUn7uUJRo8ef642P5YsanTSWLxaVyzsN5PPKF5xyhaLyRad8oahcwZWOj49P2q+kiGlSGI9MhPLI\npHA+EdIjpmgkcsz45PM+dNUSXbGspaL1vxIzInRLukTSFufcNkkys+9KukESoRsAAMwIZjYx81yb\nDLqaE3POqVD0/nIwHsYLReWOC+v5wtSOF73vKky8F1UoSoVi8ZjxY88rqnDMZ7z3gneuO/pd+ULp\nLzEF55Qrnn7LUCXMlNDdIWn3pP09ki6dfIKZ3SLpFkmaP39+5SoDAACoEmamWNQUi0qpeOX+FSAM\nqmZtHefcbc65Vc65Va2trUGXAwAAAEyYKaF7r6R5k/bnemMAAADAtDdTQvdaScvMbJGZJSTdJGlN\nwDUBAAAAUzIjerqdc3kz+5ikn6m0ZOAdzrnnAi4LAAAAmJIZEbolyTl3j6R7gq4DAAAAeKVmSnsJ\nAAAAMGMRugEAAACfEboBAAAAnxG6AQAAAJ8RugEAAACfEboBAAAAnxG6AQAAAJ8RugEAAACfEboB\nAAAAnxG6AQAAAJ8RugEAAACfEboBAAAAnxG6AQAAAJ+Zcy7oGsrOzA5J2hnQH98iqSugPxuVw3UO\nB65zOHCdw4HrHA5BXOcFzrnW33ZSVYbuIJnZOufcqqDrgL+4zuHAdQ4HrnM4cJ3DYTpfZ9pLAAAA\nAJ8RugEAAACfEbrL77agC0BFcJ3DgescDlzncOA6h8O0vc70dAMAAAA+Y6YbAAAA8BmhGwAAAPAZ\nobtMzOw6M3vRzLaY2SeDrgdnxszuMLNOM9s4aazJzO4zs83ee6M3bmb2Be/aP2tmFwVXOabKzOaZ\n2QNmtsnMnjOzT3jjXOcqYmYpM3vCzNZ71/n/88YXmdnj3vX8npklvPGkt7/FO74wyPrxyphZ1Mye\nNrOfevtc5ypjZjvMbIOZPWNm67yxGfF7m9BdBmYWlXSrpDdJWi7pnWa2PNiqcIa+Lum648Y+Kel+\n59wySfd7+1Lpui/zXrdI+lKFasSZyUv6M+fcckmXSfqo998t17m6jEq62jl3oaQVkq4zs8sk/b2k\nzznnlkrqkfQB7/wPSOrxxj/nnYeZ4xOSnp+0z3WuTq9zzq2YtB73jPi9Teguj0skbXHObXPOjUn6\nrqQbAq4JZ8A595Ck7uOGb5D0DW/7G5JunDT+TVfymKQGM2uvTKU4Xc65/c65p7ztfpX+R90hrnNV\n8a7XgLcb915O0tWS/t0bP/46j1//f5f0ejOzCpWLM2BmcyX9rqR/9fZNXOewmBG/twnd5dEhafek\n/T3eGKrLLOfcfm/7gKRZ3jbXf4bz/ml5paTHxXWuOl7LwTOSOiXdJ2mrpCPOubx3yuRrOXGdveO9\nkporWzFO0z9J+gtJRW+/WVznauQk/dzMnjSzW7yxGfF7OxbUHwzMZM45Z2ast1kFzCwj6QeS/tQ5\n1zd5sovrXB2ccwVJK8ysQdIPJZ0TcEkoMzO7XlKnc+5JM1sddD3w1RXOub1m1ibpPjN7YfLB6fx7\nm5nu8tgrad6k/bneGKrLwfF/lvLeO71xrv8MZWZxlQL3nc65u71hrnOVcs4dkfSApNeo9M/M4xNP\nk6/lxHX2jtdLOlzhUvHKvVbSW8xsh0otnldL+ry4zlXHObfXe+9U6S/Rl2iG/N4mdJfHWknLvLuk\nE5JukrQm4JpQfmsk3ext3yzpx5PG3+vdJX2ZpN5J/8yFacrr3/yqpOedc5+ddIjrXEXMrNWb4ZaZ\n1Ui6RqX+/Qck/YF32vHXefz6/4GkXzqeIjftOef+yjk31zm3UKX/B//SOfcucZ2ripnVmll2fFvS\ntZI2aob83uaJlGViZm9WqZ8sKukO59ynAy4JZ8DMviNptaQWSQclfUrSjyTdJWm+pJ2S3u6c6/bC\n2xdVWu1kSNL7nXPrgqgbU2dmV0j6taQNOtoD+tcq9XVznauEmV2g0o1VUZUmmu5yzv2dmS1WaUa0\nSdLTkt7tnBs1s5Skf1Opx79b0k3OuW3BVI/T4bWX/Llz7nquc3XxrucPvd2YpG875z5tZs2aAb+3\nCd0AAACAz2gvAQAAAHxG6AYAAAB8RugGAAAAfEboBgAAAHxG6AYAAAB8RugGgCpjZgUze2bS65Nl\n/O6FZraxXN8HAGHBY+ABoPoMO+dWBF0EAOAoZroBICTMbIeZ/R8z22BmT5jZUm98oZn90syeNbP7\nzWy+Nz7LzH5oZuu91+XeV0XN7HYze87Mfu496REAcAqEbgCoPjXHtZe8Y9KxXufc+So9pe2fvLF/\nlvQN59wFku6U9AVv/AuSfuWcu1DSRZKe88aXSbrVOfcqSUckvc3nnwcAZjyeSAkAVcbMBpxzmROM\n75B0tXNum5nFJR1wzjWbWZekdudczhvf75xrMbNDkuY650YnfcdCSfc555Z5+38pKe6c+9/+/2QA\nMHMx0w0A4eJOsv1KjE7aLoj7gwDgtyJ0A0C4vGPS+2+87Ucl3eRtv0vSr73t+yV9WJLMLGpm9ZUq\nEgCqDbMTAFB9aszsmUn79zrnxpcNbDSzZ1WarX6nN/Ynkr5mZv9D0iFJ7/fGPyHpNjP7gEoz2h+W\ntN/36gGgCtHTDQAh4fV0r3LOdQVdCwCEDe0lAAAAgM+Y6QYAAAB8xkw3AAAA4DNCNwAAAOAzQjcA\nAADgM0I3AAAA4DNCNwAAAOCz/weKhIJjMqfEbgAAAABJRU5ErkJggg==\n","text/plain":["
"]},"metadata":{"tags":[]}},{"output_type":"execute_result","data":{"text/plain":["None\n"]},"metadata":{"tags":[]},"execution_count":9}]},{"cell_type":"markdown","metadata":{"id":"yZi-CNWMtdbe","colab_type":"text"},"source":["## Predicting fahrenheit temperatures with the model"]},{"cell_type":"code","metadata":{"id":"Hg_hGeJaxqZX","colab_type":"code","outputId":"13a2e64c-0bf8-4268-f637-efcd908ba960","colab":{"base_uri":"https://localhost:8080/","height":204}},"source":["let exampleCelsiusData: [Float] = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]\n","let exampleCelciusDataPredictions = Tensor(shape: [10, 1], scalars: exampleCelsiusData)\n","\n","let exampleCelciusDataPredictions = model(exampleCelciusDataPredictions)\n","\n","print(\"Temperature predictions:\")\n","print(exampleCelciusDataPredictions)"],"execution_count":0,"outputs":[{"output_type":"stream","text":["prediction:\r\n","[[47.307415],\r\n"," [ 65.53156],\r\n"," [ 83.75571],\r\n"," [101.97985],\r\n"," [ 120.204],\r\n"," [138.42815],\r\n"," [ 156.6523],\r\n"," [174.87643],\r\n"," [193.10059],\r\n"," [211.32474]]\r\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"UU2hM02EymdW","colab_type":"text"},"source":["**The** correct answers are \n","\n","$10 \\times 1.8 + 32 = 50$\n","\n","$20 \\times 1.8 + 32 = 68$\n","\n","$30 \\times 1.8 + 32 = 86$\n","\n","$40 \\times 1.8 + 32 = 104$\n","\n","$50 \\times 1.8 + 32 = 122$\n","\n","$60 \\times 1.8 + 32 = 140$\n","\n","$70 \\times 1.8 + 32 = 158$\n","\n","$80 \\times 1.8 + 32 = 176$\n","\n","$90 \\times 1.8 + 32 = 194$\n","\n","$100 \\times 1.8 + 32 = 212$"]},{"cell_type":"markdown","metadata":{"id":"ndGNLPYjzflz","colab_type":"text"},"source":["## Looking at the layer weights\n","\n","Finally, let's print the internal variables of the Dense layer. "]},{"cell_type":"code","metadata":{"id":"e3CYb7sqxr9i","colab_type":"code","outputId":"033e6242-09fe-47b7-cd81-6158042f9afd","colab":{"base_uri":"https://localhost:8080/","height":51}},"source":["print(model.neuron.weight)\n","print(model.neuron.bias)"],"execution_count":0,"outputs":[{"output_type":"stream","text":["[[1.8224146]]\r\n","[29.08327]\r\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"YwPV0Orhzh8h","colab_type":"text"},"source":["\n","The first variable is close to ~1.8 and the second to ~32. These values (1.8 and 32) are the actual variables in the real conversion formula.\n","\n","This is really close to the values in the conversion formula. For a single neuron with a single input and a single output, the internal math looks the same as [the equation for a line](https://en.wikipedia.org/wiki/Linear_equation#Slope%E2%80%93intercept_form), $y = mx + b$, which has the same form as the conversion equation, $f = 1.8c + 32$.\n","\n","Since the form is the same, the variables should converge on the standard values of 1.8 and 32, which is exactly what happened.\n","\n","With additional neurons, additional inputs, and additional outputs, the formula becomes much more complex, but the idea is the same. \n","\n","## Extra Credit\n","\n","* make more Dense layers in the model, with different input sizes, e.g. layer 1 (input 1, output 4), layer 2 (input 4, output 4), layer 3 (input 4, output 1), and sequence the input through them\n"," * does adding more layers to the model make it more accurate as compared to single layered model? (you can print out the `.weight` and `.biast` of each layer of the model and look)\n"," * is the model more accurate? Do the variables (the weights) in the new layers come anywhere near the way the equation works to convert F to C? Or have they just obscured it more?\n","\n","\n"]}]} -------------------------------------------------------------------------------- /Notebooks/Extras/Extra 2 - Raw.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Extra 2 - Raw.ipynb","provenance":[],"collapsed_sections":[]},"language_info":{"file_extension":".swift","mimetype":"text/x-swift","name":"swift","version":""},"kernelspec":{"display_name":"Swift","language":"swift","name":"swift"},"accelerator":"GPU"},"cells":[{"cell_type":"markdown","metadata":{"id":"KHwRZTejqLRd","colab_type":"text"},"source":["# Extra - Raw TensorFlow Operators"]},{"cell_type":"markdown","metadata":{"id":"3_dbeWADqT5I","colab_type":"text"},"source":["This extra is based on the Swift for TensorFlow Raw TensorFlow operators notebook. It is updated."]},{"cell_type":"markdown","metadata":{"id":"h9veBnCaqoqA","colab_type":"text"},"source":["## Imports"]},{"cell_type":"code","metadata":{"id":"us4FINSpFkvf","colab_type":"code","colab":{}},"source":["import TensorFlow"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Q2krfYSmFnu8","colab_type":"text"},"source":["Use [`Raw`](https://www.tensorflow.org/swift/api_docs/Enums/Raw) to call raw operations:"]},{"cell_type":"code","metadata":{"id":"tZrsLetHFl12","colab_type":"code","colab":{}},"source":["Raw.mul(Tensor([2.0, 3.0]), Tensor([5.0, 6.0]))"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"-5-WMnHNGAuh","colab_type":"code","colab":{}},"source":["infix operator • : MultiplicationPrecedence"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"4G4YZd90HtML","colab_type":"code","colab":{}},"source":["extension Tensor where Scalar: Numeric {\n"," static func • (_ lhs: Tensor, _ rhs: Tensor) -> Tensor {\n"," return Raw.mul(lhs, rhs)\n"," }\n","}"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"YcwYpGkxHuQs","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":52},"outputId":"c1a96f3c-b401-4b22-d192-225190da087b","executionInfo":{"status":"ok","timestamp":1572312688648,"user_tz":240,"elapsed":1845,"user":{"displayName":"Paris B-A","photoUrl":"https://lh3.googleusercontent.com/a-/AAuE7mATPYgh7Oradnt-89Tt0-SyerXDc8Z985jmLS2U=s64","userId":"01326454337734597812"}}},"source":["let x: Tensor = [[1.0, 2.0], [3.0, 4.0]]\n","let y: Tensor = [[8.0, 7.0], [6.0, 5.0]]\n","x • y"],"execution_count":16,"outputs":[{"output_type":"execute_result","data":{"text/plain":["[[20.0, 17.0],\n"," [48.0, 41.0]]\n"]},"metadata":{"tags":[]},"execution_count":16}]}]} -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Swift for TensorFlow in 3 Hours 2 | 3 | ## Quick Start 4 | 5 | You won't need to go beyond this quickstart section if everything here works. 6 | 7 | ### To copy the tutorial notebooks into your Google Drive: 8 | 9 | 1. Open the shared Google Drive folder, containing the notebooks: 10 | * [https://lab.to/s4tf_tfworld_notebooks](https://lab.to/s4tf_tfworld_notebooks) 11 | 12 | 2. Add the shared `TFWorld2019` folder to _your_ Google Drive: 13 | * Click the dropdown next to the folders name, and then clicking "Add to My Drive", as shown in the image below. 14 | 15 |

16 | 17 | 3. Once the `TFWorld2019` folder is in _your_ Google Drive: 18 | * Open whichever notebook(s) you want to work with by double clicking them, and then clicking "Google Colaboratory", as shown in the image below. 19 | 20 |

21 | 22 | * If you see the notebook as raw text, click the "Open with" dropdown at the top, and click "Google Colaboratory", or if you don't see Colab as an option, click "Connect more apps": 23 | 24 |

25 | 26 | * Then add "Google Colaboratory": 27 | 28 |

29 | 30 | * Then open it with Colab: 31 | 32 |

33 | 34 | 35 | 4. Once your notebook is open, run it in Playground mode to allow cells to compile and run, as shown in the image below. 36 | 37 |

38 | 39 | 40 | ### To create a blank Swift notebook in Google Colab 41 | 42 | Click on this link: [lab.to/s4tf_blank](http://lab.to/s4tf_blank) 43 | 44 | _You'll need to save and name the notebook once you've created it!_ 45 | 46 | ## Other useful resources 47 | 48 | Why Swift for TensorFlow? 49 | 50 | * https://github.com/tensorflow/swift/blob/master/docs/WhySwiftForTensorFlow.md 51 | 52 | Swift tour and book: 53 | 54 | * Tour: https://docs.swift.org/swift-book/GuidedTour/GuidedTour.html 55 | * Book: https://docs.swift.org/swift-book/ 56 | 57 | Tutorials from the Swift for TensorFlow team: 58 | 59 | * Basic Swift: https://www.tensorflow.org/swift/tutorials/a_swift_tour 60 | * Protocol-oriented Programming and Generics: https://www.tensorflow.org/swift/tutorials/protocol_oriented_generics 61 | * Python interopability: https://www.tensorflow.org/swift/tutorials/python_interoperability 62 | * Model training: https://www.tensorflow.org/swift/tutorials/model_training_walkthrough 63 | * Custom differentiation: https://www.tensorflow.org/swift/tutorials/custom_differentiation 64 | * Raw TensorFlow operators: https://www.tensorflow.org/swift/tutorials/raw_tensorflow_operators 65 | 66 | The Swift Models repository, from the TensorFlow team: 67 | 68 | * https://github.com/tensorflow/swift-models 69 | 70 | The Swift for TensorFlow Discussion Group: 71 | 72 | * https://groups.google.com/a/tensorflow.org/forum/#!forum/swift 73 | 74 | The Swift APIs for TensorFlow: 75 | 76 | * https://github.com/tensorflow/swift-apis 77 | 78 | The Differentiable Programming Proposal: 79 | 80 | * https://docs.google.com/document/d/1bPepWLfRQa6CtXqKA8CDQ87uZHixNav-TFjLSisuKag/edit 81 | * https://forums.swift.org/t/differentiable-programming-mega-proposal/28547 82 | * https://github.com/dan-zheng/swift/blob/differentiable-programming/docs/DifferentiableProgramming.md 83 | 84 | SwiftAI's (FastAI) high-level Swift ML API (very early): 85 | 86 | * https://github.com/fastai/swiftai 87 | 88 | Why FastAI is embracing Swift for Deep Learning: 89 | 90 | * https://www.fast.ai/2019/03/06/fastai-swift/ 91 | 92 | FastAI's lessons on Swift for Deep Learning: 93 | 94 | * Basics of Swift for Deep Learning: https://course.fast.ai/videos/?lesson=13 95 | * Slides from this lesson: https://docs.google.com/presentation/d/1dc6o2o-uYGnJeCeyvgsgyk05dBMneArxdICW5vF75oU/edit#slide=id.p 96 | * C interop; Protocols; Putting it all together: https://course.fast.ai/videos/?lesson=14 97 | * A participant's notes on this lesson: https://medium.com/@lankinen/fast-ai-lesson-14-notes-part-2-v3-be4667394295 98 | 99 | The FastAI forum: 100 | 101 | * https://forums.fast.ai/c/swiftai 102 | 103 | Skip the Foreign Function Interface (FFI), a talk from the LLVM/Apple team: 104 | 105 | * Slides: http://llvm.org/devmtg/2014-10/Slides/Skip%20the%20FFI.pdf 106 | * Video: http://llvm.org/devmtg/2014-10/Videos/Skip%20The%20FFI!%20Embedding%20Clang%20for%20C-720.mov 107 | 108 | Value SEMANTICS (not value types!) Talk: 109 | 110 | * https://academy.realm.io/posts/swift-gallagher-value-semantics/ 111 | 112 | Facebook AI's paper on Tensor Comprehensions: 113 | 114 | * https://arxiv.org/pdf/1802.04730.pdf 115 | -------------------------------------------------------------------------------- /agenda.md: -------------------------------------------------------------------------------- 1 | # Agenda 2 | # Swift for TensorFlow in 3 Hours 3 | 4 | THIS DOCUMENT IS FOR PRESENTERS NOT ATTENDEES 5 | 6 | 1:30 PM to 3:00 PM (90 minutes) 7 | 3:00 PM to 3:30 PM (break) 8 | 3:30 PM to 5:00 PM (90 minutes) 9 | 10 | ## Activities 11 | 12 | 1. ACTIVITY 1: Make Sure Everything is Working (just a check to make sure it's working) (Paris) 13 | 2. ACTIVITY 2: SWIFT TBD (Mars) 14 | 3. ACTIVITY 3: SWIFT TBD (Mars) 15 | 4. ACTIVITY 4: Making a Basic XOR Model (Tim leads, Paris and Mars support) 16 | 5. ACTIVITY 5: Swift, meet Python (Tim leads, Paris and Mars support) 17 | 6. ACTIVITY 6: Training a bigger model (GAN) (Paris leads, Tim and Mars support) 18 | 7. ACTIVITY 7: Bringing it all together (Linear Regressor) (Paris leads, Tim and Mars support) 19 | 20 | ### Extras 21 | 22 | Just in case we need more material: 23 | 24 | 1. Fahrenheit to Celsius 25 | 2. Raw TensorFlow (very simple) 26 | 27 | # Timeline 28 | 29 | ** Paris starts** 30 | 31 | * Welcome/introductions/setup (PARIS) 32 | - who are we 33 | - what's Swift 34 | - you should have done all this setup < 35 | - here's what we're gonna do today 36 | - here's our focus/approach to teaching this 37 | 38 | * Introduction to the tools (5 minutes) (PARIS) 39 | - we're gonna use the Swift for TensorFlow toolchain, not Apple's Swift toolchain 40 | - quick overview of Jupyter 41 | - quick overview of Google Colab 42 | - ACTIVITY 1: Make sure everything is working 43 | - Ask everyone to open Colab 44 | 45 | ** switch to Mars ** 46 | 47 | * Swift Programming (~60 minutes) (MARS) 48 | 49 | * Protocols and Generics (TIM) (10-15 minutes) 50 | 51 | * Recap/summary of Swift Programming (TIM AND PARIS) 52 | 53 | * Swift for TensorFlow Basics (XOR) (TIM) 54 | - creating a simple model for XOR 55 | - giving it some data 56 | - training it 57 | - using it 58 | - ACTIVITY 4: Super basic Swift for TensorFlow Activity (XOR) (TIM) 59 | - Participants should open the "STARTER" version of Activity 4 and code in the cells that say "CODE GOES HERE". TIM to code in the "CODE GOES HERE" fields as well (in his own version of STARTER). 60 | - If Participants just want to follow along, or copy paste, they should also open the FINAL version of ACTIVITY 4. 61 | 62 | * Swift for TensorFlow + Python (10 minutes) 63 | - Python interop is powerful 64 | - what it can do 65 | - why bother? 66 | * ACTIVITY 5: BOLTING A TINY BIT OF PYTHON ONTO THE XOR (TIM) (15 minutes) 67 | - Participants should open the "STARTER" version of Activity 5 and code in the cells that say "CODE GOES HERE". TIM to code in the "CODE GOES HERE" fields as well (in his own version of STARTER). 68 | - If Participants just want to follow along, or copy paste, they should also open the FINAL version of ACTIVITY 5. 69 | - Participants and/or Tim could also just continue coding in the Activity 4 notebook. 70 | 71 | * ACTIVITY 6: Training a bigger model (GAN) (Paris leads, Tim and Mars support) 72 | - Participants should open the "STARTER" version of Activity 6 and code in the cells that say "CODE GOES HERE". TIM to code in the "CODE GOES HERE" fields as well (in his own version of STARTER). 73 | - If Participants just want to follow along, or copy paste, they should also open the FINAL version of ACTIVITY 6. 74 | 75 | * ACTIVITY 7: Bringing it all together (Linear Regressor) (Paris leads, Tim and Mars support) 76 | - Participants should open the "STARTER" version of Activity 7 and code in the cells that say "CODE GOES HERE". TIM to code in the "CODE GOES HERE" fields as well (in his own version of STARTER). 77 | - If Participants just want to follow along, or copy paste, they should also open the FINAL version of ACTIVITY 7. -------------------------------------------------------------------------------- /presentation.md: -------------------------------------------------------------------------------- 1 | footer: @parisba / @the_mcjones / @themartianlife / #TFWorld 2 | theme: Zurich,6 3 | slidenumbers: true 4 | 5 | # Swift for Tensorflow
(in three hours) 6 | 7 | --- 8 | 9 | #[fit] 👋 Hello! 10 | 11 | ^ Hello! 12 | 13 | --- 14 | 15 | ![60%](presentation_images/team.png) 16 | 17 | --- 18 | 19 | ![50%](presentation_images/tasmania.png) 20 | 21 | --- 22 | 23 | ![100%](presentation_images/books.png) 24 | 25 | --- 26 | 27 | ![fit](presentation_images/booksold.png) 28 | 29 | 30 | --- 31 | 32 | [.build-lists: true] 33 | 34 | # **Installation** 35 | 36 | - `git clone https://github.com/google/swift-jupyter.git` 37 | - `docker build -f docker/Dockerfile -t swift-jupyter .` 38 | - `docker run -p 8888:8888 --cap-add SYS_PTRACE -v /my/host/notebooks:/notebooks swift-jupyter` 39 | 40 | ... so much Docker. 41 | 42 | --- 43 | 44 | ![inline](presentation_images/docker.jpg) 45 | 46 | ^... or use Google Colaboratory! 47 | 48 | --- 49 | 50 | # Google Colaboratory 51 | 52 | - Colab is a free, experimental data-science platform from Google 53 | - It's basically a customised version of Jupyter Notebooks 54 | - It's primarily Python 55 | - but the Swift for TensorFlow team appears to have bolted Swift into it as well! 56 | - it's a bit experimental, though 57 | 58 | ^ Originally, we weren't planning on using Colab for this session, but after spending a few weeks building the content we realised that, unless you have a truly powerful machine running Jupiter and the Swift Kernel locally, it really takes longer than is useful in a tutorial-setting to run any of the larger machine learning models. 59 | 60 | --- 61 | 62 | # [fit] Swift for TensorFlow isn't quite ready. 63 | 64 | --- 65 | 66 | # [fit] Swift for TensorFlow isn't quite ready. 67 | ## It's _almost_ ready. 68 | 69 | --- 70 | 71 | # [fit] Swift for TensorFlow isn't quite ready. 72 | ## It's _almost_ ready. 73 | ## _**almost**_ 74 | 75 | --- 76 | 77 | # [fit] Swift for TensorFlow isn't quite ready. 78 | ## It's _almost_ ready. 79 | ## _**almost**_ 80 | ## ...Swift is **very** ready though! 81 | 82 | --- 83 | 84 | # Today's Plan | **Activities** 85 | 86 | * Activity 1: Making sure everything is working 87 | * Activity 2: Getting familiar with Swift 88 | * Activity 3: Slightly more advanced Swift 89 | * Activity 4: Meet TensorFlow — Training a Model 90 | * Activity 5: Swift and Python 91 | * Activity 6: Building a GAN 92 | * Activity 7: A little more Python 93 | 94 | ^ Throughout this plan, we're gonna be doing some activities, because, well, this is a tutorial! Our activities will be.... [advances one by one]. We may, or may not, get all the way through this. 95 | 96 | --- 97 | 98 | # See the Googlers... 99 | 100 | - 11:50 AM tomorrow 101 | - Great American Ballroom J/K 102 | 103 | ![right 20%](presentation_images/Tensorflow_logo.png) 104 | 105 | --- 106 | 107 | # Why? 108 | 109 | - performance! 110 | - safety 111 | - ease of reading 112 | - fun 113 | - looks kind of like Python most of the time 114 | - performance 115 | - performance 116 | 117 | --- 118 | 119 | # Some Python... 120 | 121 | ```python 122 | def collatz(n): 123 | count = 0 124 | 125 | while n > 1: 126 | if n % 2 == 0: 127 | n /= 2 128 | else: 129 | n = 3 * n + 1 130 | 131 | count += 1 132 | return count 133 | 134 | print(f"it took {collatz(9)} steps to resolve") 135 | ``` 136 | 137 | --- 138 | 139 | # Some Swift... 140 | 141 | ```swift 142 | func collatz(number: Int) -> Int { 143 | var count = 0 144 | var n = number 145 | 146 | while n > 1 { 147 | if n % 2 == 0 { 148 | n /= 2 149 | } else { 150 | n = 3 * n + 1 151 | } 152 | count += 1 153 | } 154 | return count 155 | } 156 | print("it took \(collatz(number: 9)) steps to resolve") 157 | ``` 158 | 159 | --- 160 | 161 | |Python|Swift| 162 | |:---:|:---:| 163 | |35.27 seconds|0.88 seconds| 164 | 165 | ^ py had a stddev of 2.6s 166 | swift had a stddev of 0.13s 167 | so a ~40x performance 168 | 169 | --- 170 | 171 | |Python|Swift|C| 172 | |:---:|:---:|:---:| 173 | |35.27 seconds|0.88 seconds|0.0044 seconds| 174 | 175 | ^ I was curious so I rewrote it in C 176 | knowing it would be faster 177 | and was then confused why C was THAT much faster 178 | 179 | --- 180 | 181 | ^ so instead of reinterpreting Swift each time I compiled it once and ran that 182 | so it is an apples to apples for Swift and C but not for poor Python 183 | Swift is winning here but I only did 100 tests no where NEAR enough and we are talking about 4 nanoseconds here 184 | worth saying this isn't a slight at Python, it was designed for writeability first 185 | 186 | |Python|Swift|C|Swift compiled| 187 | |:---:|:---:|:---:|:---:| 188 | |35.27 seconds|0.88 seconds|0.0044 seconds|0.0040 seconds| 189 | 190 | 191 | --- 192 | 193 | # **Swift (for programming)** 194 | 195 | ^ We're going to be teaching Swift, as a programming language, for the first bit of this tutorial. We're kind of just ignoring machine learning for this. It's important to learn Swift as a programming language before we move to Swift for TensorFlow. 196 | 197 | --- 198 | 199 | # Let's go! 200 | ## Setup 201 | 202 | - Go to this URL: 203 | 204 | https://lab.to/s4tf_tfworld2019 205 | 206 | 207 | 237 | 238 | 250 | -------------------------------------------------------------------------------- /presentation_images/Swift_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/Swift_logo.png -------------------------------------------------------------------------------- /presentation_images/Tensorflow_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/Tensorflow_logo.png -------------------------------------------------------------------------------- /presentation_images/add-to-drive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/add-to-drive.png -------------------------------------------------------------------------------- /presentation_images/book.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/book.jpg -------------------------------------------------------------------------------- /presentation_images/book3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/book3.png -------------------------------------------------------------------------------- /presentation_images/books.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/books.png -------------------------------------------------------------------------------- /presentation_images/booksold.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/booksold.png -------------------------------------------------------------------------------- /presentation_images/connect-colab.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/connect-colab.jpeg -------------------------------------------------------------------------------- /presentation_images/docker.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/docker.jpg -------------------------------------------------------------------------------- /presentation_images/hobart.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/hobart.jpg -------------------------------------------------------------------------------- /presentation_images/open-in-colab.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/open-in-colab.png -------------------------------------------------------------------------------- /presentation_images/open-in-playground.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/open-in-playground.png -------------------------------------------------------------------------------- /presentation_images/open-with-installed-colab.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/open-with-installed-colab.jpeg -------------------------------------------------------------------------------- /presentation_images/open-with.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/open-with.jpeg -------------------------------------------------------------------------------- /presentation_images/tasmania.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/tasmania.png -------------------------------------------------------------------------------- /presentation_images/team.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AIwithSwift/TFWorld2019-SwiftIn3Hours/fa489951c26f6fb301c425e0ad9a26ccdfd1b830/presentation_images/team.png --------------------------------------------------------------------------------