├── .gitignore ├── GreatStuffWithThePencil.playground ├── Pages │ ├── Detect your Handwriting.xcplaygroundpage │ │ └── Contents.swift │ ├── Get Started.xcplaygroundpage │ │ └── Contents.swift │ └── Smooth Doodeling.xcplaygroundpage │ │ └── Contents.swift ├── Resources │ └── PencilTexture.png ├── Sources │ ├── CognitiveServices.swift │ ├── EmotionHelpers.swift │ ├── Ext.swift │ └── MyDoodleCanvas.swift └── contents.xcplayground ├── Kick start Cognitive Services.playground ├── Pages │ ├── ComputerVision.xcplaygroundpage │ │ ├── Contents.swift │ │ └── Resources │ │ │ └── justtext.txt │ ├── CoreMLVision.xcplaygroundpage │ │ ├── Contents.swift │ │ ├── Resources │ │ │ └── catmoodprediction.mlmodelc │ │ │ │ ├── coremldata.bin │ │ │ │ ├── model.espresso.net │ │ │ │ ├── model.espresso.shape │ │ │ │ ├── model.espresso.weights │ │ │ │ └── model │ │ │ │ └── coremldata.bin │ │ └── Sources │ │ │ ├── CatVisionLogic.swift │ │ │ ├── UIImage+CVPixelBuffer.swift │ │ │ └── catmoodprediction.swift │ ├── CustomVision.xcplaygroundpage │ │ └── Contents.swift │ ├── Emotions.xcplaygroundpage │ │ └── Contents.swift │ ├── Faces.xcplaygroundpage │ │ └── Contents.swift │ └── First steps.xcplaygroundpage │ │ ├── Contents.swift │ │ └── Resources │ │ ├── justtext.txt │ │ └── keepcalm.png ├── Resources │ ├── Aaron.jpg │ ├── Giugli.png │ ├── Jan.png │ ├── Les.jpg │ ├── Nazuki.png │ ├── Owen_Family.jpg │ ├── Tiffany.jpg │ ├── beach.png │ ├── cat_grumpy_1.jpg │ ├── cat_grumpy_2.jpg │ ├── cat_smiling_1.jpg │ ├── cat_smiling_2.jpg │ ├── containers.png │ ├── highway.png │ ├── nightcity.png │ ├── woman_blue.png │ └── wood.png ├── Sources │ ├── CognitiveServices.swift │ └── Ext.swift └── contents.xcplayground ├── LICENSE ├── Play with Cognitive Services.playgroundbook └── Contents │ ├── Chapters │ ├── Computer Vision.playgroundchapter │ │ ├── Manifest.plist │ │ └── Pages │ │ │ ├── First steps.playgroundpage │ │ │ ├── Contents.swift │ │ │ ├── LiveView.swift │ │ │ ├── Manifest.plist │ │ │ └── Resources │ │ │ │ └── background.png │ │ │ └── Intro.cutscenepage │ │ │ ├── Manifest.plist │ │ │ └── Resources │ │ │ ├── cutscene.html │ │ │ └── images │ │ │ └── cutscenebg.png │ ├── Emotions.playgroundchapter │ │ ├── Manifest.plist │ │ └── Pages │ │ │ ├── Intro.cutscenepage │ │ │ ├── Manifest.plist │ │ │ └── Resources │ │ │ │ ├── cutscene.html │ │ │ │ └── images │ │ │ │ └── cutscenebg.png │ │ │ └── Smile.playgroundpage │ │ │ ├── Backup │ │ │ ├── Contents.swift │ │ │ ├── LiveView.swift │ │ │ ├── Manifest.plist │ │ │ └── Resources │ │ │ └── background.png │ ├── Faces.playgroundchapter │ │ ├── Manifest.plist │ │ └── Pages │ │ │ ├── Intro.cutscenepage │ │ │ ├── Manifest.plist │ │ │ └── Resources │ │ │ │ ├── cutscene.html │ │ │ │ └── images │ │ │ │ └── cutscenebg.png │ │ │ └── WhoIsThis.playgroundpage │ │ │ ├── Contents.swift │ │ │ ├── LiveView.swift │ │ │ ├── Manifest.plist │ │ │ └── Resources │ │ │ └── background.png │ └── Get Started.playgroundchapter │ │ ├── Manifest.plist │ │ └── Pages │ │ ├── Intro.cutscenepage │ │ ├── Manifest.plist │ │ └── Resources │ │ │ ├── cutscene.html │ │ │ └── images │ │ │ └── cutscenebg.png │ │ └── The elements.playgroundpage │ │ ├── Contents.swift │ │ ├── LiveView.swift │ │ ├── Manifest.plist │ │ └── Resources │ │ └── background.png │ ├── Manifest.plist │ ├── Resources │ ├── Aaron.jpg │ ├── Jan.png │ ├── Les.jpg │ ├── Nazuki.png │ ├── Owen_Family.jpg │ ├── Tiffany.jpg │ ├── background.png │ ├── beach.png │ ├── containers.png │ ├── group.png │ ├── highway.png │ ├── nightcity.png │ ├── playground_icon.png │ ├── woman_blue.png │ └── wood.png │ └── Sources │ ├── CognitiveServices.swift │ ├── Ext.swift │ ├── LandmarkView.swift │ └── MyView.swift ├── PlayWithYourSmile.playground ├── Pages │ ├── Force a smile.xcplaygroundpage │ │ └── Contents.swift │ ├── Make me smile.xcplaygroundpage │ │ └── Contents.swift │ ├── My own smile.xcplaygroundpage │ │ └── Contents.swift │ ├── My warm up.xcplaygroundpage │ │ └── Contents.swift │ └── The warm up.xcplaygroundpage │ │ └── Contents.swift ├── Resources │ ├── Aaron.jpg │ ├── Giugli.png │ ├── Jan.png │ ├── Les.jpg │ ├── Photo on 26.06.17 at 09.21.jpg │ ├── grumpycat.jpg │ └── manu.jpg ├── Sources │ ├── CognitiveServices.swift │ ├── EmotionHelpers.swift │ └── Ext.swift └── contents.xcplayground ├── README.md ├── slidedeck ├── machinelearningnoobs.pptx └── playgrounds_slidedeck.pdf └── storyboard in books ├── Travel.playgroundbook └── Contents │ ├── Chapters │ └── Chapter1.playgroundchapter │ │ ├── Manifest.plist │ │ └── Pages │ │ └── Page1.playgroundpage │ │ ├── Contents.swift │ │ ├── LiveView.swift │ │ ├── Manifest.plist │ │ ├── PrivateResources │ │ └── Hints.plist │ │ ├── PublicResources │ │ └── .gitkeep │ │ └── Sources │ │ └── .gitkeep │ ├── Manifest.plist │ ├── PrivateResources │ ├── Assets.car │ ├── Icon.png │ └── Main.storyboardc │ │ ├── AqW-SP-zhf-view-OCl-26-9ar.nib │ │ ├── BYZ-38-t0r-view-8bC-Xf-vdC.nib │ │ ├── Info.plist │ │ ├── log.nib │ │ └── view.nib │ ├── PublicResources │ └── .gitkeep │ └── Sources │ ├── .gitkeep │ ├── LogController.swift │ └── ViewController.swift ├── TravelLog.xcodeproj ├── project.pbxproj └── project.xcworkspace │ └── contents.xcworkspacedata ├── TravelLog ├── AppDelegate.swift ├── Assets.xcassets │ ├── AppIcon.appiconset │ │ └── Contents.json │ ├── Contents.json │ ├── Picture1.imageset │ │ ├── Contents.json │ │ └── Picture1.png │ ├── Picture2.imageset │ │ ├── Contents.json │ │ └── Picture2.png │ ├── Picture3.imageset │ │ ├── Contents.json │ │ └── Picture3.png │ └── Picture4.imageset │ │ ├── Contents.json │ │ └── Picture4.png ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── Info.plist ├── LogController.swift └── ViewController.swift ├── TravelWorkspace.xcworkspace └── contents.xcworkspacedata └── setup.sh /.gitignore: -------------------------------------------------------------------------------- 1 | # Xcode 2 | # 3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore 4 | 5 | ## Build generated 6 | build/ 7 | DerivedData/ 8 | 9 | ## Various settings 10 | *.pbxuser 11 | !default.pbxuser 12 | *.mode1v3 13 | !default.mode1v3 14 | *.mode2v3 15 | !default.mode2v3 16 | *.perspectivev3 17 | !default.perspectivev3 18 | xcuserdata/ 19 | 20 | ## Other 21 | *.moved-aside 22 | *.xcuserstate 23 | 24 | ## Obj-C/Swift specific 25 | *.hmap 26 | *.ipa 27 | *.dSYM.zip 28 | *.dSYM 29 | 30 | ## Playgrounds 31 | timeline.xctimeline 32 | playground.xcworkspace 33 | 34 | # Swift Package Manager 35 | # 36 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies. 37 | # Packages/ 38 | .build/ 39 | 40 | # CocoaPods 41 | # 42 | # We recommend against adding the Pods directory to your .gitignore. However 43 | # you should judge for yourself, the pros and cons are mentioned at: 44 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control 45 | # 46 | # Pods/ 47 | 48 | # Carthage 49 | # 50 | # Add this line if you want to avoid checking in source code from Carthage dependencies. 51 | # Carthage/Checkouts 52 | 53 | Carthage/Build 54 | 55 | # fastlane 56 | # 57 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the 58 | # screenshots whenever they are needed. 59 | # For more information about the recommended setup visit: 60 | # https://github.com/fastlane/fastlane/blob/master/fastlane/docs/Gitignore.md 61 | 62 | fastlane/report.xml 63 | fastlane/Preview.html 64 | fastlane/screenshots 65 | fastlane/test_output 66 | -------------------------------------------------------------------------------- /GreatStuffWithThePencil.playground/Pages/Detect your Handwriting.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | 2 | /*: 3 | # OMG the handwriting recognition :O 4 | As we learned in the previous two playgrounds how to draw with the apple pencil in a smooth way, 5 | we are now taking things a bit further. This example is rather short, but the main brain power 6 | is packed into the __MyDoodleCanvas.swift__ file. 7 | 8 | In there you'll find the magic which happens as soon as you stop writing. The piece you've 9 | just written will be packed into an image and sent to the __Cognitive Service Computer Vision__ API for 10 | __Handwriting OCR Recognition__ (yes, double the R :D). 11 | 12 | It needs two REST API calls to achieve this, because the OCR on handwriting may take a bit longer than 13 | standard OCR from printed text. So the first call asks the service to start with the recognition and the 14 | second call retrieves the result - if any text was detected from your wonderful handwriting. 15 | 16 | Just try it! And don't forget to add your **Computer Vision Key** into the __CognitiveService.swift__ 17 | file __in line 69__ :) 18 | 19 | Everything else should then work right out of the box - it's magic, right? 20 | - - - 21 | */ 22 | import Foundation 23 | import UIKit 24 | import PlaygroundSupport 25 | 26 | 27 | var myCanvas = MyDoodleCanvas() 28 | myCanvas.backgroundColor = .white 29 | myCanvas.isUserInteractionEnabled = true 30 | 31 | var canvasView = UIView(frame: CGRect(x: 0, y: 0, width: 450, height: 630)) 32 | myCanvas.frame = canvasView.frame 33 | myCanvas.setup() 34 | canvasView.addSubview((myCanvas)) 35 | 36 | PlaygroundPage.current.liveView = canvasView 37 | 38 | -------------------------------------------------------------------------------- /GreatStuffWithThePencil.playground/Pages/Get Started.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | 2 | /*: 3 | # Get a grip on your Apple Pencil! 4 | Ever wanted to code for your Pencil instead of just using it in apps of others? Now this is your time! 5 | In this playground we will get a first grip of how to support basic doodling in your app with the Pencil. 6 | And guess what - it's not complicated at all. You basically use things you already know of: touches! 7 | But see yourself, and let's get things going! 8 | 9 | And thanks to __Caroline Begbie__ for getting me started with the whole topic :) 10 | Her full tutorial can be 11 | [found here on raywenderlich.com](https://www.raywenderlich.com/121834/apple-pencil-tutorial) 12 | - - - 13 | */ 14 | //: As always, we need a couple of imports to get nice API support for what we will be doing 15 | import Foundation 16 | import UIKit 17 | import PlaygroundSupport 18 | 19 | /*: 20 | We define a class for our doodling canvas. Nope, there is no out-of-the-box drawing canvas there for you. 21 | But don't be afraid, it's pretty simple to build one on your own. 22 | */ 23 | public class BasicCanvas : UIImageView { 24 | 25 | //: First we need some constants for later caluclation and a texture for our pencil. Should look like a real pencil, right? 26 | let pi = CGFloat(Double.pi) 27 | let forceSensitivity: CGFloat = 4.0 28 | var pencilTexture = UIColor(patternImage: UIImage(named: "PencilTexture")!) 29 | let defaultLineWidth : CGFloat = 6 30 | //: We will support the finger touches as an eraser. Cool eh! 31 | var eraserColor: UIColor { 32 | return backgroundColor ?? UIColor.white 33 | } 34 | 35 | /*: 36 | ### Touches 37 | As said before this is our most important info for supporting the pencil. The pencil itself 38 | generates touches like you are used from finger based touches. But the pencil generated ones have additional 39 | information for us to differentiate and use this info for drawing. 40 | */ 41 | override public func touchesMoved(_ touches: Set, with event: UIEvent?) { 42 | guard let touch = touches.first else { 43 | return 44 | } 45 | 46 | UIGraphicsBeginImageContextWithOptions(bounds.size, false, 0.0) 47 | let context = UIGraphicsGetCurrentContext() 48 | 49 | // Draw previous image into context 50 | image?.draw(in: bounds) 51 | //: In here we just get the generated touches and draw them as strokes 52 | drawStroke(context: context, touch: touch) 53 | //: Right afterwards we generate an image from our drawing and display it to our imageview's image 54 | image = UIGraphicsGetImageFromCurrentImageContext() 55 | //: And don't forget to end/close the current context 56 | UIGraphicsEndImageContext() 57 | } 58 | 59 | /*: 60 | ### Core drawing 61 | This method does the real drawing work. It get's the touches and detects if touches are coming 62 | from the pencil or from the finger. Depending on the touch's source the stroke will be styled 63 | differently as we want to draw with the pencil and erase with the finger. 64 | No matter from where the touch is coming - the drawing work is the same. Just adding a line to 65 | the context from the start location of the touch to it's end. 66 | */ 67 | func drawStroke(context: CGContext?, touch: UITouch) { 68 | let previousLocation = touch.previousLocation(in: self) 69 | let location = touch.location(in: self) 70 | 71 | // Calculate line width for drawing stroke 72 | var lineWidth : CGFloat = 1.0 73 | 74 | if touch.type == .stylus { 75 | lineWidth = lineWidthForDrawing(context: context, touch: touch) 76 | pencilTexture.setStroke() 77 | } else { 78 | lineWidth = touch.majorRadius / 2 79 | eraserColor.setStroke() 80 | } 81 | 82 | UIColor.darkGray.setStroke() 83 | 84 | context!.setLineWidth(lineWidth) 85 | context!.setLineCap(.round) 86 | 87 | context?.move(to: previousLocation) 88 | context?.addLine(to: location) 89 | 90 | // Draw the stroke 91 | context!.strokePath() 92 | } 93 | 94 | /*: 95 | ### Forceful drawing 96 | This little helper method is used for a nice feature - reflecting the current force used while drawing. 97 | And draw the line forcefully thicker then. It gives the user a more natural feeling while drawing. 98 | */ 99 | func lineWidthForDrawing(context: CGContext?, touch: UITouch) -> CGFloat { 100 | var lineWidth = defaultLineWidth 101 | 102 | if touch.force > 0 { 103 | lineWidth = touch.force * forceSensitivity 104 | } 105 | 106 | return lineWidth 107 | } 108 | 109 | /*: 110 | We want to delete the current context - and this little method helps us doing exactly this. 111 | With a nice face effect in place. 112 | */ 113 | func clearCanvas(_ animated: Bool) { 114 | if animated { 115 | UIView.animate(withDuration: 0.5, animations: { 116 | self.alpha = 0 117 | }, completion: { finished in 118 | self.alpha = 1 119 | self.image = nil 120 | }) 121 | } else { 122 | image = nil 123 | } 124 | } 125 | } 126 | 127 | 128 | /*: 129 | - - - 130 | ### Using the Doodle Canvas 131 | Now we build ourselves a nice basic doodling canvas. So we want to use it, right? Just create a new BasicCanvas, 132 | set a background color and give it a frame (for the Playgrounds app on the iPad just use the width: 1024). 133 | Attach the new view to the Playground's liveView and you are good to go. Let's doodle! 134 | */ 135 | var myCanvas = BasicCanvas() 136 | myCanvas.backgroundColor = .white 137 | myCanvas.isUserInteractionEnabled = true 138 | 139 | var canvasView = UIView(frame: CGRect(x: 0, y: 0, width: 450, height: 630)) 140 | 141 | //: Use the wider view on the iPad with the Playgrounds app fullscreen mode. More space to draw! 142 | //var canvasView = UIView(frame: CGRect(x: 0, y: 0, width: 1024, height: 630)) 143 | 144 | myCanvas.frame = canvasView.frame 145 | canvasView.addSubview(myCanvas) 146 | 147 | PlaygroundPage.current.liveView = canvasView 148 | 149 | 150 | 151 | 152 | -------------------------------------------------------------------------------- /GreatStuffWithThePencil.playground/Pages/Smooth Doodeling.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | 2 | /*: 3 | # Make your doodling smooth! 4 | In the previous playground we got a first feeling for how easy it is to draw with the Apple Pencil. 5 | So now we want to see our doodling a bit more smoother and not that edged, especially when drawing arcs and cicles. 6 | In this playground we will add some minor adjustment to our code to achieve this exact goal: smoooothen it! 7 | - - - 8 | */ 9 | import Foundation 10 | import UIKit 11 | import PlaygroundSupport 12 | 13 | public class SmoothCanvas : UIImageView { 14 | let pi = CGFloat(Double.pi) 15 | let forceSensitivity: CGFloat = 4.0 16 | var pencilTexture = UIColor(patternImage: UIImage(named: "PencilTexture")!) 17 | let defaultLineWidth : CGFloat = 6 18 | 19 | var eraserColor: UIColor { 20 | return backgroundColor ?? UIColor.white 21 | } 22 | 23 | /*: 24 | ### Again: Touches! 25 | Guess what - here the smoothening action will happen! 26 | The pencil generates not only touches - it generates coalescedTouches and predictedTouches as well along 27 | it's drawing way over our canvas. For smoothening the drawing line we will need the coalescedTouches. Those 28 | are - simply put - the intermediate touches which are generated on a higher scanning rate from the iPad's 29 | display. Because when the pencil touches the display the framerate will be doubled. Not for the drawing, but 30 | for detecting touches by the pencil. Crazy huh! But good for us because we use them to get more touches and 31 | therefore smoothen our drawing lines. 32 | */ 33 | override public func touchesMoved(_ touches: Set, with event: UIEvent?) { 34 | 35 | guard let touch = touches.first else { 36 | return 37 | } 38 | 39 | UIGraphicsBeginImageContextWithOptions(bounds.size, false, 0.0) 40 | let context = UIGraphicsGetCurrentContext() 41 | 42 | // Draw previous image into context 43 | image?.draw(in: bounds) 44 | 45 | var touches = [UITouch]() 46 | 47 | //: The coalesced touches come withthe current event. So we find out which belong to our current touch and collect them in an array. 48 | if let coalescedTouches = event?.coalescedTouches(for: touch) { 49 | touches = coalescedTouches 50 | } else { 51 | touches.append(touch) 52 | } 53 | 54 | //: After we found all coalesced touches for our touch we draw them all. So not just the one touch, but with all it's intermediate "buddies". 55 | for touch in touches { 56 | drawStroke(context: context, touch: touch) 57 | } 58 | 59 | image = UIGraphicsGetImageFromCurrentImageContext() 60 | UIGraphicsEndImageContext() 61 | } 62 | 63 | //: The rest of the code stays exactly the same as in the first playground. We draw more strokes and therefore smoothen the drawing. That's it! 64 | 65 | func drawStroke(context: CGContext?, touch: UITouch) { 66 | let previousLocation = touch.previousLocation(in: self) 67 | let location = touch.location(in: self) 68 | 69 | // Calculate line width for drawing stroke 70 | var lineWidth : CGFloat = 1.0 71 | 72 | if touch.type == .stylus { 73 | lineWidth = lineWidthForDrawing(context: context, touch: touch) 74 | pencilTexture.setStroke() 75 | } else { 76 | lineWidth = touch.majorRadius / 2 77 | eraserColor.setStroke() 78 | } 79 | 80 | UIColor.darkGray.setStroke() 81 | 82 | context!.setLineWidth(lineWidth) 83 | context!.setLineCap(.round) 84 | 85 | context?.move(to: previousLocation) 86 | context?.addLine(to: location) 87 | 88 | // Draw the stroke 89 | context!.strokePath() 90 | } 91 | 92 | func lineWidthForDrawing(context: CGContext?, touch: UITouch) -> CGFloat { 93 | var lineWidth = defaultLineWidth 94 | 95 | if touch.force > 0 { 96 | lineWidth = touch.force * forceSensitivity 97 | } 98 | 99 | return lineWidth 100 | } 101 | 102 | func clearCanvas(_ animated: Bool) { 103 | if animated { 104 | UIView.animate(withDuration: 0.5, animations: { 105 | self.alpha = 0 106 | }, completion: { finished in 107 | self.alpha = 1 108 | self.image = nil 109 | }) 110 | } else { 111 | image = nil 112 | } 113 | } 114 | } 115 | 116 | /*: 117 | - - - 118 | ### Using the Doodle Canvas 119 | Now we build ourselves a nice smoooothe doodling canvas. So we want to use it, right? Just create a new SmoothCanvas, 120 | set a background color and give it a frame (for the Playgrounds app on the iPad just use the width: 1024). 121 | Attach the new view to the Playground's liveView and you are good to go. Let's doodle! 122 | */ 123 | 124 | var myCanvas = SmoothCanvas() 125 | myCanvas.backgroundColor = .white 126 | myCanvas.isUserInteractionEnabled = true 127 | 128 | var canvasView = UIView(frame: CGRect(x: 0, y: 0, width: 450, height: 630)) 129 | 130 | //: Use the wider view on the iPad with the Playgrounds app fullscreen mode. More space to draw! 131 | //var canvasView = UIView(frame: CGRect(x: 0, y: 0, width: 1024, height: 630)) 132 | 133 | myCanvas.frame = canvasView.frame 134 | canvasView.addSubview((myCanvas)) 135 | 136 | PlaygroundPage.current.liveView = canvasView 137 | 138 | 139 | 140 | 141 | -------------------------------------------------------------------------------- /GreatStuffWithThePencil.playground/Resources/PencilTexture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/GreatStuffWithThePencil.playground/Resources/PencilTexture.png -------------------------------------------------------------------------------- /GreatStuffWithThePencil.playground/Sources/EmotionHelpers.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import UIKit 3 | 4 | public class EmotionHelpers : NSObject { 5 | 6 | var preview : UIImageView! 7 | 8 | /** 9 | Method for putting an emoji with a matching emotion over each detected face in a photo. 10 | 11 | - parameters: 12 | - photo: The photo on which faces and it's emotion shall be detected 13 | - withFaceRect: If TRUE then the face rectangle is drawn into the photo 14 | - completion: UIImage as new photo with added emojis for the detected emotion over each face in fitting size and with face framing rectangles if declared. Image is the same size as the original. 15 | */ 16 | public func makeEmojiFromEmotionOnPhoto (photo : UIImageView!, withFaceRect: Bool, completion: @escaping (UIImage) -> (Void)) { 17 | 18 | let manager = CognitiveServices() 19 | 20 | manager.retrievePlausibleEmotionsForImage(photo.image!) { (result, error) -> (Void) in 21 | DispatchQueue.main.async(execute: { 22 | if let _ = error { 23 | print("omg something bad happened") 24 | } else { 25 | print("seems like all went well: \(String(describing: result))") 26 | } 27 | 28 | if (result?.count)! > 0 { 29 | print("1..2.. Emoji!\n\((result?.count)!) emotions detected") 30 | } else { 31 | print("Seems like no emotions were detected :(") 32 | } 33 | 34 | let photoWithEmojis = self.drawEmojisFor(emotions: result, withFaceRect: withFaceRect, image: photo.image!) 35 | completion(photoWithEmojis) 36 | }) 37 | } 38 | 39 | } 40 | 41 | public func emojisFor (emotion: CognitiveServicesEmotionResult) -> [String] { 42 | var availableEmojis = [String]() 43 | 44 | switch emotion.emotion { 45 | case .Anger: 46 | availableEmojis.append("😡") 47 | availableEmojis.append("😠") 48 | case .Contempt: 49 | availableEmojis.append("😤") 50 | case .Disgust: 51 | availableEmojis.append("😷") 52 | availableEmojis.append("🤐") 53 | case .Fear: 54 | availableEmojis.append("😱") 55 | case .Happiness: 56 | availableEmojis.append("😝") 57 | availableEmojis.append("😀") 58 | availableEmojis.append("😃") 59 | availableEmojis.append("😄") 60 | availableEmojis.append("😆") 61 | availableEmojis.append("😊") 62 | availableEmojis.append("🙂") 63 | availableEmojis.append("☺️") 64 | case .Neutral: 65 | availableEmojis.append("😶") 66 | availableEmojis.append("😐") 67 | availableEmojis.append("😑") 68 | case .Sadness: 69 | availableEmojis.append("🙁") 70 | availableEmojis.append("😞") 71 | availableEmojis.append("😟") 72 | availableEmojis.append("😔") 73 | availableEmojis.append("😢") 74 | availableEmojis.append("😭") 75 | case .Surprise: 76 | availableEmojis.append("😳") 77 | availableEmojis.append("😮") 78 | availableEmojis.append("😲") 79 | } 80 | 81 | return availableEmojis 82 | 83 | } 84 | 85 | public func drawEmojisFor (emotions: [CognitiveServicesEmotionResult]?, withFaceRect: Bool, image: UIImage) -> UIImage { 86 | 87 | var returnImage : UIImage! 88 | 89 | if let results = emotions { 90 | UIGraphicsBeginImageContext(image.size) 91 | image.draw(in: CGRect(origin: CGPoint.zero, size: image.size)) 92 | 93 | for result in results { 94 | let availableEmojis = emojisFor(emotion: result) 95 | 96 | let emoji = availableEmojis.randomElement() 97 | 98 | let maximumSize = result.frame.size 99 | let string = emoji as NSString 100 | let startingFontSize = 8192.0 101 | 102 | var actualFontSize = startingFontSize 103 | var stepping = actualFontSize 104 | 105 | repeat { 106 | stepping /= 2.0 107 | if stepping < 1.0 { 108 | break 109 | } 110 | 111 | let font = UIFont.systemFont(ofSize: CGFloat(actualFontSize)) 112 | let calculatedSize = string.size(withAttributes: [NSAttributedStringKey.font: font]) 113 | 114 | if calculatedSize.width > maximumSize.width { 115 | actualFontSize -= stepping 116 | } else { 117 | actualFontSize += stepping 118 | } 119 | 120 | } while true 121 | 122 | let font = UIFont.systemFont(ofSize: CGFloat(actualFontSize)) 123 | string.draw(in: result.frame, withAttributes: [NSAttributedStringKey.font: font]) 124 | 125 | if withFaceRect { 126 | let context = UIGraphicsGetCurrentContext() 127 | let frame = result.frame 128 | context!.setLineWidth(5) 129 | context!.addRect(frame) 130 | context!.drawPath(using: .stroke) 131 | } 132 | 133 | } 134 | 135 | returnImage = UIGraphicsGetImageFromCurrentImageContext() 136 | UIGraphicsEndImageContext() 137 | } 138 | 139 | return returnImage 140 | } 141 | 142 | } 143 | 144 | -------------------------------------------------------------------------------- /GreatStuffWithThePencil.playground/Sources/Ext.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import UIKit 3 | 4 | public extension Array { 5 | func randomElement() -> Element { 6 | let index = Int(arc4random_uniform(UInt32(self.count))) 7 | return self[index] 8 | } 9 | } 10 | 11 | public extension UIColor { 12 | convenience init(hexString:String) { 13 | 14 | let hexString = hexString.trimmingCharacters(in: NSCharacterSet.whitespacesAndNewlines) 15 | let scanner = Scanner(string: hexString) 16 | 17 | if (hexString.hasPrefix("#")) { 18 | scanner.scanLocation = 1 19 | } 20 | 21 | var color:UInt32 = 0 22 | scanner.scanHexInt32(&color) 23 | 24 | let mask = 0x000000FF 25 | let r = Int(color >> 16) & mask 26 | let g = Int(color >> 8) & mask 27 | let b = Int(color) & mask 28 | 29 | let red = CGFloat(r) / 255.0 30 | let green = CGFloat(g) / 255.0 31 | let blue = CGFloat(b) / 255.0 32 | 33 | self.init(red:red, green:green, blue:blue, alpha:1) 34 | } 35 | 36 | func toHexString() -> String { 37 | var r:CGFloat = 0 38 | var g:CGFloat = 0 39 | var b:CGFloat = 0 40 | var a:CGFloat = 0 41 | 42 | getRed(&r, green: &g, blue: &b, alpha: &a) 43 | 44 | let rgb:Int = (Int)(r*255)<<16 | (Int)(g*255)<<8 | (Int)(b*255)<<0 45 | 46 | return NSString(format:"#%06x", rgb) as String 47 | } 48 | } 49 | 50 | public extension UIImage { 51 | func crop( rect: CGRect) -> UIImage { 52 | var rect = rect 53 | rect.origin.x*=self.scale 54 | rect.origin.y*=self.scale 55 | rect.size.width*=self.scale 56 | rect.size.height*=self.scale 57 | 58 | let imageRef = self.cgImage!.cropping(to: rect) 59 | let image = UIImage(cgImage: imageRef!, scale: self.scale, orientation: self.imageOrientation) 60 | return image 61 | } 62 | } 63 | 64 | -------------------------------------------------------------------------------- /GreatStuffWithThePencil.playground/Sources/MyDoodleCanvas.swift: -------------------------------------------------------------------------------- 1 | /* 2 | * This class is based up the original implementation from Razeware 3 | * and was modified for educational reasons from 4 | * 5 | * Manuela Rink 6 | * Copyright (c) 2017 Microsoft 7 | * 8 | * This code can be used as wished and the permission notice as well 9 | * as the licencing shall not be changed in any way. 10 | * 11 | * --------------------------------------------------------------------------- 12 | * 13 | * Copyright (c) 2015 Razeware LLC 14 | * 15 | * Permission is hereby granted, free of charge, to any person obtaining a copy 16 | * of this software and associated documentation files (the "Software"), to deal 17 | * in the Software without restriction, including without limitation the rights 18 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 19 | * copies of the Software, and to permit persons to whom the Software is 20 | * furnished to do so, subject to the following conditions: 21 | * 22 | * The above copyright notice and this permission notice shall be included in 23 | * all copies or substantial portions of the Software. 24 | * 25 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 26 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 27 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 28 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 29 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 30 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 31 | * THE SOFTWARE. 32 | * 33 | * 34 | 35 | */ 36 | 37 | import Foundation 38 | import UIKit 39 | 40 | 41 | public class MyDoodleCanvas : UIImageView { 42 | 43 | let pi = CGFloat(Double.pi) 44 | 45 | let forceSensitivity: CGFloat = 4.0 46 | var pencilTexture = UIColor(patternImage: UIImage(named: "PencilTexture")!) 47 | let minLineWidth: CGFloat = 5 48 | 49 | // current drawing rect 50 | var minX = 0 51 | var minY = 0 52 | var maxX = 0 53 | var maxY = 0 54 | 55 | var trackTimer : Timer? 56 | var lastTouchTimestamp : TimeInterval? 57 | var ocrImageRect : CGRect? 58 | var currentTextRect : CGRect? 59 | 60 | let defaultLineWidth:CGFloat = 6 61 | var markerColor: UIColor = UIColor.green 62 | 63 | var eraserColor: UIColor { 64 | return backgroundColor ?? UIColor.white 65 | } 66 | 67 | var context : CGContext? 68 | 69 | public func setup () { 70 | 71 | resetDoodleRect() 72 | 73 | lastTouchTimestamp = 0 74 | 75 | if #available(iOS 10.0, *) { 76 | trackTimer = Timer.scheduledTimer(withTimeInterval: 0.5, repeats: true, block: { 77 | timer in 78 | 79 | let now = Date().timeIntervalSince1970 80 | 81 | if Int(self.lastTouchTimestamp!) > 0 && now - self.lastTouchTimestamp! > 1 { 82 | self.drawDoodlingRect(context: self.context) 83 | } 84 | }) 85 | } else {} 86 | 87 | } 88 | 89 | func resetDoodleRect() { 90 | minX = Int(self.frame.width) 91 | minY = Int(self.frame.height) 92 | 93 | maxX = 0 94 | maxY = 0 95 | 96 | lastTouchTimestamp = 0 97 | 98 | UIGraphicsBeginImageContextWithOptions(bounds.size, false, 0.0) 99 | context = UIGraphicsGetCurrentContext() 100 | } 101 | 102 | 103 | override public func touchesMoved(_ touches: Set, with event: UIEvent?) { 104 | 105 | guard let touch = touches.first else { 106 | return 107 | } 108 | 109 | lastTouchTimestamp = Date().timeIntervalSince1970 110 | 111 | // Draw previous image into context 112 | image?.draw(in: bounds) 113 | 114 | 115 | var touches = [UITouch]() 116 | if let coalescedTouches = event?.coalescedTouches(for: touch) { 117 | touches = coalescedTouches 118 | } else { 119 | touches.append(touch) 120 | } 121 | 122 | for touch in touches { 123 | drawStroke(context: context, touch: touch) 124 | } 125 | 126 | image = UIGraphicsGetImageFromCurrentImageContext() 127 | } 128 | 129 | func drawStroke(context: CGContext?, touch: UITouch) { 130 | let previousLocation = touch.previousLocation(in: self) 131 | 132 | // Calculate line width for drawing stroke 133 | var lineWidth : CGFloat = 1.0 134 | let tiltThreshold : CGFloat = pi/6 135 | 136 | //if touch.type == .stylus { 137 | 138 | let location = touch.location(in: self) 139 | 140 | minX = min(minX, Int(location.x)) 141 | minY = min(minY, Int(location.y)) 142 | maxX = max(maxX, Int(location.x)) 143 | maxY = max(maxY, Int(location.y)) 144 | 145 | if touch.altitudeAngle < tiltThreshold { 146 | lineWidth = lineWidthForShading(context: context, touch: touch) 147 | } else { 148 | lineWidth = lineWidthForDrawing(context: context, touch: touch) 149 | } 150 | 151 | pencilTexture.setStroke() 152 | 153 | UIColor.darkGray.setStroke() 154 | 155 | context!.setLineWidth(lineWidth) 156 | context!.setLineCap(.round) 157 | 158 | context?.move(to: previousLocation) 159 | context?.addLine(to: location) 160 | 161 | // Draw the stroke 162 | context!.strokePath() 163 | //} 164 | /*else { 165 | lineWidth = touch.majorRadius / 2 166 | eraserColor.setStroke() 167 | }*/ 168 | 169 | 170 | 171 | } 172 | 173 | func drawDoodlingRect(context: CGContext?) { 174 | let inset = 5 175 | 176 | markerColor.setStroke() 177 | context!.setLineWidth(1.0) 178 | context!.setLineCap(.round) 179 | UIColor.clear.setFill() 180 | 181 | ocrImageRect = CGRect(x: minX - inset, y: minY - inset, width: (maxX-minX) + inset*2, height: (maxY-minY) + 2*inset) 182 | context!.addRect(ocrImageRect!) 183 | // Draw the stroke 184 | context!.strokePath() 185 | 186 | drawTextRect(context: context, rect: ocrImageRect!) 187 | 188 | image = UIGraphicsGetImageFromCurrentImageContext() 189 | UIGraphicsEndImageContext() 190 | 191 | fetchOCRText() 192 | 193 | resetDoodleRect() 194 | } 195 | 196 | func drawTextRect(context: CGContext?, rect: CGRect) { 197 | UIColor.lightGray.setStroke() 198 | currentTextRect = CGRect(x: rect.origin.x, y: rect.origin.y + rect.height, width: rect.width, height: 15) 199 | context!.addRect(currentTextRect!) 200 | context!.strokePath() 201 | } 202 | 203 | func addLabelForOCR(text: String) { 204 | DispatchQueue.main.async { 205 | let label = UILabel(frame: self.currentTextRect!) 206 | label.text = text.characters.count > 0 ? text : "Text not recognized" 207 | label.font = UIFont(name: "Helvetica Neue", size: 9) 208 | self.addSubview(label) 209 | } 210 | } 211 | 212 | func lineWidthForShading(context: CGContext?, touch: UITouch) -> CGFloat { 213 | 214 | let previousLocation = touch.previousLocation(in: self) 215 | let location = touch.location(in: self) 216 | 217 | let vector1 = touch.azimuthUnitVector(in: self) 218 | 219 | let vector2 = CGPoint(x: location.x - previousLocation.x, y: location.y - previousLocation.y) 220 | 221 | var angle = abs(atan2(vector2.y, vector2.x) - atan2(vector1.dy, vector1.dx)) 222 | 223 | if angle > pi { 224 | angle = 2 * pi - angle 225 | } 226 | if angle > pi / 2 { 227 | angle = pi - angle 228 | } 229 | 230 | let minAngle: CGFloat = 0 231 | let maxAngle = pi / 2 232 | let normalizedAngle = (angle - minAngle) / (maxAngle - minAngle) 233 | 234 | let maxLineWidth: CGFloat = 60 235 | var lineWidth = maxLineWidth * normalizedAngle 236 | 237 | let tiltThreshold : CGFloat = pi/6 238 | let minAltitudeAngle: CGFloat = 0.25 239 | let maxAltitudeAngle = tiltThreshold 240 | 241 | let altitudeAngle = touch.altitudeAngle < minAltitudeAngle ? minAltitudeAngle : touch.altitudeAngle 242 | 243 | let normalizedAltitude = 1 - ((altitudeAngle - minAltitudeAngle) / (maxAltitudeAngle - minAltitudeAngle)) 244 | 245 | lineWidth = lineWidth * normalizedAltitude + minLineWidth 246 | 247 | let minForce: CGFloat = 0.0 248 | let maxForce: CGFloat = 5 249 | 250 | let normalizedAlpha = (touch.force - minForce) / (maxForce - minForce) 251 | 252 | context!.setAlpha(normalizedAlpha) 253 | 254 | return lineWidth 255 | } 256 | 257 | func lineWidthForDrawing(context: CGContext?, touch: UITouch) -> CGFloat { 258 | var lineWidth = defaultLineWidth 259 | 260 | if touch.force > 0 { 261 | lineWidth = touch.force * forceSensitivity 262 | } 263 | 264 | return lineWidth 265 | } 266 | 267 | func clearCanvas(_ animated: Bool) { 268 | if animated { 269 | UIView.animate(withDuration: 0.5, animations: { 270 | self.alpha = 0 271 | }, completion: { finished in 272 | self.alpha = 1 273 | self.image = nil 274 | for subview in self.subviews { 275 | subview.removeFromSuperview() 276 | } 277 | }) 278 | } else { 279 | image = nil 280 | } 281 | } 282 | 283 | func fetchOCRText () { 284 | let manager = CognitiveServices() 285 | 286 | let ocrImage = image!.crop(rect: ocrImageRect!) 287 | 288 | manager.retrieveTextOnImage(ocrImage) { 289 | operationURL, error in 290 | 291 | if #available(iOS 10.0, *) { 292 | 293 | guard let _ = operationURL else { 294 | print("Seems like the network call failed - did you enter the Computer Vision Key in CognitiveServices.swift in line 69? :)") 295 | return 296 | } 297 | 298 | let when = DispatchTime.now() + 2 // change 2 to desired number of seconds 299 | DispatchQueue.main.asyncAfter(deadline: when) { 300 | 301 | manager.retrieveResultForOcrOperation(operationURL!, completion: { 302 | results, error -> (Void) in 303 | 304 | if let theResult = results { 305 | var ocrText = "" 306 | for result in theResult { 307 | ocrText = "\(ocrText) \(result)" 308 | } 309 | self.addLabelForOCR(text: ocrText) 310 | } else { 311 | self.addLabelForOCR(text: "No text for writing") 312 | } 313 | 314 | }) 315 | 316 | } 317 | } else { 318 | // Fallback on earlier versions 319 | } 320 | } 321 | } 322 | } 323 | -------------------------------------------------------------------------------- /GreatStuffWithThePencil.playground/contents.xcplayground: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/ComputerVision.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | /*: 2 | # Describe your picture! 3 | 4 | It's time to get life into our app! Want to get your picture described by a remote service? Yes? YES? So get ready - and get to know the * *drumroooooll* * **COGNITIVE SERVICES**! 5 | 6 | We will start with the Computer Vision API. So let's see, what the "computer" can "see" on our image. 7 | */ 8 | 9 | //#-hidden-code 10 | import PlaygroundSupport 11 | import UIKit 12 | import Foundation 13 | 14 | guard #available(iOS 9, OSX 10.11, *) else { 15 | fatalError("Life? Don't talk to me about life. Here I am, brain the size of a planet, and they tell me to run a 'playground'. Call that job satisfaction? I don't.") 16 | } 17 | 18 | let myView = UIView(frame: CGRect(x: 0, y: 0, width: 450, height: 600)) 19 | 20 | let preview = UIImageView(frame: myView.bounds) 21 | //#-end-hidden-code 22 | /*: 23 | * experiment: 24 | Choose your preferred image right here or take a new one 25 | */ 26 | preview.image = /*#-editable-code*/#imageLiteral(resourceName: "containers.png")/*#-end-editable-code*/ 27 | //#-hidden-code 28 | preview.contentMode = .scaleAspectFit 29 | 30 | let textLabel = UILabel(frame: CGRect(x: 30, y: myView.bounds.height-200, width: 350, height: 200)) 31 | textLabel.lineBreakMode = .byWordWrapping 32 | textLabel.numberOfLines = 5 33 | textLabel.textColor = #colorLiteral(red: 1.0, green: 1.0, blue: 1.0, alpha: 1.0) 34 | 35 | let backgroundView = UIView(frame: CGRect(x: 0, y: myView.bounds.height-170, width: myView.bounds.width, height: 200)) 36 | backgroundView.backgroundColor = #colorLiteral(red: 0.0, green: 0.0, blue: 0.0, alpha: 1.0) 37 | backgroundView.alpha = 0.7 38 | 39 | myView.addSubview(preview) 40 | myView.addSubview(backgroundView) 41 | myView.addSubview(textLabel) 42 | 43 | func showTagsForImage (_ photo : UIImageView, _ confidence : Double) { 44 | let manager = CognitiveServices() 45 | textLabel.text = "... gimme a sec - getting your tags!" 46 | manager.retrievePlausibleTagsForImage(photo.image!, confidence) { (result, error) -> (Void) in 47 | DispatchQueue.main.async(execute: { 48 | if let _ = error { 49 | print("omg something bad happened: \(String(describing: error))") 50 | } else { 51 | print("seems like all went well: \(String(describing: result))") 52 | } 53 | setTagsAsDescription(result) 54 | }) 55 | } 56 | } 57 | 58 | func setTagsAsDescription (_ tags : [String]?) { 59 | if (tags?.count)! > 0 { 60 | textLabel.text = "Look what I detected:\n" 61 | for tag in tags! { 62 | textLabel.text = textLabel.text! + "#" + tag + " " 63 | } 64 | } else { 65 | textLabel.text = "Uh noez! No tags could be found for this image :(" 66 | } 67 | } 68 | 69 | //#-end-hidden-code 70 | /*: 71 | * experiment: 72 | Every part of the description of the picture will be returned with a certain confidence. A good value is 0.85 for nice fitting results. But go a head and play around with this value and see, with what funky descriptions the "computer" may come along 73 | */ 74 | showTagsForImage(preview, /*#-editable-code*/0.1/*#-end-editable-code*/) 75 | //#-hidden-code 76 | PlaygroundPage.current.liveView = myView 77 | //#-end-hidden-code 78 | 79 | /*: 80 | * callout(What did we learn?): 81 | Wonderful! So you just called your first API from the Cognitive Services Suite. The Computer Vision API. If you want to have a detailed look at the documentation - where you can find further examples - visit the dedicated [Computer Vision documentation](https://www.microsoft.com/cognitive-services/en-us/computer-vision-api). 82 | */ 83 | 84 | //: Enough of just describing photos. Let's catch a face and let the API know! Let's rock on and continue by [using the FACE API](@next)! 85 | 86 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/ComputerVision.xcplaygroundpage/Resources/justtext.txt: -------------------------------------------------------------------------------- 1 | Consider the epic shit as already done :) 2 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/CoreMLVision.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | /*: 2 | # Ask CoreML! 3 | ## or "How to offline detect grumpy cats" 4 | 5 | In the last page we had a look at how we can use a self trained model via a REST API for predictions. 6 | 7 | Now we imported the trained model and want to use the Vision Framework if iOS to do this offline. This is way faster and we don't have to rely on an online connection to do predictions. 8 | 9 | In this example we choose an image from the album (or take a new one with the camera). Then we transform it to fit the needs of the model and show the result in a text label below the picture. But no worries - the part of where the model prediction is requested is hidden in this example. We just want to concentrate on the way the model gives it's predictions for images. 10 | */ 11 | //#-hidden-code 12 | import Foundation 13 | import UIKit 14 | import Vision 15 | import PlaygroundSupport 16 | 17 | let myView = UIView(frame: CGRect(x: 0, y: 0, width: 450, height: 600)) 18 | 19 | let preview = UIImageView(frame: myView.bounds) 20 | //#-end-hidden-code 21 | 22 | /*: 23 | * experiment: 24 | Choose your cat image right here or take a new one 25 | */ 26 | preview.image = /*#-editable-code*/#imageLiteral(resourceName: "cat_grumpy_1.jpg")/*#-end-editable-code*/ 27 | //#-hidden-code 28 | preview.contentMode = .scaleAspectFit 29 | 30 | let textLabel = UILabel(frame: CGRect(x: 30, y: myView.bounds.height-100, width: 350, height: 100)) 31 | textLabel.lineBreakMode = .byWordWrapping 32 | textLabel.numberOfLines = 5 33 | //#-end-hidden-code 34 | textLabel.textColor = #colorLiteral(red: 0.9254902005, green: 0.2352941185, blue: 0.1019607857, alpha: 1) 35 | textLabel.text = "Wanna find out if your cat is happy or grumpy?" 36 | //#-hidden-code 37 | let backgroundView = UIView(frame: CGRect(x: 0, y: myView.bounds.height-170, width: myView.bounds.width, height: 200)) 38 | backgroundView.backgroundColor = #colorLiteral(red: 0.0, green: 0.0, blue: 0.0, alpha: 1.0) 39 | backgroundView.alpha = 0.7 40 | 41 | myView.addSubview(preview) 42 | myView.addSubview(backgroundView) 43 | myView.addSubview(textLabel) 44 | //#-end-hidden-code 45 | 46 | /*: 47 | * experiment: 48 | So let's get the cat analysis started. As you will see in a moment we get the prediction value for each available tag back. We then take the one with the higher value for granted and show the cat's mood in the UI. Groundbreaking right? :D 49 | */ 50 | let catMood = CatVisionLogic() 51 | catMood.initRequest(label: textLabel) 52 | catMood.doClassification(image: preview.image!) 53 | 54 | //#-hidden-code 55 | PlaygroundPage.current.liveView = myView 56 | //#-end-hidden-code 57 | /*: 58 | * callout(What did we learn?): 59 | Yes, we just accomplished to use CoreML and the Vision framework to get prediction from a self trained model! 60 | 61 | If you want to have a detailed look at the documentation - where you can find further examples - visit the dedicated [CUSTOM VISION documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/custom-vision-service/home) and the [CUSTOM VISION prediction definition](https://southcentralus.dev.cognitive.microsoft.com/docs/services/450e4ba4d72542e889d93fd7b8e960de/operations/5a6264bc40d86a0ef8b2c290). 62 | 63 | In case you want to dive deeper into [CoreML](https://developer.apple.com/documentation/coreml) and the [Vision framework](https://developer.apple.com/documentation/vision), just follow the white rabb... ehm links :) 64 | */ 65 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/CoreMLVision.xcplaygroundpage/Resources/catmoodprediction.mlmodelc/coremldata.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Pages/CoreMLVision.xcplaygroundpage/Resources/catmoodprediction.mlmodelc/coremldata.bin -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/CoreMLVision.xcplaygroundpage/Resources/catmoodprediction.mlmodelc/model.espresso.shape: -------------------------------------------------------------------------------- 1 | { 2 | "layer_shapes" : { 3 | "fire8\/squeeze1x1" : { 4 | "k" : 64, 5 | "w" : 14, 6 | "n" : 1, 7 | "h" : 14 8 | }, 9 | "data_bn" : { 10 | "k" : 3, 11 | "w" : 227, 12 | "n" : 1, 13 | "h" : 227 14 | }, 15 | "fire4\/expand1x1" : { 16 | "k" : 128, 17 | "w" : 28, 18 | "n" : 1, 19 | "h" : 28 20 | }, 21 | "fire9\/squeeze1x1" : { 22 | "k" : 64, 23 | "w" : 14, 24 | "n" : 1, 25 | "h" : 14 26 | }, 27 | "fire9\/concat" : { 28 | "k" : 512, 29 | "w" : 14, 30 | "n" : 1, 31 | "h" : 14 32 | }, 33 | "fire4\/concat" : { 34 | "k" : 256, 35 | "w" : 28, 36 | "n" : 1, 37 | "h" : 28 38 | }, 39 | "fire8\/expand3x3" : { 40 | "k" : 256, 41 | "w" : 14, 42 | "n" : 1, 43 | "h" : 14 44 | }, 45 | "fire2\/expand3x3" : { 46 | "k" : 64, 47 | "w" : 56, 48 | "n" : 1, 49 | "h" : 56 50 | }, 51 | "fire8\/concat" : { 52 | "k" : 512, 53 | "w" : 14, 54 | "n" : 1, 55 | "h" : 14 56 | }, 57 | "fire9\/expand3x3" : { 58 | "k" : 256, 59 | "w" : 14, 60 | "n" : 1, 61 | "h" : 14 62 | }, 63 | "conv1" : { 64 | "k" : 64, 65 | "w" : 113, 66 | "n" : 1, 67 | "h" : 113 68 | }, 69 | "fire5\/expand1x1" : { 70 | "k" : 128, 71 | "w" : 28, 72 | "n" : 1, 73 | "h" : 28 74 | }, 75 | "fire6\/expand1x1" : { 76 | "k" : 192, 77 | "w" : 14, 78 | "n" : 1, 79 | "h" : 14 80 | }, 81 | "pool10" : { 82 | "k" : 512, 83 | "w" : 1, 84 | "n" : 1, 85 | "h" : 1 86 | }, 87 | "pool10_flattened" : { 88 | "k" : 512, 89 | "w" : 1, 90 | "n" : 1, 91 | "h" : 1 92 | }, 93 | "fire3\/expand3x3" : { 94 | "k" : 64, 95 | "w" : 56, 96 | "n" : 1, 97 | "h" : 56 98 | }, 99 | "fire3\/concat" : { 100 | "k" : 128, 101 | "w" : 56, 102 | "n" : 1, 103 | "h" : 56 104 | }, 105 | "fire4\/expand3x3" : { 106 | "k" : 128, 107 | "w" : 28, 108 | "n" : 1, 109 | "h" : 28 110 | }, 111 | "loss" : { 112 | "k" : 2, 113 | "w" : 1, 114 | "n" : 1, 115 | "h" : 1 116 | }, 117 | "fire7\/concat" : { 118 | "k" : 384, 119 | "w" : 14, 120 | "n" : 1, 121 | "h" : 14 122 | }, 123 | "fire7\/expand1x1" : { 124 | "k" : 192, 125 | "w" : 14, 126 | "n" : 1, 127 | "h" : 14 128 | }, 129 | "fire2\/concat" : { 130 | "k" : 128, 131 | "w" : 56, 132 | "n" : 1, 133 | "h" : 56 134 | }, 135 | "data" : { 136 | "k" : 3, 137 | "w" : 227, 138 | "n" : 1, 139 | "h" : 227 140 | }, 141 | "fire5\/expand3x3" : { 142 | "k" : 128, 143 | "w" : 28, 144 | "n" : 1, 145 | "h" : 28 146 | }, 147 | "pool5" : { 148 | "k" : 256, 149 | "w" : 14, 150 | "n" : 1, 151 | "h" : 14 152 | }, 153 | "fire6\/concat" : { 154 | "k" : 384, 155 | "w" : 14, 156 | "n" : 1, 157 | "h" : 14 158 | }, 159 | "fc" : { 160 | "k" : 2, 161 | "w" : 1, 162 | "n" : 1, 163 | "h" : 1 164 | }, 165 | "fire6\/expand3x3" : { 166 | "k" : 192, 167 | "w" : 14, 168 | "n" : 1, 169 | "h" : 14 170 | }, 171 | "pool3" : { 172 | "k" : 128, 173 | "w" : 28, 174 | "n" : 1, 175 | "h" : 28 176 | }, 177 | "fire2\/expand1x1" : { 178 | "k" : 64, 179 | "w" : 56, 180 | "n" : 1, 181 | "h" : 56 182 | }, 183 | "fire8\/expand1x1" : { 184 | "k" : 256, 185 | "w" : 14, 186 | "n" : 1, 187 | "h" : 14 188 | }, 189 | "pool1" : { 190 | "k" : 64, 191 | "w" : 56, 192 | "n" : 1, 193 | "h" : 56 194 | }, 195 | "fire9\/expand1x1" : { 196 | "k" : 256, 197 | "w" : 14, 198 | "n" : 1, 199 | "h" : 14 200 | }, 201 | "fire5\/concat" : { 202 | "k" : 256, 203 | "w" : 28, 204 | "n" : 1, 205 | "h" : 28 206 | }, 207 | "fire7\/squeeze1x1" : { 208 | "k" : 48, 209 | "w" : 14, 210 | "n" : 1, 211 | "h" : 14 212 | }, 213 | "fire6\/squeeze1x1" : { 214 | "k" : 48, 215 | "w" : 14, 216 | "n" : 1, 217 | "h" : 14 218 | }, 219 | "fire2\/squeeze1x1" : { 220 | "k" : 16, 221 | "w" : 56, 222 | "n" : 1, 223 | "h" : 56 224 | }, 225 | "fire5\/squeeze1x1" : { 226 | "k" : 32, 227 | "w" : 28, 228 | "n" : 1, 229 | "h" : 28 230 | }, 231 | "fire7\/expand3x3" : { 232 | "k" : 192, 233 | "w" : 14, 234 | "n" : 1, 235 | "h" : 14 236 | }, 237 | "fire3\/expand1x1" : { 238 | "k" : 64, 239 | "w" : 56, 240 | "n" : 1, 241 | "h" : 56 242 | }, 243 | "fire4\/squeeze1x1" : { 244 | "k" : 32, 245 | "w" : 28, 246 | "n" : 1, 247 | "h" : 28 248 | }, 249 | "fire3\/squeeze1x1" : { 250 | "k" : 16, 251 | "w" : 56, 252 | "n" : 1, 253 | "h" : 56 254 | } 255 | } 256 | } -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/CoreMLVision.xcplaygroundpage/Resources/catmoodprediction.mlmodelc/model.espresso.weights: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Pages/CoreMLVision.xcplaygroundpage/Resources/catmoodprediction.mlmodelc/model.espresso.weights -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/CoreMLVision.xcplaygroundpage/Resources/catmoodprediction.mlmodelc/model/coremldata.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Pages/CoreMLVision.xcplaygroundpage/Resources/catmoodprediction.mlmodelc/model/coremldata.bin -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/CoreMLVision.xcplaygroundpage/Sources/CatVisionLogic.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import Vision 3 | import UIKit 4 | 5 | public class CatVisionLogic : NSObject { 6 | 7 | var classificationRequest: [VNRequest]? = nil 8 | var textLabel : UILabel? = nil 9 | 10 | public func initRequest(label: UILabel?) { 11 | do { 12 | textLabel = label 13 | let model = try VNCoreMLModel(for: catmoodprediction().model) 14 | let request = VNCoreMLRequest(model: model, completionHandler: self.handleClassification) 15 | classificationRequest = [ request ] 16 | } catch { 17 | fatalError("Can't load Vision ML model: \(error)") 18 | } 19 | } 20 | 21 | public func doClassification (image: UIImage) { 22 | do { 23 | let pixelBuffer = image.pixelBuffer(width: 227, height:227) 24 | let classifierRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer!, options: [:]) 25 | try classifierRequestHandler.perform(classificationRequest!) 26 | } catch { 27 | print("something went terribly wrong during classification") 28 | } 29 | } 30 | 31 | public func handleClassification(request: VNRequest, error: Error?) { 32 | guard let observations = request.results as? [VNClassificationObservation] 33 | else { fatalError("unexpected result type from VNCoreMLRequest") } 34 | 35 | guard let best = observations.first else { 36 | fatalError("classification didn't return any results") 37 | } 38 | 39 | DispatchQueue.main.async { 40 | if let classifierLabel = self.textLabel { 41 | if best.identifier.starts(with: "Unknown") || best.confidence < 0.50 { 42 | classifierLabel.text = "Mhm, no cat or absolutely not sure about it's mood" 43 | } 44 | else { 45 | classifierLabel.text = "This cat seem to be in a \(best.identifier) mood (\(best.confidence) sure)" 46 | } 47 | } 48 | } 49 | } 50 | 51 | 52 | } 53 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/CoreMLVision.xcplaygroundpage/Sources/UIImage+CVPixelBuffer.swift: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2017 M.I. Hollemans 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to 5 | deal in the Software without restriction, including without limitation the 6 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 7 | sell copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | The above copyright notice and this permission notice shall be included in 10 | all copies or substantial portions of the Software. 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 12 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 13 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 14 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 15 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 16 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 17 | IN THE SOFTWARE. 18 | */ 19 | 20 | import UIKit 21 | 22 | extension UIImage { 23 | /** 24 | Resizes the image to width x height and converts it to an RGB CVPixelBuffer. 25 | */ 26 | public func pixelBuffer(width: Int, height: Int) -> CVPixelBuffer? { 27 | return pixelBuffer(width: width, height: height, 28 | pixelFormatType: kCVPixelFormatType_32ARGB, 29 | colorSpace: CGColorSpaceCreateDeviceRGB(), 30 | alphaInfo: .noneSkipFirst) 31 | } 32 | 33 | /** 34 | Resizes the image to width x height and converts it to a grayscale CVPixelBuffer. 35 | */ 36 | public func pixelBufferGray(width: Int, height: Int) -> CVPixelBuffer? { 37 | return pixelBuffer(width: width, height: height, 38 | pixelFormatType: kCVPixelFormatType_OneComponent8, 39 | colorSpace: CGColorSpaceCreateDeviceGray(), 40 | alphaInfo: .none) 41 | } 42 | 43 | func pixelBuffer(width: Int, height: Int, pixelFormatType: OSType, 44 | colorSpace: CGColorSpace, alphaInfo: CGImageAlphaInfo) -> CVPixelBuffer? { 45 | var maybePixelBuffer: CVPixelBuffer? 46 | let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, 47 | kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] 48 | let status = CVPixelBufferCreate(kCFAllocatorDefault, 49 | width, 50 | height, 51 | pixelFormatType, 52 | attrs as CFDictionary, 53 | &maybePixelBuffer) 54 | 55 | guard status == kCVReturnSuccess, let pixelBuffer = maybePixelBuffer else { 56 | return nil 57 | } 58 | 59 | CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0)) 60 | let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer) 61 | 62 | guard let context = CGContext(data: pixelData, 63 | width: width, 64 | height: height, 65 | bitsPerComponent: 8, 66 | bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), 67 | space: colorSpace, 68 | bitmapInfo: alphaInfo.rawValue) 69 | else { 70 | return nil 71 | } 72 | 73 | UIGraphicsPushContext(context) 74 | context.translateBy(x: 0, y: CGFloat(height)) 75 | context.scaleBy(x: 1, y: -1) 76 | self.draw(in: CGRect(x: 0, y: 0, width: width, height: height)) 77 | UIGraphicsPopContext() 78 | 79 | CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0)) 80 | return pixelBuffer 81 | } 82 | } 83 | 84 | extension UIImage { 85 | /** 86 | Creates a new UIImage from an array of RGBA bytes. 87 | */ 88 | @nonobjc public class func fromByteArrayRGBA(_ bytes: [UInt8], 89 | width: Int, 90 | height: Int, 91 | scale: CGFloat = 0, 92 | orientation: UIImage.Orientation = .up) -> UIImage? { 93 | return fromByteArray(bytes, width: width, height: height, 94 | scale: scale, orientation: orientation, 95 | bytesPerRow: width * 4, 96 | colorSpace: CGColorSpaceCreateDeviceRGB(), 97 | alphaInfo: .premultipliedLast) 98 | } 99 | 100 | /** 101 | Creates a new UIImage from an array of grayscale bytes. 102 | */ 103 | @nonobjc public class func fromByteArrayGray(_ bytes: [UInt8], 104 | width: Int, 105 | height: Int, 106 | scale: CGFloat = 0, 107 | orientation: UIImage.Orientation = .up) -> UIImage? { 108 | return fromByteArray(bytes, width: width, height: height, 109 | scale: scale, orientation: orientation, 110 | bytesPerRow: width, 111 | colorSpace: CGColorSpaceCreateDeviceGray(), 112 | alphaInfo: .none) 113 | } 114 | 115 | @nonobjc class func fromByteArray(_ bytes: [UInt8], 116 | width: Int, 117 | height: Int, 118 | scale: CGFloat, 119 | orientation: UIImage.Orientation, 120 | bytesPerRow: Int, 121 | colorSpace: CGColorSpace, 122 | alphaInfo: CGImageAlphaInfo) -> UIImage? { 123 | var image: UIImage? 124 | bytes.withUnsafeBytes { ptr in 125 | if let context = CGContext(data: UnsafeMutableRawPointer(mutating: ptr.baseAddress!), 126 | width: width, 127 | height: height, 128 | bitsPerComponent: 8, 129 | bytesPerRow: bytesPerRow, 130 | space: colorSpace, 131 | bitmapInfo: alphaInfo.rawValue), 132 | let cgImage = context.makeImage() { 133 | image = UIImage(cgImage: cgImage, scale: scale, orientation: orientation) 134 | } 135 | } 136 | return image 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/CoreMLVision.xcplaygroundpage/Sources/catmoodprediction.swift: -------------------------------------------------------------------------------- 1 | // 2 | // catmoodprediction.swift 3 | // 4 | // This file was automatically generated and should not be edited. 5 | // 6 | 7 | import CoreML 8 | 9 | 10 | /// Model Prediction Input Type 11 | @available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) 12 | public class catmoodpredictionInput : MLFeatureProvider { 13 | 14 | /// data as color (kCVPixelFormatType_32BGRA) image buffer, 227 pixels wide by 227 pixels high 15 | var data: CVPixelBuffer 16 | 17 | public var featureNames: Set { 18 | get { 19 | return ["data"] 20 | } 21 | } 22 | 23 | public func featureValue(for featureName: String) -> MLFeatureValue? { 24 | if (featureName == "data") { 25 | return MLFeatureValue(pixelBuffer: data) 26 | } 27 | return nil 28 | } 29 | 30 | init(data: CVPixelBuffer) { 31 | self.data = data 32 | } 33 | } 34 | 35 | 36 | /// Model Prediction Output Type 37 | @available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) 38 | public class catmoodpredictionOutput : MLFeatureProvider { 39 | 40 | /// loss as dictionary of strings to doubles 41 | let loss: [String : Double] 42 | 43 | /// classLabel as string value 44 | let classLabel: String 45 | 46 | public var featureNames: Set { 47 | get { 48 | return ["loss", "classLabel"] 49 | } 50 | } 51 | 52 | public func featureValue(for featureName: String) -> MLFeatureValue? { 53 | if (featureName == "loss") { 54 | return try! MLFeatureValue(dictionary: loss as [NSObject : NSNumber]) 55 | } 56 | if (featureName == "classLabel") { 57 | return MLFeatureValue(string: classLabel) 58 | } 59 | return nil 60 | } 61 | 62 | init(loss: [String : Double], classLabel: String) { 63 | self.loss = loss 64 | self.classLabel = classLabel 65 | } 66 | } 67 | 68 | 69 | /// Class for model loading and prediction 70 | @available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) 71 | public class catmoodprediction { 72 | var model: MLModel 73 | 74 | /** 75 | Construct a model with explicit path to mlmodel file 76 | - parameters: 77 | - url: the file url of the model 78 | - throws: an NSError object that describes the problem 79 | */ 80 | init(contentsOf url: URL) throws { 81 | self.model = try MLModel(contentsOf: url) 82 | } 83 | 84 | /// Construct a model that automatically loads the model from the app's bundle 85 | convenience init() { 86 | let bundle = Bundle(for: catmoodprediction.self) 87 | let assetPath = bundle.url(forResource: "catmoodprediction", withExtension:"mlmodelc") 88 | try! self.init(contentsOf: assetPath!) 89 | } 90 | 91 | /** 92 | Make a prediction using the structured interface 93 | - parameters: 94 | - input: the input to the prediction as catmoodpredictionInput 95 | - throws: an NSError object that describes the problem 96 | - returns: the result of the prediction as catmoodpredictionOutput 97 | */ 98 | func prediction(input: catmoodpredictionInput) throws -> catmoodpredictionOutput { 99 | let outFeatures = try model.prediction(from: input) 100 | let result = catmoodpredictionOutput(loss: outFeatures.featureValue(for: "loss")!.dictionaryValue as! [String : Double], classLabel: outFeatures.featureValue(for: "classLabel")!.stringValue) 101 | return result 102 | } 103 | 104 | /** 105 | Make a prediction using the convenience interface 106 | - parameters: 107 | - data as color (kCVPixelFormatType_32BGRA) image buffer, 227 pixels wide by 227 pixels high 108 | - throws: an NSError object that describes the problem 109 | - returns: the result of the prediction as catmoodpredictionOutput 110 | */ 111 | func prediction(data: CVPixelBuffer) throws -> catmoodpredictionOutput { 112 | let input_ = catmoodpredictionInput(data: data) 113 | return try self.prediction(input: input_) 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/CustomVision.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | /*: 2 | # Custom Vision API 3 | ## or "How grumpy is my cat?" 4 | 5 | As soon as we want to ask questions, which might not fit into a globally trained model, we need to get active by ourselves. 6 | In this very case we want to know if a given cat is happy or grumpy. And let's prove, that the famouse GRUMPY CAT is really grumpy :D 7 | 8 | How can we do that? By training our own model! And no worries, this is not complicated at all - and the best thing is: it's already done for you. 9 | In this example we are using the remotely offered model from the Custom Vision API and ask via REST calls, if the cat on the image is a happy one, or a real grumpy one. 10 | */ 11 | //#-hidden-code 12 | import Foundation 13 | import UIKit 14 | import PlaygroundSupport 15 | 16 | 17 | let myView = UIView(frame: CGRect(x: 0, y: 0, width: 450, height: 600)) 18 | 19 | let preview = UIImageView(frame: myView.bounds) 20 | //#-end-hidden-code 21 | 22 | /*: 23 | * experiment: 24 | Choose your cat image right here or take a new one 25 | */ 26 | preview.image = /*#-editable-code*/#imageLiteral(resourceName: "cat_grumpy_1.jpg")/*#-end-editable-code*/ 27 | //#-hidden-code 28 | preview.contentMode = .scaleAspectFit 29 | 30 | let textLabel = UILabel(frame: CGRect(x: 30, y: myView.bounds.height-100, width: 350, height: 100)) 31 | textLabel.lineBreakMode = .byWordWrapping 32 | textLabel.numberOfLines = 5 33 | //#-end-hidden-code 34 | textLabel.textColor = #colorLiteral(red: 0.9254902005, green: 0.2352941185, blue: 0.1019607857, alpha: 1) 35 | textLabel.text = "Wanna find out if your cat is happy or grumpy?" 36 | //#-hidden-code 37 | let backgroundView = UIView(frame: CGRect(x: 0, y: myView.bounds.height-170, width: myView.bounds.width, height: 200)) 38 | backgroundView.backgroundColor = #colorLiteral(red: 0.0, green: 0.0, blue: 0.0, alpha: 1.0) 39 | backgroundView.alpha = 0.7 40 | 41 | myView.addSubview(preview) 42 | myView.addSubview(backgroundView) 43 | myView.addSubview(textLabel) 44 | 45 | func isMyCatHappyOrGrumpy(_ imageView : UIImageView) { 46 | let manager = CognitiveServices() 47 | manager.retrieveCatPredictionForImage(imageView.image!) { (result, error) -> (Void) in 48 | DispatchQueue.main.async(execute: { 49 | if let _ = error { 50 | print("omg something bad happened") 51 | } else { 52 | print("seems like all went well: \(String(describing: result))") 53 | } 54 | 55 | if let catEmotion = result { 56 | textLabel.text = "The cat on the image seems \n\(catEmotion)!" 57 | } else { 58 | textLabel.text = "Seems like no cat or it's emotion was detected :(" 59 | } 60 | 61 | }) 62 | } 63 | } 64 | //#-end-hidden-code 65 | 66 | /*: 67 | * experiment: 68 | So let's get the cat analysis started. As you will see in a moment we get the prediction value for each available tag back. We then take the one with the higher value for granted and show the cat's mood in the UI. Groundbreaking right? :D 69 | */ 70 | isMyCatHappyOrGrumpy(preview) 71 | //#-hidden-code 72 | PlaygroundPage.current.liveView = myView 73 | //#-end-hidden-code 74 | 75 | /*: 76 | * callout(What did we learn?): 77 | Wonderful! So you just called a custom vision endooint from the Cognitive Services. 78 | 79 | If you want to have a detailed look at the documentation - where you can find further examples - visit the dedicated [CUSTOM VISION documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/custom-vision-service/home) and the [CUSTOM VISION prediction definition](https://southcentralus.dev.cognitive.microsoft.com/docs/services/450e4ba4d72542e889d93fd7b8e960de/operations/5a6264bc40d86a0ef8b2c290). 80 | 81 | There you will learn how to train your model and then use it via REST API calls in your app also.*/ 82 | 83 | 84 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/Emotions.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | /*: 2 | # Am I smiling - YES I AM! 3 | 4 | After successfully managing the FACE API, we will dive a little bit further into it. 5 | Once upon a time there was a dedicated EMOTION API in place, which could detect... emotions in images and videos. As human emotions are very much related to faces, the two APIs got merged together. And that's why we are having a look into the emotion data for detected faces now! Awesome, right? 6 | */ 7 | 8 | //#-hidden-code 9 | import PlaygroundSupport 10 | import UIKit 11 | import Foundation 12 | 13 | guard #available(iOS 9, OSX 10.11, *) else { 14 | fatalError("Life? Don't talk to me about life. Here I am, brain the size of a planet, and they tell me to run a 'playground'. Call that job satisfaction? I don't.") 15 | } 16 | 17 | let myView = UIView(frame: CGRect(x: 0, y: 0, width: 450, height: 600)) 18 | 19 | let preview = UIImageView(frame: myView.bounds) 20 | //#-end-hidden-code 21 | /*: 22 | * experiment: 23 | Choose your preferred image right here or take a new one 24 | */ 25 | preview.image = /*#-editable-code*/#imageLiteral(resourceName: "Tiffany.jpg")/*#-end-editable-code*/ 26 | //#-hidden-code 27 | preview.contentMode = .scaleAspectFit 28 | 29 | 30 | 31 | let textLabel = UILabel(frame: CGRect(x: 30, y: myView.bounds.height-150, width: 350, height: 150)) 32 | textLabel.lineBreakMode = .byWordWrapping 33 | textLabel.numberOfLines = 5 34 | textLabel.textColor = #colorLiteral(red: 0.9254902005, green: 0.2352941185, blue: 0.1019607857, alpha: 1) 35 | textLabel.text = "Wow, is there somthing here?" 36 | 37 | 38 | let backgroundView = UIView(frame: CGRect(x: 0, y: myView.bounds.height-170, width: myView.bounds.width, height: 200)) 39 | backgroundView.backgroundColor = #colorLiteral(red: 0.0, green: 0.0, blue: 0.0, alpha: 1.0) 40 | backgroundView.alpha = 0.7 41 | 42 | myView.addSubview(preview) 43 | myView.addSubview(backgroundView) 44 | myView.addSubview(textLabel) 45 | myView.bringSubviewToFront(textLabel) 46 | 47 | var emojis: [CognitiveServicesFacesResult]? = nil { 48 | didSet { 49 | if preview.image == nil { 50 | return 51 | } 52 | 53 | if let results = emojis { 54 | UIGraphicsBeginImageContext(preview.image!.size) 55 | preview.image?.draw(in: CGRect(origin: CGPoint.zero, size: preview.image!.size)) 56 | 57 | for result in results { 58 | var availableEmojis = [String]() 59 | 60 | if let emotion = result.emotion { 61 | switch emotion { 62 | case .Anger: 63 | availableEmojis.append("😡") 64 | availableEmojis.append("😠") 65 | case .Contempt: 66 | availableEmojis.append("😤") 67 | case .Disgust: 68 | availableEmojis.append("😷") 69 | availableEmojis.append("🤐") 70 | case .Fear: 71 | availableEmojis.append("😱") 72 | case .Happiness: 73 | availableEmojis.append("😝") 74 | availableEmojis.append("😀") 75 | availableEmojis.append("😃") 76 | availableEmojis.append("😄") 77 | availableEmojis.append("😆") 78 | availableEmojis.append("😊") 79 | availableEmojis.append("🙂") 80 | availableEmojis.append("☺️") 81 | case .Neutral: 82 | availableEmojis.append("😶") 83 | availableEmojis.append("😐") 84 | availableEmojis.append("😑") 85 | case .Sadness: 86 | availableEmojis.append("🙁") 87 | availableEmojis.append("😞") 88 | availableEmojis.append("😟") 89 | availableEmojis.append("😔") 90 | availableEmojis.append("😢") 91 | availableEmojis.append("😭") 92 | case .Surprise: 93 | availableEmojis.append("😳") 94 | availableEmojis.append("😮") 95 | availableEmojis.append("😲") 96 | } 97 | } 98 | 99 | let emoji = availableEmojis.randomElement() 100 | 101 | let maximumSize = result.frame.size 102 | let string = emoji as NSString 103 | let startingFontSize = 8192.0 104 | 105 | var actualFontSize = startingFontSize 106 | var stepping = actualFontSize 107 | repeat { 108 | stepping /= 2.0 109 | if stepping < 1.0 { 110 | break 111 | } 112 | 113 | let font = UIFont.systemFont(ofSize: CGFloat(actualFontSize)) 114 | let calculatedSize = string.size(withAttributes: [NSAttributedString.Key.font: font]) 115 | 116 | if calculatedSize.width > maximumSize.width { 117 | actualFontSize -= stepping 118 | } else { 119 | actualFontSize += stepping 120 | } 121 | } while true 122 | 123 | let font = UIFont.systemFont(ofSize: CGFloat(actualFontSize)) 124 | string.draw(in: result.frame, withAttributes: [NSAttributedString.Key.font: font]) 125 | } 126 | 127 | preview.image = UIGraphicsGetImageFromCurrentImageContext() 128 | UIGraphicsEndImageContext() 129 | } 130 | } 131 | } 132 | 133 | 134 | 135 | func makeEmojiFromEmotionOnPhoto (_ photo : UIImageView) { 136 | let manager = CognitiveServices() 137 | textLabel.text = "... gimme a sec - watching out for smiles!" 138 | manager.retrievePlausibleEmotionsForImage(photo.image!) { (result, error) -> (Void) in 139 | DispatchQueue.main.async(execute: { 140 | if let _ = error { 141 | print("omg something bad happened") 142 | } else { 143 | print("seems like all went well: \(String(describing: result!))") 144 | } 145 | 146 | emojis = result 147 | 148 | if (result?.count)! > 0 { 149 | textLabel.text = "1..2.. Emoji!\n\((result?.count)!) emotion(s) detected" 150 | } else { 151 | textLabel.text = "Seems like no emotions were detected :(" 152 | } 153 | 154 | }) 155 | } 156 | } 157 | 158 | //#-end-hidden-code 159 | /*: 160 | * experiment: 161 | What the API gets from us is really just the image. In return we will retrieve an array of emotion results, which contains the certainties of the eight possible emotions (neutral, happiness, sadness, anger, ...). What we will do right now is to get the emotion with the highest score and map it to a suitable emoji... to pin it on the person's face :) 162 | */ 163 | makeEmojiFromEmotionOnPhoto(preview) 164 | //#-hidden-code 165 | PlaygroundPage.current.liveView = myView 166 | //#-end-hidden-code 167 | 168 | /*: 169 | * callout(What did we learn?): 170 | Wonderful! So you just called the emotion endooint from the Cognitive Services FACE API. If you want to have a detailed look at the documentation - where you can find further examples - visit the dedicated [FACE API documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/face/overview) and the [FACE API definition](https://westus.dev.cognitive.microsoft.com/docs/services/563879b61984550e40cbbe8d/operations/563879b61984550f30395236) */ 171 | 172 | //: Enough of just dealing with smiles! Let's see if we can find out smiles of cats. Is it possible, to distinguish a smiling cat from a grumpy one? Let's get going by [using the Custom Vision API](@next)! 173 | 174 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/Faces.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | /*: 2 | # Who's on that picture? 3 | 4 | As we already saw in the Emotions demo we are capable of getting some facial features from the API, like the face rectangle or the emotion. But is there more? Like getting *coordinates* of e.g. the eyes and the nose. The answer is **YES**! We can do this. 5 | 6 | The Cognitive Services provide an API called **Face API**. With this API we can analyse the features of a human face. We can determine where the eyes are, where the pupil is currently located, how "big" or "small" a nose is and if the face is currently smiling because it's mouth and lip coordinates indicates it :) 7 | 8 | But let's dive in and see, what the **Face API** sees for us. 9 | */ 10 | 11 | //#-hidden-code 12 | import PlaygroundSupport 13 | import UIKit 14 | import Foundation 15 | 16 | guard #available(iOS 9, OSX 10.11, *) else { 17 | fatalError("Life? Don't talk to me about life. Here I am, brain the size of a planet, and they tell me to run a 'playground'. Call that job satisfaction? I don't.") 18 | } 19 | 20 | extension CGContext { 21 | func addRect(rect:CGRect, fillColor:UIColor, strokeColor:UIColor, width:CGFloat) { 22 | self.addRect(rect) 23 | self.fillAndStroke(fillColor: fillColor, strokeColor: strokeColor, width: width) 24 | } 25 | func fillAndStroke(fillColor:UIColor, strokeColor:UIColor, width:CGFloat) { 26 | self.setFillColor(fillColor.cgColor) 27 | self.setStrokeColor(strokeColor.cgColor) 28 | self.setLineWidth(width) 29 | self.drawPath(using: CGPathDrawingMode.fillStroke) 30 | } 31 | } 32 | 33 | class MyLandmarkView : UIView { 34 | 35 | var face : CognitiveServicesFacesResult? = nil 36 | var scaledImageRatio : CGFloat = 1 37 | var xOffset : CGFloat = 0.0 38 | var yOffset : CGFloat = 0.0 39 | 40 | override func draw(_ rect: CGRect) { 41 | if let context = UIGraphicsGetCurrentContext(), 42 | let myFace = face { 43 | 44 | //draw all found landmarks for the face 45 | for landmark in myFace.landmarks! { 46 | context.addRect(rect: CGRect(x: (landmark.x / scaledImageRatio) + xOffset, y: (landmark.y / scaledImageRatio) + yOffset , width: 2, height: 2), fillColor: .red, strokeColor: .red, width: 1) 47 | } 48 | 49 | //draw the facerect 50 | var faceFrame = myFace.frame 51 | faceFrame.origin.x = (faceFrame.origin.x / scaledImageRatio) + xOffset 52 | faceFrame.origin.y = (faceFrame.origin.y / scaledImageRatio) + yOffset 53 | faceFrame.size.width /= scaledImageRatio 54 | faceFrame.size.height /= scaledImageRatio 55 | 56 | context.addRect(rect: faceFrame, fillColor: .clear, strokeColor: .red, width: 2) 57 | } 58 | 59 | } 60 | } 61 | 62 | let myView = UIView(frame: CGRect(x: 10, y: 10, width: 450, height: 600)) 63 | 64 | let preview = UIImageView(frame: myView.bounds) 65 | let landmarkView = MyLandmarkView(frame: myView.bounds) 66 | landmarkView.backgroundColor = .clear 67 | 68 | //#-end-hidden-code 69 | /*: 70 | * experiment: 71 | Choose your preferred image right here or take a new one. We tell the API that we'd like to know about different features of the face like age, gender, facialHair and glasses. Moreover we ask for a unique face identifier and facial landmarks. The face identifier can be used to later identify the person. The facial landmarks tell us things like where the eyes, the pupil, the nose and the mouth is and let us know about their dimensions. 72 | */ 73 | preview.image = /*#-editable-code*/#imageLiteral(resourceName: "Aaron.jpg")/*#-end-editable-code*/ 74 | 75 | //#-hidden-code 76 | preview.contentMode = .scaleAspectFit 77 | 78 | let textLabel = UILabel(frame: CGRect(x: 30, y: 0, width: 768, height: 120)) 79 | textLabel.lineBreakMode = .byWordWrapping 80 | textLabel.numberOfLines = 5 81 | textLabel.textColor = #colorLiteral(red: 1.0, green: 1.0, blue: 1.0, alpha: 1.0) 82 | 83 | let backgroundView = UIView(frame: CGRect(x: 0, y: myView.bounds.height-120, width: myView.bounds.width, height: 100)) 84 | backgroundView.backgroundColor = #colorLiteral(red: 0.0, green: 0.0, blue: 0.0, alpha: 1.0) 85 | backgroundView.alpha = 0.7 86 | 87 | myView.addSubview(preview) 88 | myView.addSubview(backgroundView) 89 | backgroundView.addSubview(textLabel) 90 | myView.addSubview(landmarkView) 91 | myView.bringSubviewToFront(landmarkView) 92 | 93 | 94 | func detectFaces (_ photo : UIImageView) { 95 | let manager = CognitiveServices() 96 | textLabel.text = "... gimme a sec - wheeere are the faces!" 97 | 98 | manager.retrieveFacesForImage(photo.image!) { (result, error) -> (Void) in 99 | DispatchQueue.main.async(execute: { 100 | if let _ = error { 101 | print("omg something bad happened") 102 | } else { 103 | print("seems like all went well: \(String(describing: result))") 104 | } 105 | 106 | if (result?.count)! > 0 { 107 | let face = result?[0] 108 | textLabel.text = "Gender: \((face?.gender)!)\nAge: \((face?.age)!)\nGlasses: \((face?.glasses)!)\nFacial hair: \((face?.facialHair)!)" 109 | landmarkView.scaledImageRatio = scaledImageRatio() 110 | drawLandmarks(face!) 111 | 112 | } else { 113 | textLabel.text = "Seems like no emotions were detected :(" 114 | } 115 | 116 | 117 | }) 118 | } 119 | } 120 | 121 | func scaledImageRatio () -> CGFloat { 122 | let imageViewHeight = preview.bounds.height 123 | let imageViewWidth = preview.bounds.width 124 | let imageSize = preview.image!.size 125 | let scaledImageHeight = min(imageSize.height * (imageViewWidth / imageSize.width), imageViewHeight) 126 | let scaledImageWidth = min(imageSize.width * (imageViewHeight / imageSize.height), imageViewWidth) 127 | 128 | landmarkView.yOffset = (myView.frame.height - scaledImageHeight) / CGFloat(2.0) 129 | landmarkView.xOffset = (myView.frame.width - scaledImageWidth) / CGFloat(2.0) 130 | 131 | let ratio : CGFloat = imageSize.height / scaledImageHeight 132 | return ratio 133 | } 134 | 135 | func drawLandmarks (_ face: CognitiveServicesFacesResult) { 136 | landmarkView.face = face 137 | landmarkView.setNeedsDisplay() 138 | } 139 | 140 | //#-end-hidden-code 141 | /*: 142 | * experiment: 143 | So let's get the face analysis started. As you will see in a moment we get the facial landmarks (dots in the image), the face rectangle and general infos like age, gender, glasses and facial hair for the selected picture. Cool huh? 144 | */ 145 | detectFaces(preview) 146 | //#-hidden-code 147 | PlaygroundPage.current.liveView = myView 148 | //#-end-hidden-code 149 | 150 | /*: 151 | * callout(What did we learn?): 152 | The wonderful thing about this **Face API** is especially the retrieval of the landmarks of the face. We can do fun things with it, like pinning things into the face :D But we can identify this face, as soon as we added it to a PersonGroup, on other images. So we don't have to analyse the image itself and compare it to other faces to "find" persons on images. We can let the Face API do the work for us. Just have a look at the [Faces documentation](https://azure.microsoft.com/en-us/services/cognitive-services/face/) and the way how to use [Persons and PersonGroups](https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/howtoidentifyfacesinimage) */ 153 | 154 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/First steps.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | /*: 2 | # First steps with Playgrounds 3 | 4 | The first thing we want to try is to get hands on with what we know already - dealing with out beloved UIKit. You can use all elements of UIKit as you are used to. In this example we want to build our environment for our further examples. Let's warm up, put your hands on the playground and get started! 5 | 6 | * callout(What to do): 7 | Just choose a picture you like, then enter a text you think fits to the image. To make it nice looking, choose a color for your text and it's background. 8 | */ 9 | 10 | //#-hidden-code 11 | import PlaygroundSupport 12 | import UIKit 13 | import Foundation 14 | 15 | guard #available(iOS 9, OSX 10.11, *) else { 16 | fatalError("Life? Don't talk to me about life. Here I am, brain the size of a planet, and they tell me to run a 'playground'. Call that job satisfaction? I don't.") 17 | } 18 | 19 | //#-end-hidden-code 20 | 21 | //#-hidden-code 22 | let myView = UIView(frame: CGRect(x: 0, y: 0, width: 450, height: 600)) 23 | 24 | let preview = UIImageView(frame: myView.bounds) 25 | //#-end-hidden-code 26 | preview.image = /*#-editable-code*/#imageLiteral(resourceName: "beach.png")/*#-end-editable-code*/ 27 | 28 | //#-hidden-code 29 | preview.contentMode = .scaleAspectFit 30 | 31 | let textFileRef = #fileLiteral(resourceName: "justtext.txt") 32 | let stringFromFile = try String(contentsOf: textFileRef) 33 | 34 | let textLabel = UILabel(frame: CGRect(x: 30, y: myView.bounds.height-200, width: 350, height: 200)) 35 | //#-end-hidden-code 36 | textLabel.text = /*#-editable-code*/"My picture is looking good!"/*#-end-editable-code*/ 37 | //#-hidden-code 38 | textLabel.lineBreakMode = .byWordWrapping 39 | textLabel.numberOfLines = 5 40 | //#-end-hidden-code 41 | textLabel.textColor = /*#-editable-code*/ #colorLiteral(red: 1, green: 1, blue: 1, alpha: 1) /*#-end-editable-code*/ 42 | //#-hidden-code 43 | let backgroundView = UIView(frame: CGRect(x: 0, y: myView.bounds.height-170, width: myView.bounds.width, height: 200)) 44 | //#-end-hidden-code 45 | backgroundView.backgroundColor = /*#-editable-code*/ #colorLiteral(red: 0, green: 0, blue: 0, alpha: 1) /*#-end-editable-code*/ 46 | backgroundView.alpha = /*#-editable-code*/ 0.7 /*#-end-editable-code*/ 47 | //#-hidden-code 48 | myView.addSubview(preview) 49 | myView.addSubview(backgroundView) 50 | myView.addSubview(textLabel) 51 | 52 | 53 | PlaygroundPage.current.liveView = myView 54 | //#-end-hidden-code 55 | 56 | 57 | /*: 58 | * callout(What did we learn?): 59 | So we are done with the basics. We created a `UIImageView` with an embedded `UIImage`. Our description area consists of an `UIView`, which background color and alpha is adjustable. And above this background view we added a `UILabel`, which shows our nice descriptive text for the picture. 60 | */ 61 | 62 | //: Horray! Let's get to our [next adventure](@next)! 63 | 64 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/First steps.xcplaygroundpage/Resources/justtext.txt: -------------------------------------------------------------------------------- 1 | Consider the epic shit as already done :) 2 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Pages/First steps.xcplaygroundpage/Resources/keepcalm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Pages/First steps.xcplaygroundpage/Resources/keepcalm.png -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/Aaron.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/Aaron.jpg -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/Giugli.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/Giugli.png -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/Jan.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/Jan.png -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/Les.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/Les.jpg -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/Nazuki.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/Nazuki.png -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/Owen_Family.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/Owen_Family.jpg -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/Tiffany.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/Tiffany.jpg -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/beach.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/beach.png -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/cat_grumpy_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/cat_grumpy_1.jpg -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/cat_grumpy_2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/cat_grumpy_2.jpg -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/cat_smiling_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/cat_smiling_1.jpg -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/cat_smiling_2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/cat_smiling_2.jpg -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/containers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/containers.png -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/highway.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/highway.png -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/nightcity.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/nightcity.png -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/woman_blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/woman_blue.png -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Resources/wood.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Kick start Cognitive Services.playground/Resources/wood.png -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/Sources/Ext.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import UIKit 3 | 4 | public extension Array { 5 | func randomElement() -> Element { 6 | let index = Int(arc4random_uniform(UInt32(self.count))) 7 | return self[index] 8 | } 9 | } 10 | 11 | extension UIColor { 12 | convenience init(hexString:String) { 13 | 14 | let hexString = hexString.trimmingCharacters(in: NSCharacterSet.whitespacesAndNewlines) 15 | let scanner = Scanner(string: hexString) 16 | 17 | if (hexString.hasPrefix("#")) { 18 | scanner.scanLocation = 1 19 | } 20 | 21 | var color:UInt32 = 0 22 | scanner.scanHexInt32(&color) 23 | 24 | let mask = 0x000000FF 25 | let r = Int(color >> 16) & mask 26 | let g = Int(color >> 8) & mask 27 | let b = Int(color) & mask 28 | 29 | let red = CGFloat(r) / 255.0 30 | let green = CGFloat(g) / 255.0 31 | let blue = CGFloat(b) / 255.0 32 | 33 | self.init(red:red, green:green, blue:blue, alpha:1) 34 | } 35 | 36 | func toHexString() -> String { 37 | var r:CGFloat = 0 38 | var g:CGFloat = 0 39 | var b:CGFloat = 0 40 | var a:CGFloat = 0 41 | 42 | getRed(&r, green: &g, blue: &b, alpha: &a) 43 | 44 | let rgb:Int = (Int)(r*255)<<16 | (Int)(g*255)<<8 | (Int)(b*255)<<0 45 | 46 | return NSString(format:"#%06x", rgb) as String 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /Kick start Cognitive Services.playground/contents.xcplayground: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 manu rink 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Computer Vision.playgroundchapter/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Version 6 | 1.0 7 | Name 8 | Computer Vision 9 | Pages 10 | 11 | Intro.cutscenepage 12 | First steps.playgroundpage 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Computer Vision.playgroundchapter/Pages/First steps.playgroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | //#-hidden-code 2 | import PlaygroundSupport 3 | import UIKit 4 | import Foundation 5 | 6 | guard #available(iOS 9, OSX 10.11, *) else { 7 | fatalError("Life? Don't talk to me about life. Here I am, brain the size of a planet, and they tell me to run a 'playground'. Call that job satisfaction? I don't.") 8 | } 9 | 10 | func setConfidenceForComputerVision(_ value: Double) { 11 | let page = PlaygroundPage.current 12 | if let proxy = page.liveView as? PlaygroundRemoteLiveViewProxy { 13 | proxy.send(.floatingPoint(value)) 14 | } 15 | } 16 | 17 | func retrieveTags() { 18 | let page = PlaygroundPage.current 19 | if let proxy = page.liveView as? PlaygroundRemoteLiveViewProxy { 20 | proxy.send(.string("retrieveTags")) 21 | } 22 | } 23 | 24 | func chooseImage (_ imageData: Data) { 25 | let page = PlaygroundPage.current 26 | if let proxy = page.liveView as? PlaygroundRemoteLiveViewProxy { 27 | proxy.send(.data(imageData)) 28 | } 29 | } 30 | 31 | //#-end-hidden-code 32 | /*: 33 | # Describe your picture! 34 | 35 | It's time to get life into our app! Want to get your picture described by a remote service? Yes? YES? So get ready - and get to know the * *drumroooooll* * **COGNITIVE SERVICES**! 36 | 37 | We will start with the Computer Vision API. So let's see, what the "computer" can "see" on our image. 38 | */ 39 | /*: 40 | * experiment: 41 | Every part of the description of the picture will be returned with a certain confidence. A good value is 0.85 for nice fitting results. But go a head and play around with this value and see, with what funky descriptions the "computer" may come along 42 | */ 43 | let image = /*#-editable-code*/#imageLiteral(resourceName: "beach.png")/*#-end-editable-code*/ 44 | let dataImage = UIImagePNGRepresentation(image) 45 | chooseImage(dataImage!) 46 | setConfidenceForComputerVision(/*#-editable-code*/0.2/*#-end-editable-code*/) 47 | retrieveTags() 48 | /*: 49 | * callout(What did we learn?): 50 | Wonderful! So you just called your first API from the Cognitive Services Suite. The Computer Vision API. If you want to have a detailed look at the documentation - where you can find further examples - visit the dedicated [Computer Vision documentation](https://www.microsoft.com/cognitive-services/en-us/computer-vision-api). 51 | */ 52 | 53 | //: Enough of just describing photos. Let's catch a smile and let the API know! Let's rock on and continue by [using the Emotion API](@next)! 54 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Computer Vision.playgroundchapter/Pages/First steps.playgroundpage/LiveView.swift: -------------------------------------------------------------------------------- 1 | import PlaygroundSupport 2 | let page = PlaygroundPage.current 3 | page.needsIndefiniteExecution = true 4 | page.liveView = MyView() 5 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Computer Vision.playgroundchapter/Pages/First steps.playgroundpage/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Version 6 | 1.0 7 | Name 8 | Describe your picture 9 | LiveViewMode 10 | VisibleByDefault 11 | PosterReference 12 | background.png 13 | 14 | 15 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Computer Vision.playgroundchapter/Pages/First steps.playgroundpage/Resources/background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Chapters/Computer Vision.playgroundchapter/Pages/First steps.playgroundpage/Resources/background.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Computer Vision.playgroundchapter/Pages/Intro.cutscenepage/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Name 6 | Introduction 7 | Version 8 | 1.0 9 | CutsceneReference 10 | cutscene.html 11 | 12 | 13 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Computer Vision.playgroundchapter/Pages/Intro.cutscenepage/Resources/cutscene.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | simpleCommands 5 | 48 | 49 | 50 | 51 |
52 |
53 |
Well done! So let's dive into the Cognitive Services.
The first API we will have a look at is the magical.... *drumroll*
54 |
Computer Vision API
55 |
56 |
We will close our eyes and let the machine describe what is on a picture. Curious? You should be!
Just select one of your favourite images, upload it to the service and wait for the description "tags".
57 |
58 | 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Computer Vision.playgroundchapter/Pages/Intro.cutscenepage/Resources/images/cutscenebg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Chapters/Computer Vision.playgroundchapter/Pages/Intro.cutscenepage/Resources/images/cutscenebg.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Emotions.playgroundchapter/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Version 6 | 1.0 7 | Name 8 | Emotions 9 | Pages 10 | 11 | Intro.cutscenepage 12 | Smile.playgroundpage 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Emotions.playgroundchapter/Pages/Intro.cutscenepage/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Name 6 | Introduction 7 | Version 8 | 1.0 9 | CutsceneReference 10 | cutscene.html 11 | 12 | 13 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Emotions.playgroundchapter/Pages/Intro.cutscenepage/Resources/cutscene.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | simpleCommands 5 | 48 | 49 | 50 | 51 |
52 |
53 |
Wow! So machines can describe pictures.
Wanna see a bit more of the Cognitive Services fairy dust?
54 |
Emotion API
55 |
is able to find the faces on an image and say in what mood the persons are. Really? Really!
56 |
57 |
By uploading a picture with one or more persons on it - face to the camera of course ;) - we can detect the emotion and guess what! We will put an emoji of this mood right where the persons face is.
UN-BE-LIEVALBE!
58 |
59 | 60 | 61 | 62 | 63 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Emotions.playgroundchapter/Pages/Intro.cutscenepage/Resources/images/cutscenebg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Chapters/Emotions.playgroundchapter/Pages/Intro.cutscenepage/Resources/images/cutscenebg.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Emotions.playgroundchapter/Pages/Smile.playgroundpage/Backup: -------------------------------------------------------------------------------- 1 | /*: 2 | # Am I smiling - YES I AM! 3 | 4 | After successfully managing the ComputerVision API, we will dive a little bit further into the **COGNITIVE SERVICES**. 5 | With the Emotion API we can detect - yes - emotions on human faces. What the API returns is not just the motion, but additionally the rectangle where this face is located at the picuture. Awesome, right? 6 | */ 7 | 8 | //#-hidden-code 9 | import PlaygroundSupport 10 | import UIKit 11 | import Foundation 12 | 13 | guard #available(iOS 9, OSX 10.11, *) else { 14 | fatalError("Life? Don't talk to me about life. Here I am, brain the size of a planet, and they tell me to run a 'playground'. Call that job satisfaction? I don't.") 15 | } 16 | 17 | func placeEmojisForEmotions() { 18 | let page = PlaygroundPage.current 19 | if let proxy = page.liveView as? PlaygroundRemoteLiveViewProxy { 20 | proxy.send(.string("placeEmotions")) 21 | } 22 | } 23 | //#-end-hidden-code 24 | 25 | /*: 26 | * experiment: 27 | What the API get from us is really just the image. In return we will get an array of emotion results, which contain a rectangle of the face position and size in the picture and certainties of the different emotions - there are 9 of them (neutral, happy, sad, angry, ...). The emotion with the highest certainty wins and will be mapped to an emoji by our app. 28 | */ 29 | 30 | 31 | 32 | placeEmojisForEmotions() 33 | 34 | /*: 35 | * callout(What did we learn?): 36 | Wonderful! So you just called your second API from the Cognitive Services Suite. The Emotion API. If you want to have a detailed look at the documentation - where you can find further examples - visit the dedicated [Emotion documentation](https://www.microsoft.com/cognitive-services/en-us/emotion-api/documentation) and the [Emotion API definition](https://dev.projectoxford.ai/docs/services/5639d931ca73072154c1ce89/operations/563b31ea778daf121cc3a5fa) */ 37 | 38 | //: Enough of just dealing with smiles now! Let's see what faces we are able to detect - and what we can say about age and gender :) Let's get going with [using the Face API](@next)! 39 | 40 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Emotions.playgroundchapter/Pages/Smile.playgroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | //#-hidden-code 2 | import PlaygroundSupport 3 | import UIKit 4 | import Foundation 5 | 6 | guard #available(iOS 9, OSX 10.11, *) else { 7 | fatalError("Life? Don't talk to me about life. Here I am, brain the size of a planet, and they tell me to run a 'playground'. Call that job satisfaction? I don't.") 8 | } 9 | 10 | func placeEmojiForEmotion() { 11 | let page = PlaygroundPage.current 12 | if let proxy = page.liveView as? PlaygroundRemoteLiveViewProxy { 13 | proxy.send(.string("placeEmotions")) 14 | } 15 | } 16 | 17 | func chooseImage (_ imageData: Data) { 18 | let page = PlaygroundPage.current 19 | if let proxy = page.liveView as? PlaygroundRemoteLiveViewProxy { 20 | proxy.send(.data(imageData)) 21 | } 22 | } 23 | 24 | //#-end-hidden-code 25 | /*: 26 | # Am I smiling - YES I AM! 27 | 28 | After successfully managing the ComputerVision API, we will dive a little bit further into the **COGNITIVE SERVICES**. 29 | With the Emotion API we can detect - yes - emotions on human faces. What the API returns is not just the motion, but additionally the rectangle where this face is located at the picuture. Awesome, right? 30 | */ 31 | /*: 32 | * experiment: 33 | What the API get from us is really just the image. In return we will get an array of emotion results, which contain a rectangle of the face position and size in the picture and certainties of the different emotions - there are 9 of them (neutral, happy, sad, angry, ...). The emotion with the highest certainty wins and will be mapped to an emoji by our app. 34 | */ 35 | let image = /*#-editable-code*/#imageLiteral(resourceName: "beach.png")/*#-end-editable-code*/ 36 | let dataImage = UIImagePNGRepresentation(image) 37 | chooseImage(dataImage!) 38 | placeEmojiForEmotion() 39 | 40 | /*: 41 | * callout(What did we learn?): 42 | Wonderful! So you just called your second API from the Cognitive Services Suite. The Emotion API. If you want to have a detailed look at the documentation - where you can find further examples - visit the dedicated [Emotion documentation](https://www.microsoft.com/cognitive-services/en-us/emotion-api/documentation) and the [Emotion API definition](https://dev.projectoxford.ai/docs/services/5639d931ca73072154c1ce89/operations/563b31ea778daf121cc3a5fa) */ 43 | 44 | //: Enough of just dealing with smiles now! Let's see what faces we are able to detect - and what we can say about age and gender :) Let's get going with [using the Face API](@next)! 45 | 46 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Emotions.playgroundchapter/Pages/Smile.playgroundpage/LiveView.swift: -------------------------------------------------------------------------------- 1 | import PlaygroundSupport 2 | let page = PlaygroundPage.current 3 | page.needsIndefiniteExecution = true 4 | page.liveView = MyView() 5 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Emotions.playgroundchapter/Pages/Smile.playgroundpage/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Version 6 | 1.0 7 | Name 8 | Gimme a smile! 9 | LiveViewMode 10 | VisibleByDefault 11 | PosterReference 12 | background.png 13 | 14 | 15 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Emotions.playgroundchapter/Pages/Smile.playgroundpage/Resources/background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Chapters/Emotions.playgroundchapter/Pages/Smile.playgroundpage/Resources/background.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Faces.playgroundchapter/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Version 6 | 1.0 7 | Name 8 | Faces 9 | Pages 10 | 11 | Intro.cutscenepage 12 | WhoIsThis.playgroundpage 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Faces.playgroundchapter/Pages/Intro.cutscenepage/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Name 6 | Introduction 7 | Version 8 | 1.0 9 | CutsceneReference 10 | cutscene.html 11 | 12 | 13 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Faces.playgroundchapter/Pages/Intro.cutscenepage/Resources/cutscene.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | simpleCommands 5 | 48 | 49 | 50 | 51 |
52 |
53 |
Pretty impressing so far right?
As you are already guessing - there is even more magic in the API hat.
54 |
Applause for our last guest for today, the
55 |
Faces API
56 |
It's about faces, but this time in a veeeery sophisticated way. We can detect the features of the face with their very coordinates in the picture. Furthermore we can say what age and gender a person is. And even if the person is wearing glasses or a beard. Wow!
57 |
58 | 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Faces.playgroundchapter/Pages/Intro.cutscenepage/Resources/images/cutscenebg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Chapters/Faces.playgroundchapter/Pages/Intro.cutscenepage/Resources/images/cutscenebg.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Faces.playgroundchapter/Pages/WhoIsThis.playgroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | //#-hidden-code 2 | import PlaygroundSupport 3 | import UIKit 4 | import Foundation 5 | 6 | guard #available(iOS 9, OSX 10.11, *) else { 7 | fatalError("Life? Don't talk to me about life. Here I am, brain the size of a planet, and they tell me to run a 'playground'. Call that job satisfaction? I don't.") 8 | } 9 | 10 | func detectFaces () { 11 | let page = PlaygroundPage.current 12 | if let proxy = page.liveView as? PlaygroundRemoteLiveViewProxy { 13 | proxy.send(.string("showFaceLandmarks")) 14 | proxy.send(.string("detectFace")) 15 | } 16 | } 17 | 18 | func chooseImage (_ imageData: Data) { 19 | let page = PlaygroundPage.current 20 | if let proxy = page.liveView as? PlaygroundRemoteLiveViewProxy { 21 | proxy.send(.data(imageData)) 22 | } 23 | } 24 | //#-end-hidden-code 25 | /*: 26 | # Who's on that picture? 27 | 28 | As we already saw in the Emotions demo we are capable of getting some facial features from the API, like the face rectangle or the emotion. But is there more? Like getting *coordinates* of e.g. the eyes and the nose. The answer is **YES**! We can do this. 29 | 30 | The Cognitive Services provide an API called **Face API**. With this API we can analyse the features of a human face. We can determine where the eyes are, where the pupil is currently located, how "big" or "small" a nose is and if the face is currently smiling because it's mouth and lip coordinates indicates it :) 31 | 32 | But let's dive in and see, what the **Face API** sees for us. 33 | */ 34 | /*: 35 | * experiment: 36 | So let's get the face analysis started. As you will see in a moment we get the facial landmarks (dots in the image), the face rectangle and general infos like age, gender, glasses and facial hair for the selected picture. Cool huh? 37 | */ 38 | 39 | let image = /*#-editable-code*/#imageLiteral(resourceName: "beach.png")/*#-end-editable-code*/ 40 | let dataImage = UIImagePNGRepresentation(image) 41 | chooseImage(dataImage!) 42 | detectFaces() 43 | /*: 44 | * callout(What did we learn?): 45 | The wonderful thing about this **Face API** is especially the retrieval of the landmarks of the face. We can do fun things with it, like pinning things into the face :D But we can identify this face, as soon as we added it to a PersonGroup, on other images. So we don't have to analyse the image itself and compare it to other faces to "find" persons on images. We can let the Face API do the work for us. Just have a look at the [Faces documentation](https://www.microsoft.com/cognitive-services/en-us/face-api/documentation/overview) and the way how to use [Persons and PersonGroups](https://www.microsoft.com/cognitive-services/en-us/face-api/documentation/face-api-how-to-topics/howtoidentifyfacesinimage) */ 46 | 47 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Faces.playgroundchapter/Pages/WhoIsThis.playgroundpage/LiveView.swift: -------------------------------------------------------------------------------- 1 | import PlaygroundSupport 2 | let page = PlaygroundPage.current 3 | page.needsIndefiniteExecution = true 4 | page.liveView = MyView() 5 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Faces.playgroundchapter/Pages/WhoIsThis.playgroundpage/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Version 6 | 1.0 7 | Name 8 | Detect faces 9 | LiveViewMode 10 | VisibleByDefault 11 | PosterReference 12 | background.png 13 | 14 | 15 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Faces.playgroundchapter/Pages/WhoIsThis.playgroundpage/Resources/background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Chapters/Faces.playgroundchapter/Pages/WhoIsThis.playgroundpage/Resources/background.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Get Started.playgroundchapter/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Version 6 | 1.0 7 | Name 8 | Get Started 9 | Pages 10 | 11 | Intro.cutscenepage 12 | The elements.playgroundpage 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Get Started.playgroundchapter/Pages/Intro.cutscenepage/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Name 6 | Introduction 7 | Version 8 | 1.0 9 | CutsceneReference 10 | cutscene.html 11 | 12 | 13 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Get Started.playgroundchapter/Pages/Intro.cutscenepage/Resources/cutscene.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | simpleCommands 5 | 48 | 49 | 50 | 51 |
52 |
53 |
Get back to the playground and
54 |
PLAY!
55 |
with Swift and the Microsoft Cognitive Services.
56 |
57 |
This playground book will guide you through the most important concepts and API use cases of the MS Cognitive Services Vision APIs.
58 |
We will start with a very short intro to the basic concepts of the playground and UIKit. Just to get your fingers warmed up. Yey!
59 |
60 | 61 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Get Started.playgroundchapter/Pages/Intro.cutscenepage/Resources/images/cutscenebg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Chapters/Get Started.playgroundchapter/Pages/Intro.cutscenepage/Resources/images/cutscenebg.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Get Started.playgroundchapter/Pages/The elements.playgroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | /*: 2 | # First steps with Playgrounds 3 | 4 | The first thing we want to try is to get hands on with what we know already - dealing with out beloved UIKit. You can use all elements of UIKit as you are used to. In this example we want to build our environment for our further examples. Let's warm up, put your hands on the playground and get started! 5 | 6 | * callout(What to do): 7 | Just choose a picture you like, then enter a text you think fits to the image. To make it nice looking, choose a color for your text. 8 | */ 9 | //#-hidden-code 10 | import PlaygroundSupport 11 | import UIKit 12 | import Foundation 13 | 14 | guard #available(iOS 9, OSX 10.11, *) else { 15 | fatalError("Life? Don't talk to me about life. Here I am, brain the size of a planet, and they tell me to run a 'playground'. Call that job satisfaction? I don't.") 16 | } 17 | 18 | func setDescription(_ message: String) { 19 | let page = PlaygroundPage.current 20 | if let proxy = page.liveView as? PlaygroundRemoteLiveViewProxy { 21 | proxy.send(.string(message)) 22 | } 23 | } 24 | 25 | func setMyTextColor(_ color: UIColor) { 26 | let page = PlaygroundPage.current 27 | if let proxy = page.liveView as? PlaygroundRemoteLiveViewProxy { 28 | let message : String = color.toHexString() 29 | proxy.send(.string(message)) 30 | } 31 | } 32 | 33 | func chooseImage (_ imageData: Data) { 34 | let page = PlaygroundPage.current 35 | if let proxy = page.liveView as? PlaygroundRemoteLiveViewProxy { 36 | proxy.send(.data(imageData)) 37 | } 38 | } 39 | //#-end-hidden-code 40 | let image = /*#-editable-code*/#imageLiteral(resourceName: "beach.png")/*#-end-editable-code*/ 41 | let dataImage = UIImagePNGRepresentation(image) 42 | chooseImage(dataImage!) 43 | setDescription(/*#-editable-code */"Description goes here!"/*#-end-editable-code*/) 44 | setMyTextColor(/*#-editable-code */#colorLiteral(red: 1.0, green: 1.0, blue: 1.0, alpha: 1.0)/*#-end-editable-code*/) 45 | /*: 46 | * callout(What did we learn?): 47 | So we are done with the basics. We created a `UIImageView` with an embedded `UIImage`. Our description area consists of an `UIView`, which background color and alpha is adjustable. And above this background view we added a `UILabel`, which shows our nice descriptive text for the picture. 48 | */ 49 | 50 | //: Horray! Let's get to our [next adventure](@next)! 51 | 52 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Get Started.playgroundchapter/Pages/The elements.playgroundpage/LiveView.swift: -------------------------------------------------------------------------------- 1 | import PlaygroundSupport 2 | let page = PlaygroundPage.current 3 | page.needsIndefiniteExecution = true 4 | page.liveView = MyView() 5 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Get Started.playgroundchapter/Pages/The elements.playgroundpage/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Version 6 | 1.0 7 | Name 8 | The Elements 9 | LiveViewMode 10 | VisibleByDefault 11 | LiveViewEdgeToEdge 12 | YES 13 | PosterReference 14 | background.png 15 | 16 | 17 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Chapters/Get Started.playgroundchapter/Pages/The elements.playgroundpage/Resources/background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Chapters/Get Started.playgroundchapter/Pages/The elements.playgroundpage/Resources/background.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Version 6 | 1.0 7 | ContentVersion 8 | 1.0 9 | Name 10 | Play with Cognitive Services 11 | ContentIdentifier 12 | com.ms.demo.CSPlayground 13 | DeploymentTarget 14 | ios10.0 15 | ImageReference 16 | playground_icon.png 17 | Chapters 18 | 19 | Get Started.playgroundchapter 20 | Computer Vision.playgroundchapter 21 | Emotions.playgroundchapter 22 | Faces.playgroundchapter 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/Aaron.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/Aaron.jpg -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/Jan.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/Jan.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/Les.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/Les.jpg -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/Nazuki.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/Nazuki.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/Owen_Family.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/Owen_Family.jpg -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/Tiffany.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/Tiffany.jpg -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/background.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/beach.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/beach.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/containers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/containers.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/group.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/group.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/highway.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/highway.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/nightcity.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/nightcity.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/playground_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/playground_icon.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/woman_blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/woman_blue.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Resources/wood.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/Play with Cognitive Services.playgroundbook/Contents/Resources/wood.png -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Sources/Ext.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import UIKit 3 | 4 | public extension Array { 5 | func randomElement() -> Element { 6 | let index = Int(arc4random_uniform(UInt32(self.count))) 7 | return self[index] 8 | } 9 | } 10 | 11 | public extension UIColor { 12 | public convenience init(hexString:String) { 13 | 14 | let hexString = hexString.trimmingCharacters(in: NSCharacterSet.whitespacesAndNewlines) 15 | let scanner = Scanner(string: hexString) 16 | 17 | if (hexString.hasPrefix("#")) { 18 | scanner.scanLocation = 1 19 | } 20 | 21 | var color:UInt32 = 0 22 | scanner.scanHexInt32(&color) 23 | 24 | let mask = 0x000000FF 25 | let r = Int(color >> 16) & mask 26 | let g = Int(color >> 8) & mask 27 | let b = Int(color) & mask 28 | 29 | let red = CGFloat(r) / 255.0 30 | let green = CGFloat(g) / 255.0 31 | let blue = CGFloat(b) / 255.0 32 | 33 | self.init(red:red, green:green, blue:blue, alpha:1) 34 | } 35 | 36 | public func toHexString() -> String { 37 | var r:CGFloat = 0 38 | var g:CGFloat = 0 39 | var b:CGFloat = 0 40 | var a:CGFloat = 0 41 | 42 | getRed(&r, green: &g, blue: &b, alpha: &a) 43 | 44 | let rgb:Int = (Int)(r*255)<<16 | (Int)(g*255)<<8 | (Int)(b*255)<<0 45 | 46 | return NSString(format:"#%06x", rgb) as String 47 | } 48 | } 49 | 50 | extension CGContext { 51 | func addRect(rect:CGRect, fillColor:UIColor, strokeColor:UIColor, width:CGFloat) { 52 | self.addRect(rect) 53 | self.fillAndStroke(fillColor: fillColor, strokeColor: strokeColor, width: width) 54 | } 55 | func fillAndStroke(fillColor:UIColor, strokeColor:UIColor, width:CGFloat) { 56 | self.setFillColor(fillColor.cgColor) 57 | self.setStrokeColor(strokeColor.cgColor) 58 | self.setLineWidth(width) 59 | self.drawPath(using: CGPathDrawingMode.fillStroke) 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Sources/LandmarkView.swift: -------------------------------------------------------------------------------- 1 | import PlaygroundSupport 2 | import UIKit 3 | import Foundation 4 | 5 | 6 | class MyLandmarkView : UIView { 7 | 8 | var face : CognitiveServicesFacesResult? = nil 9 | var scaledImageRatio : CGFloat = 1 10 | var xOffset : CGFloat = 0.0 11 | var yOffset : CGFloat = 0.0 12 | 13 | override func draw(_ rect: CGRect) { 14 | if let context = UIGraphicsGetCurrentContext(), 15 | let myFace = face { 16 | 17 | //draw all found landmarks for the face 18 | for landmark in myFace.landmarks! { 19 | context.addRect(rect: CGRect(x: (landmark.x / scaledImageRatio) + xOffset, y: (landmark.y / scaledImageRatio) + yOffset, width: 2, height: 2), fillColor: .red, strokeColor: .red, width: 1) 20 | } 21 | 22 | //draw the facerect 23 | var faceFrame = myFace.frame 24 | faceFrame.origin.x = (faceFrame.origin.x / scaledImageRatio) + xOffset 25 | faceFrame.origin.y = (faceFrame.origin.y / scaledImageRatio) + yOffset 26 | faceFrame.size.width /= scaledImageRatio 27 | faceFrame.size.height /= scaledImageRatio 28 | 29 | context.addRect(rect: faceFrame, fillColor: .clear, strokeColor: .red, width: 2) 30 | } 31 | 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /Play with Cognitive Services.playgroundbook/Contents/Sources/MyView.swift: -------------------------------------------------------------------------------- 1 | import PlaygroundSupport 2 | import UIKit 3 | import Foundation 4 | 5 | 6 | 7 | 8 | public class MyView : UIViewController { 9 | 10 | let preview = UIImageView() 11 | let textLabel = UILabel() 12 | let backgroundView = UIView() 13 | let landmarkView = MyLandmarkView() 14 | 15 | var confidence = 0.85 16 | 17 | public override func viewDidLoad() { 18 | super.viewDidLoad() 19 | 20 | view.frame = CGRect(x: 0, y: 0, width: 520, height: 768) 21 | let imageBGView = UIImageView(image: UIImage(named:"background.png")!) 22 | view.addSubview(imageBGView) 23 | 24 | preview.frame = view.bounds 25 | preview.contentMode = .scaleAspectFit 26 | 27 | textLabel.frame = CGRect(x: 30, y: view.bounds.height-200, width: 350, height: 110) 28 | textLabel.lineBreakMode = .byWordWrapping 29 | textLabel.numberOfLines = 5 30 | textLabel.textColor = .white 31 | textLabel.text = "This label makes place for your description :)" 32 | 33 | backgroundView.frame = CGRect(x: 0, y: view.bounds.height-210, width: view.bounds.width, height: 210) 34 | backgroundView.backgroundColor = .black 35 | backgroundView.alpha = 0.5 36 | 37 | landmarkView.frame = view.bounds 38 | landmarkView.backgroundColor = .clear 39 | 40 | view.addSubview(preview) 41 | view.addSubview(backgroundView) 42 | view.addSubview(textLabel) 43 | view.addSubview(landmarkView) 44 | view.bringSubview(toFront: landmarkView) 45 | 46 | makeLandmarkViewVisible(false) 47 | } 48 | 49 | public func setTheDescription(_ message: String) { 50 | textLabel.text = message 51 | } 52 | 53 | public func setTheTextColor(_ color: UIColor) { 54 | textLabel.textColor = color 55 | } 56 | 57 | public func setTheImage(_ image: UIImage) { 58 | preview.image = image 59 | } 60 | 61 | public func reply(_ message: String) { 62 | textLabel.text = message 63 | } 64 | 65 | public func makeLandmarkViewVisible(_ visible: Bool) { 66 | landmarkView.alpha = visible ? 1.0 : 0.0 67 | } 68 | 69 | public func updateImage ( _ image: UIImage) { 70 | preview.image = image 71 | view.setNeedsDisplay() 72 | textLabel.text = "upated image ... wohooo \(image)" 73 | } 74 | 75 | /** 76 | cognitive services functions 77 | called from the LiveViewMessageHandler 78 | */ 79 | 80 | //MARK: - Computer Vision - 81 | func showTagsForImage () { 82 | let manager = CognitiveServices() 83 | textLabel.text = "... gimme a sec - getting your tags!" 84 | 85 | 86 | manager.retrievePlausibleTagsForImage(preview.image!, confidence) { (result, error) -> (Void) in 87 | DispatchQueue.main.async(execute: { 88 | if let _ = error { 89 | print("omg something bad happened") 90 | } else { 91 | print("seems like all went well: \(result)") 92 | } 93 | self.setTagsAsDescription(result) 94 | }) 95 | } 96 | } 97 | 98 | private func setTagsAsDescription (_ tags : [String]?) { 99 | if (tags?.count)! > 0 { 100 | textLabel.text = "" 101 | for tag in tags! { 102 | textLabel.text = textLabel.text! + "#" + tag + " " 103 | } 104 | } else { 105 | textLabel.text = "Uh noez! No tags could be found for this image :(" 106 | } 107 | } 108 | 109 | func adjustConfidence (_ value : Double) { 110 | confidence = value 111 | } 112 | 113 | //MARK: - Emotion API - 114 | 115 | private var emojis: [CognitiveServicesEmotionResult]? = nil { 116 | didSet { 117 | if preview.image == nil { 118 | return 119 | } 120 | 121 | if let results = emojis { 122 | UIGraphicsBeginImageContext(preview.image!.size) 123 | preview.image?.draw(in: CGRect(origin: CGPoint.zero, size: preview.image!.size)) 124 | 125 | for result in results { 126 | var availableEmojis = [String]() 127 | switch result.emotion { 128 | case .Anger: 129 | availableEmojis.append("😡") 130 | availableEmojis.append("😠") 131 | case .Contempt: 132 | availableEmojis.append("😤") 133 | case .Disgust: 134 | availableEmojis.append("😷") 135 | availableEmojis.append("🤐") 136 | case .Fear: 137 | availableEmojis.append("😱") 138 | case .Happiness: 139 | availableEmojis.append("😝") 140 | availableEmojis.append("😀") 141 | availableEmojis.append("😃") 142 | availableEmojis.append("😄") 143 | availableEmojis.append("😆") 144 | availableEmojis.append("😊") 145 | availableEmojis.append("🙂") 146 | availableEmojis.append("☺️") 147 | case .Neutral: 148 | availableEmojis.append("😶") 149 | availableEmojis.append("😐") 150 | availableEmojis.append("😑") 151 | case .Sadness: 152 | availableEmojis.append("🙁") 153 | availableEmojis.append("😞") 154 | availableEmojis.append("😟") 155 | availableEmojis.append("😔") 156 | availableEmojis.append("😢") 157 | availableEmojis.append("😭") 158 | case .Surprise: 159 | availableEmojis.append("😳") 160 | availableEmojis.append("😮") 161 | availableEmojis.append("😲") 162 | } 163 | 164 | let emoji = availableEmojis.randomElement() 165 | 166 | let maximumSize = result.frame.size 167 | let string = emoji as NSString 168 | let startingFontSize = 8192.0 169 | 170 | var actualFontSize = startingFontSize 171 | var stepping = actualFontSize 172 | repeat { 173 | stepping /= 2.0 174 | if stepping < 1.0 { 175 | break 176 | } 177 | 178 | let font = UIFont.systemFont(ofSize: CGFloat(actualFontSize)) 179 | let calculatedSize = string.size(attributes: [NSFontAttributeName: font]) 180 | 181 | if calculatedSize.width > maximumSize.width { 182 | actualFontSize -= stepping 183 | } else { 184 | actualFontSize += stepping 185 | } 186 | } while true 187 | 188 | let font = UIFont.systemFont(ofSize: CGFloat(actualFontSize)) 189 | string.draw(in: result.frame, withAttributes: [NSFontAttributeName: font]) 190 | } 191 | 192 | preview.image = UIGraphicsGetImageFromCurrentImageContext() 193 | UIGraphicsEndImageContext() 194 | } 195 | } 196 | } 197 | 198 | 199 | 200 | func makeEmojiFromEmotionOnImage () { 201 | let manager = CognitiveServices() 202 | 203 | textLabel.text = "... gimme a sec - looking for smiles!" 204 | manager.retrievePlausibleEmotionsForImage(preview.image!) { (result, error) -> (Void) in 205 | DispatchQueue.main.async(execute: { 206 | if let _ = error { 207 | print("omg something bad happened") 208 | } else { 209 | print("seems like all went well: \(result)") 210 | } 211 | 212 | if (result?.count)! > 0 { 213 | self.textLabel.text = "1..2.. Emoji!\n\((result?.count)!) emotions detected" 214 | } else { 215 | self.textLabel.text = "Seems like no emotions were detected :(" 216 | } 217 | 218 | self.emojis = result 219 | }) 220 | } 221 | } 222 | 223 | //MARK: - Faces API - 224 | 225 | func detectFaces () { 226 | let manager = CognitiveServices() 227 | 228 | textLabel.text = "... gimme a sec - watching out for faces!" 229 | 230 | manager.retrieveFacesForImage(preview.image!) { (result, error) -> (Void) in 231 | DispatchQueue.main.async(execute: { 232 | if let _ = error { 233 | print("omg something bad happened") 234 | } else { 235 | print("seems like all went well: \(result)") 236 | } 237 | 238 | if (result?.count)! > 0 { 239 | let face = result?[0] 240 | self.textLabel.text = "Gender: \((face?.gender)!)\nAge: \((face?.age)!)\nGlasses: \((face?.glasses)!)\nFacial hair: \((face?.facialHair)!)" 241 | self.landmarkView.scaledImageRatio = self.scaledImageRatio() 242 | self.drawLandmarks(face!) 243 | 244 | } else { 245 | self.textLabel.text = "Seems like no emotions were detected :(" 246 | } 247 | }) 248 | } 249 | } 250 | 251 | private func scaledImageRatio () -> CGFloat { 252 | let imageViewHeight = preview.bounds.height 253 | let imageViewWidth = preview.bounds.width 254 | let imageSize = preview.image!.size 255 | let scaledImageHeight = min(imageSize.height * (imageViewWidth / imageSize.width), imageViewHeight) 256 | let scaledImageWidth = min(imageSize.width * (imageViewHeight / imageSize.height), imageViewWidth) 257 | 258 | landmarkView.yOffset = (view.frame.height - scaledImageHeight) / CGFloat(2.0) 259 | landmarkView.xOffset = (view.frame.width - scaledImageWidth) / CGFloat(2.0) 260 | 261 | let ratio : CGFloat = imageSize.height / scaledImageHeight 262 | return ratio 263 | } 264 | 265 | private func drawLandmarks (_ face: CognitiveServicesFacesResult) { 266 | landmarkView.face = face 267 | landmarkView.setNeedsDisplay() 268 | } 269 | 270 | 271 | } 272 | 273 | //MARK: - LiveView communication extension - 274 | 275 | extension MyView : PlaygroundLiveViewMessageHandler { 276 | public func liveViewMessageConnectionOpened() { 277 | // We don't need to do anything in particular when the connection opens. 278 | } 279 | 280 | public func liveViewMessageConnectionClosed() { 281 | // We don't need to do anything in particular when the connection closes. 282 | } 283 | 284 | public func receive(_ message: PlaygroundValue) { 285 | switch message { 286 | case let .string(text): 287 | if text.contains(".") { 288 | setTheImage(UIImage(named:text)!) 289 | } else if text.contains("#") { 290 | setTheTextColor(UIColor(hexString: text)) 291 | } else if text == "retrieveTags" { 292 | showTagsForImage() 293 | } else if text == "placeEmotions" { 294 | makeEmojiFromEmotionOnImage() 295 | } else if text == "detectFace" { 296 | detectFaces() 297 | } else if text == "showFaceLandmarks" { 298 | makeLandmarkViewVisible(true) 299 | } else if text == "hideFaceLandmarks" { 300 | makeLandmarkViewVisible(false) 301 | } else { 302 | setTheDescription(text) 303 | } 304 | case let .integer(number): 305 | reply("You sent me the number \(number)!") 306 | case let .boolean(boolean): 307 | reply("You sent me the value \(boolean)!") 308 | case let .floatingPoint(number): 309 | adjustConfidence(number) 310 | reply("You sent me the number \(number)!") 311 | case let .date(date): 312 | reply("You sent me the date \(date)") 313 | case let .data(mydata): 314 | let theImage = UIImage(data: mydata) 315 | updateImage(theImage!) 316 | case .array: 317 | reply("Hmm. I don't know what to do with an array.") 318 | case let .dictionary(_): 319 | reply("Hmm. I don't know what to do with an array.") 320 | } 321 | } 322 | } 323 | -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Pages/Force a smile.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import UIKit 3 | import PlaygroundSupport 4 | 5 | 6 | 7 | let helpers = EmotionHelpers() 8 | 9 | 10 | 11 | func makeEmojiFromEmotionOnPhoto (photo : UIImageView!, withFaceRect: Bool, completion: @escaping (UIImage) -> (Void)) { 12 | 13 | let manager = CognitiveServices() 14 | 15 | manager.retrievePlausibleEmotionsForImage(photo.image!) { (result, error) -> (Void) in 16 | DispatchQueue.main.async(execute: { 17 | if let _ = error { 18 | print("omg something bad happened") 19 | } else { 20 | print("seems like all went well: \(String(describing: result))") 21 | } 22 | 23 | if (result?.count)! > 0 { 24 | print("1..2.. Emoji!\n\((result?.count)!) emotions detected") 25 | } else { 26 | print("Seems like no emotions were detected :(") 27 | } 28 | 29 | var isHappy = true 30 | for res in result! { 31 | if res.emotion != CognitiveServicesEmotion.Happiness { 32 | isHappy = false 33 | print("OMG detected unhappy face - NO!") 34 | break 35 | } 36 | } 37 | 38 | if isHappy { 39 | let photoWithEmojis = helpers.drawEmojisFor(emotions: result, withFaceRect: withFaceRect, image: photo.image!) 40 | completion(photoWithEmojis) 41 | } 42 | else { 43 | completion(#imageLiteral(resourceName: "grumpycat.jpg")) 44 | } 45 | }) 46 | } 47 | } 48 | 49 | 50 | 51 | let frame = CGRect(x: 0, y: 0, width: 450, height: 600) 52 | let preview = UIImageView(frame: frame) 53 | preview.contentMode = .scaleAspectFit 54 | preview.backgroundColor = #colorLiteral(red: 1, green: 1, blue: 1, alpha: 1) 55 | preview.image = #imageLiteral(resourceName: "Giugli.png") 56 | makeEmojiFromEmotionOnPhoto(photo: preview, withFaceRect: true) { emojiImage in 57 | preview.image = emojiImage 58 | } 59 | 60 | 61 | 62 | PlaygroundPage.current.liveView = preview 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Pages/Make me smile.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | /*: 2 | # Make me smile! :D 3 | 4 | After successfully managing the ComputerVision API, we will dive a little bit further into the **COGNITIVE SERVICES**. 5 | With the Emotion API we can detect - yes - emotions on human faces. What the API returns is not just the motion, but additionally the rectangle where this face is located at the picuture. Awesome, right? 6 | */ 7 | /*: 8 | ## A bit of initial setup work 9 | 10 | Let's do some initial work and create a place where we want to display our photo. 11 | - First we create a frame, in which our photo will be displayed. Think about it as the dimension of your graphic context. 12 | - Then we create a component called UIImageView, which will be able to display our photo, as soon was we choose one 13 | - We need to define the way we want to fit the image into the defined space 14 | - And for the matter of beauty we set our favorite color as the background color for our photo 15 | */ 16 | 17 | 18 | // This makes all the iOS known controls available like buttons, labels, navigation views and so on 19 | import UIKit 20 | 21 | // This import gives us nice support for playground specific features like using the live view 22 | import PlaygroundSupport 23 | 24 | //create the rectangle in which the resulting photo shall be shown 25 | let frame = CGRect(x: 0, y: 0, width: 450, height: 600) 26 | //create an container for your photo and give it your defined size 27 | let preview = UIImageView(frame: frame) 28 | //set the way the photo will be fit into the defined rectangle 29 | preview.contentMode = .scaleAspectFit 30 | //set your favorite background color to the view 31 | preview.backgroundColor = #colorLiteral(red: 1, green: 1, blue: 1, alpha: 1) 32 | 33 | 34 | 35 | /*: 36 | ## Choose me! ME! 37 | 38 | It's time now to finally get the emotion detection going right? So here we just choose a photo with the image picker and tadaaa, it is displayed in the previous defined preview image view. 39 | */ 40 | 41 | preview.image = #imageLiteral(resourceName: "Giugli.png") 42 | 43 | 44 | /*: 45 | ## Do some networking ;) 46 | 47 | We have everything in place. So now we want to send our chosen photo the the Cognitive Services emotion detection endpoint and show this emotion as an emoji over our face on the image. Sounds like fun right? So let's get going! 48 | - We create an "instance" of a class called EmotionHelpers 49 | - This instance knows a handy method called "makeEmojisFromEmotionOnPhoto". This is exactly what we need. So we go ahead and call this method 50 | - The method needs two "parameters" and one "callback" 51 | - The first parameter is the photo, because otherwise the helper can't call the emotion endpoint for us 52 | - The second parameter tells the helper to draw a rectangle around the detected faces in the picture 53 | - And the code in curly braces handles what shall be done with the result we get back from the helper method. As we get back an image, we just want to see it within our preview image view. And so we just assign the newly craeted emojiimage to our preview view. 54 | */ 55 | 56 | let helpers = EmotionHelpers() 57 | 58 | helpers.makeEmojiFromEmotionOnPhoto(photo: preview, withFaceRect: true) { emojiImage in 59 | preview.image = emojiImage 60 | } 61 | 62 | //set our preview image view 63 | PlaygroundPage.current.liveView = preview 64 | -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Pages/My own smile.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | 2 | 3 | import Foundation 4 | import PlaygroundSupport 5 | import UIKit 6 | 7 | 8 | //cgrect - create the rectangle in which the resulting photo shall be shown 9 | 10 | 11 | //uiimageview - create an container for your photo and give it your defined size 12 | let preview = UIImageView() 13 | 14 | //contentMode - set the way the photo will be fit into the defined rectangle 15 | 16 | 17 | //background color - set your favorite background color to the view 18 | 19 | 20 | //image literal - use image picker and assign to image of uiimageview 21 | 22 | 23 | //EmotionHelpers - create an "instance" of a class called EmotionHelpers 24 | 25 | 26 | //makeEmojisFromEmotionOnPhoto - use the method makeEmojisFromEmotionOnPhoto to recognize the emotion of the face in the image. Use the image of the preview as the photo for the first parameter. With the second parameter you can choose, if you want to see a rectangle around the detected face. The completion block is there for you to assign the new image with emojis to the preview 27 | 28 | 29 | PlaygroundPage.current.liveView = preview -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Pages/My warm up.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | 2 | import UIKit 3 | import PlaygroundSupport 4 | import Foundation 5 | import GameplayKit 6 | 7 | var str = "Hello, playground" 8 | 9 | 10 | -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Pages/The warm up.xcplaygroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | 2 | /*: 3 | # OMG it's Swift! 4 | 5 | It's time to get started. So we will create a little program which generates an output on the screen everytime we press a button. Not hard right? So let's go! 6 | - - - 7 | */ 8 | //: As always, we need a couple of imports to get nice API support for what we will be doing 9 | import UIKit 10 | import PlaygroundSupport 11 | import Foundation 12 | import GameplayKit 13 | 14 | //: Then we define an **array** of emoji strings, which we will display in our output 15 | let emojis = ["😘", "😽", "👊", "🤘", "👩‍💻"] 16 | //: As we need to display our output somewhere, we create a UIView to satisfy this need. You can freely play around with the size if you wish. 17 | let view = UIView(frame: CGRect(x: 0, y: 0, width: 320, height: 500)) 18 | //: To get a nice and friendly UI, we set the background color to white. The default value is just black. 19 | view.backgroundColor = UIColor.white 20 | //: As we want to trigger our output with a button click, we need a button. So we create a button! 21 | let button = UIButton(type: .roundedRect) 22 | //: Then the button gets a nice and describing title 23 | button.setTitle("Press me", for: .normal) 24 | //: And a place to be displayed 25 | button.frame = CGRect(x: 85, y: 400, width: 150, height: 30) 26 | //: The button setup is done, so we just add it to the view as a subview 27 | view.addSubview(button) 28 | 29 | /*: 30 | ### Something very playground specific 31 | - - - 32 | Now we have to do something very playground specific. In iOS it's super easy to add logic to touch events of controls. As the playground is not a full blown iOS app, we have to do a little workaround to get to our goal. So just notice that this class with it's only function helps the button to know what to do when it gets tapped. 33 | */ 34 | class ButtonLogic { 35 | var yPos = 20 36 | 37 | /*: 38 | Great! Now we have to define what the button should do everythime it getts tapped, or clicked :D 39 | - We craete a label to show our output and give it a place to the displayed 40 | - Then we create a random number between 0 and the length of our emoji-array 41 | - We set the text into the label right next to an emoji from our array from a random position within the array 42 | - And then add the label to the view for being able to see it on the 43 | */ 44 | @objc func clicked() { 45 | let label = UILabel(frame: CGRect(x: 40, y: yPos, width: 320, height: 30)) 46 | 47 | let random = GKRandomDistribution(lowestValue: 0, highestValue: emojis.count-1) 48 | let index = random.nextInt() 49 | let emoji = emojis[index] 50 | 51 | label.text = "Coding \(emoji)!" 52 | view.addSubview(label) 53 | 54 | yPos += 30 55 | } 56 | } 57 | 58 | //: As described above we have to create a new class instance for triggering the button behaviour. Then we add a target to the button which is triggered as soon as it gets touched. 59 | let receiver = ButtonLogic() 60 | button.addTarget(receiver, action: #selector(ButtonLogic.clicked), for: .touchUpInside) 61 | 62 | //We assign our own created view to the playground so that it will be nicely rendered and displayed for us! 63 | PlaygroundPage.current.liveView = view 64 | 65 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Resources/Aaron.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/PlayWithYourSmile.playground/Resources/Aaron.jpg -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Resources/Giugli.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/PlayWithYourSmile.playground/Resources/Giugli.png -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Resources/Jan.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/PlayWithYourSmile.playground/Resources/Jan.png -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Resources/Les.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/PlayWithYourSmile.playground/Resources/Les.jpg -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Resources/Photo on 26.06.17 at 09.21.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/PlayWithYourSmile.playground/Resources/Photo on 26.06.17 at 09.21.jpg -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Resources/grumpycat.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/PlayWithYourSmile.playground/Resources/grumpycat.jpg -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Resources/manu.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/PlayWithYourSmile.playground/Resources/manu.jpg -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Sources/EmotionHelpers.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import UIKit 3 | 4 | public class EmotionHelpers : NSObject { 5 | 6 | var preview : UIImageView! 7 | 8 | /** 9 | Method for putting an emoji with a matching emotion over each detected face in a photo. 10 | 11 | - parameters: 12 | - photo: The photo on which faces and it's emotion shall be detected 13 | - withFaceRect: If TRUE then the face rectangle is drawn into the photo 14 | - completion: UIImage as new photo with added emojis for the detected emotion over each face in fitting size and with face framing rectangles if declared. Image is the same size as the original. 15 | */ 16 | public func makeEmojiFromEmotionOnPhoto (photo : UIImageView!, withFaceRect: Bool, completion: @escaping (UIImage) -> (Void)) { 17 | 18 | let manager = CognitiveServices() 19 | 20 | manager.retrievePlausibleEmotionsForImage(photo.image!) { (result, error) -> (Void) in 21 | DispatchQueue.main.async(execute: { 22 | if let _ = error { 23 | print("omg something bad happened") 24 | } else { 25 | print("seems like all went well: \(String(describing: result))") 26 | } 27 | 28 | if (result?.count)! > 0 { 29 | print("1..2.. Emoji!\n\((result?.count)!) emotions detected") 30 | } else { 31 | print("Seems like no emotions were detected :(") 32 | } 33 | 34 | let photoWithEmojis = self.drawEmojisFor(emotions: result, withFaceRect: withFaceRect, image: photo.image!) 35 | completion(photoWithEmojis) 36 | }) 37 | } 38 | 39 | } 40 | 41 | public func emojisFor (emotion: CognitiveServicesEmotionResult) -> [String] { 42 | var availableEmojis = [String]() 43 | 44 | switch emotion.emotion { 45 | case .Anger: 46 | availableEmojis.append("😡") 47 | availableEmojis.append("😠") 48 | case .Contempt: 49 | availableEmojis.append("😤") 50 | case .Disgust: 51 | availableEmojis.append("😷") 52 | availableEmojis.append("🤐") 53 | case .Fear: 54 | availableEmojis.append("😱") 55 | case .Happiness: 56 | availableEmojis.append("😝") 57 | availableEmojis.append("😀") 58 | availableEmojis.append("😃") 59 | availableEmojis.append("😄") 60 | availableEmojis.append("😆") 61 | availableEmojis.append("😊") 62 | availableEmojis.append("🙂") 63 | availableEmojis.append("☺️") 64 | case .Neutral: 65 | availableEmojis.append("😶") 66 | availableEmojis.append("😐") 67 | availableEmojis.append("😑") 68 | case .Sadness: 69 | availableEmojis.append("🙁") 70 | availableEmojis.append("😞") 71 | availableEmojis.append("😟") 72 | availableEmojis.append("😔") 73 | availableEmojis.append("😢") 74 | availableEmojis.append("😭") 75 | case .Surprise: 76 | availableEmojis.append("😳") 77 | availableEmojis.append("😮") 78 | availableEmojis.append("😲") 79 | } 80 | 81 | return availableEmojis 82 | 83 | } 84 | 85 | public func drawEmojisFor (emotions: [CognitiveServicesEmotionResult]?, withFaceRect: Bool, image: UIImage) -> UIImage { 86 | 87 | var returnImage : UIImage! 88 | 89 | if let results = emotions { 90 | UIGraphicsBeginImageContext(image.size) 91 | image.draw(in: CGRect(origin: CGPoint.zero, size: image.size)) 92 | 93 | for result in results { 94 | let availableEmojis = emojisFor(emotion: result) 95 | 96 | let emoji = availableEmojis.randomElement() 97 | 98 | let maximumSize = result.frame.size 99 | let string = emoji as NSString 100 | let startingFontSize = 8192.0 101 | 102 | var actualFontSize = startingFontSize 103 | var stepping = actualFontSize 104 | 105 | repeat { 106 | stepping /= 2.0 107 | if stepping < 1.0 { 108 | break 109 | } 110 | 111 | let font = UIFont.systemFont(ofSize: CGFloat(actualFontSize)) 112 | let calculatedSize = string.size(attributes: [NSFontAttributeName: font]) 113 | 114 | if calculatedSize.width > maximumSize.width { 115 | actualFontSize -= stepping 116 | } else { 117 | actualFontSize += stepping 118 | } 119 | 120 | } while true 121 | 122 | let font = UIFont.systemFont(ofSize: CGFloat(actualFontSize)) 123 | string.draw(in: result.frame, withAttributes: [NSFontAttributeName: font]) 124 | 125 | if withFaceRect { 126 | let context = UIGraphicsGetCurrentContext() 127 | let frame = result.frame 128 | context!.setLineWidth(5) 129 | context!.addRect(frame) 130 | context!.drawPath(using: .stroke) 131 | } 132 | 133 | } 134 | 135 | returnImage = UIGraphicsGetImageFromCurrentImageContext() 136 | UIGraphicsEndImageContext() 137 | } 138 | 139 | return returnImage 140 | } 141 | 142 | } 143 | 144 | -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/Sources/Ext.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import UIKit 3 | 4 | public extension Array { 5 | func randomElement() -> Element { 6 | let index = Int(arc4random_uniform(UInt32(self.count))) 7 | return self[index] 8 | } 9 | } 10 | 11 | extension UIColor { 12 | convenience init(hexString:String) { 13 | 14 | let hexString = hexString.trimmingCharacters(in: NSCharacterSet.whitespacesAndNewlines) 15 | let scanner = Scanner(string: hexString) 16 | 17 | if (hexString.hasPrefix("#")) { 18 | scanner.scanLocation = 1 19 | } 20 | 21 | var color:UInt32 = 0 22 | scanner.scanHexInt32(&color) 23 | 24 | let mask = 0x000000FF 25 | let r = Int(color >> 16) & mask 26 | let g = Int(color >> 8) & mask 27 | let b = Int(color) & mask 28 | 29 | let red = CGFloat(r) / 255.0 30 | let green = CGFloat(g) / 255.0 31 | let blue = CGFloat(b) / 255.0 32 | 33 | self.init(red:red, green:green, blue:blue, alpha:1) 34 | } 35 | 36 | func toHexString() -> String { 37 | var r:CGFloat = 0 38 | var g:CGFloat = 0 39 | var b:CGFloat = 0 40 | var a:CGFloat = 0 41 | 42 | getRed(&r, green: &g, blue: &b, alpha: &a) 43 | 44 | let rgb:Int = (Int)(r*255)<<16 | (Int)(g*255)<<8 | (Int)(b*255)<<0 45 | 46 | return NSString(format:"#%06x", rgb) as String 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /PlayWithYourSmile.playground/contents.xcplayground: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![license](https://img.shields.io/github/license/mashape/apistatus.svg?maxAge=2592000)]() Platform iOS Swift 3 compatible 2 | 3 | # Hands on Swift Playgrounds for iPad 4 | 5 | This playgrounds will show you how to code :) Not from the very beginning, but with super funny features. The first features in this playground will show you how to use the Mircosoft Cognitive Services APIs within fun and easy to learn scenarios. 6 | 7 |

8 | 9 |

10 | 11 | ## What specific APIs will I learn? 12 | The first APIs out of this huge suite will be the 13 | - Computer Vision API 14 | - Emotion API 15 | - Face API 16 | - Custom Vision Service with API usage 17 | - CoreML predictions with own trained model ([an easy guide for getting started @ medium](https://medium.com/@codeprincess/your-cat-is-not-grumpy-3e5353b62a67) ) 18 | - Handwritten text recognition ( [step by step tutorial @ medium](https://medium.com/@codeprincess/the-doodling-workshop-2-9c763c21c92b) ) 19 | - use storyboards in Playground Books 20 | 21 | To get going with those technologies the examples in the playground will get you kick started into the topic :) 22 | 23 | ## Details on the playground 24 | The Playground is compatible with the Swift Playground App, currently iOS 10, and Xcode 8. You will be able to execute the playground itself in Xcode and in Swift Playgrounds without any changes to the code. But be aware that you can't execute playground books in Xcode. They just RUN on the iPad. 25 | 26 | ## How the heck can I get this playground onto my iPad 27 | With Airdrop - Just that easy :D But make sure that you are using Mac OS 10.11.6. I encountered connectivity troubles between the iPad and MacBook Pro when running under Mac OS 10.11.4. And you are surely aware of it, but I will mention it anyways: turn on Bluetooth - otherwise Airdrop won't work ;) Might save you a minute or two :D And same Wifi/network is a good idea too. Yes, Caption Obvious! 28 | 29 | ## How does all of this work? 30 | The playground and playground book are full of hints for handling the Cognitive Service APIs. All you have to do is fire up Xcode, open the files and just **add your own keys** for Computer Vision, Emotion and Face to the **CognitiveServices.swift** file. You'll get your keys as soon as you get your (free) trial on https://azure.microsoft.com/en-us/try/cognitive-services/. So now just Airdrop your playgrounds over to your iPad and you should be ready to go! 31 | 32 | ## Support? 33 | I will try to get back to you if you have any trouble using the playgrounds. But as this is kind of a side project by now, please be patient - just ping me on Twitter via [@codeprincess](https://analytics.twitter.com/user/codePrincess). 34 | -------------------------------------------------------------------------------- /slidedeck/machinelearningnoobs.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/slidedeck/machinelearningnoobs.pptx -------------------------------------------------------------------------------- /slidedeck/playgrounds_slidedeck.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/slidedeck/playgrounds_slidedeck.pdf -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/Chapters/Chapter1.playgroundchapter/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Version 6 | 1.0 7 | Name 8 | Chapter 1 9 | Pages 10 | 11 | Page1.playgroundpage 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/Chapters/Chapter1.playgroundchapter/Pages/Page1.playgroundpage/Contents.swift: -------------------------------------------------------------------------------- 1 | /*: 2 | # It's a me, tha storyboard! 3 | Playground Books can do a lot. You might know this already. But have you ever thought of using your already 4 | existing storyboards within your playgrounds and books? If not, now is the time :) 5 | 6 | * callout(Storyboards!): 7 | Build your view controller as you are used to and then just use them within your gorgeous Swift Playground Book. 8 | */ 9 | 10 | -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/Chapters/Chapter1.playgroundchapter/Pages/Page1.playgroundpage/LiveView.swift: -------------------------------------------------------------------------------- 1 | import PlaygroundSupport 2 | import UIKit 3 | 4 | let storyboard = UIStoryboard.init(name: "Main", bundle: Bundle.main) 5 | let ctrl = storyboard.instantiateViewController(withIdentifier: "view") 6 | 7 | PlaygroundPage.current.liveView = ctrl 8 | -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/Chapters/Chapter1.playgroundchapter/Pages/Page1.playgroundpage/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Name 6 | Page 1 7 | LiveViewMode 8 | VisibleByDefault 9 | LiveViewEdgeToEdge 10 | 11 | PlaygroundLoggingMode 12 | Normal 13 | 14 | 15 | -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/Chapters/Chapter1.playgroundchapter/Pages/Page1.playgroundpage/PrivateResources/Hints.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Hints 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/Chapters/Chapter1.playgroundchapter/Pages/Page1.playgroundpage/PublicResources/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/Travel.playgroundbook/Contents/Chapters/Chapter1.playgroundchapter/Pages/Page1.playgroundpage/PublicResources/.gitkeep -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/Chapters/Chapter1.playgroundchapter/Pages/Page1.playgroundpage/Sources/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/Travel.playgroundbook/Contents/Chapters/Chapter1.playgroundchapter/Pages/Page1.playgroundpage/Sources/.gitkeep -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/Manifest.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SwiftVersion 6 | 3.0 7 | ImageReference 8 | Icon.png 9 | Version 10 | 3.0 11 | ContentVersion 12 | 1.0 13 | Name 14 | Empty 15 | ContentIdentifier 16 | com.ms.demo.TravelLog 17 | DeploymentTarget 18 | ios10.0 19 | DevelopmentRegion 20 | en 21 | Chapters 22 | 23 | Chapter1.playgroundchapter 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Assets.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Assets.car -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Icon.png -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Main.storyboardc/AqW-SP-zhf-view-OCl-26-9ar.nib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Main.storyboardc/AqW-SP-zhf-view-OCl-26-9ar.nib -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Main.storyboardc/BYZ-38-t0r-view-8bC-Xf-vdC.nib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Main.storyboardc/BYZ-38-t0r-view-8bC-Xf-vdC.nib -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Main.storyboardc/Info.plist: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Main.storyboardc/Info.plist -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Main.storyboardc/log.nib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Main.storyboardc/log.nib -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Main.storyboardc/view.nib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/Travel.playgroundbook/Contents/PrivateResources/Main.storyboardc/view.nib -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/PublicResources/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/Travel.playgroundbook/Contents/PublicResources/.gitkeep -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/Sources/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/Travel.playgroundbook/Contents/Sources/.gitkeep -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/Sources/LogController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // LogController.swift 3 | // TravelLog 4 | // 5 | // Created by Manu Rink on 30.08.17. 6 | // Copyright © 2017 microsoft. All rights reserved. 7 | // 8 | 9 | import Foundation 10 | import UIKit 11 | 12 | @objc(LogController) 13 | class LogController : UIViewController { 14 | 15 | @IBOutlet weak var greetingsLabel: UILabel! 16 | var greeting : String? 17 | 18 | override func viewDidLoad() { 19 | super.viewDidLoad() 20 | 21 | if let greetText = greeting { 22 | greetingsLabel.text = "Hola \(greetText) :)" 23 | } else { 24 | greetingsLabel.text = "Hola you :)" 25 | } 26 | } 27 | 28 | override func didReceiveMemoryWarning() { 29 | super.didReceiveMemoryWarning() 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /storyboard in books/Travel.playgroundbook/Contents/Sources/ViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.swift 3 | // TravelLog 4 | // 5 | // Created by Manu Rink on 30.08.17. 6 | // Copyright © 2017 microsoft. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | @objc(ViewController) 12 | public class ViewController: UIViewController { 13 | 14 | @IBOutlet weak var usernameTextField: UITextField! 15 | 16 | override public func viewDidLoad() { 17 | super.viewDidLoad() 18 | // Do any additional setup after loading the view, typically from a nib. 19 | } 20 | 21 | override public func didReceiveMemoryWarning() { 22 | super.didReceiveMemoryWarning() 23 | // Dispose of any resources that can be recreated. 24 | } 25 | 26 | override public func prepare(for segue: UIStoryboardSegue, sender: Any?) { 27 | let logCtrl = segue.destination as! LogController 28 | logCtrl.greeting = usernameTextField.text 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /storyboard in books/TravelLog.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 200C1A701F56E81900B39793 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 200C1A6F1F56E81900B39793 /* AppDelegate.swift */; }; 11 | 200C1A721F56E81900B39793 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 200C1A711F56E81900B39793 /* ViewController.swift */; }; 12 | 200C1A751F56E81900B39793 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 200C1A731F56E81900B39793 /* Main.storyboard */; }; 13 | 200C1A771F56E81900B39793 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 200C1A761F56E81900B39793 /* Assets.xcassets */; }; 14 | 200C1A7A1F56E81900B39793 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 200C1A781F56E81900B39793 /* LaunchScreen.storyboard */; }; 15 | 200C1A821F5704BE00B39793 /* LogController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 200C1A811F5704BE00B39793 /* LogController.swift */; }; 16 | /* End PBXBuildFile section */ 17 | 18 | /* Begin PBXFileReference section */ 19 | 200C1A6C1F56E81900B39793 /* TravelLog.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = TravelLog.app; sourceTree = BUILT_PRODUCTS_DIR; }; 20 | 200C1A6F1F56E81900B39793 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 21 | 200C1A711F56E81900B39793 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 22 | 200C1A741F56E81900B39793 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 23 | 200C1A761F56E81900B39793 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 24 | 200C1A791F56E81900B39793 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 25 | 200C1A7B1F56E81900B39793 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 26 | 200C1A811F5704BE00B39793 /* LogController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LogController.swift; sourceTree = ""; }; 27 | /* End PBXFileReference section */ 28 | 29 | /* Begin PBXFrameworksBuildPhase section */ 30 | 200C1A691F56E81900B39793 /* Frameworks */ = { 31 | isa = PBXFrameworksBuildPhase; 32 | buildActionMask = 2147483647; 33 | files = ( 34 | ); 35 | runOnlyForDeploymentPostprocessing = 0; 36 | }; 37 | /* End PBXFrameworksBuildPhase section */ 38 | 39 | /* Begin PBXGroup section */ 40 | 200C1A631F56E81900B39793 = { 41 | isa = PBXGroup; 42 | children = ( 43 | 200C1A6E1F56E81900B39793 /* TravelLog */, 44 | 200C1A6D1F56E81900B39793 /* Products */, 45 | ); 46 | sourceTree = ""; 47 | }; 48 | 200C1A6D1F56E81900B39793 /* Products */ = { 49 | isa = PBXGroup; 50 | children = ( 51 | 200C1A6C1F56E81900B39793 /* TravelLog.app */, 52 | ); 53 | name = Products; 54 | sourceTree = ""; 55 | }; 56 | 200C1A6E1F56E81900B39793 /* TravelLog */ = { 57 | isa = PBXGroup; 58 | children = ( 59 | 200C1A6F1F56E81900B39793 /* AppDelegate.swift */, 60 | 200C1A711F56E81900B39793 /* ViewController.swift */, 61 | 200C1A731F56E81900B39793 /* Main.storyboard */, 62 | 200C1A761F56E81900B39793 /* Assets.xcassets */, 63 | 200C1A781F56E81900B39793 /* LaunchScreen.storyboard */, 64 | 200C1A7B1F56E81900B39793 /* Info.plist */, 65 | 200C1A811F5704BE00B39793 /* LogController.swift */, 66 | ); 67 | path = TravelLog; 68 | sourceTree = ""; 69 | }; 70 | /* End PBXGroup section */ 71 | 72 | /* Begin PBXNativeTarget section */ 73 | 200C1A6B1F56E81900B39793 /* TravelLog */ = { 74 | isa = PBXNativeTarget; 75 | buildConfigurationList = 200C1A7E1F56E81900B39793 /* Build configuration list for PBXNativeTarget "TravelLog" */; 76 | buildPhases = ( 77 | 200C1A681F56E81900B39793 /* Sources */, 78 | 200C1A691F56E81900B39793 /* Frameworks */, 79 | 200C1A6A1F56E81900B39793 /* Resources */, 80 | 206F7DD51F570B0F007D7156 /* ShellScript */, 81 | ); 82 | buildRules = ( 83 | ); 84 | dependencies = ( 85 | ); 86 | name = TravelLog; 87 | productName = TravelLog; 88 | productReference = 200C1A6C1F56E81900B39793 /* TravelLog.app */; 89 | productType = "com.apple.product-type.application"; 90 | }; 91 | /* End PBXNativeTarget section */ 92 | 93 | /* Begin PBXProject section */ 94 | 200C1A641F56E81900B39793 /* Project object */ = { 95 | isa = PBXProject; 96 | attributes = { 97 | LastSwiftUpdateCheck = 0830; 98 | LastUpgradeCheck = 0830; 99 | ORGANIZATIONNAME = microsoft; 100 | TargetAttributes = { 101 | 200C1A6B1F56E81900B39793 = { 102 | CreatedOnToolsVersion = 8.3.2; 103 | DevelopmentTeam = 8W62GL8FE7; 104 | ProvisioningStyle = Automatic; 105 | }; 106 | }; 107 | }; 108 | buildConfigurationList = 200C1A671F56E81900B39793 /* Build configuration list for PBXProject "TravelLog" */; 109 | compatibilityVersion = "Xcode 3.2"; 110 | developmentRegion = English; 111 | hasScannedForEncodings = 0; 112 | knownRegions = ( 113 | en, 114 | Base, 115 | ); 116 | mainGroup = 200C1A631F56E81900B39793; 117 | productRefGroup = 200C1A6D1F56E81900B39793 /* Products */; 118 | projectDirPath = ""; 119 | projectRoot = ""; 120 | targets = ( 121 | 200C1A6B1F56E81900B39793 /* TravelLog */, 122 | ); 123 | }; 124 | /* End PBXProject section */ 125 | 126 | /* Begin PBXResourcesBuildPhase section */ 127 | 200C1A6A1F56E81900B39793 /* Resources */ = { 128 | isa = PBXResourcesBuildPhase; 129 | buildActionMask = 2147483647; 130 | files = ( 131 | 200C1A7A1F56E81900B39793 /* LaunchScreen.storyboard in Resources */, 132 | 200C1A771F56E81900B39793 /* Assets.xcassets in Resources */, 133 | 200C1A751F56E81900B39793 /* Main.storyboard in Resources */, 134 | ); 135 | runOnlyForDeploymentPostprocessing = 0; 136 | }; 137 | /* End PBXResourcesBuildPhase section */ 138 | 139 | /* Begin PBXShellScriptBuildPhase section */ 140 | 206F7DD51F570B0F007D7156 /* ShellScript */ = { 141 | isa = PBXShellScriptBuildPhase; 142 | buildActionMask = 2147483647; 143 | files = ( 144 | ); 145 | inputPaths = ( 146 | ); 147 | outputPaths = ( 148 | ); 149 | runOnlyForDeploymentPostprocessing = 0; 150 | shellPath = /bin/sh; 151 | shellScript = "chmod u+x setup.sh\n./setup.sh"; 152 | }; 153 | /* End PBXShellScriptBuildPhase section */ 154 | 155 | /* Begin PBXSourcesBuildPhase section */ 156 | 200C1A681F56E81900B39793 /* Sources */ = { 157 | isa = PBXSourcesBuildPhase; 158 | buildActionMask = 2147483647; 159 | files = ( 160 | 200C1A821F5704BE00B39793 /* LogController.swift in Sources */, 161 | 200C1A721F56E81900B39793 /* ViewController.swift in Sources */, 162 | 200C1A701F56E81900B39793 /* AppDelegate.swift in Sources */, 163 | ); 164 | runOnlyForDeploymentPostprocessing = 0; 165 | }; 166 | /* End PBXSourcesBuildPhase section */ 167 | 168 | /* Begin PBXVariantGroup section */ 169 | 200C1A731F56E81900B39793 /* Main.storyboard */ = { 170 | isa = PBXVariantGroup; 171 | children = ( 172 | 200C1A741F56E81900B39793 /* Base */, 173 | ); 174 | name = Main.storyboard; 175 | sourceTree = ""; 176 | }; 177 | 200C1A781F56E81900B39793 /* LaunchScreen.storyboard */ = { 178 | isa = PBXVariantGroup; 179 | children = ( 180 | 200C1A791F56E81900B39793 /* Base */, 181 | ); 182 | name = LaunchScreen.storyboard; 183 | sourceTree = ""; 184 | }; 185 | /* End PBXVariantGroup section */ 186 | 187 | /* Begin XCBuildConfiguration section */ 188 | 200C1A7C1F56E81900B39793 /* Debug */ = { 189 | isa = XCBuildConfiguration; 190 | buildSettings = { 191 | ALWAYS_SEARCH_USER_PATHS = NO; 192 | CLANG_ANALYZER_NONNULL = YES; 193 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 194 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 195 | CLANG_CXX_LIBRARY = "libc++"; 196 | CLANG_ENABLE_MODULES = YES; 197 | CLANG_ENABLE_OBJC_ARC = YES; 198 | CLANG_WARN_BOOL_CONVERSION = YES; 199 | CLANG_WARN_CONSTANT_CONVERSION = YES; 200 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 201 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 202 | CLANG_WARN_EMPTY_BODY = YES; 203 | CLANG_WARN_ENUM_CONVERSION = YES; 204 | CLANG_WARN_INFINITE_RECURSION = YES; 205 | CLANG_WARN_INT_CONVERSION = YES; 206 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 207 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 208 | CLANG_WARN_UNREACHABLE_CODE = YES; 209 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 210 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 211 | COPY_PHASE_STRIP = NO; 212 | DEBUG_INFORMATION_FORMAT = dwarf; 213 | ENABLE_STRICT_OBJC_MSGSEND = YES; 214 | ENABLE_TESTABILITY = YES; 215 | GCC_C_LANGUAGE_STANDARD = gnu99; 216 | GCC_DYNAMIC_NO_PIC = NO; 217 | GCC_NO_COMMON_BLOCKS = YES; 218 | GCC_OPTIMIZATION_LEVEL = 0; 219 | GCC_PREPROCESSOR_DEFINITIONS = ( 220 | "DEBUG=1", 221 | "$(inherited)", 222 | ); 223 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 224 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 225 | GCC_WARN_UNDECLARED_SELECTOR = YES; 226 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 227 | GCC_WARN_UNUSED_FUNCTION = YES; 228 | GCC_WARN_UNUSED_VARIABLE = YES; 229 | IPHONEOS_DEPLOYMENT_TARGET = 10.3; 230 | MTL_ENABLE_DEBUG_INFO = YES; 231 | ONLY_ACTIVE_ARCH = YES; 232 | SDKROOT = iphoneos; 233 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 234 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 235 | }; 236 | name = Debug; 237 | }; 238 | 200C1A7D1F56E81900B39793 /* Release */ = { 239 | isa = XCBuildConfiguration; 240 | buildSettings = { 241 | ALWAYS_SEARCH_USER_PATHS = NO; 242 | CLANG_ANALYZER_NONNULL = YES; 243 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 244 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 245 | CLANG_CXX_LIBRARY = "libc++"; 246 | CLANG_ENABLE_MODULES = YES; 247 | CLANG_ENABLE_OBJC_ARC = YES; 248 | CLANG_WARN_BOOL_CONVERSION = YES; 249 | CLANG_WARN_CONSTANT_CONVERSION = YES; 250 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 251 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 252 | CLANG_WARN_EMPTY_BODY = YES; 253 | CLANG_WARN_ENUM_CONVERSION = YES; 254 | CLANG_WARN_INFINITE_RECURSION = YES; 255 | CLANG_WARN_INT_CONVERSION = YES; 256 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 257 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 258 | CLANG_WARN_UNREACHABLE_CODE = YES; 259 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 260 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 261 | COPY_PHASE_STRIP = NO; 262 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 263 | ENABLE_NS_ASSERTIONS = NO; 264 | ENABLE_STRICT_OBJC_MSGSEND = YES; 265 | GCC_C_LANGUAGE_STANDARD = gnu99; 266 | GCC_NO_COMMON_BLOCKS = YES; 267 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 268 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 269 | GCC_WARN_UNDECLARED_SELECTOR = YES; 270 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 271 | GCC_WARN_UNUSED_FUNCTION = YES; 272 | GCC_WARN_UNUSED_VARIABLE = YES; 273 | IPHONEOS_DEPLOYMENT_TARGET = 10.3; 274 | MTL_ENABLE_DEBUG_INFO = NO; 275 | SDKROOT = iphoneos; 276 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; 277 | VALIDATE_PRODUCT = YES; 278 | }; 279 | name = Release; 280 | }; 281 | 200C1A7F1F56E81900B39793 /* Debug */ = { 282 | isa = XCBuildConfiguration; 283 | buildSettings = { 284 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 285 | DEVELOPMENT_TEAM = 8W62GL8FE7; 286 | INFOPLIST_FILE = TravelLog/Info.plist; 287 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 288 | PRODUCT_BUNDLE_IDENTIFIER = com.ms.demo.TravelLog; 289 | PRODUCT_MODULE_NAME = TravelLog; 290 | PRODUCT_NAME = "$(TARGET_NAME)"; 291 | SWIFT_VERSION = 3.0; 292 | }; 293 | name = Debug; 294 | }; 295 | 200C1A801F56E81900B39793 /* Release */ = { 296 | isa = XCBuildConfiguration; 297 | buildSettings = { 298 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 299 | DEVELOPMENT_TEAM = 8W62GL8FE7; 300 | INFOPLIST_FILE = TravelLog/Info.plist; 301 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 302 | PRODUCT_BUNDLE_IDENTIFIER = com.ms.demo.TravelLog; 303 | PRODUCT_MODULE_NAME = TravelLog; 304 | PRODUCT_NAME = "$(TARGET_NAME)"; 305 | SWIFT_VERSION = 3.0; 306 | }; 307 | name = Release; 308 | }; 309 | /* End XCBuildConfiguration section */ 310 | 311 | /* Begin XCConfigurationList section */ 312 | 200C1A671F56E81900B39793 /* Build configuration list for PBXProject "TravelLog" */ = { 313 | isa = XCConfigurationList; 314 | buildConfigurations = ( 315 | 200C1A7C1F56E81900B39793 /* Debug */, 316 | 200C1A7D1F56E81900B39793 /* Release */, 317 | ); 318 | defaultConfigurationIsVisible = 0; 319 | defaultConfigurationName = Release; 320 | }; 321 | 200C1A7E1F56E81900B39793 /* Build configuration list for PBXNativeTarget "TravelLog" */ = { 322 | isa = XCConfigurationList; 323 | buildConfigurations = ( 324 | 200C1A7F1F56E81900B39793 /* Debug */, 325 | 200C1A801F56E81900B39793 /* Release */, 326 | ); 327 | defaultConfigurationIsVisible = 0; 328 | defaultConfigurationName = Release; 329 | }; 330 | /* End XCConfigurationList section */ 331 | }; 332 | rootObject = 200C1A641F56E81900B39793 /* Project object */; 333 | } 334 | -------------------------------------------------------------------------------- /storyboard in books/TravelLog.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /storyboard in books/TravelLog/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // TravelLog 4 | // 5 | // Created by Manu Rink on 30.08.17. 6 | // Copyright © 2017 microsoft. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | @UIApplicationMain 12 | class AppDelegate: UIResponder, UIApplicationDelegate { 13 | 14 | var window: UIWindow? 15 | 16 | 17 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { 18 | // Override point for customization after application launch. 19 | return true 20 | } 21 | 22 | func applicationWillResignActive(_ application: UIApplication) { 23 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 24 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 25 | } 26 | 27 | func applicationDidEnterBackground(_ application: UIApplication) { 28 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 29 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 30 | } 31 | 32 | func applicationWillEnterForeground(_ application: UIApplication) { 33 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 34 | } 35 | 36 | func applicationDidBecomeActive(_ application: UIApplication) { 37 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 38 | } 39 | 40 | func applicationWillTerminate(_ application: UIApplication) { 41 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 42 | } 43 | 44 | 45 | } 46 | 47 | -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | } 43 | ], 44 | "info" : { 45 | "version" : 1, 46 | "author" : "xcode" 47 | } 48 | } -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Assets.xcassets/Picture1.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "scale" : "1x" 6 | }, 7 | { 8 | "idiom" : "universal", 9 | "filename" : "Picture1.png", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Assets.xcassets/Picture1.imageset/Picture1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/TravelLog/Assets.xcassets/Picture1.imageset/Picture1.png -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Assets.xcassets/Picture2.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "scale" : "1x" 6 | }, 7 | { 8 | "idiom" : "universal", 9 | "filename" : "Picture2.png", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Assets.xcassets/Picture2.imageset/Picture2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/TravelLog/Assets.xcassets/Picture2.imageset/Picture2.png -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Assets.xcassets/Picture3.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "scale" : "1x" 6 | }, 7 | { 8 | "idiom" : "universal", 9 | "filename" : "Picture3.png", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Assets.xcassets/Picture3.imageset/Picture3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/TravelLog/Assets.xcassets/Picture3.imageset/Picture3.png -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Assets.xcassets/Picture4.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "scale" : "1x" 6 | }, 7 | { 8 | "idiom" : "universal", 9 | "filename" : "Picture4.png", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Assets.xcassets/Picture4.imageset/Picture4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codePrincess/playgrounds/4f4f5bbcf3e6725bde00c960d834d8a716e5c67b/storyboard in books/TravelLog/Assets.xcassets/Picture4.imageset/Picture4.png -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /storyboard in books/TravelLog/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | UILaunchStoryboardName 24 | LaunchScreen 25 | UIMainStoryboardFile 26 | Main 27 | UIRequiredDeviceCapabilities 28 | 29 | armv7 30 | 31 | UISupportedInterfaceOrientations 32 | 33 | UIInterfaceOrientationPortrait 34 | UIInterfaceOrientationLandscapeLeft 35 | UIInterfaceOrientationLandscapeRight 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /storyboard in books/TravelLog/LogController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // LogController.swift 3 | // TravelLog 4 | // 5 | // Created by Manu Rink on 30.08.17. 6 | // Copyright © 2017 microsoft. All rights reserved. 7 | // 8 | 9 | import Foundation 10 | import UIKit 11 | 12 | @objc(LogController) 13 | class LogController : UIViewController { 14 | 15 | @IBOutlet weak var greetingsLabel: UILabel! 16 | var greeting : String? 17 | 18 | override func viewDidLoad() { 19 | super.viewDidLoad() 20 | 21 | if let greetText = greeting { 22 | greetingsLabel.text = "Hola \(greetText) :)" 23 | } else { 24 | greetingsLabel.text = "Hola you :)" 25 | } 26 | } 27 | 28 | override func didReceiveMemoryWarning() { 29 | super.didReceiveMemoryWarning() 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /storyboard in books/TravelLog/ViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.swift 3 | // TravelLog 4 | // 5 | // Created by Manu Rink on 30.08.17. 6 | // Copyright © 2017 microsoft. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | @objc(ViewController) 12 | public class ViewController: UIViewController { 13 | 14 | @IBOutlet weak var usernameTextField: UITextField! 15 | 16 | override public func viewDidLoad() { 17 | super.viewDidLoad() 18 | // Do any additional setup after loading the view, typically from a nib. 19 | } 20 | 21 | override public func didReceiveMemoryWarning() { 22 | super.didReceiveMemoryWarning() 23 | // Dispose of any resources that can be recreated. 24 | } 25 | 26 | override public func prepare(for segue: UIStoryboardSegue, sender: Any?) { 27 | let logCtrl = segue.destination as! LogController 28 | logCtrl.greeting = usernameTextField.text 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /storyboard in books/TravelWorkspace.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /storyboard in books/setup.sh: -------------------------------------------------------------------------------- 1 | OUTPUT="Travel.playgroundbook" 2 | 3 | cp "$SRCROOT/TravelLog/ViewController.swift" "$OUTPUT/Contents/Sources" 4 | cp "$SRCROOT/TravelLog/LogController.swift" "$OUTPUT/Contents/Sources" 5 | 6 | cp "$CODESIGNING_FOLDER_PATH/Assets.car" "$OUTPUT/Contents/PrivateResources" 7 | cp -r "$CODESIGNING_FOLDER_PATH/Base.lproj/Main.storyboardc" "$OUTPUT/Contents/PrivateResources" 8 | --------------------------------------------------------------------------------