├── .DS_Store ├── .gitignore ├── HelloCoreML.xcodeproj ├── project.pbxproj └── project.xcworkspace │ ├── contents.xcworkspacedata │ └── xcshareddata │ └── IDEWorkspaceChecks.plist ├── HelloCoreML ├── .DS_Store ├── AppDelegate.swift ├── Assets.xcassets │ ├── AppIcon.appiconset │ │ └── Contents.json │ ├── Contents.json │ ├── Feathers.imageset │ │ ├── Contents.json │ │ └── Feathers.png │ ├── Udanie.imageset │ │ ├── Contents.json │ │ └── Udanie.png │ ├── candy.imageset │ │ ├── Contents.json │ │ └── candy.png │ ├── cellphone-wallpaper.imageset │ │ ├── Contents.json │ │ └── cellphone-wallpaper.png │ ├── girl.imageset │ │ ├── Contents.json │ │ └── girl.jpg │ ├── mosaicImg.imageset │ │ ├── Contents.json │ │ └── mosaicImg.png │ ├── museImg.imageset │ │ ├── Contents.json │ │ └── museImg.png │ └── screamImg.imageset │ │ ├── Contents.json │ │ └── screamImg.png ├── Base.lproj │ └── LaunchScreen.storyboard ├── Home │ ├── CategoryRow.swift │ └── Home.swift ├── Info.plist ├── MLModels │ ├── AnsweringText_2x.png │ ├── ArtStyle.jpg │ ├── DepthPrediction_2x.png │ ├── DrawingClassification1_2x.png │ ├── ImageClassification1_2x.png │ ├── ImageClassification2_2x.png │ ├── ImageClassification3_2x.png │ ├── ImageClassification4_2x.png │ ├── ObjectDetectionRealTime1_2x.png │ ├── ObjectDetectionRealTime2_2x.png │ ├── Segmentation_2x.png │ └── mlmodels.json ├── MachineLearning │ ├── .DS_Store │ ├── ArtStyles │ │ ├── ArtStyleView.swift │ │ ├── ArtStylesMeta.swift │ │ ├── ArtStylesModel.swift │ │ ├── CoreMLHelpers │ │ │ ├── Array.swift │ │ │ ├── CVPixelBuffer+Helpers.swift │ │ │ ├── MLMultiArray+Image.swift │ │ │ ├── Math.swift │ │ │ ├── MultiArray.swift │ │ │ ├── NonMaxSuppression.swift │ │ │ ├── Predictions.swift │ │ │ └── UIImage+CVPixelBuffer.swift │ │ ├── StyleInput.swift │ │ └── styles.json │ ├── FCRN-DepthPrediction │ │ ├── FCRNDepthPredictionView.swift │ │ ├── HeatmapView.swift │ │ ├── HeatmapViewPistProcessor.swift │ │ └── LiveImageViewController.swift │ ├── ImageClassifier │ │ ├── ImageClassificationModel.swift │ │ ├── ImageClassifierView.swift │ │ └── ImagePicker.swift │ ├── MNISTClassifier │ │ ├── DrawView.swift │ │ ├── MNISTClassificationModel.swift │ │ └── MNISTClassifierView.swift │ └── Object Detection │ │ ├── ObjectDetectionView.swift │ │ ├── ViewController.swift │ │ └── VisionObjectRecognitionViewController.swift ├── Models │ ├── Data.swift │ └── MLMetaModel.swift ├── Preview Content │ └── Preview Assets.xcassets │ │ └── Contents.json ├── Resources │ └── charleyrivers_feature.jpg ├── SceneDelegate.swift └── Supporting Views │ ├── CircleImage.swift │ └── VideoCapture.swift ├── LICENSE ├── README.md └── apple-machine-learning-models.png /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/.DS_Store -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Xcode 2 | # 3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore 4 | 5 | ## Build generated 6 | build/ 7 | DerivedData/ 8 | 9 | ## Various settings 10 | *.pbxuser 11 | !default.pbxuser 12 | *.mode1v3 13 | !default.mode1v3 14 | *.mode2v3 15 | !default.mode2v3 16 | *.perspectivev3 17 | !default.perspectivev3 18 | xcuserdata/ 19 | 20 | ## Other 21 | *.moved-aside 22 | *.xccheckout 23 | *.xcscmblueprint 24 | *.mlmodel 25 | */.DS_Store 26 | 27 | ## Obj-C/Swift specific 28 | *.hmap 29 | *.ipa 30 | *.dSYM.zip 31 | *.dSYM 32 | 33 | ## Playgrounds 34 | timeline.xctimeline 35 | playground.xcworkspace 36 | 37 | # Swift Package Manager 38 | # 39 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies. 40 | # Packages/ 41 | # Package.pins 42 | # Package.resolved 43 | .build/ 44 | 45 | # CocoaPods 46 | # 47 | # We recommend against adding the Pods directory to your .gitignore. However 48 | # you should judge for yourself, the pros and cons are mentioned at: 49 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control 50 | # 51 | # Pods/ 52 | 53 | # Carthage 54 | # 55 | # Add this line if you want to avoid checking in source code from Carthage dependencies. 56 | # Carthage/Checkouts 57 | 58 | Carthage/Build 59 | 60 | # fastlane 61 | # 62 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the 63 | # screenshots whenever they are needed. 64 | # For more information about the recommended setup visit: 65 | # https://docs.fastlane.tools/best-practices/source-control/#source-control 66 | 67 | fastlane/report.xml 68 | fastlane/Preview.html 69 | fastlane/screenshots/**/*.png 70 | fastlane/test_output 71 | -------------------------------------------------------------------------------- /HelloCoreML.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 50; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 003A68BA238EB24F0045A550 /* ArtStyle.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 003A68B9238EB24F0045A550 /* ArtStyle.jpg */; }; 11 | 0098547A237DA73D0020E959 /* DepthPrediction_2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 00985479237DA73D0020E959 /* DepthPrediction_2x.png */; }; 12 | 00985484237DA9470020E959 /* ImageClassification3_2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 0098547B237DA9470020E959 /* ImageClassification3_2x.png */; }; 13 | 00985485237DA9470020E959 /* ImageClassification1_2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 0098547C237DA9470020E959 /* ImageClassification1_2x.png */; }; 14 | 00985486237DA9470020E959 /* ObjectDetectionRealTime1_2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 0098547D237DA9470020E959 /* ObjectDetectionRealTime1_2x.png */; }; 15 | 00985487237DA9470020E959 /* ObjectDetectionRealTime2_2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 0098547E237DA9470020E959 /* ObjectDetectionRealTime2_2x.png */; }; 16 | 00985488237DA9470020E959 /* ImageClassification4_2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 0098547F237DA9470020E959 /* ImageClassification4_2x.png */; }; 17 | 00985489237DA9470020E959 /* ImageClassification2_2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 00985480237DA9470020E959 /* ImageClassification2_2x.png */; }; 18 | 0098548A237DA9480020E959 /* AnsweringText_2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 00985481237DA9470020E959 /* AnsweringText_2x.png */; }; 19 | 0098548B237DA9480020E959 /* DrawingClassification1_2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 00985482237DA9470020E959 /* DrawingClassification1_2x.png */; }; 20 | 0098548C237DA9480020E959 /* Segmentation_2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 00985483237DA9470020E959 /* Segmentation_2x.png */; }; 21 | 00985491237DB9570020E959 /* MNISTClassifier.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 0098548E237DB9570020E959 /* MNISTClassifier.mlmodel */; }; 22 | 00CDE9F5237E4D45003A4FA2 /* MobileNetV2.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 00CDE9F4237E4D45003A4FA2 /* MobileNetV2.mlmodel */; }; 23 | 00CDE9F7237E4DF7003A4FA2 /* Resnet50.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 00CDE9F6237E4DF7003A4FA2 /* Resnet50.mlmodel */; }; 24 | 00CDE9FA237E4E04003A4FA2 /* FCRN.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 00CDE9F9237E4E04003A4FA2 /* FCRN.mlmodel */; }; 25 | 00CDE9FC237E4E42003A4FA2 /* SqueezeNet.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 00CDE9FB237E4E42003A4FA2 /* SqueezeNet.mlmodel */; }; 26 | 00CDEA04237E4FD0003A4FA2 /* YOLOv3.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA03237E4FD0003A4FA2 /* YOLOv3.mlmodel */; }; 27 | 00CDEA4E237E8284003A4FA2 /* VideoCapture.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA4D237E8284003A4FA2 /* VideoCapture.swift */; }; 28 | 00CDEA50237E8A50003A4FA2 /* LiveImageViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA4F237E8A50003A4FA2 /* LiveImageViewController.swift */; }; 29 | 00CDEA52237E9995003A4FA2 /* FCRNDepthPredictionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA51237E9995003A4FA2 /* FCRNDepthPredictionView.swift */; }; 30 | 00CDEA54237EE0C8003A4FA2 /* HeatmapView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA53237EE0C8003A4FA2 /* HeatmapView.swift */; }; 31 | 00CDEA56237EEC88003A4FA2 /* HeatmapViewPistProcessor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA55237EEC88003A4FA2 /* HeatmapViewPistProcessor.swift */; }; 32 | 00CDEA58237EFEB1003A4FA2 /* DrawView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA57237EFEB1003A4FA2 /* DrawView.swift */; }; 33 | 00CDEA5A237F0549003A4FA2 /* MNISTClassifierView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA59237F0549003A4FA2 /* MNISTClassifierView.swift */; }; 34 | 00CDEA5C237FED05003A4FA2 /* MNISTClassificationModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA5B237FED05003A4FA2 /* MNISTClassificationModel.swift */; }; 35 | 00CDEA5E23800C7F003A4FA2 /* ImageClassificationModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA5D23800C7F003A4FA2 /* ImageClassificationModel.swift */; }; 36 | 00CDEA602380103A003A4FA2 /* ImagePicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA5F2380103A003A4FA2 /* ImagePicker.swift */; }; 37 | 00CDEA6223801316003A4FA2 /* ImageClassifierView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA6123801316003A4FA2 /* ImageClassifierView.swift */; }; 38 | 00CDEA6F23821C5E003A4FA2 /* VisionObjectRecognitionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA6D23821C5E003A4FA2 /* VisionObjectRecognitionViewController.swift */; }; 39 | 00CDEA7023821C5E003A4FA2 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA6E23821C5E003A4FA2 /* ViewController.swift */; }; 40 | 00CDEA7223821CEA003A4FA2 /* ObjectDetectionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA7123821CEA003A4FA2 /* ObjectDetectionView.swift */; }; 41 | 00CDEA7423822105003A4FA2 /* Home.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00CDEA7323822105003A4FA2 /* Home.swift */; }; 42 | 00F804B7237D7AB7007C27C6 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00F804B6237D7AB7007C27C6 /* AppDelegate.swift */; }; 43 | 00F804B9237D7AB7007C27C6 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00F804B8237D7AB7007C27C6 /* SceneDelegate.swift */; }; 44 | 00F804BD237D7ABA007C27C6 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 00F804BC237D7ABA007C27C6 /* Assets.xcassets */; }; 45 | 00F804C0237D7ABA007C27C6 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 00F804BF237D7ABA007C27C6 /* Preview Assets.xcassets */; }; 46 | 00F804C3237D7ABA007C27C6 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 00F804C1237D7ABA007C27C6 /* LaunchScreen.storyboard */; }; 47 | 00F804E5237D7AE0007C27C6 /* charleyrivers_feature.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 00F804D4237D7AE0007C27C6 /* charleyrivers_feature.jpg */; }; 48 | 00F804EF237D7B0F007C27C6 /* Data.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00F804EE237D7B0F007C27C6 /* Data.swift */; }; 49 | 00F804F6237D7D85007C27C6 /* CategoryRow.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00F804F5237D7D85007C27C6 /* CategoryRow.swift */; }; 50 | 00F80521237D8196007C27C6 /* CircleImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00F8051A237D8196007C27C6 /* CircleImage.swift */; }; 51 | 00F8052E237D956A007C27C6 /* mlmodels.json in Resources */ = {isa = PBXBuildFile; fileRef = 00F8052D237D956A007C27C6 /* mlmodels.json */; }; 52 | 00F80531237D987F007C27C6 /* MLMetaModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00F80530237D987F007C27C6 /* MLMetaModel.swift */; }; 53 | 00FC24B5238E45AA007B4E7A /* FNS-La-Muse.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24AF238E45AA007B4E7A /* FNS-La-Muse.mlmodel */; }; 54 | 00FC24B6238E45AA007B4E7A /* FNS-Candy.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24B0238E45AA007B4E7A /* FNS-Candy.mlmodel */; }; 55 | 00FC24B7238E45AA007B4E7A /* FNS-Udnie.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24B1238E45AA007B4E7A /* FNS-Udnie.mlmodel */; }; 56 | 00FC24B8238E45AA007B4E7A /* FNS-Feathers.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24B2238E45AA007B4E7A /* FNS-Feathers.mlmodel */; }; 57 | 00FC24B9238E45AA007B4E7A /* FNS-Mosaic.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24B3238E45AA007B4E7A /* FNS-Mosaic.mlmodel */; }; 58 | 00FC24BA238E45AA007B4E7A /* FNS-The-Scream.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24B4238E45AA007B4E7A /* FNS-The-Scream.mlmodel */; }; 59 | 00FC24BC238E45D8007B4E7A /* ArtStylesModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24BB238E45D8007B4E7A /* ArtStylesModel.swift */; }; 60 | 00FC24C6238E4CA6007B4E7A /* Math.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24BE238E4CA6007B4E7A /* Math.swift */; }; 61 | 00FC24C7238E4CA6007B4E7A /* UIImage+CVPixelBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24BF238E4CA6007B4E7A /* UIImage+CVPixelBuffer.swift */; }; 62 | 00FC24C8238E4CA6007B4E7A /* Array.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24C0238E4CA6007B4E7A /* Array.swift */; }; 63 | 00FC24C9238E4CA6007B4E7A /* Predictions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24C1238E4CA6007B4E7A /* Predictions.swift */; }; 64 | 00FC24CA238E4CA6007B4E7A /* MultiArray.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24C2238E4CA6007B4E7A /* MultiArray.swift */; }; 65 | 00FC24CB238E4CA6007B4E7A /* CVPixelBuffer+Helpers.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24C3238E4CA6007B4E7A /* CVPixelBuffer+Helpers.swift */; }; 66 | 00FC24CC238E4CA6007B4E7A /* NonMaxSuppression.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24C4238E4CA6007B4E7A /* NonMaxSuppression.swift */; }; 67 | 00FC24CD238E4CA6007B4E7A /* MLMultiArray+Image.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24C5238E4CA6007B4E7A /* MLMultiArray+Image.swift */; }; 68 | 00FC24CF238E4CC8007B4E7A /* StyleInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24CE238E4CC8007B4E7A /* StyleInput.swift */; }; 69 | 00FC24D1238E4EFC007B4E7A /* ArtStyleView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24D0238E4EFC007B4E7A /* ArtStyleView.swift */; }; 70 | 00FC24D3238E742D007B4E7A /* ArtStylesMeta.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00FC24D2238E742D007B4E7A /* ArtStylesMeta.swift */; }; 71 | 00FC24D5238E749D007B4E7A /* styles.json in Resources */ = {isa = PBXBuildFile; fileRef = 00FC24D4238E749D007B4E7A /* styles.json */; }; 72 | /* End PBXBuildFile section */ 73 | 74 | /* Begin PBXFileReference section */ 75 | 003A68B9238EB24F0045A550 /* ArtStyle.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = ArtStyle.jpg; sourceTree = ""; }; 76 | 00985479237DA73D0020E959 /* DepthPrediction_2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = DepthPrediction_2x.png; sourceTree = ""; }; 77 | 0098547B237DA9470020E959 /* ImageClassification3_2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ImageClassification3_2x.png; sourceTree = ""; }; 78 | 0098547C237DA9470020E959 /* ImageClassification1_2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ImageClassification1_2x.png; sourceTree = ""; }; 79 | 0098547D237DA9470020E959 /* ObjectDetectionRealTime1_2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ObjectDetectionRealTime1_2x.png; sourceTree = ""; }; 80 | 0098547E237DA9470020E959 /* ObjectDetectionRealTime2_2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ObjectDetectionRealTime2_2x.png; sourceTree = ""; }; 81 | 0098547F237DA9470020E959 /* ImageClassification4_2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ImageClassification4_2x.png; sourceTree = ""; }; 82 | 00985480237DA9470020E959 /* ImageClassification2_2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ImageClassification2_2x.png; sourceTree = ""; }; 83 | 00985481237DA9470020E959 /* AnsweringText_2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = AnsweringText_2x.png; sourceTree = ""; }; 84 | 00985482237DA9470020E959 /* DrawingClassification1_2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = DrawingClassification1_2x.png; sourceTree = ""; }; 85 | 00985483237DA9470020E959 /* Segmentation_2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = Segmentation_2x.png; sourceTree = ""; }; 86 | 0098548E237DB9570020E959 /* MNISTClassifier.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = MNISTClassifier.mlmodel; sourceTree = ""; }; 87 | 00CDE9F4237E4D45003A4FA2 /* MobileNetV2.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = MobileNetV2.mlmodel; sourceTree = ""; }; 88 | 00CDE9F6237E4DF7003A4FA2 /* Resnet50.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = Resnet50.mlmodel; sourceTree = ""; }; 89 | 00CDE9F9237E4E04003A4FA2 /* FCRN.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = FCRN.mlmodel; sourceTree = ""; }; 90 | 00CDE9FB237E4E42003A4FA2 /* SqueezeNet.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = SqueezeNet.mlmodel; sourceTree = ""; }; 91 | 00CDEA03237E4FD0003A4FA2 /* YOLOv3.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = YOLOv3.mlmodel; sourceTree = ""; }; 92 | 00CDEA4D237E8284003A4FA2 /* VideoCapture.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoCapture.swift; sourceTree = ""; }; 93 | 00CDEA4F237E8A50003A4FA2 /* LiveImageViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LiveImageViewController.swift; sourceTree = ""; }; 94 | 00CDEA51237E9995003A4FA2 /* FCRNDepthPredictionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FCRNDepthPredictionView.swift; sourceTree = ""; }; 95 | 00CDEA53237EE0C8003A4FA2 /* HeatmapView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HeatmapView.swift; sourceTree = ""; }; 96 | 00CDEA55237EEC88003A4FA2 /* HeatmapViewPistProcessor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HeatmapViewPistProcessor.swift; sourceTree = ""; }; 97 | 00CDEA57237EFEB1003A4FA2 /* DrawView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DrawView.swift; sourceTree = ""; }; 98 | 00CDEA59237F0549003A4FA2 /* MNISTClassifierView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MNISTClassifierView.swift; sourceTree = ""; }; 99 | 00CDEA5B237FED05003A4FA2 /* MNISTClassificationModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MNISTClassificationModel.swift; sourceTree = ""; }; 100 | 00CDEA5D23800C7F003A4FA2 /* ImageClassificationModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageClassificationModel.swift; sourceTree = ""; }; 101 | 00CDEA5F2380103A003A4FA2 /* ImagePicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImagePicker.swift; sourceTree = ""; }; 102 | 00CDEA6123801316003A4FA2 /* ImageClassifierView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageClassifierView.swift; sourceTree = ""; }; 103 | 00CDEA6D23821C5E003A4FA2 /* VisionObjectRecognitionViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VisionObjectRecognitionViewController.swift; sourceTree = ""; }; 104 | 00CDEA6E23821C5E003A4FA2 /* ViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 105 | 00CDEA7123821CEA003A4FA2 /* ObjectDetectionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ObjectDetectionView.swift; sourceTree = ""; }; 106 | 00CDEA7323822105003A4FA2 /* Home.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Home.swift; sourceTree = ""; }; 107 | 00F804B3237D7AB7007C27C6 /* HelloCoreML.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = HelloCoreML.app; sourceTree = BUILT_PRODUCTS_DIR; }; 108 | 00F804B6237D7AB7007C27C6 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 109 | 00F804B8237D7AB7007C27C6 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; 110 | 00F804BC237D7ABA007C27C6 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 111 | 00F804BF237D7ABA007C27C6 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 112 | 00F804C2237D7ABA007C27C6 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 113 | 00F804C4237D7ABA007C27C6 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 114 | 00F804D4237D7AE0007C27C6 /* charleyrivers_feature.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = charleyrivers_feature.jpg; sourceTree = ""; }; 115 | 00F804EE237D7B0F007C27C6 /* Data.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Data.swift; sourceTree = ""; }; 116 | 00F804F5237D7D85007C27C6 /* CategoryRow.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CategoryRow.swift; sourceTree = ""; }; 117 | 00F8051A237D8196007C27C6 /* CircleImage.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CircleImage.swift; sourceTree = ""; }; 118 | 00F8052D237D956A007C27C6 /* mlmodels.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = mlmodels.json; sourceTree = ""; }; 119 | 00F80530237D987F007C27C6 /* MLMetaModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MLMetaModel.swift; sourceTree = ""; }; 120 | 00FC24AF238E45AA007B4E7A /* FNS-La-Muse.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = "FNS-La-Muse.mlmodel"; sourceTree = ""; }; 121 | 00FC24B0238E45AA007B4E7A /* FNS-Candy.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = "FNS-Candy.mlmodel"; sourceTree = ""; }; 122 | 00FC24B1238E45AA007B4E7A /* FNS-Udnie.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = "FNS-Udnie.mlmodel"; sourceTree = ""; }; 123 | 00FC24B2238E45AA007B4E7A /* FNS-Feathers.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = "FNS-Feathers.mlmodel"; sourceTree = ""; }; 124 | 00FC24B3238E45AA007B4E7A /* FNS-Mosaic.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = "FNS-Mosaic.mlmodel"; sourceTree = ""; }; 125 | 00FC24B4238E45AA007B4E7A /* FNS-The-Scream.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = "FNS-The-Scream.mlmodel"; sourceTree = ""; }; 126 | 00FC24BB238E45D8007B4E7A /* ArtStylesModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ArtStylesModel.swift; sourceTree = ""; }; 127 | 00FC24BE238E4CA6007B4E7A /* Math.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Math.swift; sourceTree = ""; }; 128 | 00FC24BF238E4CA6007B4E7A /* UIImage+CVPixelBuffer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "UIImage+CVPixelBuffer.swift"; sourceTree = ""; }; 129 | 00FC24C0238E4CA6007B4E7A /* Array.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Array.swift; sourceTree = ""; }; 130 | 00FC24C1238E4CA6007B4E7A /* Predictions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Predictions.swift; sourceTree = ""; }; 131 | 00FC24C2238E4CA6007B4E7A /* MultiArray.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MultiArray.swift; sourceTree = ""; }; 132 | 00FC24C3238E4CA6007B4E7A /* CVPixelBuffer+Helpers.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CVPixelBuffer+Helpers.swift"; sourceTree = ""; }; 133 | 00FC24C4238E4CA6007B4E7A /* NonMaxSuppression.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NonMaxSuppression.swift; sourceTree = ""; }; 134 | 00FC24C5238E4CA6007B4E7A /* MLMultiArray+Image.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "MLMultiArray+Image.swift"; sourceTree = ""; }; 135 | 00FC24CE238E4CC8007B4E7A /* StyleInput.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StyleInput.swift; sourceTree = ""; }; 136 | 00FC24D0238E4EFC007B4E7A /* ArtStyleView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ArtStyleView.swift; sourceTree = ""; }; 137 | 00FC24D2238E742D007B4E7A /* ArtStylesMeta.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ArtStylesMeta.swift; sourceTree = ""; }; 138 | 00FC24D4238E749D007B4E7A /* styles.json */ = {isa = PBXFileReference; lastKnownFileType = text.json; path = styles.json; sourceTree = ""; }; 139 | /* End PBXFileReference section */ 140 | 141 | /* Begin PBXFrameworksBuildPhase section */ 142 | 00F804B0237D7AB7007C27C6 /* Frameworks */ = { 143 | isa = PBXFrameworksBuildPhase; 144 | buildActionMask = 2147483647; 145 | files = ( 146 | ); 147 | runOnlyForDeploymentPostprocessing = 0; 148 | }; 149 | /* End PBXFrameworksBuildPhase section */ 150 | 151 | /* Begin PBXGroup section */ 152 | 00985493237DB9DE0020E959 /* MachineLearning */ = { 153 | isa = PBXGroup; 154 | children = ( 155 | 00FC24AE238E455C007B4E7A /* ArtStyles */, 156 | 00CDEA00237E4E70003A4FA2 /* Object Detection */, 157 | 00CDE9F8237E4DFB003A4FA2 /* FCRN-DepthPrediction */, 158 | 00985496237DBC570020E959 /* ImageClassifier */, 159 | 00985494237DB9ED0020E959 /* MNISTClassifier */, 160 | ); 161 | path = MachineLearning; 162 | sourceTree = ""; 163 | }; 164 | 00985494237DB9ED0020E959 /* MNISTClassifier */ = { 165 | isa = PBXGroup; 166 | children = ( 167 | 0098548E237DB9570020E959 /* MNISTClassifier.mlmodel */, 168 | 00CDEA57237EFEB1003A4FA2 /* DrawView.swift */, 169 | 00CDEA59237F0549003A4FA2 /* MNISTClassifierView.swift */, 170 | 00CDEA5B237FED05003A4FA2 /* MNISTClassificationModel.swift */, 171 | ); 172 | path = MNISTClassifier; 173 | sourceTree = ""; 174 | }; 175 | 00985496237DBC570020E959 /* ImageClassifier */ = { 176 | isa = PBXGroup; 177 | children = ( 178 | 00CDE9FB237E4E42003A4FA2 /* SqueezeNet.mlmodel */, 179 | 00CDE9F4237E4D45003A4FA2 /* MobileNetV2.mlmodel */, 180 | 00CDE9F6237E4DF7003A4FA2 /* Resnet50.mlmodel */, 181 | 00CDEA5D23800C7F003A4FA2 /* ImageClassificationModel.swift */, 182 | 00CDEA5F2380103A003A4FA2 /* ImagePicker.swift */, 183 | 00CDEA6123801316003A4FA2 /* ImageClassifierView.swift */, 184 | ); 185 | path = ImageClassifier; 186 | sourceTree = ""; 187 | }; 188 | 00CDE9F8237E4DFB003A4FA2 /* FCRN-DepthPrediction */ = { 189 | isa = PBXGroup; 190 | children = ( 191 | 00CDE9F9237E4E04003A4FA2 /* FCRN.mlmodel */, 192 | 00CDEA4F237E8A50003A4FA2 /* LiveImageViewController.swift */, 193 | 00CDEA51237E9995003A4FA2 /* FCRNDepthPredictionView.swift */, 194 | 00CDEA53237EE0C8003A4FA2 /* HeatmapView.swift */, 195 | 00CDEA55237EEC88003A4FA2 /* HeatmapViewPistProcessor.swift */, 196 | ); 197 | path = "FCRN-DepthPrediction "; 198 | sourceTree = ""; 199 | }; 200 | 00CDEA00237E4E70003A4FA2 /* Object Detection */ = { 201 | isa = PBXGroup; 202 | children = ( 203 | 00CDEA03237E4FD0003A4FA2 /* YOLOv3.mlmodel */, 204 | 00CDEA6E23821C5E003A4FA2 /* ViewController.swift */, 205 | 00CDEA6D23821C5E003A4FA2 /* VisionObjectRecognitionViewController.swift */, 206 | 00CDEA7123821CEA003A4FA2 /* ObjectDetectionView.swift */, 207 | ); 208 | path = "Object Detection"; 209 | sourceTree = ""; 210 | }; 211 | 00F804AA237D7AB7007C27C6 = { 212 | isa = PBXGroup; 213 | children = ( 214 | 00F804B5237D7AB7007C27C6 /* HelloCoreML */, 215 | 00F804B4237D7AB7007C27C6 /* Products */, 216 | ); 217 | sourceTree = ""; 218 | }; 219 | 00F804B4237D7AB7007C27C6 /* Products */ = { 220 | isa = PBXGroup; 221 | children = ( 222 | 00F804B3237D7AB7007C27C6 /* HelloCoreML.app */, 223 | ); 224 | name = Products; 225 | sourceTree = ""; 226 | }; 227 | 00F804B5237D7AB7007C27C6 /* HelloCoreML */ = { 228 | isa = PBXGroup; 229 | children = ( 230 | 00985493237DB9DE0020E959 /* MachineLearning */, 231 | 00F8052F237D97D2007C27C6 /* MLModels */, 232 | 00F80516237D8196007C27C6 /* Supporting Views */, 233 | 00F804F4237D7D6C007C27C6 /* Home */, 234 | 00F804ED237D7AFB007C27C6 /* Models */, 235 | 00F804B6237D7AB7007C27C6 /* AppDelegate.swift */, 236 | 00F804B8237D7AB7007C27C6 /* SceneDelegate.swift */, 237 | 00F804BC237D7ABA007C27C6 /* Assets.xcassets */, 238 | 00F804C1237D7ABA007C27C6 /* LaunchScreen.storyboard */, 239 | 00F804C4237D7ABA007C27C6 /* Info.plist */, 240 | 00F804CA237D7AE0007C27C6 /* Resources */, 241 | 00F804BE237D7ABA007C27C6 /* Preview Content */, 242 | ); 243 | path = HelloCoreML; 244 | sourceTree = ""; 245 | }; 246 | 00F804BE237D7ABA007C27C6 /* Preview Content */ = { 247 | isa = PBXGroup; 248 | children = ( 249 | 00F804BF237D7ABA007C27C6 /* Preview Assets.xcassets */, 250 | ); 251 | path = "Preview Content"; 252 | sourceTree = ""; 253 | }; 254 | 00F804CA237D7AE0007C27C6 /* Resources */ = { 255 | isa = PBXGroup; 256 | children = ( 257 | 00F804D4237D7AE0007C27C6 /* charleyrivers_feature.jpg */, 258 | ); 259 | path = Resources; 260 | sourceTree = ""; 261 | }; 262 | 00F804ED237D7AFB007C27C6 /* Models */ = { 263 | isa = PBXGroup; 264 | children = ( 265 | 00F804EE237D7B0F007C27C6 /* Data.swift */, 266 | 00F80530237D987F007C27C6 /* MLMetaModel.swift */, 267 | ); 268 | path = Models; 269 | sourceTree = ""; 270 | }; 271 | 00F804F4237D7D6C007C27C6 /* Home */ = { 272 | isa = PBXGroup; 273 | children = ( 274 | 00F804F5237D7D85007C27C6 /* CategoryRow.swift */, 275 | 00CDEA7323822105003A4FA2 /* Home.swift */, 276 | ); 277 | path = Home; 278 | sourceTree = ""; 279 | }; 280 | 00F80516237D8196007C27C6 /* Supporting Views */ = { 281 | isa = PBXGroup; 282 | children = ( 283 | 00F8051A237D8196007C27C6 /* CircleImage.swift */, 284 | 00CDEA4D237E8284003A4FA2 /* VideoCapture.swift */, 285 | ); 286 | path = "Supporting Views"; 287 | sourceTree = ""; 288 | }; 289 | 00F8052F237D97D2007C27C6 /* MLModels */ = { 290 | isa = PBXGroup; 291 | children = ( 292 | 00F8052D237D956A007C27C6 /* mlmodels.json */, 293 | 00985479237DA73D0020E959 /* DepthPrediction_2x.png */, 294 | 00985481237DA9470020E959 /* AnsweringText_2x.png */, 295 | 00985482237DA9470020E959 /* DrawingClassification1_2x.png */, 296 | 0098547C237DA9470020E959 /* ImageClassification1_2x.png */, 297 | 00985480237DA9470020E959 /* ImageClassification2_2x.png */, 298 | 0098547B237DA9470020E959 /* ImageClassification3_2x.png */, 299 | 0098547F237DA9470020E959 /* ImageClassification4_2x.png */, 300 | 0098547D237DA9470020E959 /* ObjectDetectionRealTime1_2x.png */, 301 | 0098547E237DA9470020E959 /* ObjectDetectionRealTime2_2x.png */, 302 | 00985483237DA9470020E959 /* Segmentation_2x.png */, 303 | 003A68B9238EB24F0045A550 /* ArtStyle.jpg */, 304 | ); 305 | path = MLModels; 306 | sourceTree = ""; 307 | }; 308 | 00FC24AE238E455C007B4E7A /* ArtStyles */ = { 309 | isa = PBXGroup; 310 | children = ( 311 | 00FC24B0238E45AA007B4E7A /* FNS-Candy.mlmodel */, 312 | 00FC24B2238E45AA007B4E7A /* FNS-Feathers.mlmodel */, 313 | 00FC24AF238E45AA007B4E7A /* FNS-La-Muse.mlmodel */, 314 | 00FC24B3238E45AA007B4E7A /* FNS-Mosaic.mlmodel */, 315 | 00FC24B4238E45AA007B4E7A /* FNS-The-Scream.mlmodel */, 316 | 00FC24B1238E45AA007B4E7A /* FNS-Udnie.mlmodel */, 317 | 00FC24BB238E45D8007B4E7A /* ArtStylesModel.swift */, 318 | 00FC24CE238E4CC8007B4E7A /* StyleInput.swift */, 319 | 00FC24D0238E4EFC007B4E7A /* ArtStyleView.swift */, 320 | 00FC24D2238E742D007B4E7A /* ArtStylesMeta.swift */, 321 | 00FC24D4238E749D007B4E7A /* styles.json */, 322 | 00FC24BD238E4CA6007B4E7A /* CoreMLHelpers */, 323 | ); 324 | path = ArtStyles; 325 | sourceTree = ""; 326 | }; 327 | 00FC24BD238E4CA6007B4E7A /* CoreMLHelpers */ = { 328 | isa = PBXGroup; 329 | children = ( 330 | 00FC24BE238E4CA6007B4E7A /* Math.swift */, 331 | 00FC24BF238E4CA6007B4E7A /* UIImage+CVPixelBuffer.swift */, 332 | 00FC24C0238E4CA6007B4E7A /* Array.swift */, 333 | 00FC24C1238E4CA6007B4E7A /* Predictions.swift */, 334 | 00FC24C2238E4CA6007B4E7A /* MultiArray.swift */, 335 | 00FC24C3238E4CA6007B4E7A /* CVPixelBuffer+Helpers.swift */, 336 | 00FC24C4238E4CA6007B4E7A /* NonMaxSuppression.swift */, 337 | 00FC24C5238E4CA6007B4E7A /* MLMultiArray+Image.swift */, 338 | ); 339 | path = CoreMLHelpers; 340 | sourceTree = ""; 341 | }; 342 | /* End PBXGroup section */ 343 | 344 | /* Begin PBXNativeTarget section */ 345 | 00F804B2237D7AB7007C27C6 /* HelloCoreML */ = { 346 | isa = PBXNativeTarget; 347 | buildConfigurationList = 00F804C7237D7ABA007C27C6 /* Build configuration list for PBXNativeTarget "HelloCoreML" */; 348 | buildPhases = ( 349 | 00F804AF237D7AB7007C27C6 /* Sources */, 350 | 00F804B0237D7AB7007C27C6 /* Frameworks */, 351 | 00F804B1237D7AB7007C27C6 /* Resources */, 352 | ); 353 | buildRules = ( 354 | ); 355 | dependencies = ( 356 | ); 357 | name = HelloCoreML; 358 | productName = HelloCoreML; 359 | productReference = 00F804B3237D7AB7007C27C6 /* HelloCoreML.app */; 360 | productType = "com.apple.product-type.application"; 361 | }; 362 | /* End PBXNativeTarget section */ 363 | 364 | /* Begin PBXProject section */ 365 | 00F804AB237D7AB7007C27C6 /* Project object */ = { 366 | isa = PBXProject; 367 | attributes = { 368 | LastSwiftUpdateCheck = 1120; 369 | LastUpgradeCheck = 1120; 370 | ORGANIZATIONNAME = RobinChao; 371 | TargetAttributes = { 372 | 00F804B2237D7AB7007C27C6 = { 373 | CreatedOnToolsVersion = 11.2; 374 | }; 375 | }; 376 | }; 377 | buildConfigurationList = 00F804AE237D7AB7007C27C6 /* Build configuration list for PBXProject "HelloCoreML" */; 378 | compatibilityVersion = "Xcode 9.3"; 379 | developmentRegion = en; 380 | hasScannedForEncodings = 0; 381 | knownRegions = ( 382 | en, 383 | Base, 384 | ); 385 | mainGroup = 00F804AA237D7AB7007C27C6; 386 | productRefGroup = 00F804B4237D7AB7007C27C6 /* Products */; 387 | projectDirPath = ""; 388 | projectRoot = ""; 389 | targets = ( 390 | 00F804B2237D7AB7007C27C6 /* HelloCoreML */, 391 | ); 392 | }; 393 | /* End PBXProject section */ 394 | 395 | /* Begin PBXResourcesBuildPhase section */ 396 | 00F804B1237D7AB7007C27C6 /* Resources */ = { 397 | isa = PBXResourcesBuildPhase; 398 | buildActionMask = 2147483647; 399 | files = ( 400 | 0098547A237DA73D0020E959 /* DepthPrediction_2x.png in Resources */, 401 | 0098548B237DA9480020E959 /* DrawingClassification1_2x.png in Resources */, 402 | 0098548C237DA9480020E959 /* Segmentation_2x.png in Resources */, 403 | 00985486237DA9470020E959 /* ObjectDetectionRealTime1_2x.png in Resources */, 404 | 00F804C3237D7ABA007C27C6 /* LaunchScreen.storyboard in Resources */, 405 | 00985485237DA9470020E959 /* ImageClassification1_2x.png in Resources */, 406 | 00F804C0237D7ABA007C27C6 /* Preview Assets.xcassets in Resources */, 407 | 00FC24D5238E749D007B4E7A /* styles.json in Resources */, 408 | 0098548A237DA9480020E959 /* AnsweringText_2x.png in Resources */, 409 | 003A68BA238EB24F0045A550 /* ArtStyle.jpg in Resources */, 410 | 00985489237DA9470020E959 /* ImageClassification2_2x.png in Resources */, 411 | 00F804E5237D7AE0007C27C6 /* charleyrivers_feature.jpg in Resources */, 412 | 00F804BD237D7ABA007C27C6 /* Assets.xcassets in Resources */, 413 | 00F8052E237D956A007C27C6 /* mlmodels.json in Resources */, 414 | 00985484237DA9470020E959 /* ImageClassification3_2x.png in Resources */, 415 | 00985487237DA9470020E959 /* ObjectDetectionRealTime2_2x.png in Resources */, 416 | 00985488237DA9470020E959 /* ImageClassification4_2x.png in Resources */, 417 | ); 418 | runOnlyForDeploymentPostprocessing = 0; 419 | }; 420 | /* End PBXResourcesBuildPhase section */ 421 | 422 | /* Begin PBXSourcesBuildPhase section */ 423 | 00F804AF237D7AB7007C27C6 /* Sources */ = { 424 | isa = PBXSourcesBuildPhase; 425 | buildActionMask = 2147483647; 426 | files = ( 427 | 00CDEA52237E9995003A4FA2 /* FCRNDepthPredictionView.swift in Sources */, 428 | 00FC24CB238E4CA6007B4E7A /* CVPixelBuffer+Helpers.swift in Sources */, 429 | 00F80531237D987F007C27C6 /* MLMetaModel.swift in Sources */, 430 | 00F804B7237D7AB7007C27C6 /* AppDelegate.swift in Sources */, 431 | 00F804B9237D7AB7007C27C6 /* SceneDelegate.swift in Sources */, 432 | 00FC24CA238E4CA6007B4E7A /* MultiArray.swift in Sources */, 433 | 00CDEA5C237FED05003A4FA2 /* MNISTClassificationModel.swift in Sources */, 434 | 00FC24CF238E4CC8007B4E7A /* StyleInput.swift in Sources */, 435 | 00CDEA6223801316003A4FA2 /* ImageClassifierView.swift in Sources */, 436 | 00FC24B7238E45AA007B4E7A /* FNS-Udnie.mlmodel in Sources */, 437 | 00FC24C7238E4CA6007B4E7A /* UIImage+CVPixelBuffer.swift in Sources */, 438 | 00CDEA04237E4FD0003A4FA2 /* YOLOv3.mlmodel in Sources */, 439 | 00FC24B5238E45AA007B4E7A /* FNS-La-Muse.mlmodel in Sources */, 440 | 00CDE9FA237E4E04003A4FA2 /* FCRN.mlmodel in Sources */, 441 | 00CDEA5E23800C7F003A4FA2 /* ImageClassificationModel.swift in Sources */, 442 | 00FC24C9238E4CA6007B4E7A /* Predictions.swift in Sources */, 443 | 00CDE9F7237E4DF7003A4FA2 /* Resnet50.mlmodel in Sources */, 444 | 00CDEA7023821C5E003A4FA2 /* ViewController.swift in Sources */, 445 | 00CDEA602380103A003A4FA2 /* ImagePicker.swift in Sources */, 446 | 00F804F6237D7D85007C27C6 /* CategoryRow.swift in Sources */, 447 | 00CDEA54237EE0C8003A4FA2 /* HeatmapView.swift in Sources */, 448 | 00FC24B6238E45AA007B4E7A /* FNS-Candy.mlmodel in Sources */, 449 | 00CDE9F5237E4D45003A4FA2 /* MobileNetV2.mlmodel in Sources */, 450 | 00FC24C8238E4CA6007B4E7A /* Array.swift in Sources */, 451 | 00FC24D1238E4EFC007B4E7A /* ArtStyleView.swift in Sources */, 452 | 00CDEA56237EEC88003A4FA2 /* HeatmapViewPistProcessor.swift in Sources */, 453 | 00F804EF237D7B0F007C27C6 /* Data.swift in Sources */, 454 | 00CDEA50237E8A50003A4FA2 /* LiveImageViewController.swift in Sources */, 455 | 00F80521237D8196007C27C6 /* CircleImage.swift in Sources */, 456 | 00CDEA4E237E8284003A4FA2 /* VideoCapture.swift in Sources */, 457 | 00985491237DB9570020E959 /* MNISTClassifier.mlmodel in Sources */, 458 | 00FC24C6238E4CA6007B4E7A /* Math.swift in Sources */, 459 | 00CDEA58237EFEB1003A4FA2 /* DrawView.swift in Sources */, 460 | 00FC24D3238E742D007B4E7A /* ArtStylesMeta.swift in Sources */, 461 | 00FC24CC238E4CA6007B4E7A /* NonMaxSuppression.swift in Sources */, 462 | 00CDEA7423822105003A4FA2 /* Home.swift in Sources */, 463 | 00FC24B9238E45AA007B4E7A /* FNS-Mosaic.mlmodel in Sources */, 464 | 00FC24BC238E45D8007B4E7A /* ArtStylesModel.swift in Sources */, 465 | 00CDEA6F23821C5E003A4FA2 /* VisionObjectRecognitionViewController.swift in Sources */, 466 | 00CDEA7223821CEA003A4FA2 /* ObjectDetectionView.swift in Sources */, 467 | 00FC24B8238E45AA007B4E7A /* FNS-Feathers.mlmodel in Sources */, 468 | 00FC24CD238E4CA6007B4E7A /* MLMultiArray+Image.swift in Sources */, 469 | 00FC24BA238E45AA007B4E7A /* FNS-The-Scream.mlmodel in Sources */, 470 | 00CDE9FC237E4E42003A4FA2 /* SqueezeNet.mlmodel in Sources */, 471 | 00CDEA5A237F0549003A4FA2 /* MNISTClassifierView.swift in Sources */, 472 | ); 473 | runOnlyForDeploymentPostprocessing = 0; 474 | }; 475 | /* End PBXSourcesBuildPhase section */ 476 | 477 | /* Begin PBXVariantGroup section */ 478 | 00F804C1237D7ABA007C27C6 /* LaunchScreen.storyboard */ = { 479 | isa = PBXVariantGroup; 480 | children = ( 481 | 00F804C2237D7ABA007C27C6 /* Base */, 482 | ); 483 | name = LaunchScreen.storyboard; 484 | sourceTree = ""; 485 | }; 486 | /* End PBXVariantGroup section */ 487 | 488 | /* Begin XCBuildConfiguration section */ 489 | 00F804C5237D7ABA007C27C6 /* Debug */ = { 490 | isa = XCBuildConfiguration; 491 | buildSettings = { 492 | ALWAYS_SEARCH_USER_PATHS = NO; 493 | CLANG_ANALYZER_NONNULL = YES; 494 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 495 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 496 | CLANG_CXX_LIBRARY = "libc++"; 497 | CLANG_ENABLE_MODULES = YES; 498 | CLANG_ENABLE_OBJC_ARC = YES; 499 | CLANG_ENABLE_OBJC_WEAK = YES; 500 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 501 | CLANG_WARN_BOOL_CONVERSION = YES; 502 | CLANG_WARN_COMMA = YES; 503 | CLANG_WARN_CONSTANT_CONVERSION = YES; 504 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 505 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 506 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 507 | CLANG_WARN_EMPTY_BODY = YES; 508 | CLANG_WARN_ENUM_CONVERSION = YES; 509 | CLANG_WARN_INFINITE_RECURSION = YES; 510 | CLANG_WARN_INT_CONVERSION = YES; 511 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 512 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 513 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 514 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 515 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 516 | CLANG_WARN_STRICT_PROTOTYPES = YES; 517 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 518 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 519 | CLANG_WARN_UNREACHABLE_CODE = YES; 520 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 521 | COPY_PHASE_STRIP = NO; 522 | DEBUG_INFORMATION_FORMAT = dwarf; 523 | ENABLE_STRICT_OBJC_MSGSEND = YES; 524 | ENABLE_TESTABILITY = YES; 525 | GCC_C_LANGUAGE_STANDARD = gnu11; 526 | GCC_DYNAMIC_NO_PIC = NO; 527 | GCC_NO_COMMON_BLOCKS = YES; 528 | GCC_OPTIMIZATION_LEVEL = 0; 529 | GCC_PREPROCESSOR_DEFINITIONS = ( 530 | "DEBUG=1", 531 | "$(inherited)", 532 | ); 533 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 534 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 535 | GCC_WARN_UNDECLARED_SELECTOR = YES; 536 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 537 | GCC_WARN_UNUSED_FUNCTION = YES; 538 | GCC_WARN_UNUSED_VARIABLE = YES; 539 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 540 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; 541 | MTL_FAST_MATH = YES; 542 | ONLY_ACTIVE_ARCH = YES; 543 | SDKROOT = iphoneos; 544 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 545 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 546 | }; 547 | name = Debug; 548 | }; 549 | 00F804C6237D7ABA007C27C6 /* Release */ = { 550 | isa = XCBuildConfiguration; 551 | buildSettings = { 552 | ALWAYS_SEARCH_USER_PATHS = NO; 553 | CLANG_ANALYZER_NONNULL = YES; 554 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 555 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 556 | CLANG_CXX_LIBRARY = "libc++"; 557 | CLANG_ENABLE_MODULES = YES; 558 | CLANG_ENABLE_OBJC_ARC = YES; 559 | CLANG_ENABLE_OBJC_WEAK = YES; 560 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 561 | CLANG_WARN_BOOL_CONVERSION = YES; 562 | CLANG_WARN_COMMA = YES; 563 | CLANG_WARN_CONSTANT_CONVERSION = YES; 564 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 565 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 566 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 567 | CLANG_WARN_EMPTY_BODY = YES; 568 | CLANG_WARN_ENUM_CONVERSION = YES; 569 | CLANG_WARN_INFINITE_RECURSION = YES; 570 | CLANG_WARN_INT_CONVERSION = YES; 571 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 572 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 573 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 574 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 575 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 576 | CLANG_WARN_STRICT_PROTOTYPES = YES; 577 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 578 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 579 | CLANG_WARN_UNREACHABLE_CODE = YES; 580 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 581 | COPY_PHASE_STRIP = NO; 582 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 583 | ENABLE_NS_ASSERTIONS = NO; 584 | ENABLE_STRICT_OBJC_MSGSEND = YES; 585 | GCC_C_LANGUAGE_STANDARD = gnu11; 586 | GCC_NO_COMMON_BLOCKS = YES; 587 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 588 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 589 | GCC_WARN_UNDECLARED_SELECTOR = YES; 590 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 591 | GCC_WARN_UNUSED_FUNCTION = YES; 592 | GCC_WARN_UNUSED_VARIABLE = YES; 593 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 594 | MTL_ENABLE_DEBUG_INFO = NO; 595 | MTL_FAST_MATH = YES; 596 | SDKROOT = iphoneos; 597 | SWIFT_COMPILATION_MODE = wholemodule; 598 | SWIFT_OPTIMIZATION_LEVEL = "-O"; 599 | VALIDATE_PRODUCT = YES; 600 | }; 601 | name = Release; 602 | }; 603 | 00F804C8237D7ABA007C27C6 /* Debug */ = { 604 | isa = XCBuildConfiguration; 605 | buildSettings = { 606 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 607 | CODE_SIGN_STYLE = Automatic; 608 | DEVELOPMENT_ASSET_PATHS = "\"HelloCoreML/Preview Content\""; 609 | DEVELOPMENT_TEAM = H4WQ3ZHR2W; 610 | ENABLE_PREVIEWS = YES; 611 | INFOPLIST_FILE = HelloCoreML/Info.plist; 612 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 613 | LD_RUNPATH_SEARCH_PATHS = ( 614 | "$(inherited)", 615 | "@executable_path/Frameworks", 616 | ); 617 | PRODUCT_BUNDLE_IDENTIFIER = "com.robin.apple-samplecode.Vision-ML-Example"; 618 | PRODUCT_NAME = "$(TARGET_NAME)"; 619 | SWIFT_VERSION = 5.0; 620 | TARGETED_DEVICE_FAMILY = "1,2"; 621 | }; 622 | name = Debug; 623 | }; 624 | 00F804C9237D7ABA007C27C6 /* Release */ = { 625 | isa = XCBuildConfiguration; 626 | buildSettings = { 627 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 628 | CODE_SIGN_STYLE = Automatic; 629 | DEVELOPMENT_ASSET_PATHS = "\"HelloCoreML/Preview Content\""; 630 | DEVELOPMENT_TEAM = H4WQ3ZHR2W; 631 | ENABLE_PREVIEWS = YES; 632 | INFOPLIST_FILE = HelloCoreML/Info.plist; 633 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 634 | LD_RUNPATH_SEARCH_PATHS = ( 635 | "$(inherited)", 636 | "@executable_path/Frameworks", 637 | ); 638 | PRODUCT_BUNDLE_IDENTIFIER = "com.robin.apple-samplecode.Vision-ML-Example"; 639 | PRODUCT_NAME = "$(TARGET_NAME)"; 640 | SWIFT_VERSION = 5.0; 641 | TARGETED_DEVICE_FAMILY = "1,2"; 642 | }; 643 | name = Release; 644 | }; 645 | /* End XCBuildConfiguration section */ 646 | 647 | /* Begin XCConfigurationList section */ 648 | 00F804AE237D7AB7007C27C6 /* Build configuration list for PBXProject "HelloCoreML" */ = { 649 | isa = XCConfigurationList; 650 | buildConfigurations = ( 651 | 00F804C5237D7ABA007C27C6 /* Debug */, 652 | 00F804C6237D7ABA007C27C6 /* Release */, 653 | ); 654 | defaultConfigurationIsVisible = 0; 655 | defaultConfigurationName = Release; 656 | }; 657 | 00F804C7237D7ABA007C27C6 /* Build configuration list for PBXNativeTarget "HelloCoreML" */ = { 658 | isa = XCConfigurationList; 659 | buildConfigurations = ( 660 | 00F804C8237D7ABA007C27C6 /* Debug */, 661 | 00F804C9237D7ABA007C27C6 /* Release */, 662 | ); 663 | defaultConfigurationIsVisible = 0; 664 | defaultConfigurationName = Release; 665 | }; 666 | /* End XCConfigurationList section */ 667 | }; 668 | rootObject = 00F804AB237D7AB7007C27C6 /* Project object */; 669 | } 670 | -------------------------------------------------------------------------------- /HelloCoreML.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /HelloCoreML.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /HelloCoreML/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/.DS_Store -------------------------------------------------------------------------------- /HelloCoreML/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/14. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | @UIApplicationMain 12 | class AppDelegate: UIResponder, UIApplicationDelegate { 13 | 14 | 15 | 16 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { 17 | // Override point for customization after application launch. 18 | return true 19 | } 20 | 21 | // MARK: UISceneSession Lifecycle 22 | 23 | func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration { 24 | // Called when a new scene session is being created. 25 | // Use this method to select a configuration to create the new scene with. 26 | return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role) 27 | } 28 | 29 | func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set) { 30 | // Called when the user discards a scene session. 31 | // If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions. 32 | // Use this method to release any resources that were specific to the discarded scenes, as they will not return. 33 | } 34 | 35 | 36 | } 37 | 38 | -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "size" : "1024x1024", 91 | "scale" : "1x" 92 | } 93 | ], 94 | "info" : { 95 | "version" : 1, 96 | "author" : "xcode" 97 | } 98 | } -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/Feathers.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "Feathers.png", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/Feathers.imageset/Feathers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/Assets.xcassets/Feathers.imageset/Feathers.png -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/Udanie.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "Udanie.png", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/Udanie.imageset/Udanie.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/Assets.xcassets/Udanie.imageset/Udanie.png -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/candy.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "candy.png", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/candy.imageset/candy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/Assets.xcassets/candy.imageset/candy.png -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/cellphone-wallpaper.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "cellphone-wallpaper.png", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/cellphone-wallpaper.imageset/cellphone-wallpaper.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/Assets.xcassets/cellphone-wallpaper.imageset/cellphone-wallpaper.png -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/girl.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "girl.jpg", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/girl.imageset/girl.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/Assets.xcassets/girl.imageset/girl.jpg -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/mosaicImg.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "mosaicImg.png", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/mosaicImg.imageset/mosaicImg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/Assets.xcassets/mosaicImg.imageset/mosaicImg.png -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/museImg.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "museImg.png", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/museImg.imageset/museImg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/Assets.xcassets/museImg.imageset/museImg.png -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/screamImg.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "screamImg.png", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "version" : 1, 19 | "author" : "xcode" 20 | } 21 | } -------------------------------------------------------------------------------- /HelloCoreML/Assets.xcassets/screamImg.imageset/screamImg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/Assets.xcassets/screamImg.imageset/screamImg.png -------------------------------------------------------------------------------- /HelloCoreML/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /HelloCoreML/Home/CategoryRow.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CategoryRow.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/14. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import SwiftUI 10 | 11 | struct CategoryRow: View { 12 | var categoryName: String 13 | var items: [MLMetaModel] 14 | 15 | var body: some View { 16 | VStack(alignment: .leading) { 17 | Text(self.categoryName) 18 | .font(.headline) 19 | .padding(.leading, 15) 20 | .padding(.top, 5) 21 | 22 | ScrollView(.horizontal, showsIndicators: false) { 23 | HStack(alignment: .top, spacing: CGFloat(0)) { 24 | ForEach(self.items) { model in 25 | NavigationLink(destination: self.contentView(model: model)){ 26 | CategoryItem(model: model) 27 | } 28 | } 29 | } 30 | } 31 | .frame(height: CGFloat(185)) 32 | } 33 | } 34 | 35 | func contentView(model: MLMetaModel) -> AnyView { 36 | if model.category == .Images { 37 | switch model.id { 38 | case 1001: 39 | return AnyView(FCRNDepthPredictionView()) 40 | case 1002: 41 | return AnyView(MNISTClassifierView().environmentObject(MNISTClassificationModel())) 42 | case 1004: 43 | return AnyView(ImageClassifierView().environmentObject(ImageClassificationModel(mlModel: MobileNetV2().model))) 44 | case 1005: 45 | return AnyView(ImageClassifierView().environmentObject(ImageClassificationModel(mlModel: Resnet50().model))) 46 | case 1006: 47 | return AnyView(ImageClassifierView().environmentObject(ImageClassificationModel(mlModel: SqueezeNet().model))) 48 | case 1008: 49 | return AnyView(ObjectDetectionView()) 50 | default: 51 | return AnyView(CircleImage(image: ImageStore.shared.image(name: "charleyrivers_feature.jpg"))) 52 | } 53 | }else if model.category == .ArtStyles{ 54 | switch model.id { 55 | case 1001: 56 | return AnyView(ArtStyleView()) 57 | default: 58 | return AnyView(CircleImage(image: ImageStore.shared.image(name: "charleyrivers_feature.jpg"))) 59 | } 60 | } 61 | return AnyView(CircleImage(image: ImageStore.shared.image(name: "charleyrivers_feature.jpg"))) 62 | } 63 | } 64 | 65 | struct CategoryItem: View { 66 | var model: MLMetaModel 67 | var body: some View { 68 | VStack(alignment: .leading) { 69 | model.coverImage 70 | .renderingMode(.original) 71 | .resizable() 72 | .frame(width: 205, height: 155) 73 | .cornerRadius(5) 74 | Text(model.support == 1 ? model.name : "[WIP]---\(model.name)") 75 | .foregroundColor(.primary) 76 | .font(.caption) 77 | } 78 | .padding(.leading, 15) 79 | } 80 | } 81 | 82 | 83 | struct CategoryRow_Previews: PreviewProvider { 84 | static var previews: some View { 85 | CategoryRow( 86 | categoryName: mlModelData[0].category.rawValue, 87 | items: Array(mlModelData.prefix(4)) 88 | ) 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /HelloCoreML/Home/Home.swift: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | A view showing featured landmarks above a list of all of the landmarks. 6 | */ 7 | 8 | import SwiftUI 9 | 10 | struct CategoryHome: View { 11 | var categories: [String: [MLMetaModel]] { 12 | Dictionary( 13 | grouping: mlModelData, 14 | by: { $0.category.rawValue } 15 | ) 16 | } 17 | 18 | var body: some View { 19 | NavigationView { 20 | List { 21 | Header(headerImageName: "charleyrivers_feature.jpg") 22 | .scaledToFill() 23 | .frame(height: 250) 24 | .clipped() 25 | .listRowInsets(EdgeInsets()) 26 | 27 | ForEach(categories.keys.sorted(), id: \.self) { key in 28 | CategoryRow(categoryName: key, items: self.categories[key]!) 29 | } 30 | .listRowInsets(EdgeInsets()) 31 | } 32 | .navigationBarTitle(Text("SwiftUI + CoreML")) 33 | } 34 | } 35 | } 36 | 37 | struct Header: View { 38 | var headerImageName: String 39 | var body: some View { 40 | ImageStore.shared.image(name: headerImageName).resizable() 41 | } 42 | } 43 | 44 | struct CategoryHome_Previews: PreviewProvider { 45 | static var previews: some View { 46 | CategoryHome() 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /HelloCoreML/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | $(PRODUCT_BUNDLE_PACKAGE_TYPE) 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | UIApplicationSceneManifest 24 | 25 | UIApplicationSupportsMultipleScenes 26 | 27 | UISceneConfigurations 28 | 29 | UIWindowSceneSessionRoleApplication 30 | 31 | 32 | UISceneConfigurationName 33 | Default Configuration 34 | UISceneDelegateClassName 35 | $(PRODUCT_MODULE_NAME).SceneDelegate 36 | 37 | 38 | 39 | 40 | UILaunchStoryboardName 41 | LaunchScreen 42 | UIRequiredDeviceCapabilities 43 | 44 | armv7 45 | 46 | UISupportedInterfaceOrientations 47 | 48 | UIInterfaceOrientationPortrait 49 | UIInterfaceOrientationLandscapeLeft 50 | UIInterfaceOrientationLandscapeRight 51 | 52 | NSCameraUsageDescription 53 | Please give the permission to use Camera for this app feature. 54 | UISupportedInterfaceOrientations~ipad 55 | 56 | UIInterfaceOrientationPortrait 57 | UIInterfaceOrientationPortraitUpsideDown 58 | UIInterfaceOrientationLandscapeLeft 59 | UIInterfaceOrientationLandscapeRight 60 | 61 | 62 | 63 | -------------------------------------------------------------------------------- /HelloCoreML/MLModels/AnsweringText_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MLModels/AnsweringText_2x.png -------------------------------------------------------------------------------- /HelloCoreML/MLModels/ArtStyle.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MLModels/ArtStyle.jpg -------------------------------------------------------------------------------- /HelloCoreML/MLModels/DepthPrediction_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MLModels/DepthPrediction_2x.png -------------------------------------------------------------------------------- /HelloCoreML/MLModels/DrawingClassification1_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MLModels/DrawingClassification1_2x.png -------------------------------------------------------------------------------- /HelloCoreML/MLModels/ImageClassification1_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MLModels/ImageClassification1_2x.png -------------------------------------------------------------------------------- /HelloCoreML/MLModels/ImageClassification2_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MLModels/ImageClassification2_2x.png -------------------------------------------------------------------------------- /HelloCoreML/MLModels/ImageClassification3_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MLModels/ImageClassification3_2x.png -------------------------------------------------------------------------------- /HelloCoreML/MLModels/ImageClassification4_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MLModels/ImageClassification4_2x.png -------------------------------------------------------------------------------- /HelloCoreML/MLModels/ObjectDetectionRealTime1_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MLModels/ObjectDetectionRealTime1_2x.png -------------------------------------------------------------------------------- /HelloCoreML/MLModels/ObjectDetectionRealTime2_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MLModels/ObjectDetectionRealTime2_2x.png -------------------------------------------------------------------------------- /HelloCoreML/MLModels/Segmentation_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MLModels/Segmentation_2x.png -------------------------------------------------------------------------------- /HelloCoreML/MLModels/mlmodels.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "FCRN-DepthPrediction", 4 | "category": "Images", 5 | "subtitle": "Depth Estimation", 6 | "desc": "Predict the depth from a single image.", 7 | "id": 1001, 8 | "cover": "DepthPrediction_2x.png", 9 | "support": 1 10 | }, 11 | { 12 | "name": "MNIST", 13 | "category": "Images", 14 | "subtitle": "Drawing Classification", 15 | "desc": "Classify a single handwritten digit (supports digits 0-9).", 16 | "id": 1002, 17 | "cover": "ImageClassification4_2x.png", 18 | "support": 1 19 | }, 20 | { 21 | "name": "DrawingClassifier", 22 | "category": "Images", 23 | "subtitle": "Drawing Classification", 24 | "desc": "Drawing classifier that learns to recognize new drawings based on a K-Nearest Neighbors model (KNN).", 25 | "id": 1003, 26 | "cover": "DrawingClassification1_2x.png", 27 | "support": 0 28 | }, 29 | { 30 | "name": "MobileNetV2", 31 | "category": "Images", 32 | "subtitle": "Image Classification", 33 | "desc": "The MobileNetv2 architecture trained to classify the dominant object in a camera frame or image.", 34 | "id": 1004, 35 | "cover": "ImageClassification3_2x.png", 36 | "support": 1 37 | }, 38 | { 39 | "name": "Resnet50", 40 | "category": "Images", 41 | "subtitle": "Image Classification", 42 | "desc": "A Residual Neural Network that will classify the dominant object in a camera frame or image.", 43 | "id": 1005, 44 | "cover": "ImageClassification1_2x.png", 45 | "support": 1 46 | }, 47 | { 48 | "name": "SqueezeNet", 49 | "category": "Images", 50 | "subtitle": "Image Classification", 51 | "desc": "A small Deep Neural Network architecture that classifies the dominant object in a camera frame or image.", 52 | "id": 1006, 53 | "cover": "ImageClassification2_2x.png", 54 | "support": 1 55 | }, 56 | { 57 | "name": "DeeplabV3", 58 | "category": "Images", 59 | "subtitle": "Image Segmentation", 60 | "desc": "Segment the pixels of a camera frame or image into a predefined set of classes.", 61 | "id": 1007, 62 | "cover": "Segmentation_2x.png", 63 | "support": 0 64 | }, 65 | { 66 | "name": "YOLOv3", 67 | "category": "Images", 68 | "subtitle": "Object Detection", 69 | "desc": "Locate and classify 80 different types of objects present in a camera frame or image.", 70 | "id": 1008, 71 | "cover": "ObjectDetectionRealTime2_2x.png", 72 | "support": 1 73 | }, 74 | { 75 | "name": "BERT-SQuAD", 76 | "category": "Text", 77 | "subtitle": "Question Answering", 78 | "desc": "Find answers to questions about paragraphs of text.", 79 | "id": 1001, 80 | "cover": "AnsweringText_2x.png", 81 | "support": 0 82 | }, 83 | { 84 | "name": "Neural Style Art", 85 | "category": "ArtStyles", 86 | "subtitle": "Neural Style Art", 87 | "desc": "Use Neural Network stylize Image/Video frame to Art.", 88 | "id": 1001, 89 | "cover": "ArtStyle.jpg", 90 | "support": 1 91 | }, 92 | ] 93 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/MachineLearning/.DS_Store -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/ArtStyleView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ArtStyleView.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/27. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import SwiftUI 10 | import CoreML 11 | 12 | struct ArtStyleView: View { 13 | let modelMetas: [ArtStylesMeta] = load("styles.json") 14 | let styleModel = ArtStylesModel() 15 | 16 | @State private var isPresented = false 17 | @State private var takePhoto = false 18 | @State private var sourceImage: UIImage? 19 | 20 | var body: some View { 21 | VStack(alignment: .leading) { 22 | self.sourceImage == nil ? AnyView(ArtStylePlaceholdView()) : ZStack { 23 | GeometryReader { geo in 24 | Image(uiImage: self.sourceImage!) 25 | .resizable() 26 | .aspectRatio(contentMode: .fill) 27 | .frame(width: geo.size.width) 28 | .padding(.bottom, 10.0) 29 | } 30 | }.toAnyView() 31 | HStack { 32 | Button(action: { 33 | self.takePhoto = false 34 | self.isPresented.toggle() 35 | }, label: { 36 | Image(systemName: "photo") 37 | .foregroundColor(Color.primary) 38 | }).font(.title) 39 | .padding(.leading, 20.0) 40 | Spacer() 41 | .frame(height: 45.0, alignment: .center) 42 | Button(action: { 43 | self.takePhoto = true 44 | self.isPresented.toggle() 45 | }, label: { 46 | Image(systemName: "camera") 47 | .foregroundColor(Color.primary) 48 | }).font(.title) 49 | .padding(.trailing, 20.0) 50 | } 51 | .frame(maxWidth: .infinity, maxHeight: 45.0, alignment: .center) 52 | .background(Color.black.opacity(0.2)) 53 | 54 | ScrollView(.horizontal, showsIndicators: false) { 55 | HStack(alignment: .top, spacing: CGFloat(0)) { 56 | ForEach(self.modelMetas) { meta in 57 | ArtStyleModelView(meta: meta) 58 | .onTapGesture { 59 | self.processArtStyle(meta: meta) 60 | } 61 | } 62 | } 63 | } 64 | .frame(height: CGFloat(120)) 65 | .background(Color.black.opacity(0.2)) 66 | } 67 | .sheet(isPresented: self.$isPresented) { 68 | ShowImagePicker(image: self.$sourceImage, takePhoto: self.$takePhoto) 69 | } 70 | .navigationBarTitle(Text("Art Style Image"), displayMode: .inline) 71 | } 72 | 73 | private func processArtStyle(meta: ArtStylesMeta) { 74 | guard let readiedImage = self.sourceImage else { 75 | print("Error: No Picture") 76 | return 77 | } 78 | styleModel.processImage(readiedImage, style: meta.style) { resultImage in 79 | self.sourceImage = resultImage 80 | } 81 | } 82 | } 83 | 84 | struct ArtStylePlaceholdView: View { 85 | var body: some View { 86 | ZStack { 87 | Image(systemName: "photo.fill") 88 | .resizable() 89 | .aspectRatio(contentMode: .fit) 90 | .foregroundColor(Color.init(.lightGray)) 91 | .shadow(color: .secondary, radius: 5) 92 | }.padding() 93 | } 94 | } 95 | 96 | struct ArtStyleModelView: View { 97 | var meta: ArtStylesMeta 98 | 99 | var body: some View { 100 | VStack(alignment: .leading) { 101 | Image(meta.cover) 102 | .renderingMode(.original) 103 | .resizable() 104 | .frame(width: 80, height: 80) 105 | .cornerRadius(5) 106 | HStack { 107 | Spacer() 108 | Text(meta.name) 109 | .foregroundColor(.white) 110 | .font(.caption) 111 | Spacer() 112 | } 113 | 114 | 115 | } 116 | .padding(.leading, 5) 117 | .padding(.trailing, 5) 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/ArtStylesMeta.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ArtStylesMeta.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/27. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import SwiftUI 10 | 11 | 12 | struct ArtStylesMeta: Hashable, Codable, Identifiable { 13 | var id: Int 14 | var name: String 15 | var cover: String 16 | var style: ArtStyle 17 | } 18 | 19 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/ArtStylesModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ArtStylesModel.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/27. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import CoreML 10 | import Vision 11 | import ImageIO 12 | import UIKit 13 | 14 | enum ArtStyle: Int, CaseIterable, Codable, Hashable { 15 | case Mosaic = 0 16 | case Scream = 1 17 | case Muse = 2 18 | case Udanie = 3 19 | case Candy = 4 20 | case Feathers = 5 21 | } 22 | 23 | final class ArtStylesModel { 24 | @Published var sourceImage: UIImage? = nil 25 | @Published var resultImage: UIImage? = nil 26 | 27 | 28 | 29 | 30 | var style: ArtStyle = .Mosaic 31 | 32 | var models: [MLModel] = [] 33 | private let imageSize = 720 34 | 35 | 36 | var museModel: FNS_La_Muse! 37 | var candy:FNS_Candy! 38 | var Feathers:FNS_Feathers! 39 | var udanieModel:FNS_Udnie! 40 | var mosaic:FNS_Mosaic! 41 | var screamModel:FNS_The_Scream! 42 | 43 | init() { 44 | do { 45 | let pathMuse = Bundle.main.path(forResource: "FNS-La-Muse", ofType: "mlmodelc") 46 | let pathCandy = Bundle.main.path(forResource: "FNS-Candy", ofType: "mlmodelc") 47 | let pathFeathers = Bundle.main.path(forResource: "FNS-Feathers", ofType: "mlmodelc") 48 | let pathUdanie = Bundle.main.path(forResource: "FNS-Udnie", ofType: "mlmodelc") 49 | let pathMosaic = Bundle.main.path(forResource: "FNS-Mosaic", ofType: "mlmodelc") 50 | let pathScream = Bundle.main.path(forResource: "FNS-The-Scream", ofType: "mlmodelc") 51 | 52 | museModel = try FNS_La_Muse(contentsOf:URL(fileURLWithPath: pathMuse!) ) 53 | candy = try FNS_Candy(contentsOf:URL(fileURLWithPath: pathCandy!) ) 54 | Feathers = try FNS_Feathers(contentsOf:URL(fileURLWithPath: pathFeathers!) ) 55 | udanieModel = try FNS_Udnie(contentsOf:URL(fileURLWithPath: pathUdanie!) ) 56 | mosaic = try FNS_Mosaic(contentsOf:URL(fileURLWithPath: pathMosaic!) ) 57 | screamModel = try FNS_The_Scream(contentsOf:URL(fileURLWithPath: pathScream!) ) 58 | 59 | models.append(mosaic.model) 60 | models.append(screamModel.model) 61 | models.append(museModel.model) 62 | models.append(udanieModel.model) 63 | models.append(candy.model) 64 | models.append(Feathers.model) 65 | } catch let error { 66 | print("Error: ArtStyle Model Initializer error. \(error)") 67 | } 68 | } 69 | 70 | func processImage(_ image: UIImage, style: ArtStyle, compeletion: (_ result: UIImage?) -> ()) { 71 | let model = models[style.rawValue] 72 | if let pixelBuffered = image.pixelBuffer(width: 720, height: 720) { 73 | let input = ArtStyleInput(input: pixelBuffered) 74 | let outFeatures = try! model.prediction(from: input) 75 | let output = outFeatures.featureValue(for: "outputImage")!.imageBufferValue! 76 | if let result = UIImage(pixelBuffer: output) { 77 | compeletion(result) 78 | }else{ 79 | compeletion(nil) 80 | } 81 | } 82 | } 83 | 84 | private func stylizeImage(cgImage: CGImage, model: MLModel) -> CGImage { 85 | let input = ArtStyleInput(input: pixelBuffer(cgImage: cgImage, width: imageSize, height: imageSize)) 86 | let outFeatures = try! model.prediction(from: input) 87 | let output = outFeatures.featureValue(for: "outputImage")!.imageBufferValue! 88 | CVPixelBufferLockBaseAddress(output, .readOnly) 89 | let width = CVPixelBufferGetWidth(output) 90 | let height = CVPixelBufferGetHeight(output) 91 | let data = CVPixelBufferGetBaseAddress(output)! 92 | 93 | let outContext = CGContext(data: data, 94 | width: width, 95 | height: height, 96 | bitsPerComponent: 8, 97 | bytesPerRow: CVPixelBufferGetBytesPerRow(output), 98 | space: CGColorSpaceCreateDeviceRGB(), 99 | bitmapInfo: CGImageByteOrderInfo.order32Little.rawValue | CGImageAlphaInfo.noneSkipFirst.rawValue)! 100 | let outImage = outContext.makeImage()! 101 | CVPixelBufferUnlockBaseAddress(output, .readOnly) 102 | 103 | return outImage 104 | } 105 | ///Method which converts given CGImage to CVPixelBuffer. 106 | private func pixelBuffer(cgImage: CGImage, width: Int, height: Int) -> CVPixelBuffer { 107 | var pixelBuffer: CVPixelBuffer? = nil 108 | let status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA , nil, &pixelBuffer) 109 | if status != kCVReturnSuccess { 110 | fatalError("Cannot create pixel buffer for image") 111 | } 112 | 113 | CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags.init(rawValue: 0)) 114 | let data = CVPixelBufferGetBaseAddress(pixelBuffer!) 115 | let rgbColorSpace = CGColorSpaceCreateDeviceRGB() 116 | let bitmapInfo = CGBitmapInfo(rawValue: CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.noneSkipFirst.rawValue) 117 | let context = CGContext(data: data, width: width, height: height, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!), space: rgbColorSpace, bitmapInfo: bitmapInfo.rawValue) 118 | 119 | context?.draw(cgImage, in: CGRect(x: 0, y: 0, width: width, height: height)) 120 | CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0)) 121 | 122 | return pixelBuffer! 123 | } 124 | 125 | } 126 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/CoreMLHelpers/Array.swift: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2017 M.I. Hollemans 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy 5 | of this software and associated documentation files (the "Software"), to 6 | deal in the Software without restriction, including without limitation the 7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 8 | sell copies of the Software, and to permit persons to whom the Software is 9 | furnished to do so, subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in 12 | all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 20 | IN THE SOFTWARE. 21 | */ 22 | 23 | import Swift 24 | 25 | extension Array where Element: Comparable { 26 | /** 27 | Returns the index and value of the largest element in the array. 28 | */ 29 | public func argmax() -> (Int, Element) { 30 | precondition(self.count > 0) 31 | var maxIndex = 0 32 | var maxValue = self[0] 33 | for i in 1.. maxValue { 35 | maxValue = self[i] 36 | maxIndex = i 37 | } 38 | } 39 | return (maxIndex, maxValue) 40 | } 41 | 42 | /** 43 | Returns the indices of the array's elements in sorted order. 44 | */ 45 | public func argsort(by areInIncreasingOrder: (Element, Element) -> Bool) -> [Array.Index] { 46 | return self.indices.sorted { areInIncreasingOrder(self[$0], self[$1]) } 47 | } 48 | 49 | /** 50 | Returns a new array containing the elements at the specified indices. 51 | */ 52 | public func gather(indices: [Array.Index]) -> [Element] { 53 | var a = [Element]() 54 | for i in indices { a.append(self[i]) } 55 | return a 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/CoreMLHelpers/CVPixelBuffer+Helpers.swift: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2017 M.I. Hollemans 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy 5 | of this software and associated documentation files (the "Software"), to 6 | deal in the Software without restriction, including without limitation the 7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 8 | sell copies of the Software, and to permit persons to whom the Software is 9 | furnished to do so, subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in 12 | all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 20 | IN THE SOFTWARE. 21 | */ 22 | 23 | import Foundation 24 | import Accelerate 25 | import CoreImage 26 | 27 | /** 28 | Creates a RGB pixel buffer of the specified width and height. 29 | */ 30 | public func createPixelBuffer(width: Int, height: Int) -> CVPixelBuffer? { 31 | var pixelBuffer: CVPixelBuffer? 32 | let status = CVPixelBufferCreate(nil, width, height, 33 | kCVPixelFormatType_32BGRA, nil, 34 | &pixelBuffer) 35 | if status != kCVReturnSuccess { 36 | print("Error: could not create resized pixel buffer", status) 37 | return nil 38 | } 39 | return pixelBuffer 40 | } 41 | 42 | /** 43 | First crops the pixel buffer, then resizes it. 44 | */ 45 | public func resizePixelBuffer(_ srcPixelBuffer: CVPixelBuffer, 46 | cropX: Int, 47 | cropY: Int, 48 | cropWidth: Int, 49 | cropHeight: Int, 50 | scaleWidth: Int, 51 | scaleHeight: Int) -> CVPixelBuffer? { 52 | 53 | CVPixelBufferLockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0)) 54 | guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer) else { 55 | print("Error: could not get pixel buffer base address") 56 | return nil 57 | } 58 | let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer) 59 | let offset = cropY*srcBytesPerRow + cropX*4 60 | var srcBuffer = vImage_Buffer(data: srcData.advanced(by: offset), 61 | height: vImagePixelCount(cropHeight), 62 | width: vImagePixelCount(cropWidth), 63 | rowBytes: srcBytesPerRow) 64 | 65 | let destBytesPerRow = scaleWidth*4 66 | guard let destData = malloc(scaleHeight*destBytesPerRow) else { 67 | print("Error: out of memory") 68 | return nil 69 | } 70 | var destBuffer = vImage_Buffer(data: destData, 71 | height: vImagePixelCount(scaleHeight), 72 | width: vImagePixelCount(scaleWidth), 73 | rowBytes: destBytesPerRow) 74 | 75 | let error = vImageScale_ARGB8888(&srcBuffer, &destBuffer, nil, vImage_Flags(0)) 76 | CVPixelBufferUnlockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0)) 77 | if error != kvImageNoError { 78 | print("Error:", error) 79 | free(destData) 80 | return nil 81 | } 82 | 83 | let releaseCallback: CVPixelBufferReleaseBytesCallback = { _, ptr in 84 | if let ptr = ptr { 85 | free(UnsafeMutableRawPointer(mutating: ptr)) 86 | } 87 | } 88 | 89 | let pixelFormat = CVPixelBufferGetPixelFormatType(srcPixelBuffer) 90 | var dstPixelBuffer: CVPixelBuffer? 91 | let status = CVPixelBufferCreateWithBytes(nil, scaleWidth, scaleHeight, 92 | pixelFormat, destData, 93 | destBytesPerRow, releaseCallback, 94 | nil, nil, &dstPixelBuffer) 95 | if status != kCVReturnSuccess { 96 | print("Error: could not create new pixel buffer") 97 | free(destData) 98 | return nil 99 | } 100 | return dstPixelBuffer 101 | } 102 | 103 | /** 104 | Resizes a CVPixelBuffer to a new width and height. 105 | */ 106 | public func resizePixelBuffer(_ pixelBuffer: CVPixelBuffer, 107 | width: Int, height: Int) -> CVPixelBuffer? { 108 | return resizePixelBuffer(pixelBuffer, cropX: 0, cropY: 0, 109 | cropWidth: CVPixelBufferGetWidth(pixelBuffer), 110 | cropHeight: CVPixelBufferGetHeight(pixelBuffer), 111 | scaleWidth: width, scaleHeight: height) 112 | } 113 | 114 | /** 115 | Resizes a CVPixelBuffer to a new width and height. 116 | */ 117 | public func resizePixelBuffer(_ pixelBuffer: CVPixelBuffer, 118 | width: Int, height: Int, 119 | output: CVPixelBuffer, context: CIContext) { 120 | let ciImage = CIImage(cvPixelBuffer: pixelBuffer) 121 | let sx = CGFloat(width) / CGFloat(CVPixelBufferGetWidth(pixelBuffer)) 122 | let sy = CGFloat(height) / CGFloat(CVPixelBufferGetHeight(pixelBuffer)) 123 | let scaleTransform = CGAffineTransform(scaleX: sx, y: sy) 124 | let scaledImage = ciImage.transformed(by: scaleTransform) 125 | context.render(scaledImage, to: output) 126 | } 127 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/CoreMLHelpers/MLMultiArray+Image.swift: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2017 M.I. Hollemans 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy 5 | of this software and associated documentation files (the "Software"), to 6 | deal in the Software without restriction, including without limitation the 7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 8 | sell copies of the Software, and to permit persons to whom the Software is 9 | furnished to do so, subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in 12 | all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 20 | IN THE SOFTWARE. 21 | */ 22 | 23 | import Foundation 24 | import CoreML 25 | import UIKit 26 | extension MLMultiArray { 27 | /** 28 | Converts the multi-array to a UIImage. 29 | */ 30 | public func image(offset: T, scale: T) -> UIImage? { 31 | return MultiArray(self).image(offset: offset, scale: scale) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/CoreMLHelpers/Math.swift: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2017 M.I. Hollemans 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy 5 | of this software and associated documentation files (the "Software"), to 6 | deal in the Software without restriction, including without limitation the 7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 8 | sell copies of the Software, and to permit persons to whom the Software is 9 | furnished to do so, subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in 12 | all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 20 | IN THE SOFTWARE. 21 | */ 22 | 23 | import Foundation 24 | 25 | public func clamp(_ x: T, min: T, max: T) -> T { 26 | if x < min { return min } 27 | if x > max { return max } 28 | return x 29 | } 30 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/CoreMLHelpers/MultiArray.swift: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2017 M.I. Hollemans 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy 5 | of this software and associated documentation files (the "Software"), to 6 | deal in the Software without restriction, including without limitation the 7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 8 | sell copies of the Software, and to permit persons to whom the Software is 9 | furnished to do so, subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in 12 | all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 20 | IN THE SOFTWARE. 21 | */ 22 | 23 | import Foundation 24 | import CoreML 25 | import Swift 26 | import UIKit 27 | 28 | public protocol MultiArrayType: Comparable { 29 | static var multiArrayDataType: MLMultiArrayDataType { get } 30 | static func +(lhs: Self, rhs: Self) -> Self 31 | static func *(lhs: Self, rhs: Self) -> Self 32 | init(_: Int) 33 | var toUInt8: UInt8 { get } 34 | } 35 | 36 | extension Double: MultiArrayType { 37 | public static var multiArrayDataType: MLMultiArrayDataType { return .double } 38 | public var toUInt8: UInt8 { return UInt8(self) } 39 | } 40 | 41 | extension Float: MultiArrayType { 42 | public static var multiArrayDataType: MLMultiArrayDataType { return .float32 } 43 | public var toUInt8: UInt8 { return UInt8(self) } 44 | } 45 | 46 | extension Int32: MultiArrayType { 47 | public static var multiArrayDataType: MLMultiArrayDataType { return .int32 } 48 | public var toUInt8: UInt8 { return UInt8(self) } 49 | } 50 | 51 | /** 52 | Wrapper around MLMultiArray to make it more Swifty. 53 | */ 54 | public struct MultiArray { 55 | public let array: MLMultiArray 56 | public let pointer: UnsafeMutablePointer 57 | 58 | private(set) public var strides: [Int] 59 | private(set) public var shape: [Int] 60 | 61 | /** 62 | Creates a new multi-array filled with all zeros. 63 | */ 64 | public init(shape: [Int]) { 65 | let m = try! MLMultiArray(shape: shape as [NSNumber], dataType: T.multiArrayDataType) 66 | self.init(m) 67 | memset(pointer, 0, MemoryLayout.stride * count) 68 | } 69 | 70 | /** 71 | Creates a new multi-array initialized with the specified value. 72 | */ 73 | public init(shape: [Int], initial: T) { 74 | self.init(shape: shape) 75 | for i in 0..(OpaquePointer(array.dataPointer)) 92 | } 93 | 94 | /** 95 | Returns the number of elements in the entire array. 96 | */ 97 | public var count: Int { 98 | return shape.reduce(1, *) 99 | } 100 | 101 | public subscript(a: Int) -> T { 102 | get { return pointer[a] } 103 | set { pointer[a] = newValue } 104 | } 105 | 106 | public subscript(a: Int, b: Int) -> T { 107 | get { return pointer[a*strides[0] + b*strides[1]] } 108 | set { pointer[a*strides[0] + b*strides[1]] = newValue } 109 | } 110 | 111 | public subscript(a: Int, b: Int, c: Int) -> T { 112 | get { return pointer[a*strides[0] + b*strides[1] + c*strides[2]] } 113 | set { pointer[a*strides[0] + b*strides[1] + c*strides[2]] = newValue } 114 | } 115 | 116 | public subscript(a: Int, b: Int, c: Int, d: Int) -> T { 117 | get { return pointer[a*strides[0] + b*strides[1] + c*strides[2] + d*strides[3]] } 118 | set { pointer[a*strides[0] + b*strides[1] + c*strides[2] + d*strides[3]] = newValue } 119 | } 120 | 121 | public subscript(a: Int, b: Int, c: Int, d: Int, e: Int) -> T { 122 | get { return pointer[a*strides[0] + b*strides[1] + c*strides[2] + d*strides[3] + e*strides[4]] } 123 | set { pointer[a*strides[0] + b*strides[1] + c*strides[2] + d*strides[3] + e*strides[4]] = newValue } 124 | } 125 | 126 | public subscript(indices: [Int]) -> T { 127 | get { return pointer[offset(for: indices)] } 128 | set { pointer[offset(for: indices)] = newValue } 129 | } 130 | 131 | func offset(for indices: [Int]) -> Int { 132 | var offset = 0 133 | for i in 0.. MultiArray { 144 | precondition(order.count == strides.count) 145 | var newShape = shape 146 | var newStrides = strides 147 | for i in 0.. MultiArray { 158 | let newCount = dimensions.reduce(1, *) 159 | precondition(newCount == count, "Cannot reshape \(shape) to \(dimensions)") 160 | 161 | var newStrides = [Int](repeating: 0, count: dimensions.count) 162 | newStrides[dimensions.count - 1] = 1 163 | for i in stride(from: dimensions.count - 1, to: 0, by: -1) { 164 | newStrides[i - 1] = newStrides[i] * dimensions[i] 165 | } 166 | 167 | return MultiArray(array, dimensions, newStrides) 168 | } 169 | } 170 | 171 | extension MultiArray: CustomStringConvertible { 172 | public var description: String { 173 | return description([]) 174 | } 175 | 176 | func description(_ indices: [Int]) -> String { 177 | func indent(_ x: Int) -> String { 178 | return String(repeating: " ", count: x) 179 | } 180 | 181 | // This function is called recursively for every dimension. 182 | // Add an entry for this dimension to the end of the array. 183 | var indices = indices + [0] 184 | 185 | let d = indices.count - 1 // the current dimension 186 | let N = shape[d] // how many elements in this dimension 187 | 188 | var s = "[" 189 | if indices.count < shape.count { // not last dimension yet? 190 | for i in 0.. UIImage? { 227 | if shape.count == 3, let (b, w, h) = toRawBytesRGBA(offset: offset, scale: scale) { 228 | return UIImage.fromByteArrayRGBA(b, width: w, height: h) 229 | } else if shape.count == 2, let (b, w, h) = toRawBytesGray(offset: offset, scale: scale) { 230 | return UIImage.fromByteArrayGray(b, width: w, height: h) 231 | } else { 232 | return nil 233 | } 234 | } 235 | 236 | /** 237 | Converts the multi-array into an array of RGBA pixels. 238 | 239 | - Note: The multi-array must have shape (3, height, width). If your array 240 | has a different shape, use `reshape()` or `transpose()` first. 241 | */ 242 | public func toRawBytesRGBA(offset: T, scale: T) 243 | -> (bytes: [UInt8], width: Int, height: Int)? { 244 | guard shape.count == 3 else { 245 | print("Expected a multi-array with 3 dimensions, got \(shape)") 246 | return nil 247 | } 248 | guard shape[0] == 3 else { 249 | print("Expected first dimension to have 3 channels, got \(shape[0])") 250 | return nil 251 | } 252 | 253 | let height = shape[1] 254 | let width = shape[2] 255 | var bytes = [UInt8](repeating: 0, count: height * width * 4) 256 | 257 | for h in 0.. (bytes: [UInt8], width: Int, height: Int)? { 281 | guard shape.count == 2 else { 282 | print("Expected a multi-array with 2 dimensions, got \(shape)") 283 | return nil 284 | } 285 | 286 | let height = shape[0] 287 | let width = shape[1] 288 | var bytes = [UInt8](repeating: 0, count: height * width) 289 | 290 | for h in 0.. UIImage? { 307 | guard shape.count == 3 else { 308 | print("Expected a multi-array with 3 dimensions, got \(shape)") 309 | return nil 310 | } 311 | guard channel >= 0 && channel < shape[0] else { 312 | print("Channel must be between 0 and \(shape[0] - 1)") 313 | return nil 314 | } 315 | 316 | let height = shape[1] 317 | let width = shape[2] 318 | var a = MultiArray(shape: [height, width]) 319 | for y in 0.. Float { 32 | let areaA = a.width * a.height 33 | if areaA <= 0 { return 0 } 34 | 35 | let areaB = b.width * b.height 36 | if areaB <= 0 { return 0 } 37 | 38 | let intersectionMinX = max(a.minX, b.minX) 39 | let intersectionMinY = max(a.minY, b.minY) 40 | let intersectionMaxX = min(a.maxX, b.maxX) 41 | let intersectionMaxY = min(a.maxY, b.maxY) 42 | let intersectionArea = max(intersectionMaxY - intersectionMinY, 0) * 43 | max(intersectionMaxX - intersectionMinX, 0) 44 | return Float(intersectionArea / (areaA + areaB - intersectionArea)) 45 | } 46 | 47 | public typealias NMSPrediction = (classIndex: Int, score: Float, rect: CGRect) 48 | 49 | /** 50 | Removes bounding boxes that overlap too much with other boxes that have 51 | a higher score. 52 | */ 53 | public func nonMaxSuppression(predictions: [NMSPrediction], iouThreshold: Float, maxBoxes: Int) -> [Int] { 54 | return nonMaxSuppression(predictions: predictions, 55 | indices: Array(predictions.indices), 56 | iouThreshold: iouThreshold, 57 | maxBoxes: maxBoxes) 58 | } 59 | 60 | /** 61 | Removes bounding boxes that overlap too much with other boxes that have 62 | a higher score. 63 | 64 | Based on code from https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/non_max_suppression_op.cc 65 | 66 | - Note: This version of NMS ignores the class of the bounding boxes. Since it 67 | selects the bounding boxes in a greedy fashion, if a certain class has many 68 | boxes that are selected, then it is possible none of the boxes of the other 69 | classes get selected. 70 | 71 | - Parameters: 72 | - predictions: an array of bounding boxes and their scores 73 | - indices: which predictions to look at 74 | - iouThreshold: used to decide whether boxes overlap too much 75 | - maxBoxes: the maximum number of boxes that will be selected 76 | 77 | - Returns: the array indices of the selected bounding boxes 78 | */ 79 | public func nonMaxSuppression(predictions: [NMSPrediction], 80 | indices: [Int], 81 | iouThreshold: Float, 82 | maxBoxes: Int) -> [Int] { 83 | 84 | // Sort the boxes based on their confidence scores, from high to low. 85 | let sortedIndices = indices.sorted { predictions[$0].score > predictions[$1].score } 86 | 87 | var selected: [Int] = [] 88 | 89 | // Loop through the bounding boxes, from highest score to lowest score, 90 | // and determine whether or not to keep each box. 91 | for i in 0..= maxBoxes { break } 93 | 94 | var shouldSelect = true 95 | let boxA = predictions[sortedIndices[i]] 96 | 97 | // Does the current box overlap one of the selected boxes more than the 98 | // given threshold amount? Then it's too similar, so don't keep it. 99 | for j in 0.. iouThreshold { 102 | shouldSelect = false 103 | break 104 | } 105 | } 106 | 107 | // This bounding box did not overlap too much with any previously selected 108 | // bounding box, so we'll keep it. 109 | if shouldSelect { 110 | selected.append(sortedIndices[i]) 111 | } 112 | } 113 | 114 | return selected 115 | } 116 | 117 | /** 118 | Multi-class version of non maximum suppression. 119 | 120 | Where `nonMaxSuppression()` does not look at the class of the predictions at 121 | all, the multi-class version first selects the best bounding boxes for each 122 | class, and then keeps the best ones of those. 123 | 124 | With this method you can usually expect to see at least one bounding box for 125 | each class (unless all the scores for a given class are really low). 126 | 127 | Based on code from: https://github.com/tensorflow/models/blob/master/object_detection/core/post_processing.py 128 | 129 | - Parameters: 130 | - numClasses: the number of classes 131 | - predictions: an array of bounding boxes and their scores 132 | - scoreThreshold: used to only keep bounding boxes with a high enough score 133 | - iouThreshold: used to decide whether boxes overlap too much 134 | - maxPerClass: the maximum number of boxes that will be selected per class 135 | - maxTotal: maximum number of boxes that will be selected over all classes 136 | 137 | - Returns: the array indices of the selected bounding boxes 138 | */ 139 | public func nonMaxSuppressionMultiClass(numClasses: Int, 140 | predictions: [NMSPrediction], 141 | scoreThreshold: Float, 142 | iouThreshold: Float, 143 | maxPerClass: Int, 144 | maxTotal: Int) -> [Int] { 145 | var selectedBoxes: [Int] = [] 146 | 147 | // Look at all the classes one-by-one. 148 | for c in 0.. scoreThreshold { 158 | filteredBoxes.append(p) 159 | } 160 | } 161 | } 162 | 163 | // Only keep the best bounding boxes for this class. 164 | let nmsBoxes = nonMaxSuppression(predictions: predictions, 165 | indices: filteredBoxes, 166 | iouThreshold: iouThreshold, 167 | maxBoxes: maxPerClass) 168 | 169 | // Add the indices of the surviving boxes to the big list. 170 | selectedBoxes.append(contentsOf: nmsBoxes) 171 | } 172 | 173 | // Sort all the surviving boxes by score and only keep the best ones. 174 | let sortedBoxes = selectedBoxes.sorted { predictions[$0].score > predictions[$1].score } 175 | return Array(sortedBoxes.prefix(maxTotal)) 176 | } 177 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/CoreMLHelpers/Predictions.swift: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2017 M.I. Hollemans 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy 5 | of this software and associated documentation files (the "Software"), to 6 | deal in the Software without restriction, including without limitation the 7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 8 | sell copies of the Software, and to permit persons to whom the Software is 9 | furnished to do so, subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in 12 | all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 20 | IN THE SOFTWARE. 21 | */ 22 | 23 | import Vision 24 | 25 | /** 26 | Returns the top `k` predictions from Core ML classification results as an 27 | array of `(String, Double)` pairs. 28 | */ 29 | public func top(_ k: Int, _ prob: [String: Double]) -> [(String, Double)] { 30 | return Array(prob.map { x in (x.key, x.value) } 31 | .sorted(by: { a, b -> Bool in a.1 > b.1 }) 32 | .prefix(through: min(k, prob.count) - 1)) 33 | } 34 | 35 | /** 36 | Returns the top `k` predictions from Vision classification results as an 37 | array of `(String, Double)` pairs. 38 | */ 39 | public func top(_ k: Int, _ observations: [VNClassificationObservation]) -> [(String, Double)] { 40 | // The Vision observations are sorted by confidence already. 41 | return observations.prefix(through: min(k, observations.count) - 1) 42 | .map { ($0.identifier, Double($0.confidence)) } 43 | } 44 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/CoreMLHelpers/UIImage+CVPixelBuffer.swift: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2017 M.I. Hollemans 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy 5 | of this software and associated documentation files (the "Software"), to 6 | deal in the Software without restriction, including without limitation the 7 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 8 | sell copies of the Software, and to permit persons to whom the Software is 9 | furnished to do so, subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in 12 | all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 19 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 20 | IN THE SOFTWARE. 21 | */ 22 | 23 | import UIKit 24 | import VideoToolbox 25 | 26 | extension UIImage { 27 | /** 28 | Resizes the image to width x height and converts it to an RGB CVPixelBuffer. 29 | */ 30 | public func pixelBuffer(width: Int, height: Int) -> CVPixelBuffer? { 31 | return pixelBuffer(width: width, height: height, 32 | pixelFormatType: kCVPixelFormatType_32ARGB, 33 | colorSpace: CGColorSpaceCreateDeviceRGB(), 34 | alphaInfo: .noneSkipFirst) 35 | } 36 | 37 | /** 38 | Resizes the image to width x height and converts it to a grayscale CVPixelBuffer. 39 | */ 40 | public func pixelBufferGray(width: Int, height: Int) -> CVPixelBuffer? { 41 | return pixelBuffer(width: width, height: height, 42 | pixelFormatType: kCVPixelFormatType_OneComponent8, 43 | colorSpace: CGColorSpaceCreateDeviceGray(), 44 | alphaInfo: .none) 45 | } 46 | 47 | func pixelBuffer(width: Int, height: Int, pixelFormatType: OSType, 48 | colorSpace: CGColorSpace, alphaInfo: CGImageAlphaInfo) -> CVPixelBuffer? { 49 | var maybePixelBuffer: CVPixelBuffer? 50 | let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, 51 | kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] 52 | let status = CVPixelBufferCreate(kCFAllocatorDefault, 53 | width, 54 | height, 55 | pixelFormatType, 56 | attrs as CFDictionary, 57 | &maybePixelBuffer) 58 | 59 | guard status == kCVReturnSuccess, let pixelBuffer = maybePixelBuffer else { 60 | return nil 61 | } 62 | 63 | CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0)) 64 | let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer) 65 | 66 | guard let context = CGContext(data: pixelData, 67 | width: width, 68 | height: height, 69 | bitsPerComponent: 8, 70 | bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), 71 | space: colorSpace, 72 | bitmapInfo: alphaInfo.rawValue) 73 | else { 74 | return nil 75 | } 76 | 77 | UIGraphicsPushContext(context) 78 | context.translateBy(x: 0, y: CGFloat(height)) 79 | context.scaleBy(x: 1, y: -1) 80 | self.draw(in: CGRect(x: 0, y: 0, width: width, height: height)) 81 | UIGraphicsPopContext() 82 | 83 | CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0)) 84 | return pixelBuffer 85 | } 86 | } 87 | 88 | extension UIImage { 89 | /** 90 | Creates a new UIImage from a CVPixelBuffer. 91 | NOTE: This only works for RGB pixel buffers, not for grayscale. 92 | */ 93 | public convenience init?(pixelBuffer: CVPixelBuffer) { 94 | var cgImage: CGImage? 95 | VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage) 96 | 97 | if let cgImage = cgImage { 98 | self.init(cgImage: cgImage) 99 | } else { 100 | return nil 101 | } 102 | } 103 | 104 | /** 105 | Creates a new UIImage from a CVPixelBuffer, using Core Image. 106 | */ 107 | public convenience init?(pixelBuffer: CVPixelBuffer, context: CIContext) { 108 | let ciImage = CIImage(cvPixelBuffer: pixelBuffer) 109 | let rect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), 110 | height: CVPixelBufferGetHeight(pixelBuffer)) 111 | if let cgImage = context.createCGImage(ciImage, from: rect) { 112 | self.init(cgImage: cgImage) 113 | } else { 114 | return nil 115 | } 116 | } 117 | } 118 | 119 | extension UIImage { 120 | /** 121 | Creates a new UIImage from an array of RGBA bytes. 122 | */ 123 | @nonobjc public class func fromByteArrayRGBA(_ bytes: [UInt8], 124 | width: Int, 125 | height: Int, 126 | scale: CGFloat = 0, 127 | orientation: UIImage.Orientation = .up) -> UIImage? { 128 | return fromByteArray(bytes, width: width, height: height, 129 | scale: scale, orientation: orientation, 130 | bytesPerRow: width * 4, 131 | colorSpace: CGColorSpaceCreateDeviceRGB(), 132 | alphaInfo: .premultipliedLast) 133 | } 134 | 135 | /** 136 | Creates a new UIImage from an array of grayscale bytes. 137 | */ 138 | @nonobjc public class func fromByteArrayGray(_ bytes: [UInt8], 139 | width: Int, 140 | height: Int, 141 | scale: CGFloat = 0, 142 | orientation: UIImage.Orientation = .up) -> UIImage? { 143 | return fromByteArray(bytes, width: width, height: height, 144 | scale: scale, orientation: orientation, 145 | bytesPerRow: width, 146 | colorSpace: CGColorSpaceCreateDeviceGray(), 147 | alphaInfo: .none) 148 | } 149 | 150 | @nonobjc class func fromByteArray(_ bytes: [UInt8], 151 | width: Int, 152 | height: Int, 153 | scale: CGFloat, 154 | orientation: UIImage.Orientation, 155 | bytesPerRow: Int, 156 | colorSpace: CGColorSpace, 157 | alphaInfo: CGImageAlphaInfo) -> UIImage? { 158 | var image: UIImage? 159 | bytes.withUnsafeBytes { ptr in 160 | if let context = CGContext(data: UnsafeMutableRawPointer(mutating: ptr.baseAddress!), 161 | width: width, 162 | height: height, 163 | bitsPerComponent: 8, 164 | bytesPerRow: bytesPerRow, 165 | space: colorSpace, 166 | bitmapInfo: alphaInfo.rawValue), 167 | let cgImage = context.makeImage() { 168 | image = UIImage(cgImage: cgImage, scale: scale, orientation: orientation) 169 | } 170 | } 171 | return image 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/StyleInput.swift: -------------------------------------------------------------------------------- 1 | // 2 | // StyleInput.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/27. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import CoreML 10 | 11 | 12 | 13 | class ArtStyleInput: MLFeatureProvider { 14 | var input: CVPixelBuffer 15 | 16 | var featureNames: Set { 17 | get { 18 | return ["inputImage"] 19 | } 20 | } 21 | 22 | func featureValue(for featureName: String) -> MLFeatureValue? { 23 | if featureName == "inputImage" { 24 | return MLFeatureValue(pixelBuffer: input) 25 | } 26 | return nil 27 | } 28 | 29 | init(input: CVPixelBuffer) { 30 | self.input = input 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ArtStyles/styles.json: -------------------------------------------------------------------------------- 1 | 2 | [ 3 | { 4 | "id": 1001, 5 | "style": 4, 6 | "name": "Candy", 7 | "cover": "candy" 8 | }, 9 | { 10 | "id": 1002, 11 | "style": 5, 12 | "name": "Feathers", 13 | "cover": "Feathers" 14 | }, 15 | { 16 | "id": 1003, 17 | "style": 2, 18 | "name": "Muse", 19 | "cover": "museImg" 20 | }, 21 | { 22 | "id": 1004, 23 | "style": 0, 24 | "name": "Mosaic", 25 | "cover": "mosaicImg" 26 | }, 27 | { 28 | "id": 1005, 29 | "style": 1, 30 | "name": "Scream", 31 | "cover": "screamImg" 32 | }, 33 | { 34 | "id": 1006, 35 | "style": 3, 36 | "name": "Udnie", 37 | "cover": "Udanie" 38 | } 39 | ] 40 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/FCRN-DepthPrediction /FCRNDepthPredictionView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // FCRN-DepthPredictionView.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/15. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import SwiftUI 10 | 11 | struct FCRNDepthPredictionView: View { 12 | var body: some View { 13 | VStack { 14 | Spacer() 15 | LiveImageViewController() 16 | Spacer() 17 | } 18 | .background(Color.gray) 19 | .navigationBarTitle(Text("FCRN-DepthPrediction"), displayMode: .inline) 20 | } 21 | } 22 | 23 | struct ContentView_Previews: PreviewProvider { 24 | static var previews: some View { 25 | FCRNDepthPredictionView() 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/FCRN-DepthPrediction /HeatmapView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // HeatmapView.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/15. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import SwiftUI 10 | import UIKit 11 | 12 | class HeatmapView: UIView { 13 | var heatmap: Array>? = nil { 14 | didSet { 15 | self.setNeedsDisplay() 16 | } 17 | } 18 | 19 | override func draw(_ rect: CGRect) { 20 | if let context = UIGraphicsGetCurrentContext(){ 21 | context.clear(rect) 22 | 23 | guard let heatmap = self.heatmap else { 24 | return 25 | } 26 | 27 | let size = self.bounds.size 28 | let heatmap_w = heatmap.count 29 | let heatmap_h = heatmap.first?.count ?? 0 30 | let width = size.width / CGFloat(heatmap_w) 31 | let height = size.height / CGFloat(heatmap_h) 32 | 33 | for j in 0 ..< heatmap_h { 34 | for i in 0 ..< heatmap_w { 35 | let value = heatmap[i][j] 36 | var alpha = CGFloat(value) 37 | if alpha > 1 { 38 | alpha = 1 39 | } else if alpha < 0 { 40 | alpha = 0 41 | } 42 | 43 | let rect = CGRect(x: CGFloat(i) * width, y: CGFloat(j) * height, width: width, height: height) 44 | 45 | let color = UIColor(white: 1 - alpha, alpha: 1) 46 | let bpath = UIBezierPath(rect: rect) 47 | 48 | color.set() 49 | bpath.fill() 50 | } 51 | } 52 | } 53 | 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/FCRN-DepthPrediction /HeatmapViewPistProcessor.swift: -------------------------------------------------------------------------------- 1 | // 2 | // HeatmapViewPistProcessor.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/15. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import CoreML 10 | 11 | class HeatmapViewPistProcessor { 12 | func convertTo2DArray(from heatmaps: MLMultiArray) -> Array> { 13 | guard heatmaps.shape.count >= 3 else { 14 | print("Error: heatmap's shape is invalid. \(heatmaps.shape)") 15 | return [] 16 | } 17 | 18 | let _ /*keypoint_number*/ = heatmaps.shape[0].intValue 19 | let heatmap_w = heatmaps.shape[1].intValue 20 | let heatmap_h = heatmaps.shape[2].intValue 21 | 22 | var convertedHeatmap: Array> = Array(repeating: Array(repeating: 0.0, count: heatmap_w), count: heatmap_h) 23 | 24 | var minimumValue = Double.greatestFiniteMagnitude 25 | var maximumValue = -Double.greatestFiniteMagnitude 26 | 27 | for i in 0 ..< heatmap_w { 28 | for j in 0 ..< heatmap_h { 29 | let index = i * (heatmap_h) + j 30 | let confidence = heatmaps[index].doubleValue 31 | guard confidence > 0 else { continue } 32 | convertedHeatmap[j][i] = confidence 33 | 34 | if minimumValue > confidence { 35 | minimumValue = confidence 36 | } 37 | if maximumValue < confidence { 38 | maximumValue = confidence 39 | } 40 | } 41 | } 42 | 43 | let minmaxGap = maximumValue - minimumValue 44 | for i in 0 ..< heatmap_w { 45 | for j in 0 ..< heatmap_h { 46 | convertedHeatmap[j][i] = (convertedHeatmap[j][i] - minimumValue) / minmaxGap 47 | } 48 | } 49 | return convertedHeatmap 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/FCRN-DepthPrediction /LiveImageViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // LiveImageViewController.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/15. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import SwiftUI 10 | import Vision 11 | 12 | final class LiveImageViewController: UIViewController { 13 | var videoPreview: UIView! = { 14 | return UIView(frame: CGRect(x: 0, y: 0, width: UIScreen.main.bounds.size.width, height: 450)) 15 | }() 16 | 17 | var drawingView: HeatmapView! = { 18 | return HeatmapView(frame: CGRect(x: 0, y: 455, width: UIScreen.main.bounds.size.width, height: 450)) 19 | }() 20 | 21 | var videoCapture: VideoCapture! 22 | 23 | let estimationModel = FCRN() 24 | 25 | var request: VNCoreMLRequest? 26 | var visionModel: VNCoreMLModel? 27 | 28 | let postprocessor = HeatmapViewPistProcessor() 29 | 30 | override func viewDidLoad() { 31 | super.viewDidLoad() 32 | 33 | view.addSubview(videoPreview) 34 | view.addSubview(drawingView) 35 | 36 | setupModel() 37 | setupCamera() 38 | } 39 | 40 | override func viewWillAppear(_ animated: Bool) { 41 | super.viewWillAppear(animated) 42 | videoCapture.start() 43 | } 44 | 45 | override func viewWillDisappear(_ animated: Bool) { 46 | super.viewWillDisappear(animated) 47 | videoCapture.stop() 48 | } 49 | 50 | func setupModel() { 51 | if let visionModel = try? VNCoreMLModel(for: estimationModel.model) { 52 | self.visionModel = visionModel 53 | request = VNCoreMLRequest(model: visionModel, completionHandler: visionRequestDidComplete) 54 | request?.imageCropAndScaleOption = .scaleFill 55 | } else { 56 | fatalError("Error: Setup Vision Model error") 57 | } 58 | } 59 | 60 | func setupCamera() { 61 | videoCapture = VideoCapture() 62 | videoCapture.delegate = self 63 | videoCapture.fps = 50 64 | videoCapture.setup(sessionPreset: .medium) { (success) in 65 | if success { 66 | if let previewLayer = self.videoCapture.previewLayer { 67 | self.videoPreview.layer.addSublayer(previewLayer) 68 | self.resizePreviewLayer() 69 | } 70 | 71 | self.videoCapture.start() 72 | } 73 | } 74 | } 75 | 76 | override func viewDidLayoutSubviews() { 77 | super.viewDidLayoutSubviews() 78 | resizePreviewLayer() 79 | } 80 | 81 | func resizePreviewLayer() { 82 | videoCapture.previewLayer?.frame = videoPreview.bounds 83 | } 84 | } 85 | 86 | extension LiveImageViewController: VideoCaptureDelegate { 87 | func videoCapture(_ capture: VideoCapture, didCaptureViewFrame pixelBuffer: CVPixelBuffer?) { 88 | if let pixelBuffer = pixelBuffer { 89 | predict(with: pixelBuffer) 90 | } 91 | } 92 | } 93 | 94 | 95 | extension LiveImageViewController { 96 | func predict(with pixelBuffer: CVPixelBuffer) { 97 | guard let request = request else { 98 | fatalError("Error: VNCoreMLRequest error") 99 | } 100 | 101 | let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]) 102 | try? handler.perform([request]) 103 | } 104 | 105 | 106 | func visionRequestDidComplete(request: VNRequest, error: Error?) { 107 | if let observations = request.results as? [VNCoreMLFeatureValueObservation], 108 | let heatmap = observations.first?.featureValue.multiArrayValue { 109 | 110 | let convertedHeatmap = postprocessor.convertTo2DArray(from: heatmap) 111 | 112 | DispatchQueue.main.async { [weak self] in 113 | self?.drawingView.heatmap = convertedHeatmap 114 | } 115 | } 116 | } 117 | } 118 | 119 | 120 | 121 | extension LiveImageViewController: UIViewControllerRepresentable { 122 | public typealias UIViewControllerType = LiveImageViewController 123 | 124 | func makeUIViewController(context: UIViewControllerRepresentableContext) -> LiveImageViewController.UIViewControllerType { 125 | return LiveImageViewController() 126 | } 127 | 128 | func updateUIViewController(_ uiViewController: LiveImageViewController, context: UIViewControllerRepresentableContext) { 129 | 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ImageClassifier/ImageClassificationModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ImageClassificationModel.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/16. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import CoreML 11 | import Vision 12 | import ImageIO 13 | 14 | 15 | final class ImageClassificationModel: ObservableObject { 16 | @Published var image: UIImage? = nil 17 | @Published var classificationResult: String = "" 18 | var model: MLModel = MobileNetV2().model 19 | 20 | init(mlModel model:MLModel) { 21 | self.model = model 22 | } 23 | 24 | private lazy var classificationRequest: VNCoreMLRequest = { 25 | do { 26 | let model = try VNCoreMLModel(for: self.model) 27 | return VNCoreMLRequest(model: model) { [weak self] (request, error) in 28 | self?.handleClassification(for: request, error: error) 29 | } 30 | } catch { 31 | fatalError("Error: Can not load Vision ML Model. \(error).") 32 | } 33 | }() 34 | 35 | func startPredictClassification() { 36 | self.classificationResult = "Classifying..." 37 | let orientation = CGImagePropertyOrientation(rawValue: UInt32(self.image!.imageOrientation.rawValue)) 38 | 39 | guard let ciImage = CIImage(image: self.image!) else { 40 | fatalError("Unable to create \(CIImage.self) from \(String(describing: image))") 41 | } 42 | 43 | DispatchQueue.global(qos: .userInitiated).async { 44 | let handler = VNImageRequestHandler(ciImage: ciImage, orientation: orientation ?? .up) 45 | do { 46 | try handler.perform([self.classificationRequest]) 47 | } catch { 48 | print("Failed to perform classification.\n\(error.localizedDescription)") 49 | } 50 | } 51 | } 52 | 53 | private func handleClassification(for request: VNRequest, error: Error?) { 54 | DispatchQueue.main.async { 55 | guard let results = request.results else { 56 | self.classificationResult = "Unable to classify image.\n\(error!.localizedDescription)" 57 | return 58 | } 59 | let observations = results as! [VNClassificationObservation] 60 | 61 | if observations.isEmpty { 62 | self.classificationResult = "Nothing recognized!" 63 | } else { 64 | let topClassifications = observations.prefix(2) 65 | let descriptions = topClassifications.map { classification in 66 | return String(format: "%.2f %@", classification.confidence, classification.identifier) 67 | } 68 | self.classificationResult = "Classification:\n" + descriptions.joined(separator: "\n") 69 | } 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ImageClassifier/ImageClassifierView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ImageClassifierView.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/16. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import SwiftUI 10 | 11 | 12 | struct ImageClassifierView: View { 13 | @EnvironmentObject var classificationModel: ImageClassificationModel 14 | @State private var isPresented = false 15 | @State private var takePhoto = false 16 | 17 | fileprivate func classification() { 18 | self.classificationModel.startPredictClassification() 19 | } 20 | 21 | var body: some View { 22 | VStack { 23 | self.classificationModel.image == nil ? PlaceholdView().toAnyView() : ZStack { 24 | Image(uiImage: self.classificationModel.image!) 25 | .resizable() 26 | .aspectRatio(contentMode: .fill) 27 | .onTapGesture { 28 | self.classification() 29 | } 30 | 31 | Text(self.classificationModel.classificationResult.localizedCapitalized) 32 | .foregroundColor(.white) 33 | .font(.system(size: 22)) 34 | .shadow(color: .black, radius: 1, x: 2, y: 2) 35 | .padding() 36 | .background(Rectangle() 37 | .foregroundColor(Color.init(.systemBackground)) 38 | .opacity(0.33) 39 | .cornerRadius(10)) 40 | 41 | }.toAnyView() 42 | HStack { 43 | Button(action: { 44 | self.takePhoto = false 45 | self.classificationModel.classificationResult = "Tap to Classify" 46 | self.isPresented.toggle() 47 | }, label: { 48 | Image(systemName: "photo") 49 | }).font(.title) 50 | Spacer() 51 | .frame(width: 250, height: 44, alignment: .trailing) 52 | Button(action: { 53 | self.takePhoto = true 54 | self.classificationModel.classificationResult = "Tap to Classify" 55 | self.isPresented.toggle() 56 | }, label: { 57 | Image(systemName: "camera") 58 | }).font(.title) 59 | }.padding() 60 | .frame(maxWidth: .infinity) 61 | .background(Color.gray.opacity(0.2)).toAnyView() 62 | } 63 | .sheet(isPresented: self.$isPresented) { 64 | ShowImagePicker(image: self.$classificationModel.image, takePhoto: self.$takePhoto) 65 | } 66 | .navigationBarTitle(Text("Image Classifier"), displayMode: .inline) 67 | .onDisappear { 68 | self.classificationModel.image = nil 69 | } 70 | } 71 | } 72 | 73 | 74 | struct PlaceholdView: View { 75 | var body: some View { 76 | ZStack { 77 | Image(systemName: "photo.fill") 78 | .resizable() 79 | .aspectRatio(contentMode: .fit) 80 | .foregroundColor(Color.init(.systemRed)) 81 | .shadow(color: .secondary, radius: 5) 82 | }.padding() 83 | } 84 | } 85 | 86 | 87 | extension View { 88 | func toAnyView() -> AnyView { 89 | AnyView(self) 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/ImageClassifier/ImagePicker.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ImagePicker.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/16. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import SwiftUI 10 | 11 | final class ImagePickerCoordinator: NSObject { 12 | @Binding var image: UIImage? 13 | @Binding var takePhoto: Bool 14 | 15 | init(image: Binding, takePhoto: Binding) { 16 | _image = image 17 | _takePhoto = takePhoto 18 | } 19 | } 20 | 21 | struct ShowImagePicker: UIViewControllerRepresentable { 22 | typealias UIViewControllerType = UIImagePickerController 23 | 24 | @Binding var image: UIImage? 25 | @Binding var takePhoto: Bool 26 | 27 | func makeCoordinator() -> ImagePickerCoordinator { 28 | ImagePickerCoordinator(image: $image, takePhoto: $takePhoto) 29 | } 30 | 31 | func makeUIViewController(context: UIViewControllerRepresentableContext) -> ShowImagePicker.UIViewControllerType { 32 | let pickerControoler = UIImagePickerController() 33 | pickerControoler.delegate = context.coordinator 34 | 35 | guard UIImagePickerController.isSourceTypeAvailable(.camera) else { 36 | return pickerControoler 37 | } 38 | 39 | switch self.takePhoto { 40 | case true: 41 | pickerControoler.sourceType = .camera 42 | case false: 43 | pickerControoler.sourceType = .photoLibrary 44 | } 45 | 46 | pickerControoler.allowsEditing = true 47 | 48 | return pickerControoler 49 | } 50 | 51 | func updateUIViewController(_ uiViewController: ShowImagePicker.UIViewControllerType, context: UIViewControllerRepresentableContext) { 52 | 53 | } 54 | } 55 | 56 | 57 | extension ImagePickerCoordinator: UINavigationControllerDelegate, UIImagePickerControllerDelegate { 58 | func imagePickerControllerDidCancel(_ picker: UIImagePickerController) { 59 | picker.dismiss(animated: true) 60 | } 61 | 62 | func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) { 63 | guard let uiImage = info[UIImagePickerController.InfoKey.originalImage] as? UIImage else { 64 | return 65 | } 66 | 67 | self.image = uiImage 68 | picker.dismiss(animated: true) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/MNISTClassifier/DrawView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // DrawView.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/15. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import SwiftUI 11 | 12 | final class DrawView: UIView { 13 | var linewidth = CGFloat(15) { 14 | didSet { 15 | setNeedsDisplay() 16 | } 17 | } 18 | var color = UIColor.white { 19 | didSet { 20 | setNeedsDisplay() 21 | } 22 | } 23 | 24 | 25 | var lines: [Line] = [] 26 | var lastPoint: CGPoint! 27 | 28 | override func touchesBegan(_ touches: Set, with event: UIEvent?) { 29 | lastPoint = touches.first!.location(in: self) 30 | } 31 | 32 | override func touchesMoved(_ touches: Set, with event: UIEvent?) { 33 | let newPoint = touches.first!.location(in: self) 34 | 35 | lines.append(Line(start: lastPoint, end: newPoint)) 36 | lastPoint = newPoint 37 | 38 | setNeedsDisplay() 39 | } 40 | 41 | override func draw(_ rect: CGRect) { 42 | super.draw(rect) 43 | 44 | let drawPath = UIBezierPath() 45 | drawPath.lineCapStyle = .round 46 | 47 | for line in lines { 48 | drawPath.move(to: line.start) 49 | drawPath.addLine(to: line.end) 50 | } 51 | drawPath.lineWidth = linewidth 52 | color.set() 53 | drawPath.stroke() 54 | } 55 | 56 | func getViewContext() -> CGContext? { 57 | let colorSpace: CGColorSpace = CGColorSpaceCreateDeviceGray() 58 | 59 | let bitmapInfo = CGImageAlphaInfo.none.rawValue 60 | 61 | let context = CGContext(data: nil, 62 | width: 28, 63 | height: 28, 64 | bitsPerComponent: 8, 65 | bytesPerRow: 28, 66 | space: colorSpace, 67 | bitmapInfo: bitmapInfo) 68 | 69 | context!.translateBy(x: 0, y: 28) 70 | context!.scaleBy(x: 28 / self.frame.size.width, y: -28 / self.frame.size.height) 71 | 72 | self.layer.render(in: context!) 73 | 74 | return context 75 | } 76 | 77 | func clear() { 78 | self.lines = [] 79 | self.backgroundColor = UIColor.black 80 | setNeedsDisplay() 81 | } 82 | } 83 | 84 | 85 | class Line { 86 | var start, end: CGPoint 87 | 88 | init(start: CGPoint, end: CGPoint) { 89 | self.start = start 90 | self.end = end 91 | } 92 | } 93 | 94 | 95 | struct DrawCanvasView: UIViewRepresentable{ 96 | let drawView = DrawView() 97 | 98 | func makeUIView(context: Context) -> DrawView { 99 | return drawView 100 | } 101 | 102 | func updateUIView(_ uiView: DrawView, context: Context) { 103 | drawView.setNeedsDisplay() 104 | } 105 | 106 | func getConext() -> CGContext? { 107 | return drawView.getViewContext() 108 | } 109 | 110 | func earsing(){ 111 | drawView.clear() 112 | } 113 | 114 | } 115 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/MNISTClassifier/MNISTClassificationModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // MNISTClassificationModel.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/16. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | import UIKit 9 | import CoreML 10 | import Vision 11 | import ImageIO 12 | 13 | 14 | final class MNISTClassificationModel: ObservableObject { 15 | @Published var cgImage: CGImage? = nil 16 | @Published var classificationText: String = "" 17 | 18 | private lazy var mnistClassificationRequest: VNCoreMLRequest = { 19 | do { 20 | let model = try VNCoreMLModel(for: MNISTClassifier().model) 21 | let request = VNCoreMLRequest(model: model) { [weak self](request, error) in 22 | self?.handleClassification(for: request, error: error) 23 | } 24 | return request 25 | } catch { 26 | fatalError("Error: Can not load Vision ML Model. \(error).") 27 | } 28 | }() 29 | 30 | func startPredictClassification() { 31 | self.classificationText = "Classifying..." 32 | let ciImage = CIImage(cgImage: self.cgImage!) 33 | let handler = VNImageRequestHandler(ciImage: ciImage, options: [:]) 34 | do { 35 | try handler.perform([self.mnistClassificationRequest]) 36 | } catch { 37 | print("Error: Image Request Perform error \(error).") 38 | } 39 | } 40 | 41 | private func handleClassification(for request: VNRequest, error: Error?) { 42 | DispatchQueue.main.async { 43 | guard let results = request.results else { 44 | self.classificationText = "Unable to classify image.\n\(error!.localizedDescription)" 45 | return 46 | } 47 | let observations = results as! [VNClassificationObservation] 48 | 49 | if observations.isEmpty { 50 | self.classificationText = "Nothing recognized!" 51 | } else { 52 | guard let best = observations.first else { 53 | fatalError("Error: Can not get best result.") 54 | } 55 | self.classificationText = best.identifier 56 | } 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/MNISTClassifier/MNISTClassifierView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // MNISTClassifierView.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/16. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import SwiftUI 10 | import CoreML 11 | import Vision 12 | 13 | struct MNISTClassifierView: View { 14 | private let drawCanvasView = DrawCanvasView() 15 | 16 | @EnvironmentObject var mnistModel: MNISTClassificationModel 17 | 18 | fileprivate func classification() { 19 | self.mnistModel.startPredictClassification() 20 | } 21 | 22 | var body: some View { 23 | VStack { 24 | drawCanvasView 25 | .background(Color.black) 26 | .frame(height: CGFloat(450)) 27 | HStack(alignment: .top) { 28 | Button(action: { 29 | self.drawCanvasView.earsing() 30 | }) { 31 | HStack { 32 | Image(systemName: "rectangle.grid.1x2.fill") 33 | Text("Clear") 34 | } 35 | .padding() 36 | .background(Color.yellow) 37 | .cornerRadius(5) 38 | } 39 | Spacer() 40 | .frame(width: 50) 41 | Button(action: { 42 | guard let context = self.drawCanvasView.getConext(), let inputImage = context.makeImage() else { 43 | fatalError("Error: Get context or make image fail.") 44 | } 45 | self.mnistModel.cgImage = inputImage 46 | self.classification() 47 | }) { 48 | Image(systemName: "rectangle.grid.1x2.fill") 49 | Text("Predict") 50 | } 51 | .padding() 52 | .foregroundColor(Color.red) 53 | .background(Color.blue) 54 | .cornerRadius(5) 55 | } 56 | Spacer() 57 | VStack { 58 | Text(self.mnistModel.classificationText) 59 | .bold() 60 | .font(.system(size: 150)) 61 | } 62 | .padding() 63 | .frame(width: 300, height: 300, alignment: .center) 64 | .background(Color.gray) 65 | } 66 | .background(Color.white) 67 | .navigationBarTitle(Text("MNISTClassifier"), displayMode: .inline) 68 | } 69 | } 70 | 71 | 72 | 73 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/Object Detection/ObjectDetectionView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ObjectDetectionView.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/18. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import SwiftUI 10 | import CoreML 11 | 12 | 13 | struct ObjectDetectionView: View { 14 | var body: some View { 15 | VStack { 16 | Spacer() 17 | VisionObjectRecognitionViewController() 18 | Spacer() 19 | } 20 | .background(Color.gray) 21 | .navigationBarTitle(Text("ObjectDetection"), displayMode: .inline) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/Object Detection/ViewController.swift: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Contains the view controller for the Breakfast Finder. 6 | */ 7 | 8 | import UIKit 9 | import AVFoundation 10 | import Vision 11 | 12 | class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { 13 | 14 | var bufferSize: CGSize = .zero 15 | var rootLayer: CALayer! = nil 16 | 17 | private var previewView: UIView! 18 | private let session = AVCaptureSession() 19 | private var previewLayer: AVCaptureVideoPreviewLayer! = nil 20 | private let videoDataOutput = AVCaptureVideoDataOutput() 21 | 22 | private let videoDataOutputQueue = DispatchQueue(label: "VideoDataOutput", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem) 23 | 24 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 25 | // to be implemented in the subclass 26 | } 27 | 28 | override func viewDidLoad() { 29 | super.viewDidLoad() 30 | previewView = UIView(frame: CGRect(x: 0, y: 0, width: UIScreen.main.bounds.size.width, height: UIScreen.main.bounds.size.height)) 31 | view.addSubview(previewView) 32 | setupAVCapture() 33 | } 34 | 35 | override func didReceiveMemoryWarning() { 36 | super.didReceiveMemoryWarning() 37 | // Dispose of any resources that can be recreated. 38 | } 39 | 40 | func setupAVCapture() { 41 | var deviceInput: AVCaptureDeviceInput! 42 | 43 | // Select a video device, make an input 44 | let videoDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .back).devices.first 45 | do { 46 | deviceInput = try AVCaptureDeviceInput(device: videoDevice!) 47 | } catch { 48 | print("Could not create video device input: \(error)") 49 | return 50 | } 51 | 52 | session.beginConfiguration() 53 | session.sessionPreset = .vga640x480 // Model image size is smaller. 54 | 55 | // Add a video input 56 | guard session.canAddInput(deviceInput) else { 57 | print("Could not add video device input to the session") 58 | session.commitConfiguration() 59 | return 60 | } 61 | session.addInput(deviceInput) 62 | if session.canAddOutput(videoDataOutput) { 63 | session.addOutput(videoDataOutput) 64 | // Add a video data output 65 | videoDataOutput.alwaysDiscardsLateVideoFrames = true 66 | videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)] 67 | videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue) 68 | } else { 69 | print("Could not add video data output to the session") 70 | session.commitConfiguration() 71 | return 72 | } 73 | let captureConnection = videoDataOutput.connection(with: .video) 74 | // Always process the frames 75 | captureConnection?.isEnabled = true 76 | do { 77 | try videoDevice!.lockForConfiguration() 78 | let dimensions = CMVideoFormatDescriptionGetDimensions((videoDevice?.activeFormat.formatDescription)!) 79 | bufferSize.width = CGFloat(dimensions.width) 80 | bufferSize.height = CGFloat(dimensions.height) 81 | videoDevice!.unlockForConfiguration() 82 | } catch { 83 | print(error) 84 | } 85 | session.commitConfiguration() 86 | previewLayer = AVCaptureVideoPreviewLayer(session: session) 87 | previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill 88 | rootLayer = previewView.layer 89 | previewLayer.frame = rootLayer.bounds 90 | rootLayer.addSublayer(previewLayer) 91 | } 92 | 93 | func startCaptureSession() { 94 | session.startRunning() 95 | } 96 | 97 | // Clean up capture setup 98 | func teardownAVCapture() { 99 | previewLayer.removeFromSuperlayer() 100 | previewLayer = nil 101 | } 102 | 103 | func captureOutput(_ captureOutput: AVCaptureOutput, didDrop didDropSampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 104 | // print("frame dropped") 105 | } 106 | 107 | public func exifOrientationFromDeviceOrientation() -> CGImagePropertyOrientation { 108 | let curDeviceOrientation = UIDevice.current.orientation 109 | let exifOrientation: CGImagePropertyOrientation 110 | 111 | switch curDeviceOrientation { 112 | case UIDeviceOrientation.portraitUpsideDown: // Device oriented vertically, home button on the top 113 | exifOrientation = .left 114 | case UIDeviceOrientation.landscapeLeft: // Device oriented horizontally, home button on the right 115 | exifOrientation = .upMirrored 116 | case UIDeviceOrientation.landscapeRight: // Device oriented horizontally, home button on the left 117 | exifOrientation = .down 118 | case UIDeviceOrientation.portrait: // Device oriented vertically, home button on the bottom 119 | exifOrientation = .up 120 | default: 121 | exifOrientation = .up 122 | } 123 | return exifOrientation 124 | } 125 | } 126 | 127 | -------------------------------------------------------------------------------- /HelloCoreML/MachineLearning/Object Detection/VisionObjectRecognitionViewController.swift: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | Contains the object recognition view controller for the Breakfast Finder. 6 | */ 7 | 8 | import UIKit 9 | import AVFoundation 10 | import Vision 11 | import SwiftUI 12 | 13 | final class VisionObjectRecognitionViewController: ViewController { 14 | var objectDetectionModel: MLModel = YOLOv3().model 15 | 16 | private var detectionOverlay: CALayer! = nil 17 | 18 | // Vision parts 19 | private var requests = [VNRequest]() 20 | 21 | func setupVision(){ 22 | // Setup Vision parts 23 | do { 24 | let visionModel = try VNCoreMLModel(for: objectDetectionModel) 25 | let objectRecognition = VNCoreMLRequest(model: visionModel, completionHandler: { (request, error) in 26 | DispatchQueue.main.async(execute: { 27 | // perform all the UI updates on the main queue 28 | if let results = request.results { 29 | self.drawVisionRequestResults(results) 30 | } 31 | }) 32 | }) 33 | self.requests = [objectRecognition] 34 | } catch let error as NSError { 35 | print("Model loading went wrong: \(error)") 36 | } 37 | } 38 | 39 | func drawVisionRequestResults(_ results: [Any]) { 40 | CATransaction.begin() 41 | CATransaction.setValue(kCFBooleanTrue, forKey: kCATransactionDisableActions) 42 | detectionOverlay.sublayers = nil // remove all the old recognized objects 43 | for observation in results where observation is VNRecognizedObjectObservation { 44 | guard let objectObservation = observation as? VNRecognizedObjectObservation else { 45 | continue 46 | } 47 | // Select only the label with the highest confidence. 48 | let topLabelObservation = objectObservation.labels[0] 49 | let objectBounds = VNImageRectForNormalizedRect(objectObservation.boundingBox, Int(bufferSize.width), Int(bufferSize.height)) 50 | 51 | let shapeLayer = self.createRoundedRectLayerWithBounds(objectBounds) 52 | 53 | let textLayer = self.createTextSubLayerInBounds(objectBounds, 54 | identifier: topLabelObservation.identifier, 55 | confidence: topLabelObservation.confidence) 56 | shapeLayer.addSublayer(textLayer) 57 | detectionOverlay.addSublayer(shapeLayer) 58 | } 59 | self.updateLayerGeometry() 60 | CATransaction.commit() 61 | } 62 | 63 | override func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 64 | guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { 65 | return 66 | } 67 | 68 | let exifOrientation = exifOrientationFromDeviceOrientation() 69 | 70 | let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: exifOrientation, options: [:]) 71 | do { 72 | try imageRequestHandler.perform(self.requests) 73 | } catch { 74 | print(error) 75 | } 76 | } 77 | 78 | override func setupAVCapture() { 79 | super.setupAVCapture() 80 | 81 | // setup Vision parts 82 | setupLayers() 83 | updateLayerGeometry() 84 | setupVision() 85 | 86 | // start the capture 87 | startCaptureSession() 88 | } 89 | 90 | func setupLayers() { 91 | detectionOverlay = CALayer() // container layer that has all the renderings of the observations 92 | detectionOverlay.name = "DetectionOverlay" 93 | detectionOverlay.bounds = CGRect(x: 0.0, 94 | y: 0.0, 95 | width: bufferSize.width, 96 | height: bufferSize.height) 97 | detectionOverlay.position = CGPoint(x: rootLayer.bounds.midX, y: rootLayer.bounds.midY) 98 | rootLayer.addSublayer(detectionOverlay) 99 | } 100 | 101 | func updateLayerGeometry() { 102 | let bounds = rootLayer.bounds 103 | var scale: CGFloat 104 | 105 | let xScale: CGFloat = bounds.size.width / bufferSize.height 106 | let yScale: CGFloat = bounds.size.height / bufferSize.width 107 | 108 | scale = fmax(xScale, yScale) 109 | if scale.isInfinite { 110 | scale = 1.0 111 | } 112 | CATransaction.begin() 113 | CATransaction.setValue(kCFBooleanTrue, forKey: kCATransactionDisableActions) 114 | 115 | // rotate the layer into screen orientation and scale and mirror 116 | detectionOverlay.setAffineTransform(CGAffineTransform(rotationAngle: CGFloat(.pi / 2.0)).scaledBy(x: scale, y: -scale)) 117 | // center the layer 118 | detectionOverlay.position = CGPoint (x: bounds.midX, y: bounds.midY) 119 | 120 | CATransaction.commit() 121 | 122 | } 123 | 124 | func createTextSubLayerInBounds(_ bounds: CGRect, identifier: String, confidence: VNConfidence) -> CATextLayer { 125 | let textLayer = CATextLayer() 126 | textLayer.name = "Object Label" 127 | let formattedString = NSMutableAttributedString(string: String(format: "\(identifier)\nConfidence: %.2f", confidence)) 128 | let largeFont = UIFont(name: "Helvetica", size: 24.0)! 129 | formattedString.addAttributes([NSAttributedString.Key.font: largeFont], range: NSRange(location: 0, length: identifier.count)) 130 | textLayer.string = formattedString 131 | textLayer.bounds = CGRect(x: 0, y: 0, width: bounds.size.height - 10, height: bounds.size.width - 10) 132 | textLayer.position = CGPoint(x: bounds.midX, y: bounds.midY) 133 | textLayer.shadowOpacity = 0.7 134 | textLayer.shadowOffset = CGSize(width: 2, height: 2) 135 | textLayer.foregroundColor = CGColor(colorSpace: CGColorSpaceCreateDeviceRGB(), components: [0.0, 0.0, 0.0, 1.0]) 136 | textLayer.contentsScale = 2.0 // retina rendering 137 | // rotate the layer into screen orientation and scale and mirror 138 | textLayer.setAffineTransform(CGAffineTransform(rotationAngle: CGFloat(.pi / 2.0)).scaledBy(x: 1.0, y: -1.0)) 139 | return textLayer 140 | } 141 | 142 | func createRoundedRectLayerWithBounds(_ bounds: CGRect) -> CALayer { 143 | let shapeLayer = CALayer() 144 | shapeLayer.bounds = bounds 145 | shapeLayer.position = CGPoint(x: bounds.midX, y: bounds.midY) 146 | shapeLayer.name = "Found Object" 147 | shapeLayer.backgroundColor = CGColor(colorSpace: CGColorSpaceCreateDeviceRGB(), components: [1.0, 1.0, 0.2, 0.4]) 148 | shapeLayer.cornerRadius = 7 149 | return shapeLayer 150 | } 151 | 152 | } 153 | 154 | 155 | extension VisionObjectRecognitionViewController: UIViewControllerRepresentable { 156 | public typealias UIViewControllerType = VisionObjectRecognitionViewController 157 | 158 | func makeUIViewController(context: UIViewControllerRepresentableContext) -> VisionObjectRecognitionViewController.UIViewControllerType { 159 | return VisionObjectRecognitionViewController() 160 | } 161 | 162 | func updateUIViewController(_ uiViewController: VisionObjectRecognitionViewController, context: UIViewControllerRepresentableContext) { 163 | 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /HelloCoreML/Models/Data.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Data.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/14. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import Foundation 10 | import SwiftUI 11 | import ImageIO 12 | 13 | 14 | 15 | let mlModelData: [MLMetaModel] = load("mlmodels.json") 16 | 17 | 18 | func load(_ filename: String, as type: T.Type = T.self) -> T { 19 | let data: Data 20 | 21 | guard let file = Bundle.main.url(forResource: filename, withExtension: nil) 22 | else { 23 | fatalError("Couldn't find \(filename) in main bundle.") 24 | } 25 | 26 | do { 27 | data = try Data(contentsOf: file) 28 | } catch { 29 | fatalError("Couldn't load \(filename) from main bundle:\n\(error)") 30 | } 31 | 32 | do { 33 | let decoder = JSONDecoder() 34 | return try decoder.decode(T.self, from: data) 35 | } catch { 36 | fatalError("Couldn't parse \(filename) as \(T.self):\n\(error)") 37 | } 38 | } 39 | 40 | final class ImageStore { 41 | typealias _ImageDictionary = [String: CGImage] 42 | fileprivate var images: _ImageDictionary = [:] 43 | 44 | fileprivate static var scale = 2 45 | 46 | static var shared = ImageStore() 47 | 48 | func image(name: String) -> Image { 49 | let index = _guaranteeImage(name: name) 50 | 51 | return Image(images.values[index], scale: CGFloat(ImageStore.scale), label: Text(verbatim: name)) 52 | } 53 | 54 | static func loadImage(name: String) -> CGImage { 55 | guard 56 | let url = Bundle.main.url(forResource: name, withExtension: nil), 57 | let imageSource = CGImageSourceCreateWithURL(url as NSURL, nil), 58 | let image = CGImageSourceCreateImageAtIndex(imageSource, 0, nil) 59 | else { 60 | fatalError("Couldn't load image \(name) from main bundle.") 61 | } 62 | return image 63 | } 64 | 65 | fileprivate func _guaranteeImage(name: String) -> _ImageDictionary.Index { 66 | if let index = images.index(forKey: name) { return index } 67 | 68 | images[name] = ImageStore.loadImage(name: name) 69 | return images.index(forKey: name)! 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /HelloCoreML/Models/MLMetaModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // MLModel.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/14. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import SwiftUI 10 | 11 | struct MLMetaModel: Hashable, Codable, Identifiable { 12 | var id: Int 13 | var name: String 14 | fileprivate var cover: String 15 | var category: Category 16 | var subtitle: String 17 | var desc: String 18 | var support: Int 19 | 20 | 21 | enum Category: String, CaseIterable, Codable, Hashable { 22 | case Images = "Images" 23 | case Text = "Text" 24 | case ArtStyles = "ArtStyles" 25 | } 26 | } 27 | 28 | extension MLMetaModel { 29 | var coverImage: Image { 30 | ImageStore.shared.image(name: cover) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /HelloCoreML/Preview Content/Preview Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /HelloCoreML/Resources/charleyrivers_feature.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/HelloCoreML/Resources/charleyrivers_feature.jpg -------------------------------------------------------------------------------- /HelloCoreML/SceneDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SceneDelegate.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/14. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import SwiftUI 11 | 12 | class SceneDelegate: UIResponder, UIWindowSceneDelegate { 13 | 14 | var window: UIWindow? 15 | 16 | 17 | func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) { 18 | // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`. 19 | // If using a storyboard, the `window` property will automatically be initialized and attached to the scene. 20 | // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead). 21 | 22 | // Create the SwiftUI view that provides the window contents. 23 | if let windowScene = scene as? UIWindowScene { 24 | let window = UIWindow(windowScene: windowScene) 25 | window.rootViewController = UIHostingController(rootView: CategoryHome()) 26 | self.window = window 27 | window.makeKeyAndVisible() 28 | } 29 | } 30 | 31 | func sceneDidDisconnect(_ scene: UIScene) { 32 | // Called as the scene is being released by the system. 33 | // This occurs shortly after the scene enters the background, or when its session is discarded. 34 | // Release any resources associated with this scene that can be re-created the next time the scene connects. 35 | // The scene may re-connect later, as its session was not neccessarily discarded (see `application:didDiscardSceneSessions` instead). 36 | } 37 | 38 | func sceneDidBecomeActive(_ scene: UIScene) { 39 | // Called when the scene has moved from an inactive state to an active state. 40 | // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive. 41 | } 42 | 43 | func sceneWillResignActive(_ scene: UIScene) { 44 | // Called when the scene will move from an active state to an inactive state. 45 | // This may occur due to temporary interruptions (ex. an incoming phone call). 46 | } 47 | 48 | func sceneWillEnterForeground(_ scene: UIScene) { 49 | // Called as the scene transitions from the background to the foreground. 50 | // Use this method to undo the changes made on entering the background. 51 | } 52 | 53 | func sceneDidEnterBackground(_ scene: UIScene) { 54 | // Called as the scene transitions from the foreground to the background. 55 | // Use this method to save data, release shared resources, and store enough scene-specific state information 56 | // to restore the scene back to its current state. 57 | } 58 | 59 | 60 | } 61 | 62 | -------------------------------------------------------------------------------- /HelloCoreML/Supporting Views/CircleImage.swift: -------------------------------------------------------------------------------- 1 | /* 2 | See LICENSE folder for this sample’s licensing information. 3 | 4 | Abstract: 5 | A view that clips an image to a circle and adds a stroke and shadow. 6 | */ 7 | 8 | import SwiftUI 9 | 10 | struct CircleImage: View { 11 | var image: Image 12 | 13 | var body: some View { 14 | image 15 | .clipShape(Circle()) 16 | .overlay(Circle().stroke(Color.white, lineWidth: 4)) 17 | .shadow(radius: 10) 18 | } 19 | } 20 | 21 | struct CircleImage_Previews: PreviewProvider { 22 | static var previews: some View { 23 | CircleImage(image: Image("turtlerock")) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /HelloCoreML/Supporting Views/VideoCapture.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewCapture.swift 3 | // HelloCoreML 4 | // 5 | // Created by Robin on 2019/11/15. 6 | // Copyright © 2019 RobinChao. All rights reserved. 7 | // 8 | 9 | import AVFoundation 10 | import CoreVideo 11 | 12 | public protocol VideoCaptureDelegate: class { 13 | func videoCapture(_ capture: VideoCapture, didCaptureViewFrame pixelBuffer: CVPixelBuffer?) 14 | } 15 | 16 | public class VideoCapture: NSObject { 17 | public var previewLayer: AVCaptureVideoPreviewLayer? 18 | public weak var delegate: VideoCaptureDelegate? 19 | public var fps = 15 20 | 21 | let captureSession = AVCaptureSession() 22 | let videoOutput = AVCaptureVideoDataOutput() 23 | let queue = DispatchQueue(label: "com.robin.hello-coreml") 24 | 25 | public func setup(sessionPreset: AVCaptureSession.Preset = .vga640x480, completion: @escaping (Bool) -> Void) { 26 | self.setupCamera(sessionPreset: sessionPreset) { (success) in 27 | completion(success) 28 | } 29 | } 30 | 31 | func setupCamera(sessionPreset: AVCaptureSession.Preset, completion: @escaping (_ success: Bool) -> Void) { 32 | captureSession.beginConfiguration() 33 | captureSession.sessionPreset = sessionPreset 34 | 35 | guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else { 36 | print("Error: no video device available") 37 | return 38 | } 39 | 40 | guard let videoInput = try? AVCaptureDeviceInput(device: captureDevice) else { 41 | print("Error: could not create AVCaptureDeviceInput") 42 | return 43 | } 44 | 45 | if captureSession.canAddInput(videoInput) { 46 | captureSession.addInput(videoInput) 47 | } 48 | 49 | let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 50 | previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill 51 | previewLayer.connection?.videoOrientation = .portrait 52 | self.previewLayer = previewLayer 53 | 54 | let settings: [String : Any] = [ 55 | kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32BGRA) 56 | ] 57 | 58 | videoOutput.videoSettings = settings 59 | videoOutput.alwaysDiscardsLateVideoFrames = true 60 | videoOutput.setSampleBufferDelegate(self, queue: queue) 61 | 62 | if captureSession.canAddOutput(videoOutput) { 63 | captureSession.addOutput(videoOutput) 64 | } 65 | 66 | videoOutput.connection(with: AVMediaType.video)?.videoOrientation = .portrait 67 | 68 | captureSession.commitConfiguration() 69 | 70 | let success = true 71 | completion(success) 72 | } 73 | 74 | public func start() { 75 | if !captureSession.isRunning { 76 | captureSession.startRunning() 77 | } 78 | } 79 | 80 | public func stop() { 81 | if captureSession.isRunning { 82 | captureSession.stopRunning() 83 | } 84 | } 85 | } 86 | 87 | 88 | extension VideoCapture : AVCaptureVideoDataOutputSampleBufferDelegate { 89 | public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 90 | let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) 91 | delegate?.videoCapture(self, didCaptureViewFrame: imageBuffer) 92 | } 93 | 94 | public func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 95 | print("Dropped frame") 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Robin 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CoreML-Practice-with-SwiftUI 2 | 3 | **This Project is IN PROGRESS** 4 | 5 | This is a CoreML Practice with SwiftUI. Pretrained Models from [Apple CoreML Models](https://developer.apple.com/machine-learning/models/). 6 | 7 | ![](./apple-machine-learning-models.png) 8 | 9 | ## Included Demos 10 | 11 | * **FCRN-DepthPrediction** [FCRN-DepthPrediction](https://github.com/iro-cp/FCRN-DepthPrediction): Predict the depth from a single image. 12 | * **MNISTClassifier** [MNIST](http://yann.lecun.com/exdb/mnist/): Classify a single handwritten digit (supports digits 0-9). 13 | * **ImageClassifier** 14 | * [MobileNetV2](https://github.com/tensorflow/models/tree/master/research/slim/nets/mobilenet): The MobileNetv2 architecture trained to classify the dominant object in a camera frame or image 、 15 | * [Resnet50](https://github.com/fchollet/deep-learning-models/blob/master/resnet50.py): A Residual Neural Network that will classify the dominant object in a camera frame or image 、 16 | * [SqueezeNet](https://github.com/DeepScale/SqueezeNet): A small Deep Neural Network architecture that classifies the dominant object in a camera frame or image. 17 | * **Object Detection** [YOLOv3](https://github.com/pjreddie/darknet): Locate and classify 80 different types of objects present in a camera frame or image. 18 | 19 | ## How to use 20 | 21 | -------------------------------------------------------------------------------- /apple-machine-learning-models.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yak0xff/CoreML-Practice-with-SwiftUI/329064c413d978f35a6217fbf160033c38d4b463/apple-machine-learning-models.png --------------------------------------------------------------------------------