├── FastViTSample
├── FastViTSample
│ ├── models
│ │ └── .keep
│ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AccentColor.colorset
│ │ │ └── Contents.json
│ │ └── AppIcon.appiconset
│ │ │ └── Contents.json
│ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ ├── FastViTSample.entitlements
│ ├── FastViTApp.swift
│ ├── CameraView.swift
│ ├── CoreImageExtensions.swift
│ ├── DataModel.swift
│ ├── ViewfinderView.swift
│ └── Camera.swift
├── FastViTSample.xcodeproj
│ ├── project.xcworkspace
│ │ ├── contents.xcworkspacedata
│ │ └── xcshareddata
│ │ │ └── IDEWorkspaceChecks.plist
│ └── project.pbxproj
└── README.md
├── depth-anything-example
├── DepthApp
│ ├── models
│ │ └── .keep
│ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AccentColor.colorset
│ │ │ └── Contents.json
│ │ └── AppIcon.appiconset
│ │ │ └── Contents.json
│ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ ├── DepthView.swift
│ ├── DepthApp.entitlements
│ ├── DepthApp.swift
│ ├── CameraView.swift
│ ├── ViewfinderView.swift
│ ├── DataModel.swift
│ └── Camera.swift
├── DepthSample.xcodeproj
│ ├── project.xcworkspace
│ │ ├── contents.xcworkspacedata
│ │ └── xcshareddata
│ │ │ └── IDEWorkspaceChecks.plist
│ ├── xcshareddata
│ │ └── xcschemes
│ │ │ ├── DepthApp.xcscheme
│ │ │ └── DepthCLI.xcscheme
│ └── project.pbxproj
├── Common
│ └── CoreImageExtensions.swift
├── README.md
└── DepthCLI
│ └── MainCommand.swift
├── tutorials
└── images
│ ├── unpatched-units.png
│ └── patched-on-iphone.png
├── SemanticSegmentationSample
├── SemanticSegmentationSample
│ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AccentColor.colorset
│ │ │ └── Contents.json
│ │ └── AppIcon.appiconset
│ │ │ └── Contents.json
│ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ ├── SegmentationView.swift
│ ├── SemanticSegmentationSample.entitlements
│ ├── SemanticSegmentationSampleApp.swift
│ ├── CameraView.swift
│ ├── ViewfinderView.swift
│ ├── DataModel.swift
│ └── Camera.swift
├── SemanticSegmentationSample.xcodeproj
│ ├── project.xcworkspace
│ │ ├── contents.xcworkspacedata
│ │ └── xcshareddata
│ │ │ └── IDEWorkspaceChecks.plist
│ ├── xcshareddata
│ │ └── xcschemes
│ │ │ └── SemanticSegmentationCLI.xcscheme
│ └── project.pbxproj
├── Common
│ ├── SemanticMapToColor.metal
│ ├── PostProcessing.swift
│ ├── CoreImageExtensions.swift
│ └── SemanticMapToImage.swift
├── README.md
└── SemanticSegmentationCLI
│ └── MainCommand.swift
├── .gitignore
├── README.md
└── LICENSE
/FastViTSample/FastViTSample/models/.keep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/depth-anything-example/DepthApp/models/.keep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tutorials/images/unpatched-units.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huggingface/coreml-examples/HEAD/tutorials/images/unpatched-units.png
--------------------------------------------------------------------------------
/tutorials/images/patched-on-iphone.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huggingface/coreml-examples/HEAD/tutorials/images/patched-on-iphone.png
--------------------------------------------------------------------------------
/FastViTSample/FastViTSample/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/depth-anything-example/DepthApp/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/FastViTSample/FastViTSample/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/SemanticSegmentationSample/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/depth-anything-example/DepthApp/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/SemanticSegmentationSample/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/FastViTSample/FastViTSample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/depth-anything-example/DepthSample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/FastViTSample/FastViTSample/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/depth-anything-example/DepthApp/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/SemanticSegmentationSample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/SemanticSegmentationSample/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | **/xcuserdata/
3 | **/*.mlpackage/
4 |
5 | ## Build generated
6 | .build/
7 | build/
8 | Staging/
9 |
10 | ## Various settings
11 | *.pbxuser
12 | *.mode1v3
13 | *.mode2v3
14 |
15 | # Swift Package Manager
16 | Packages/
17 | Package.pins
18 | Package.resolved
--------------------------------------------------------------------------------
/SemanticSegmentationSample/SemanticSegmentationSample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/FastViTSample/FastViTSample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/depth-anything-example/DepthSample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/SemanticSegmentationSample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/depth-anything-example/DepthApp/DepthView.swift:
--------------------------------------------------------------------------------
1 | import AVFoundation
2 | import SwiftUI
3 |
4 | struct DepthView: View {
5 | @Binding var image: Image?
6 |
7 | var body: some View {
8 | if let image = image {
9 | image.resizable().aspectRatio(contentMode: .fill)
10 | }
11 | }
12 | }
13 |
14 | #Preview {
15 | DepthView(image: .constant(Image(systemName: "circle.rectangle.filled.pattern.diagonalline")))
16 | }
17 |
--------------------------------------------------------------------------------
/depth-anything-example/DepthApp/DepthApp.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.device.camera
8 |
9 | com.apple.security.files.user-selected.read-only
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/FastViTSample/FastViTSample/FastViTSample.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.device.camera
8 |
9 | com.apple.security.files.user-selected.read-only
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/SemanticSegmentationSample/SegmentationView.swift:
--------------------------------------------------------------------------------
1 | import AVFoundation
2 | import SwiftUI
3 |
4 | struct SegmentationView: View {
5 | @Binding var image: Image?
6 |
7 | var body: some View {
8 | if let image = image {
9 | image.resizable().aspectRatio(contentMode: .fill)
10 | }
11 | }
12 | }
13 |
14 | #Preview {
15 | SegmentationView(image: .constant(Image(systemName: "circle.rectangle.filled.pattern.diagonalline")))
16 | }
17 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/SemanticSegmentationSample/SemanticSegmentationSample.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.device.camera
8 |
9 | com.apple.security.files.user-selected.read-only
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/FastViTSample/FastViTSample/FastViTApp.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | @main
4 | struct CameraApp: App {
5 | init() {
6 | UINavigationBar.applyCustomAppearance()
7 | }
8 |
9 | var body: some Scene {
10 | WindowGroup {
11 | CameraView()
12 | }
13 | }
14 | }
15 |
16 | fileprivate extension UINavigationBar {
17 | static func applyCustomAppearance() {
18 | let appearance = UINavigationBarAppearance()
19 | appearance.backgroundEffect = UIBlurEffect(style: .systemUltraThinMaterial)
20 | UINavigationBar.appearance().standardAppearance = appearance
21 | UINavigationBar.appearance().compactAppearance = appearance
22 | UINavigationBar.appearance().scrollEdgeAppearance = appearance
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/depth-anything-example/DepthApp/DepthApp.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | @main
4 | struct CameraApp: App {
5 | init() {
6 | UINavigationBar.applyCustomAppearance()
7 | }
8 |
9 | var body: some Scene {
10 | WindowGroup {
11 | CameraView()
12 | }
13 | }
14 | }
15 |
16 | fileprivate extension UINavigationBar {
17 | static func applyCustomAppearance() {
18 | let appearance = UINavigationBarAppearance()
19 | appearance.backgroundEffect = UIBlurEffect(style: .systemUltraThinMaterial)
20 | UINavigationBar.appearance().standardAppearance = appearance
21 | UINavigationBar.appearance().compactAppearance = appearance
22 | UINavigationBar.appearance().scrollEdgeAppearance = appearance
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/SemanticSegmentationSample/SemanticSegmentationSampleApp.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | @main
4 | struct CameraApp: App {
5 | init() {
6 | UINavigationBar.applyCustomAppearance()
7 | }
8 |
9 | var body: some Scene {
10 | WindowGroup {
11 | CameraView()
12 | }
13 | }
14 | }
15 |
16 | fileprivate extension UINavigationBar {
17 | static func applyCustomAppearance() {
18 | let appearance = UINavigationBarAppearance()
19 | appearance.backgroundEffect = UIBlurEffect(style: .systemUltraThinMaterial)
20 | UINavigationBar.appearance().standardAppearance = appearance
21 | UINavigationBar.appearance().compactAppearance = appearance
22 | UINavigationBar.appearance().scrollEdgeAppearance = appearance
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/FastViTSample/FastViTSample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | },
8 | {
9 | "appearances" : [
10 | {
11 | "appearance" : "luminosity",
12 | "value" : "dark"
13 | }
14 | ],
15 | "idiom" : "universal",
16 | "platform" : "ios",
17 | "size" : "1024x1024"
18 | },
19 | {
20 | "appearances" : [
21 | {
22 | "appearance" : "luminosity",
23 | "value" : "tinted"
24 | }
25 | ],
26 | "idiom" : "universal",
27 | "platform" : "ios",
28 | "size" : "1024x1024"
29 | }
30 | ],
31 | "info" : {
32 | "author" : "xcode",
33 | "version" : 1
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/FastViTSample/FastViTSample/CameraView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct CameraView: View {
4 | @StateObject private var model = DataModel()
5 |
6 | var body: some View {
7 | NavigationStack {
8 | GeometryReader { geometry in
9 | VStack {
10 | ViewfinderView(session: model.camera.captureSession, prediction: $model.prediction)
11 | .frame(width: geometry.size.width, height: geometry.size.height).clipped()
12 | }
13 | }
14 | .task {
15 | await model.camera.start()
16 | }
17 | .navigationTitle("Camera")
18 | .navigationBarTitleDisplayMode(.inline)
19 | .navigationBarHidden(true)
20 | .statusBar(hidden: true)
21 | .ignoresSafeArea()
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/Common/SemanticMapToColor.metal:
--------------------------------------------------------------------------------
1 | #include
2 | using namespace metal;
3 |
4 | float3 hue2rgb(float hue) {
5 | float r = fabs(hue * 6.0 - 3.0) - 1.0;
6 | float g = 2.0 - fabs(hue * 6.0 - 2.0);
7 | float b = 2.0 - fabs(hue * 6.0 - 4.0);
8 | return saturate(float3(r, g, b));
9 | }
10 |
11 | kernel void SemanticMapToColor(texture2d semantic_map [[ texture(0) ]],
12 | texture2d image [[ texture(1) ]],
13 | const device uint &n_classes [[ buffer(0) ]],
14 | uint2 gid [[thread_position_in_grid]]) {
15 | uint class_id = semantic_map.read(gid).r;
16 | float hue = float(class_id) / float(n_classes);
17 | float3 rgb = hue2rgb(hue);
18 | image.write(float4(rgb, 1.0), gid);
19 | }
20 |
--------------------------------------------------------------------------------
/depth-anything-example/DepthApp/CameraView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct CameraView: View {
4 | @StateObject private var model = DataModel()
5 |
6 | var body: some View {
7 | NavigationStack {
8 | GeometryReader { geometry in
9 | VStack {
10 | ViewfinderView(session: model.camera.captureSession)
11 | .frame(width: geometry.size.width, height: geometry.size.height / 2).clipped()
12 | DepthView(image: $model.depthImage).background(.black)
13 | .frame(width: geometry.size.width, height: geometry.size.height / 2).clipped()
14 | }
15 | }
16 | .task {
17 | await model.camera.start()
18 | }
19 | .navigationTitle("Camera")
20 | .navigationBarTitleDisplayMode(.inline)
21 | .navigationBarHidden(true)
22 | .statusBar(hidden: true)
23 | .ignoresSafeArea()
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/SemanticSegmentationSample/CameraView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct CameraView: View {
4 | @StateObject private var model = DataModel()
5 |
6 | var body: some View {
7 | NavigationStack {
8 | GeometryReader { geometry in
9 | VStack {
10 | ViewfinderView(session: model.camera.captureSession)
11 | .frame(width: geometry.size.width, height: geometry.size.height / 2).clipped()
12 | SegmentationView(image: $model.segmentationImage).background(.black)
13 | .frame(width: geometry.size.width, height: geometry.size.height / 2).clipped()
14 | }
15 | }
16 | .task {
17 | await model.camera.start()
18 | }
19 | .navigationTitle("Camera")
20 | .navigationBarTitleDisplayMode(.inline)
21 | .navigationBarHidden(true)
22 | .statusBar(hidden: true)
23 | .ignoresSafeArea()
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/depth-anything-example/DepthApp/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | },
8 | {
9 | "idiom" : "mac",
10 | "scale" : "1x",
11 | "size" : "16x16"
12 | },
13 | {
14 | "idiom" : "mac",
15 | "scale" : "2x",
16 | "size" : "16x16"
17 | },
18 | {
19 | "idiom" : "mac",
20 | "scale" : "1x",
21 | "size" : "32x32"
22 | },
23 | {
24 | "idiom" : "mac",
25 | "scale" : "2x",
26 | "size" : "32x32"
27 | },
28 | {
29 | "idiom" : "mac",
30 | "scale" : "1x",
31 | "size" : "128x128"
32 | },
33 | {
34 | "idiom" : "mac",
35 | "scale" : "2x",
36 | "size" : "128x128"
37 | },
38 | {
39 | "idiom" : "mac",
40 | "scale" : "1x",
41 | "size" : "256x256"
42 | },
43 | {
44 | "idiom" : "mac",
45 | "scale" : "2x",
46 | "size" : "256x256"
47 | },
48 | {
49 | "idiom" : "mac",
50 | "scale" : "1x",
51 | "size" : "512x512"
52 | },
53 | {
54 | "idiom" : "mac",
55 | "scale" : "2x",
56 | "size" : "512x512"
57 | }
58 | ],
59 | "info" : {
60 | "author" : "xcode",
61 | "version" : 1
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/FastViTSample/README.md:
--------------------------------------------------------------------------------
1 | # FastViT Sample
2 |
3 | This sample demonstrates the use of [FastViT](https://github.com/apple/ml-fastvit) converted to Core ML using [coremltools](https://github.com/apple/coremltools). FastViT is a small and very fast model for image classification.
4 |
5 | ## Instructions
6 |
7 | 1. [Download FastViTT8F16.mlpackage](#download) from the Hugging Face Hub and place it inside the `models` folder of the project.
8 | 2. Open `FastViTSample.xcodeproj` in XCode.
9 | 3. Build & run the project!
10 |
11 | The FastViT model was introduced in the paper [FastViT: A Fast Hybrid Vision Transformer using Structural Reparameterization](https://arxiv.org/abs/2303.14189) by Pavan Kumar Anasosalu Vasu et al. and first released in [this repository](https://github.com/apple/ml-fastvit).
12 |
13 | ## Download
14 |
15 | Core ML packages are available in:
16 | - [apple/coreml-FastViT-T8](https://huggingface.co/apple/coreml-FastViT-T8). Small version (4M parameters).
17 | - [apple/coreml-FastViT-MA36](https://huggingface.co/apple/coreml-FastViT-MA36). Larger version (44M parameters) with better accuracy.
18 |
19 | Install `huggingface-cli`
20 |
21 | ```bash
22 | brew install huggingface-cli
23 | ```
24 |
25 | Download `FastViTT8F16.mlpackage` to the `models` directory:
26 |
27 | ```bash
28 | huggingface-cli download \
29 | --local-dir models --local-dir-use-symlinks False \
30 | apple/coreml-FastViT-T8 \
31 | --include "FastViTT8F16.mlpackage/*"
32 | ```
33 |
34 | FastViT-T8 is the smallest version of the model, with 4M parameters. You can also try the larger and more accurate FastViT-MA36 (44M parameters), downloading it from [apple/coreml-FastViT-MA36](https://huggingface.co/apple/coreml-FastViT-MA36).
35 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/README.md:
--------------------------------------------------------------------------------
1 | # Semantic Segmentation Sample with DETR
2 |
3 | This sample demonstrates the use of [DETR](https://huggingface.co/facebook/detr-resnet-50) converted to Core ML. It allows semantic segmentation on iOS devices, where each pixel in an image is classified according to the most probable category it belongs to.
4 |
5 | We leverage [coremltools](https://github.com/apple/coremltools) for model conversion and compression. You can read more about it [here](https://apple.github.io/coremltools/docs-guides/source/opt-palettization-api.html).
6 |
7 | ## Instructions
8 |
9 | 1. [Download DETRResnet50SemanticSegmentationF16.mlpackage](#download) from the Hugging Face Hub and place it inside the `models` folder of the project.
10 | 2. Open `SemanticSegmentationSample.xcodeproj` in Xcode.
11 | 3. Build & run the project!
12 |
13 | DEtection TRansformer (DETR) was introduced in the paper [End-to-End Object Detection with Transformers](https://arxiv.org/abs/2005.12872) by Carion et al. and first released in [this repository](https://github.com/facebookresearch/detr).
14 |
15 | ## Download
16 |
17 | Core ML packages are available in [apple/coreml-detr-semantic-segmentation](https://huggingface.co/apple/coreml-detr-semantic-segmentation).
18 | Install `huggingface-cli`
19 |
20 | ```bash
21 | brew install huggingface-cli
22 | ```
23 |
24 | Download `DETRResnet50SemanticSegmentationF16.mlpackage` to the `models` directory:
25 |
26 | ```bash
27 | huggingface-cli download \
28 | --local-dir models --local-dir-use-symlinks False \
29 | apple/coreml-detr-semantic-segmentation \
30 | --include "DETRResnet50SemanticSegmentationF16.mlpackage/*"
31 | ```
32 |
33 | To download all the model versions, including quantized ones, skip the `--include` argument.
34 |
--------------------------------------------------------------------------------
/FastViTSample/FastViTSample/CoreImageExtensions.swift:
--------------------------------------------------------------------------------
1 | import CoreImage
2 | import ImageIO
3 | import UniformTypeIdentifiers
4 |
5 | extension CIImage {
6 | /// Returns a resized image.
7 | func resized(to size: CGSize) -> CIImage {
8 | let outputScaleX = size.width / extent.width
9 | let outputScaleY = size.height / extent.height
10 | var outputImage = self.transformed(by: CGAffineTransform(scaleX: outputScaleX, y: outputScaleY))
11 | outputImage = outputImage.transformed(
12 | by: CGAffineTransform(translationX: -outputImage.extent.origin.x, y: -outputImage.extent.origin.y)
13 | )
14 | return outputImage
15 | }
16 | }
17 |
18 | extension CIContext {
19 | /// Renders an image to a new pixel buffer.
20 | func render(_ image: CIImage, pixelFormat: OSType) -> CVPixelBuffer? {
21 | var output: CVPixelBuffer!
22 | let status = CVPixelBufferCreate(
23 | kCFAllocatorDefault,
24 | Int(image.extent.width),
25 | Int(image.extent.height),
26 | pixelFormat,
27 | nil,
28 | &output
29 | )
30 | guard status == kCVReturnSuccess else {
31 | return nil
32 | }
33 | render(image, to: output)
34 | return output
35 | }
36 |
37 | /// Writes the image as a PNG.
38 | func writePNG(_ image: CIImage, to url: URL) {
39 | let outputCGImage = createCGImage(image, from: image.extent)!
40 | guard let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.png.identifier as CFString, 1, nil) else {
41 | fatalError("Failed to create an image destination.")
42 | }
43 | CGImageDestinationAddImage(destination, outputCGImage, nil)
44 | CGImageDestinationFinalize(destination)
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/depth-anything-example/Common/CoreImageExtensions.swift:
--------------------------------------------------------------------------------
1 | import CoreImage
2 | import ImageIO
3 | import UniformTypeIdentifiers
4 |
5 | extension CIImage {
6 | /// Returns a resized image.
7 | func resized(to size: CGSize) -> CIImage {
8 | let outputScaleX = size.width / extent.width
9 | let outputScaleY = size.height / extent.height
10 | var outputImage = self.transformed(by: CGAffineTransform(scaleX: outputScaleX, y: outputScaleY))
11 | outputImage = outputImage.transformed(
12 | by: CGAffineTransform(translationX: -outputImage.extent.origin.x, y: -outputImage.extent.origin.y)
13 | )
14 | return outputImage
15 | }
16 | }
17 |
18 | extension CIContext {
19 | /// Renders an image to a new pixel buffer.
20 | func render(_ image: CIImage, pixelFormat: OSType) -> CVPixelBuffer? {
21 | var output: CVPixelBuffer!
22 | let status = CVPixelBufferCreate(
23 | kCFAllocatorDefault,
24 | Int(image.extent.width),
25 | Int(image.extent.height),
26 | pixelFormat,
27 | nil,
28 | &output
29 | )
30 | guard status == kCVReturnSuccess else {
31 | return nil
32 | }
33 | render(image, to: output)
34 | return output
35 | }
36 |
37 | /// Writes the image as a PNG.
38 | func writePNG(_ image: CIImage, to url: URL) {
39 | let outputCGImage = createCGImage(image, from: image.extent)!
40 | guard let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.png.identifier as CFString, 1, nil) else {
41 | fatalError("Failed to create an image destination.")
42 | }
43 | CGImageDestinationAddImage(destination, outputCGImage, nil)
44 | CGImageDestinationFinalize(destination)
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/Common/PostProcessing.swift:
--------------------------------------------------------------------------------
1 | import CoreML
2 | import CoreImage
3 | import CoreImage.CIFilterBuiltins
4 |
5 | enum PostProcessorError : Error {
6 | case missingModelMetadata
7 | case colorConversionError
8 | }
9 |
10 | class DETRPostProcessor {
11 | /// Number of raw classes, including empty ones with no labels
12 | let numClasses: Int
13 |
14 | /// Map from semantic id to class label
15 | let ids2Labels: [Int : String]
16 |
17 | init(model: MLModel) throws {
18 | struct ClassList: Codable {
19 | var labels: [String]
20 | }
21 |
22 | guard let userFields = model.modelDescription.metadata[MLModelMetadataKey.creatorDefinedKey] as? [String : String],
23 | let params = userFields["com.apple.coreml.model.preview.params"] else {
24 | throw PostProcessorError.missingModelMetadata
25 | }
26 | guard let jsonData = params.data(using: .utf8),
27 | let classList = try? JSONDecoder().decode(ClassList.self, from: jsonData) else {
28 | throw PostProcessorError.missingModelMetadata
29 | }
30 | let rawLabels = classList.labels
31 |
32 | // Filter out empty categories whose label is "--"
33 | let ids2Labels = Dictionary(uniqueKeysWithValues: rawLabels.enumerated().filter { $1 != "--" })
34 |
35 | self.numClasses = rawLabels.count
36 | self.ids2Labels = ids2Labels
37 | }
38 |
39 | /// Creates a new CIImage from a raw semantic predictions returned by the model
40 | func semanticImage(semanticPredictions: MLShapedArray) throws -> CIImage {
41 | guard let image = try SemanticMapToImage.shared?.mapToImage(semanticMap: semanticPredictions, numClasses: numClasses) else {
42 | throw PostProcessorError.colorConversionError
43 | }
44 | return image
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/SemanticSegmentationSample/Common/CoreImageExtensions.swift:
--------------------------------------------------------------------------------
1 | import CoreImage
2 | import CoreImage.CIFilterBuiltins
3 | import ImageIO
4 | import UniformTypeIdentifiers
5 |
6 | extension CIImage {
7 | /// Returns a resized image.
8 | func resized(to size: CGSize) -> CIImage {
9 | let outputScaleX = size.width / extent.width
10 | let outputScaleY = size.height / extent.height
11 | var outputImage = self.transformed(by: CGAffineTransform(scaleX: outputScaleX, y: outputScaleY))
12 | outputImage = outputImage.transformed(
13 | by: CGAffineTransform(translationX: -outputImage.extent.origin.x, y: -outputImage.extent.origin.y)
14 | )
15 | return outputImage
16 | }
17 |
18 | public func withAlpha(_ alpha: T) -> CIImage? {
19 | guard alpha != 1 else { return self }
20 |
21 | let filter = CIFilter.colorMatrix()
22 | filter.inputImage = self
23 | filter.aVector = CIVector(x: 0, y: 0, z: 0, w: CGFloat(alpha))
24 |
25 | return filter.outputImage
26 | }
27 | }
28 |
29 | extension CIContext {
30 | /// Renders an image to a new pixel buffer.
31 | func render(_ image: CIImage, pixelFormat: OSType) -> CVPixelBuffer? {
32 | var output: CVPixelBuffer!
33 | let status = CVPixelBufferCreate(
34 | kCFAllocatorDefault,
35 | Int(image.extent.width),
36 | Int(image.extent.height),
37 | pixelFormat,
38 | nil,
39 | &output
40 | )
41 | guard status == kCVReturnSuccess else {
42 | return nil
43 | }
44 | render(image, to: output)
45 | return output
46 | }
47 |
48 | /// Writes the image as a PNG.
49 | func writePNG(_ image: CIImage, to url: URL) {
50 | let outputCGImage = createCGImage(image, from: image.extent)!
51 | guard let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.png.identifier as CFString, 1, nil) else {
52 | fatalError("Failed to create an image destination.")
53 | }
54 | CGImageDestinationAddImage(destination, outputCGImage, nil)
55 | CGImageDestinationFinalize(destination)
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # CoreML Examples
2 |
3 | This repository contains a collection of CoreML demo apps, with optimized models for the Apple Neural Engine™️. It also hosts tutorials and other resources you can use in your own projects.
4 |
5 | ## Demo Apps
6 |
7 | ||||
8 | | ------------- | ------------- | ------------- |
9 | |