├── Resources ├── blurring.jpeg └── sharpening.jpeg ├── .gitignore ├── GenerateSwiftUICoreImage ├── GenerateSwiftUICoreImage.xcodeproj │ ├── project.xcworkspace │ │ ├── contents.xcworkspacedata │ │ └── xcshareddata │ │ │ └── IDEWorkspaceChecks.plist │ └── project.pbxproj └── GenerateSwiftUICoreImage │ ├── GenerateSwiftUICoreImage.entitlements │ ├── GenerateSwiftUICoreImageApp.swift │ └── ContentView.swift ├── .swiftpm └── xcode │ └── package.xcworkspace │ └── xcshareddata │ └── IDEWorkspaceChecks.plist ├── LICENSE.txt ├── Package.swift ├── Sources ├── Image-Extensions.swift └── CIImage-Extensions.swift ├── Generator ├── FunctionMinima.json ├── MissingParameterDocumentation.json ├── docLookup.json ├── abstracts.json └── CIImage-Generation.swift └── README.md /Resources/blurring.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danwood/SwiftUICoreImage/HEAD/Resources/blurring.jpeg -------------------------------------------------------------------------------- /Resources/sharpening.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/danwood/SwiftUICoreImage/HEAD/Resources/sharpening.jpeg -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | /.build 3 | /Packages 4 | /*.xcodeproj 5 | xcuserdata/ 6 | DerivedData/ 7 | .swiftpm/config/registries.json 8 | .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata 9 | .netrc 10 | -------------------------------------------------------------------------------- /GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | com.apple.security.app-sandbox 6 | 7 | com.apple.security.files.user-selected.read-only 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/GenerateSwiftUICoreImageApp.swift: -------------------------------------------------------------------------------- 1 | // 2 | // GenerateSwiftUICoreImageApp.swift 3 | // GenerateSwiftUICoreImage 4 | // 5 | // Created by Dan Wood on 6/25/24. 6 | // 7 | 8 | import SwiftUI 9 | 10 | @main 11 | struct GenerateSwiftUICoreImageApp: App { 12 | var body: some Scene { 13 | let _ = dumpFilters() 14 | let _ = dumpUnknownProperties() 15 | WindowGroup { 16 | ContentView() 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/ContentView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ContentView.swift 3 | // GenerateSwiftUICoreImage 4 | // 5 | // Created by Dan Wood on 6/25/24. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct ContentView: View { 11 | var body: some View { 12 | VStack { 13 | Image(systemName: "globe") 14 | .imageScale(.large) 15 | .foregroundStyle(.tint) 16 | Text("See console output") 17 | } 18 | .padding() 19 | } 20 | } 21 | 22 | #Preview { 23 | ContentView() 24 | } 25 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Dan Wood 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /Package.swift: -------------------------------------------------------------------------------- 1 | // swift-tools-version: 5.7 2 | // The swift-tools-version declares the minimum version of Swift required to build this package. 3 | 4 | import PackageDescription 5 | 6 | let package = Package( 7 | name: "SwiftUICoreImage", 8 | platforms: [ 9 | .macOS(.v10_15), 10 | .iOS(.v13) 11 | ], 12 | products: [ 13 | // Products define the executables and libraries a package produces, and make them visible to other packages. 14 | .library( 15 | name: "SwiftUICoreImage", 16 | targets: ["SwiftUICoreImage"]), 17 | ], 18 | dependencies: [ 19 | // Dependencies declare other packages that this package depends on. 20 | // .package(url: /* package url */, from: "1.0.0"), 21 | ], 22 | targets: [ 23 | // Targets are the basic building blocks of a package. A target can define a module or a test suite. 24 | // Targets can depend on other targets in this package, and on products in packages this package depends on. 25 | .target( 26 | name: "SwiftUICoreImage", 27 | dependencies: [], 28 | path: "Sources") 29 | ] 30 | ) 31 | -------------------------------------------------------------------------------- /Sources/Image-Extensions.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Image-Extensions.swift 3 | // SwiftUI Core Image 4 | // 5 | // Created by Dan Wood on 5/9/23. 6 | // 7 | 8 | import Foundation 9 | import CoreGraphics 10 | import CoreImage 11 | import SwiftUI 12 | 13 | public extension Image { 14 | private static let context = CIContext(options: nil) 15 | 16 | init(ciImage: CIImage) { 17 | 18 | #if canImport(UIKit) 19 | // Note that making a UIImage and then using that to initialize the Image doesn't seem to work, but CGImage is fine. 20 | if let cgImage = Self.context.createCGImage(ciImage, from: ciImage.extent) { 21 | self.init(cgImage, scale: 1.0, orientation: .up, label: Text("")) 22 | } else { 23 | self.init(systemName: "questionmark") 24 | } 25 | #elseif canImport(AppKit) 26 | // Looks like the NSCIImageRep is slightly better optimized for repeated runs, 27 | // I'm guessing that it doesn't actually render the bitmap unless it needs to. 28 | let rep = NSCIImageRep(ciImage: ciImage) 29 | guard rep.size.width <= 10000, rep.size.height <= 10000 else { // simple test to make sure we don't have overflow extent 30 | self.init(nsImage: NSImage()) 31 | return 32 | } 33 | let nsImage = NSImage(size: rep.size) // size affects aspect ratio but not resolution 34 | nsImage.addRepresentation(rep) 35 | self.init(nsImage: nsImage) 36 | #endif 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /Generator/FunctionMinima.json: -------------------------------------------------------------------------------- 1 | { 2 | "areaAverage": "11.0", 3 | "areaHistogram": "11.0", 4 | "areaMaximum": "11.0", 5 | "areaMaximumAlpha": "11.0", 6 | "areaMinimum": "11.0", 7 | "areaMinimumAlpha": "11.0", 8 | "areaMinMax": "11.0", 9 | "areaMinMaxRed": "11.0", 10 | "bumpDistortion": "11.0", 11 | "bumpDistortionLinear": "11.0", 12 | "circleSplashDistortion": "11.0", 13 | "circularWrap": "11.0", 14 | "colorAbsoluteDifference": "11.0", 15 | "colorThreshold": "11.0", 16 | "colorThresholdOtsu": "11.0", 17 | "columnAverage": "11.0", 18 | "displacementDistortion": "11.0", 19 | "droste": "11.0", 20 | "glassDistortion": "11.0", 21 | "glassLozenge": "11.0", 22 | "histogramDisplay": "11.0", 23 | "holeDistortion": "11.0", 24 | "kMeans": "11.0", 25 | "lightTunnel": "11.0", 26 | "ninePartStretched": "11.0", 27 | "ninePartTiled": "11.0", 28 | "pinchDistortion": "11.0", 29 | "rowAverage": "11.0", 30 | "stretchCrop": "11.0", 31 | "torusLensDistortion": "11.0", 32 | "twirlDistortion": "11.0", 33 | "vortexDistortion": "11.0", 34 | "convolutionRGB3X3": "12.0", 35 | "convolutionRGB5X5": "12.0", 36 | "convolutionRGB7X7": "12.0", 37 | "convolutionRGB9Horizontal": "12.0", 38 | "convolutionRGB9Vertical": "12.0", 39 | "linearLightBlendMode": "12.0", 40 | "personSegmentation": "12.0", 41 | "vividLightBlendMode": "12.0", 42 | "areaLogarithmicHistogram": "13.0", 43 | "convertLabToRGB": "13.0", 44 | "convertRGBtoLab": "13.0", 45 | 46 | "colorCubesMixedWithMask": "13.0", 47 | "colorCubeWithColorSpace": "13.0", 48 | "colorCube": "13.0", 49 | 50 | "attributedTextImageGenerator": "13.0", 51 | "textImageGenerator": "13.0" 52 | } 53 | -------------------------------------------------------------------------------- /Generator/MissingParameterDocumentation.json: -------------------------------------------------------------------------------- 1 | { 2 | "CIBokehBlur" : { 3 | "softness" : "The softness of the bokeh effect" 4 | }, 5 | "CIDepthBlurEffect" : { 6 | "_NOTE" : "THIS WAS GLEANED FROM WWDC2017#508 VIDEO; NO OFFICIAL APPLE DOCUMENTATION FOUND. NOT ACTUALLY USED HERE BECAUSE OF SEVERAL BUILT-IN INITIALIZERS.", 7 | "aperture" : "Simulated lens aperature to adjust blur for unfocused elements", 8 | "auxDataMetadata" : "UNKNOWN", 9 | "calibrationData" : "UNKNOWN", 10 | "chinPositions" : "Vector of up to 4 x,y positions indicating where peoples' chins are", 11 | "disparityImage" : "Grayscale image indicating depth; lighter is nearer.", 12 | "focusRectangle" : "Part of the rectangle to make sure to put into focus", 13 | "gainMap" : "UNKNOWN", 14 | "leftEyePositions" : "Vector of up to 4 x,y positions indicating where peoples' left eyes are", 15 | "lumaNoiseScale" : "UNKNOWN", 16 | "nosePositions" : "Vector of up to 4 x,y positions indicating where peoples' noses are", 17 | "rightEyePositions" : "Vector of up to 4 x,y positions indicating where peoples' right eyes are", 18 | "scaleFactor" : "Integrated downsampling (rather than doing it later) since this is computationally expensive", 19 | "shape" : "UNKNOWN" 20 | }, 21 | "CIDepthOfField" : { 22 | "point0" : "A set of coordinates marking the first point to be focused on", 23 | "point1" : "A set of coordinates marking the second point to be focused on", 24 | "unsharpMaskIntensity" : "The intensity of the unsharp mask effect", 25 | "unsharpMaskRadius" : "The radius of the unsharpened mask effect applied to the in-focus area of effect" 26 | }, 27 | "CIDroste" : { 28 | "insetPoint0" : "The x and y position that defines the first inset point", 29 | "insetPoint1" : "The x and y position that defines the second inset point", 30 | "periodicity" : "The amount of intervals", 31 | "rotation" : "The angle of the rotation, in radians", 32 | "strands" : "The amount of strands", 33 | "zoom" : "The zoom of the effect" 34 | }, 35 | "CIEdgePreserveUpsampleFilter" : { 36 | "lumaSigma" : "Influence of the input image’s luma information on the upsampling operation", 37 | "smallImage" : "An image representing the reference for scaling the input image with the type CIImage", 38 | "spatialSigma" : "The influence of the input image’s spatial information on the upsampling operation" 39 | }, 40 | "CIGuidedFilter" : { 41 | "_NOTE" : "THIS WAS GLEANED FROM DESCRIPTIONS OF THIS FILTER IN GENERAL; NO OFFICIAL APPLE DOCUMENTATION FOUND", 42 | "epsilon" : "Smoothness. A higher value means more smoothing." 43 | }, 44 | "CIHueSaturationValueGradient" : { 45 | "dither" : "A boolean value specifying whether the distort the generated output", 46 | "softness" : "The softness of the generated color wheel" 47 | }, 48 | "CILenticularHaloGenerator" : { 49 | "haloOverlap" : "The overlap of red, green, and blue halos. A value of 1 results in a full overlap." 50 | }, 51 | "CIPerspectiveCorrection" : { 52 | "crop" : "A rectangle that specifies the extent of the corrected image" 53 | }, 54 | "CIToneCurve" : { 55 | "point0" : "A vector containing the position of the first point of the tone curve", 56 | "point1" : "A vector containing the position of the second point of the tone curve", 57 | "point2" : "A vector containing the position of the third point of the tone curve", 58 | "point3" : "A vector containing the position of the fourth point of the tone curve", 59 | "point4" : "A vector containing the position of the fifth point of the tone curve" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /Sources/CIImage-Extensions.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CIImage-Extensions.swift 3 | // SwiftUI Core Image 4 | // 5 | // Created by Dan Wood on 5/9/23. 6 | // 7 | 8 | import SwiftUI 9 | import CoreImage 10 | import CoreImage.CIFilterBuiltins 11 | 12 | public extension CIImage { 13 | 14 | // Pretty fast. Subsequent invocations are cached. 15 | convenience init(_ name: String, bundle: Bundle? = nil) { 16 | #if canImport(UIKit) 17 | if let uiImage = UIImage(named: name, in: bundle, with: nil) { 18 | self.init(uiImage: uiImage) 19 | } else { 20 | self.init() 21 | } 22 | #elseif canImport(AppKit) 23 | let nsImage: NSImage? 24 | if let bundle { 25 | nsImage = bundle.image(forResource: name) 26 | } else { 27 | nsImage = NSImage(named: name) 28 | } 29 | if let nsImage { 30 | self.init(nsImage: nsImage) 31 | } else { 32 | self.init() 33 | } 34 | #endif 35 | } 36 | 37 | #if canImport(UIKit) 38 | convenience init(uiImage: UIImage) { 39 | if let cgImage = uiImage.cgImage { 40 | self.init(cgImage: cgImage) 41 | } else { 42 | self.init() 43 | } 44 | } 45 | #elseif canImport(AppKit) 46 | convenience init(nsImage: NSImage) { 47 | if let cgImage = nsImage.cgImage(forProposedRect: nil, context: nil, hints: nil) { // TODO: Maybe consider NSGraphicsContext 48 | self.init(cgImage: cgImage) 49 | } else { 50 | self.init() 51 | } 52 | } 53 | #endif 54 | 55 | /// Useful for debugging when chaining multiple CIImage modifiers together. 56 | func logExtent(file: String = #file, line: Int = #line) -> CIImage { 57 | NSLog("\(file):\(line) \(self.extent)") 58 | return self 59 | } 60 | 61 | } 62 | 63 | // MARK: USEFUL EXTENSIONS FOR WORKING IN A SWIFTUI-LIKE FASHION 64 | 65 | public extension CIImage { 66 | 67 | /// Save the extent and then re-crop to that extent after applying whatever is in the closure 68 | func recropping(apply: (CIImage) -> CIImage) -> CIImage { 69 | let savedExtent: CGRect = extent 70 | let newCIImage = apply(self) 71 | let cropped = newCIImage.cropped(to: savedExtent) 72 | return cropped 73 | } 74 | 75 | /// Apply to whatever is in the closure. Useful if the current image is used as a parameter to a new image process. 76 | func replacing(apply: (CIImage) -> CIImage) -> CIImage { 77 | let newCIImage = apply(self) 78 | return newCIImage 79 | } 80 | 81 | /// Resize an image down so it fully fills the container, cropping in the center as needed. 82 | @available(macOS 10.15, *) 83 | func scaledToFill(_ size: CGSize?) -> CIImage { 84 | guard let size else { return self } 85 | let currentSize = extent.size 86 | let largerRatio: CGFloat = max(size.width / currentSize.width, size.height / currentSize.height) 87 | let newSize: CGSize = CGSize(width: currentSize.width * largerRatio, height: currentSize.height * largerRatio) 88 | // Scale to the larger of two ratios so it fills 89 | let scaled = self.lanczosScaleTransform(scale: Float(largerRatio)) 90 | let clamped = scaled.clampedToExtent() 91 | let cropped = clamped.cropped(to: CGRect(x: (newSize.width - size.width) / 2, 92 | y: (newSize.height - size.height) / 2, 93 | width: size.width, height: size.height)) 94 | return cropped 95 | } 96 | 97 | /// Resize an image down so it fully fits in container, centered as needed. No cropping. 98 | @available(macOS 10.15, *) 99 | func scaledToFit(_ size: CGSize?) -> CIImage { 100 | guard let size else { return self } 101 | let currentSize = extent.size 102 | let smallerRatio: CGFloat = min(size.width / currentSize.width, size.height / currentSize.height) 103 | let newSize: CGSize = CGSize(width: currentSize.width * smallerRatio, height: currentSize.height * smallerRatio) 104 | // Scale to the smaller of two ratios so it fits 105 | let scaled = self.lanczosScaleTransform(scale: Float(smallerRatio)) 106 | let clamped = scaled.clampedToExtent() 107 | let cropped = clamped.cropped(to: CGRect(origin: .zero, size: newSize)) 108 | return cropped 109 | } 110 | 111 | /// convenience, to be similar to SwiftUI view offset 112 | func offset(by offset: CGSize) -> CIImage { 113 | guard offset != .zero else { return self } 114 | return self.transformed(by: CGAffineTransform(translationX: offset.width, y: offset.height)) 115 | } 116 | 117 | } 118 | 119 | // MARK: OVERLOADS OF EXISTING CIIMAGE OPERATIONS SO WE CAN PASS IN 'ACTIVE' BOOLEAN TO BE ABLE TO HAVE INERT MODIFIER 120 | 121 | public extension CIImage { 122 | 123 | // Don't overload these; already a way to pass in arguments to get an inert modifier 124 | //open func transformed(by matrix: CGAffineTransform) -> CIImage // pass in CGAffineTransform.identity 125 | //open func transformed(by matrix: CGAffineTransform, highQualityDownsample: Bool) -> CIImage // pass in CGAffineTransform.identity 126 | //open func composited(over dest: CIImage) -> CIImage // pass in empty image 127 | //open func cropped(to rect: CGRect) -> CIImage // Pass in CGRect.infinite 128 | //open func clamped(to rect: CGRect) -> CIImage // Pass in CGRect.infinite 129 | //open func settingProperties(_ properties: [AnyHashable : Any]) -> CIImage // Pass in empty to add no properties 130 | 131 | // Maybe not worth dealing with. 132 | //open func oriented(forExifOrientation orientation: Int32) -> CIImage 133 | //open func oriented(_ orientation: CGImagePropertyOrientation) -> CIImage 134 | //open func matchedToWorkingSpace(from colorSpace: CGColorSpace) -> CIImage? 135 | //open func matchedFromWorkingSpace(to colorSpace: CGColorSpace) -> CIImage? 136 | //open func insertingIntermediate() -> CIImage 137 | //open func insertingIntermediate(cache: Bool) -> CIImage 138 | //open func convertingWorkingSpaceToLab() -> CIImage 139 | //open func convertingLabToWorkingSpace() -> CIImage 140 | 141 | // Doesn't really apply since the whole point is to have image modifiers for all the filters. 142 | //open func applyingFilter(_ filterName: String, parameters params: [String : Any]) -> CIImage 143 | //open func applyingFilter(_ filterName: String) -> CIImage 144 | 145 | // Don't implement because we have an equivalent operation already. Sigma is just the pixel radius. 146 | //open func applyingGaussianBlur(sigma: Double) -> CIImage 147 | 148 | // OK these get an active overload. 149 | 150 | /* Return a new infinite image by replicating the edge pixels of the receiver image. */ 151 | @available(macOS 10.10, *) 152 | func clampedToExtent(active: Bool = true) -> CIImage { 153 | guard active else { return self } 154 | return clampedToExtent() 155 | } 156 | 157 | /* Return a new image by multiplying the receiver's RGB values by its alpha. */ 158 | @available(macOS 10.12, *) 159 | func premultiplyingAlpha(active: Bool = true) -> CIImage { 160 | guard active else { return self } 161 | return premultiplyingAlpha() 162 | } 163 | 164 | /* Return a new image by dividing the receiver's RGB values by its alpha. */ 165 | @available(macOS 10.12, *) 166 | func unpremultiplyingAlpha(active: Bool = true) -> CIImage { 167 | guard active else { return self } 168 | return unpremultiplyingAlpha() 169 | } 170 | 171 | /* Return a new image with alpha set to 1 within the rectangle and 0 outside. */ 172 | @available(macOS 10.12, *) 173 | func settingAlphaOne(in extent: CGRect, active: Bool = true) -> CIImage { 174 | guard active else { return self } 175 | return settingAlphaOne(in: extent) 176 | } 177 | 178 | /* Returns a new image by changing the receiver's sample mode to bilinear interpolation. */ 179 | @available(macOS 10.13, *) 180 | func samplingLinear(active: Bool = true) -> CIImage { 181 | guard active else { return self } 182 | return samplingLinear() 183 | } 184 | 185 | /* Returns a new image by changing the receiver's sample mode to nearest neighbor. */ 186 | @available(macOS 10.13, *) 187 | func samplingNearest(active: Bool = true) -> CIImage { // equivalent to CISampleNearest filter 188 | guard active else { return self } 189 | return samplingNearest() 190 | } 191 | } 192 | 193 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SwiftUICoreImage 2 | 3 | Help for using Core Image within the context of SwiftUI. Also useful even without SwiftUI. 4 | 5 | ## Introduction 6 | 7 | Core Image is a wonderful image-processsing toolkit in macOS and iOS, but it's a bit clunky to use. Even after Apple added Swift APIs to many of the filters ([CoreImage.CIFilterBuiltins](https://developer.apple.com/documentation/coreimage/methods_and_protocols_for_filter_creation)), it's still pretty tedious to chain filters to images. 8 | 9 | The purpose of this package is to provide an easier way to chain multiple filters to CIImage instances and then render them into SwiftUI (or any other context — SwiftUI is not needed). 10 | 11 | ```Swift 12 | Image(ciImage: CIImage("Bernie.jpeg") 13 | .sepiaTone(intensity: sepia) 14 | .recropping { image in 15 | image 16 | .clampedToExtent(active: clamped) 17 | .gaussianBlur(radius: gaussianBlurRadius) 18 | } 19 | ) 20 | .resizable() 21 | .aspectRatio(contentMode: .fit) 22 | ``` 23 | 24 | ## Manifest 25 | 26 | Included in this package is: 27 | 28 | * CIImage-Filters.swift 29 | * about 200 modifiers on `CIImage` that return a new modified `CIImage` (or the original if unmodified) 30 | * 20 static functions that return a newly generated `CIImage` 31 | * Includes filters up to iOS 18, macOS 15 32 | * CIImage-Extensions.swift 33 | * Convenience initializers for `CIImage` from a resource name and from an `NSImage`/`UIImage` 34 | * Modifiers for `CIImage` to return cropped, scaled, etc. to be easier to work with SwiftUI 35 | * Overloads of several built-in `CIImage` modifier functions that take an `active` boolean parameter 36 | * Image-Extensions.swift 37 | * Convenience initializer to create a SwiftUI `Image` from a `CIImage` 38 | 39 | ## How This Works 40 | 41 | Similarly to how SwiftUI view modifiers each return a modified `View` instance, these modifiers on `CIImage` take care of the core image chaining by creating a corresponding `CIFilter`, hooking up the `inputImage` for you, and returning the resulting `outputImage`. 42 | 43 | When creating SwiftUI code, I think it's important that you can use [Inert Modifiers](https://developer.apple.com/videos/play/wwdc2021/10022/?time=2303) in which you pass in some parameter that causes the modifier to have no effect. (For instance, specifying opacity of 1.0 or padding of 0.0 to a view.) 44 | 45 | In this code, I've made sure that each of our image modifiers come with inert modifiers: in some cases it's passing in a parameter that clearly has no effect (e.g. zero intensity, zero radius); or it's a nil background image when combining with another image; or a boolean `active` parameter. If the parameter(s) specified would cause no change in the image, then the identity (self) is returned forthwith. 46 | 47 | The contents of CIImage-Filters.swift are generated source code, using code that I've included in this repository (`CIImage-Generation.swift`, not included in the package import). This loops through the core image metadata that Apple provides (`CIFilter.filterNames(inCategories: nil)`). Unfortunately this list is somewhat out of date and contains a number of inconsistencies that I've done my best to overcome. There are some JSON files that provide additional metadata such as a list of the functions that actually do have online documentation — 56 functions aren't documented so some guesswork is needed — or repairs to missing or obsolete documentation. You probably won't need to run this code unless you have some special requirements or the list has been updated in a future OS release. 48 | 49 | ## Using With SwiftUI 50 | 51 | Remember that Core Image operations are really just a "recipe" for the processing steps; the actual work is not performed until the image needs to be rendered to a bitmap. 52 | 53 | Instead of creating a SwiftUI `Image` using a [built-in initializer](https://developer.apple.com/documentation/swiftui/image) from a resource name or other image type (`CGImage`, `NSImage`, `UIImage`), this code provides a new initializer to create an `Image` from a `CIImage`. When SwiftUI needs to render the image, the Core Image is rendered to the screen. 54 | 55 | Your typical approach, then, will be to create an `Image`, passing in a `CIImage` created using one of the [built-in initializers](https://developer.apple.com/documentation/coreimage/ciimage) or the convenience methods included here to create from a resource name or another image type. 56 | 57 | Then, just chain modifiers to that `CIImage` to indicate what to modify. 58 | 59 | Many modifiers are simple. For instance: 60 | 61 | ```Swift 62 | Image(ciImage: CIImage("Halloween.jpeg") 63 | .xRay() 64 | ) 65 | ``` 66 | 67 | If you wish to toggle whether the filter is applied, use the `active` parameter (default value of `true`): 68 | 69 | ```Swift 70 | Image(ciImage: CIImage("Halloween.jpeg") 71 | .xRay(active: isMachineOn) 72 | ) 73 | ``` 74 | 75 | Chain any number of modifiers found in `CIImage-Filters.swift` to construct the desired result. 76 | 77 | ### Image Scaling 78 | 79 | Many Core Image filters use pixel values for parameters. Therefore, it may be needed to get an image scaled to an appropriate size _before_ applying operations. For example, applying a 10-pixel-radius blur to a 6000⨉4000 image that is then scaled down to 300⨉200 might not yield what you want; perhaps you want to first scale the image to 300⨉200 and then apply the 10-pixel-radius blur. 80 | 81 | Core Image provides a scaling operation (`CILanczosScaleTransform` and `lanczosScaleTransform()`) but this package also includes more convenient alternatives: `scaledToFill()` and `scaledToFit()` where you pass in the dimensions you want. 82 | 83 | A typical use of this works well in conjunction with `GeometryReader`. For example: 84 | 85 | ```Swift 86 | GeometryReader { geo in 87 | let geoSize: CGSize = geo.frame(in: .local).integral.size 88 | // Resize image to double the frame size, assuming we are on a retina display 89 | let newSize: CGSize = CGSize(width: geoSize.width * 2, 90 | height: geoSize.height * 2) 91 | 92 | Image(ciImage: CIImage("M83.jpeg") 93 | .scaledToFit(newSize) 94 | .sharpenLuminance(sharpness: 1.0, radius: 5) 95 | ) 96 | .resizable() // Make sure retina image is scaled to fit 97 | .aspectRatio(contentMode: .fit) 98 | } 99 | ``` 100 | ![Compare original, sharpened without pre-scaling, sharpened after pre-scaled](./Resources/sharpening.jpeg) 101 | 102 | 103 | ## Using Without SwiftUI 104 | 105 | SwiftUI is not needed at all. Just create a `CIImage` and perform operations. Then, render to a bitmap. 106 | 107 | ```Swift 108 | let tiledImage: CIImage = CIImage("HeyGoodMorning.png"). 109 | .triangleTile(center: .zero, angle: 0.356, width: 2.0) 110 | 111 | imageView.image = UIImage(CIImage: tiledImage) 112 | ``` 113 | 114 | ## Other Notes 115 | 116 | If you've used Core Image, you'll know that sometimes you need to play with the extent of an image, e.g. clamping an image to have infinite edges before applying a gaussian blur, then re-cropping to the image's original extent. To accomplish this, you can use the **`recropping`** modifier which is followed by a closure. The operation saves the extent of the image, applies whatever is in the closure, and then re-crops to that extent. In the example below, the image in `ciImage` is converted into an image with the pixel colors along its edges extend infinitely in all directions, then it is blurred, and then upon exit from the closure, the returned image is re-cropped. 117 | 118 | ```Swift 119 | ciImage 120 | .recropping { image in 121 | image 122 | .clampedToExtent() 123 | .gaussianBlur(radius: 10) 124 | } 125 | ``` 126 | 127 | ![Compare unblurred, improper blurring, and proper blurring](./Resources/blurring.jpeg) 128 | 129 | The `recropping` modifier is also useful if you find that a filter (e.g. `comicEffect()` has grown your image's extent slightly and you want to clamp it to its original size. 130 | 131 | Another useful operation is **`replacing`**. Much like `recropping` except that it does not mess with the extent of the image. You pass in a closure, which starts with the image you were working with; your closure returns a new image. This can be useful when working with the compositing operations in Core Image, which require a *background* image to be passed in. What if your chain of operations is on the background image, and you want to overlay something on top? Just wrap your operation in `.replacing` and return the composited image. 132 | 133 | ```Swift 134 | ciImage 135 | .replacing { backgroundImage in 136 | ciImage2 137 | .sourceAtopCompositing(backgroundImage: backgroundImage) 138 | } 139 | ``` 140 | 141 | In this case, the image in `ciImage2` is the foreground image, placed atop the `backgroundImage`, then returned to the chain of operations. 142 | 143 | ## Using Package 144 | 145 | In Xcode, Choose File > Add Packages… then enter the URL of this repository into the search bar, and continue from there. 146 | 147 | In your code: 148 | 149 | ```Swift 150 | import SwiftUICoreImage 151 | ``` 152 | 153 | That's it! 154 | 155 | --- 156 | 157 | ## Future Improvements 158 | 159 | Rather than generating repetitive code, it would be nice to define some macros that expand to the repetitive code! 160 | The advantage of this is that one could just import the macro package and just define only the filters they want, 161 | rather than defining all 200+ mostly-unused filters. 162 | 163 | [Apparently](https://forums.swift.org/t/macros-attached-macros-to-methods-and-functions/65531/6) 164 | this would require [Function Body Macros](https://github.com/swiftlang/swift-evolution/blob/main/proposals/0415-function-body-macros.md) 165 | which are not available in Swift 5.x but might make it into Swift 6.0. 166 | 167 | Ideally we would specify something like this: 168 | ``` 169 | @CoreImageExtension 170 | func pixellate(center: CGPoint, scale: Float, active: Bool = true) -> CIImage 171 | ``` 172 | 173 | And this would fill in the body with some code that does the following: 174 | * Guard statement to return self if active flag is false (or other inert modifier; maybe we'd have to indicate which parameter is this?) 175 | * Create built-in CIFilter object based off of name of the function being expanded 176 | * Set all the parameters as specified in the function parameters. Maybe need some way to indicate parameters that need to be cast to another type 177 | * return the outputImage from the filter 178 | 179 | --- 180 | 181 | Please file an issue or pull request if you can think of an improvement to the code or documentation of the generated filters, 182 | or find any other helpful utilities for manipulating Core Images in this toolkit! 183 | -------------------------------------------------------------------------------- /Generator/docLookup.json: -------------------------------------------------------------------------------- 1 | { 2 | "accordionFoldTransition": "3228263-accordionfoldtransition", 3 | "additionCompositing": "3228264-additioncompositing", 4 | "affineClamp": "3228265-affineclamp", 5 | "affineTile": "3228266-affinetile", 6 | "areaAverage": "3547111-areaaverage", 7 | "areaHistogram": "3547112-areahistogram", 8 | "areaLogarithmicHistogram": "4401848-arealogarithmichistogram", 9 | "areaMaximum": "3547114-areamaximum", 10 | "areaMaximumAlpha": "3547113-areamaximumalpha", 11 | "areaMinMax": "3547115-areaminmax", 12 | "areaMinMaxRed": "3547116-areaminmaxred", 13 | "areaMinimum": "3547118-areaminimum", 14 | "areaMinimumAlpha": "3547117-areaminimumalpha", 15 | "attributedTextImageGenerator": "3228267-attributedtextimagegenerator", 16 | "aztecCodeGenerator": "3228268-azteccodegenerator", 17 | "barcodeGenerator": "3228269-barcodegenerator", 18 | "barsSwipeTransition": "3228270-barsswipetransition", 19 | "bicubicScaleTransform": "3228271-bicubicscaletransform", 20 | "blendWithAlphaMask": "3228272-blendwithalphamask", 21 | "blendWithBlueMask": "3228273-blendwithbluemask", 22 | "blendWithMask": "3228274-blendwithmask", 23 | "blendWithRedMask": "3228275-blendwithredmask", 24 | "bloom": "3228276-bloom", 25 | "blurredRectangleGenerator": "4401849-blurredrectanglegenerator", 26 | "bokehBlur": "3228277-bokehblur", 27 | "boxBlur": "3228278-boxblur", 28 | "bumpDistortion": "4401850-bumpdistortion", 29 | "bumpDistortionLinear": "4401851-bumpdistortionlinear", 30 | "cannyEdgeDetector": "4401852-cannyedgedetector", 31 | "checkerboardGenerator": "3228279-checkerboardgenerator", 32 | "circleSplashDistortion": "4401853-circlesplashdistortion", 33 | "circularScreen": "3228280-circularscreen", 34 | "circularWrap": "4401854-circularwrap", 35 | "cmykHalftone": "3228259-cmykhalftone", 36 | "code128BarcodeGenerator": "3228281-code128barcodegenerator", 37 | "colorAbsoluteDifference": "3547119-colorabsolutedifference", 38 | "colorBlendMode": "3228282-colorblendmode", 39 | "colorBurnBlendMode": "3228283-colorburnblendmode", 40 | "colorClamp": "3228284-colorclamp", 41 | "colorControls": "3228285-colorcontrols", 42 | "colorCrossPolynomial": "3228286-colorcrosspolynomial", 43 | "colorCube": "3228287-colorcube", 44 | "colorCubeWithColorSpace": "3228288-colorcubewithcolorspace", 45 | "colorCubesMixedWithMask": "3228289-colorcubesmixedwithmask", 46 | "colorCurves": "3228290-colorcurves", 47 | "colorDodgeBlendMode": "3228291-colordodgeblendmode", 48 | "colorInvert": "3228292-colorinvert", 49 | "colorMap": "3228293-colormap", 50 | "colorMatrix": "3228294-colormatrix", 51 | "colorMonochrome": "3228295-colormonochrome", 52 | "colorPolynomial": "3228296-colorpolynomial", 53 | "colorPosterize": "3228297-colorposterize", 54 | "colorThreshold": "3547120-colorthreshold", 55 | "colorThresholdOtsu": "4401855-colorthresholdotsu", 56 | "columnAverage": "3547121-columnaverage", 57 | "comicEffect": "3228298-comiceffect", 58 | "convertLabToRGB": "4401856-convertlabtorgb", 59 | "convertRGBtoLab": "4401857-convertrgbtolab", 60 | "convolution3X3": "3228299-convolution3x3", 61 | "convolution5X5": "3228300-convolution5x5", 62 | "convolution7X7": "3228301-convolution7x7", 63 | "convolution9Horizontal": "3228302-convolution9horizontal", 64 | "convolution9Vertical": "3228303-convolution9vertical", 65 | "convolutionRGB3X3": "4401858-convolutionrgb3x3", 66 | "convolutionRGB5X5": "4401859-convolutionrgb5x5", 67 | "convolutionRGB7X7": "4401860-convolutionrgb7x7", 68 | "convolutionRGB9Horizontal": "4401861-convolutionrgb9horizontal", 69 | "convolutionRGB9Vertical": "4401862-convolutionrgb9vertical", 70 | "copyMachineTransition": "3228304-copymachinetransition", 71 | "coreMLModel": "3228305-coremlmodel", 72 | "crystallize": "3228306-crystallize", 73 | "darkenBlendMode": "3228307-darkenblendmode", 74 | "depthOfField": "3228308-depthoffield", 75 | "depthToDisparity": "3228309-depthtodisparity", 76 | "differenceBlendMode": "3228310-differenceblendmode", 77 | "discBlur": "3228311-discblur", 78 | "disintegrateWithMaskTransition": "3228312-disintegratewithmasktransition", 79 | "disparityToDepth": "3228313-disparitytodepth", 80 | "displacementDistortion": "4401863-displacementdistortion", 81 | "dissolveTransition": "3228314-dissolvetransition", 82 | "dither": "3228315-dither", 83 | "divideBlendMode": "3228316-divideblendmode", 84 | "documentEnhancer": "3228317-documentenhancer", 85 | "dotScreen": "3228318-dotscreen", 86 | "droste": "4401864-droste", 87 | "edgePreserveUpsample": "3228319-edgepreserveupsample", 88 | "edgeWork": "3228320-edgework", 89 | "edges": "3228321-edges", 90 | "eightfoldReflectedTile": "3228322-eightfoldreflectedtile", 91 | "exclusionBlendMode": "3228323-exclusionblendmode", 92 | "exposureAdjust": "3228324-exposureadjust", 93 | "falseColor": "3228325-falsecolor", 94 | "flashTransition": "3228326-flashtransition", 95 | "fourfoldReflectedTile": "3228327-fourfoldreflectedtile", 96 | "fourfoldRotatedTile": "3228328-fourfoldrotatedtile", 97 | "fourfoldTranslatedTile": "3228329-fourfoldtranslatedtile", 98 | "gaborGradients": "3325508-gaborgradients", 99 | "gammaAdjust": "3228330-gammaadjust", 100 | "gaussianBlur": "3228331-gaussianblur", 101 | "gaussianGradient": "3228332-gaussiangradient", 102 | "glassDistortion": "4401865-glassdistortion", 103 | "glassLozenge": "4401866-glasslozenge", 104 | "glideReflectedTile": "3228333-glidereflectedtile", 105 | "gloom": "3228334-gloom", 106 | "hardLightBlendMode": "3228335-hardlightblendmode", 107 | "hatchedScreen": "3228336-hatchedscreen", 108 | "heightFieldFromMask": "3228337-heightfieldfrommask", 109 | "hexagonalPixellate": "3228338-hexagonalpixellate", 110 | "highlightShadowAdjust": "3228339-highlightshadowadjust", 111 | "histogramDisplay": "3547122-histogramdisplay", 112 | "holeDistortion": "4401867-holedistortion", 113 | "hueAdjust": "3228340-hueadjust", 114 | "hueBlendMode": "3228341-hueblendmode", 115 | "hueSaturationValueGradient": "3228342-huesaturationvaluegradient", 116 | "kMeans": "3547110-kmeans", 117 | "kaleidoscope": "3228343-kaleidoscope", 118 | "keystoneCorrectionCombined": "3325509-keystonecorrectioncombined", 119 | "keystoneCorrectionHorizontal": "3325510-keystonecorrectionhorizontal", 120 | "keystoneCorrectionVertical": "3325511-keystonecorrectionvertical", 121 | "labDeltaE": "3228260-labdeltae", 122 | "lanczosScaleTransform": "3228344-lanczosscaletransform", 123 | "lenticularHaloGenerator": "3228345-lenticularhalogenerator", 124 | "lightTunnel": "4401868-lighttunnel", 125 | "lightenBlendMode": "3228346-lightenblendmode", 126 | "lineOverlay": "3228347-lineoverlay", 127 | "lineScreen": "3228348-linescreen", 128 | "linearBurnBlendMode": "3228349-linearburnblendmode", 129 | "linearDodgeBlendMode": "3228350-lineardodgeblendmode", 130 | "linearGradient": "3228351-lineargradient", 131 | "linearLightBlendMode": "4401869-linearlightblendmode", 132 | "linearToSRGBToneCurve": "3228352-lineartosrgbtonecurve", 133 | "luminosityBlendMode": "3228353-luminosityblendmode", 134 | "maskToAlpha": "3228354-masktoalpha", 135 | "maskedVariableBlur": "3228355-maskedvariableblur", 136 | "maximumComponent": "3228356-maximumcomponent", 137 | "maximumCompositing": "3228357-maximumcompositing", 138 | "median": "3228358-median", 139 | "meshGenerator": "3228359-meshgenerator", 140 | "minimumComponent": "3228360-minimumcomponent", 141 | "minimumCompositing": "3228361-minimumcompositing", 142 | "mix": "3228362-mix", 143 | "modTransition": "3228363-modtransition", 144 | "morphologyGradient": "3228364-morphologygradient", 145 | "morphologyMaximum": "3228365-morphologymaximum", 146 | "morphologyMinimum": "3228366-morphologyminimum", 147 | "morphologyRectangleMaximum": "3228367-morphologyrectanglemaximum", 148 | "morphologyRectangleMinimum": "3228368-morphologyrectangleminimum", 149 | "motionBlur": "3228369-motionblur", 150 | "multiplyBlendMode": "3228370-multiplyblendmode", 151 | "multiplyCompositing": "3228371-multiplycompositing", 152 | "ninePartStretched": "4401871-ninepartstretched", 153 | "ninePartTiled": "4401872-nineparttiled", 154 | "noiseReduction": "3228372-noisereduction", 155 | "opTile": "3228373-optile", 156 | "overlayBlendMode": "3228374-overlayblendmode", 157 | "pageCurlTransition": "3228375-pagecurltransition", 158 | "pageCurlWithShadowTransition": "3228376-pagecurlwithshadowtransition", 159 | "paletteCentroid": "3228377-palettecentroid", 160 | "palettize": "3228378-palettize", 161 | "parallelogramTile": "3228379-parallelogramtile", 162 | "pdf417BarcodeGenerator": "3228261-pdf417barcodegenerator", 163 | "personSegmentation": "4401873-personsegmentation", 164 | "perspectiveCorrection": "3228380-perspectivecorrection", 165 | "perspectiveRotate": "3325512-perspectiverotate", 166 | "perspectiveTile": "3228381-perspectivetile", 167 | "perspectiveTransform": "3228382-perspectivetransform", 168 | "perspectiveTransformWithExtent": "3228383-perspectivetransformwithextent", 169 | "photoEffectChrome": "3228384-photoeffectchrome", 170 | "photoEffectFade": "3228385-photoeffectfade", 171 | "photoEffectInstant": "3228386-photoeffectinstant", 172 | "photoEffectMono": "3228387-photoeffectmono", 173 | "photoEffectNoir": "3228388-photoeffectnoir", 174 | "photoEffectProcess": "3228389-photoeffectprocess", 175 | "photoEffectTonal": "3228390-photoeffecttonal", 176 | "photoEffectTransfer": "3228391-photoeffecttransfer", 177 | "pinLightBlendMode": "3228392-pinlightblendmode", 178 | "pinchDistortion": "4401874-pinchdistortion", 179 | "pixellate": "3228393-pixellate", 180 | "pointillize": "3228394-pointillize", 181 | "qrCodeGenerator": "3228262-qrcodegenerator", 182 | "radialGradient": "3228395-radialgradient", 183 | "randomGenerator": "3228396-randomgenerator", 184 | "rippleTransition": "3228397-rippletransition", 185 | "roundedRectangleGenerator": "3335007-roundedrectanglegenerator", 186 | "roundedRectangleStrokeGenerator": "4401875-roundedrectanglestrokegenerator", 187 | "rowAverage": "3547123-rowaverage", 188 | "sRGBToneCurveToLinear": "3228398-srgbtonecurvetolinear", 189 | "saliencyMap": "3228399-saliencymap", 190 | "saturationBlendMode": "3228400-saturationblendmode", 191 | "screenBlendMode": "3228401-screenblendmode", 192 | "sepiaTone": "3228402-sepiatone", 193 | "shadedMaterial": "3228403-shadedmaterial", 194 | "sharpenLuminance": "3228404-sharpenluminance", 195 | "sixfoldReflectedTile": "3228405-sixfoldreflectedtile", 196 | "sixfoldRotatedTile": "3228406-sixfoldrotatedtile", 197 | "smoothLinearGradient": "3228407-smoothlineargradient", 198 | "sobelGradients": "4401876-sobelgradients", 199 | "softLightBlendMode": "3228408-softlightblendmode", 200 | "sourceAtopCompositing": "3228409-sourceatopcompositing", 201 | "sourceInCompositing": "3228410-sourceincompositing", 202 | "sourceOutCompositing": "3228411-sourceoutcompositing", 203 | "sourceOverCompositing": "3228412-sourceovercompositing", 204 | "spotColor": "3228413-spotcolor", 205 | "spotLight": "3228414-spotlight", 206 | "starShineGenerator": "3228415-starshinegenerator", 207 | "straighten": "3228416-straighten", 208 | "stretchCrop": "4401877-stretchcrop", 209 | "stripesGenerator": "3228417-stripesgenerator", 210 | "subtractBlendMode": "3228418-subtractblendmode", 211 | "sunbeamsGenerator": "3228419-sunbeamsgenerator", 212 | "swipeTransition": "3228420-swipetransition", 213 | "temperatureAndTint": "3228421-temperatureandtint", 214 | "textImageGenerator": "3228422-textimagegenerator", 215 | "thermal": "3228423-thermal", 216 | "toneCurve": "3228424-tonecurve", 217 | "torusLensDistortion": "4401879-toruslensdistortion", 218 | "triangleKaleidoscope": "3228425-trianglekaleidoscope", 219 | "triangleTile": "3228426-triangletile", 220 | "twelvefoldReflectedTile": "3228427-twelvefoldreflectedtile", 221 | "twirlDistortion": "4401880-twirldistortion", 222 | "unsharpMask": "3228428-unsharpmask", 223 | "vibrance": "3228429-vibrance", 224 | "vignette": "3228431-vignette", 225 | "vignetteEffect": "3228430-vignetteeffect", 226 | "vividLightBlendMode": "4401881-vividlightblendmode", 227 | "vortexDistortion": "4401882-vortexdistortion", 228 | "whitePointAdjust": "3228432-whitepointadjust", 229 | "xRay": "3228433-xray", 230 | "zoomBlur": "3228434-zoomblur" 231 | } 232 | -------------------------------------------------------------------------------- /Generator/abstracts.json: -------------------------------------------------------------------------------- 1 | { 2 | "CIAccordionFoldTransition": "Transitions from one image to another of differing dimensions by unfolding and crossfading.", 3 | "CIAdditionCompositing": "Adds color components to achieve a brightening effect.", 4 | "CIAffineClamp": "Performs an affine transform on a source image and then clamps the pixels at the edge of the transformed image, extending them outwards.", 5 | "CIAffineTile": "Applies an affine transform to an image and then tiles the transformed image.", 6 | "CIAffineTransform": "Applies an affine transform to an image.", 7 | "CIAreaAverage": "Returns a single-pixel image that contains the average color for the region of interest.", 8 | "CIAreaHistogram": "Returns a 1D image (inputCount wide by one pixel high) that contains the component-wise histogram computed for the specified rectangular area.", 9 | "CIAreaMaximum": "Returns a single-pixel image that contains the maximum color components for the region of interest.", 10 | "CIAreaMaximumAlpha": "Returns a single-pixel image that contains the color vector with the maximum alpha value for the region of interest.", 11 | "CIAreaMinimum": "Returns a single-pixel image that contains the minimum color components for the region of interest.", 12 | "CIAreaMinimumAlpha": "Returns a single-pixel image that contains the color vector with the minimum alpha value for the region of interest.", 13 | "CIAztecCodeGenerator": "Generates an Aztec code (two-dimensional barcode) from input data.", 14 | "CIBarsSwipeTransition": "Transitions from one image to another by passing a bar over the source image.", 15 | "CIBlendWithAlphaMask": "Uses alpha values from a mask to interpolate between an image and the background.", 16 | "CIBlendWithMask": "Uses values from a grayscale mask to interpolate between an image and the background.", 17 | "CIBloom": "Softens edges and applies a pleasant glow to an image.", 18 | "CIBoxBlur": "Blurs an image using a box-shaped convolution kernel.", 19 | "CIBumpDistortion": "Creates a bump that originates at a specified point in the image.", 20 | "CIBumpDistortionLinear": "Creates a concave or convex distortion that originates from a line in the image.", 21 | "CICheckerboardGenerator": "Generates a checkerboard pattern.", 22 | "CICircleSplashDistortion": "Distorts the pixels starting at the circumference of a circle and emanating outward.", 23 | "CICircularScreen": "Simulates a circular-shaped halftone screen.", 24 | "CICircularWrap": "Wraps an image around a transparent circle.", 25 | "CICMYKHalftone": "Creates a color, halftoned rendition of the source image, using cyan, magenta, yellow, and black inks over a white page.", 26 | "CICode128BarcodeGenerator": "Generates a Code 128 one-dimensional barcode from input data.", 27 | "CIColorBlendMode": "Uses the luminance values of the background with the hue and saturation values of the source image.", 28 | "CIColorBurnBlendMode": "Darkens the background image samples to reflect the source image samples.", 29 | "CIColorClamp": "Modifies color values to keep them within a specified range.", 30 | "CIColorControls": "Adjusts saturation, brightness, and contrast values.", 31 | "CIColorCrossPolynomial": "Modifies the pixel values in an image by applying a set of polynomial cross-products.", 32 | "CIColorCube": "Uses a three-dimensional color table to transform the source image pixels.", 33 | "CIColorCubeWithColorSpace": "Uses a three-dimensional color table to transform the source image pixels and maps the result to a specified color space.", 34 | "CIColorDodgeBlendMode": "Brightens the background image samples to reflect the source image samples.", 35 | "CIColorInvert": "Inverts the colors in an image.", 36 | "CIColorMap": "Performs a nonlinear transformation of source color values using mapping values provided in a table.", 37 | "CIColorMatrix": "Multiplies source color values and adds a bias factor to each color component.", 38 | "CIColorMonochrome": "Remaps colors so they fall within shades of a single color.", 39 | "CIColorPolynomial": "Modifies the pixel values in an image by applying a set of cubic polynomials.", 40 | "CIColorPosterize": "Remaps red, green, and blue color components to the number of brightness values you specify for each color component.", 41 | "CIColumnAverage": "Returns a 1-pixel high image that contains the average color for each scan column.", 42 | "CIComicEffect": "Simulates a comic book drawing by outlining edges and applying a color halftone effect.", 43 | "CIConstantColorGenerator": "Generates a solid color.", 44 | "CIConvolution3X3": "Modifies pixel values by performing a 3x3 matrix convolution.", 45 | "CIConvolution5X5": "Modifies pixel values by performing a 5x5 matrix convolution.", 46 | "CIConvolution7X7": "Modifies pixel values by performing a 7x7 matrix convolution.", 47 | "CIConvolution9Horizontal": "Modifies pixel values by performing a 9-element horizontal convolution.", 48 | "CIConvolution9Vertical": "Modifies pixel values by performing a 9-element vertical convolution.", 49 | "CICopyMachineTransition": "Transitions from one image to another by simulating the effect of a copy machine.", 50 | "CICrop": "Applies a crop to an image.", 51 | "CICrystallize": "Creates polygon-shaped color blocks by aggregating source pixel-color values.", 52 | "CIDarkenBlendMode": "Creates composite image samples by choosing the darker samples (from either the source image or the background).", 53 | "CIDepthOfField": "Simulates a depth of field effect.", 54 | "CIDifferenceBlendMode": "Subtracts either the source image sample color from the background image sample color, or the reverse, depending on which sample has the greater brightness value.", 55 | "CIDiscBlur": "Blurs an image using a disc-shaped convolution kernel.", 56 | "CIDisintegrateWithMaskTransition": "Transitions from one image to another using the shape defined by a mask.", 57 | "CIDisplacementDistortion": "Applies the grayscale values of the second image to the first image.", 58 | "CIDissolveTransition": "Uses a dissolve to transition from one image to another.", 59 | "CIDivideBlendMode": "Divides the background image sample color from the source image sample color.", 60 | "CIDotScreen": "Simulates the dot patterns of a halftone screen.", 61 | "CIDroste": "Recursively draws a portion of an image in imitation of an M. C. Escher drawing.", 62 | "CIEdges": "Finds all edges in an image and displays them in color.", 63 | "CIEdgeWork": "Produces a stylized black-and-white rendition of an image that looks similar to a woodblock cutout.", 64 | "CIEightfoldReflectedTile": "Produces a tiled image from a source image by applying an 8-way reflected symmetry.", 65 | "CIExclusionBlendMode": "Produces an effect similar to that produced by the CIDifferenceBlendMode filter but with lower contrast.", 66 | "CIExposureAdjust": "Adjusts the exposure setting for an image similar to the way you control exposure for a camera when you change the F-stop.", 67 | "CIFalseColor": "Maps luminance to a color ramp of two colors.", 68 | "CIFlashTransition": "Transitions from one image to another by creating a flash.", 69 | "CIFourfoldReflectedTile": "Produces a tiled image from a source image by applying a 4-way reflected symmetry.", 70 | "CIFourfoldRotatedTile": "Produces a tiled image from a source image by rotating the source image at increments of 90 degrees.", 71 | "CIFourfoldTranslatedTile": "Produces a tiled image from a source image by applying 4 translation operations.", 72 | "CIGammaAdjust": "Adjusts midtone brightness.", 73 | "CIGaussianBlur": "Spreads source pixels by an amount specified by a Gaussian distribution.", 74 | "CIGaussianGradient": "Generates a gradient that varies from one color to another using a Gaussian distribution.", 75 | "CIGlassDistortion": "Distorts an image by applying a glass-like texture.", 76 | "CIGlassLozenge": "Creates a lozenge-shaped lens and distorts the portion of the image over which the lens is placed.", 77 | "CIGlideReflectedTile": "Produces a tiled image from a source image by translating and smearing the image.", 78 | "CIGloom": "Dulls the highlights of an image.", 79 | "CIHardLightBlendMode": "Either multiplies or screens colors, depending on the source image sample color.", 80 | "CIHatchedScreen": "Simulates the hatched pattern of a halftone screen.", 81 | "CIHeightFieldFromMask": "Produces a continuous three-dimensional, loft-shaped height field from a grayscale mask.", 82 | "CIHexagonalPixellate": "Maps an image to colored hexagons whose color is defined by the replaced pixels.", 83 | "CIHighlightShadowAdjust": "Adjust the tonal mapping of an image while preserving spatial detail.", 84 | "CIHistogramDisplayFilter": "Generates a histogram image from the output of the CIAreaHistogram filter.", 85 | "CIHoleDistortion": "Creates a circular area that pushes the image pixels outward, distorting those pixels closest to the circle the most.", 86 | "CIHueAdjust": "Changes the overall hue, or tint, of the source pixels.", 87 | "CIHueBlendMode": "Uses the luminance and saturation values of the background image with the hue of the input image.", 88 | "CIKaleidoscope": "Produces a kaleidoscopic image from a source image by applying 12-way symmetry.", 89 | "CILanczosScaleTransform": "Produces a high-quality, scaled version of a source image.", 90 | "CILenticularHaloGenerator": "Simulates a lens flare.", 91 | "CILightenBlendMode": "Creates composite image samples by choosing the lighter samples (either from the source image or the background).", 92 | "CILightTunnel": "Rotates a portion of the input image specified by the center and radius parameters to give a tunneling effect.", 93 | "CILinearBurnBlendMode": "Darkens the background image samples to reflect the source image samples while also increasing contrast.", 94 | "CILinearDodgeBlendMode": "Brightens the background image samples to reflect the source image samples while also increasing contrast.", 95 | "CILinearGradient": "Generates a gradient that varies along a linear axis between two defined endpoints.", 96 | "CILinearToSRGBToneCurve": "Maps color intensity from a linear gamma curve to the sRGB color space.", 97 | "CILineOverlay": "Creates a sketch that outlines the edges of an image in black.", 98 | "CILineScreen": "Simulates the line pattern of a halftone screen.", 99 | "CILuminosityBlendMode": "Uses the hue and saturation of the background image with the luminance of the input image.", 100 | "CIMaskedVariableBlur": "Blurs the source image according to the brightness levels in a mask image.", 101 | "CIMaskToAlpha": "Converts a grayscale image to a white image that is masked by alpha.", 102 | "CIMaximumComponent": "Returns a grayscale image from max(r,g,b).", 103 | "CIMaximumCompositing": "Computes the maximum value, by color component, of two input images and creates an output image using the maximum values.", 104 | "CIMedianFilter": "Computes the median value for a group of neighboring pixels and replaces each pixel value with the median.", 105 | "CIMinimumComponent": "Returns a grayscale image from min(r,g,b).", 106 | "CIMinimumCompositing": "Computes the minimum value, by color component, of two input images and creates an output image using the minimum values.", 107 | "CIModTransition": "Transitions from one image to another by revealing the target image through irregularly shaped holes.", 108 | "CIMotionBlur": "Blurs an image to simulate the effect of using a camera that moves a specified angle and distance while capturing the image.", 109 | "CIMultiplyBlendMode": "Multiplies the input image samples with the background image samples.", 110 | "CIMultiplyCompositing": "Multiplies the color component of two input images and creates an output image using the multiplied values.", 111 | "CINoiseReduction": "Reduces noise using a threshold value to define what is considered noise.", 112 | "CIOpTile": "Segments an image, applying any specified scaling and rotation, and then assembles the image again to give an op art appearance.", 113 | "CIOverlayBlendMode": "Either multiplies or screens the input image samples with the background image samples, depending on the background color.", 114 | "CIPageCurlTransition": "Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.", 115 | "CIPageCurlWithShadowTransition": "Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.", 116 | "CIParallelogramTile": "Warps an image by reflecting it in a parallelogram, and then tiles the result.", 117 | "CIPDF417BarcodeGenerator": "Generates a PDF417 code (two-dimensional barcode) from input data.", 118 | "CIPerspectiveCorrection": "Applies a perspective correction, transforming an arbitrary quadrilateral region in the source image to a rectangular output image.", 119 | "CIPerspectiveTile": "Applies a perspective transform to an image and then tiles the result.", 120 | "CIPerspectiveTransform": "Alters the geometry of an image to simulate the observer changing viewing position.", 121 | "CIPerspectiveTransformWithExtent": "Alters the geometry of a portion of an image to simulate the observer changing viewing position.", 122 | "CIPhotoEffectChrome": "Applies a preconfigured set of effects that imitate vintage photography film with exaggerated color.", 123 | "CIPhotoEffectFade": "Applies a preconfigured set of effects that imitate vintage photography film with diminished color.", 124 | "CIPhotoEffectInstant": "Applies a preconfigured set of effects that imitate vintage photography film with distorted colors.", 125 | "CIPhotoEffectMono": "Applies a preconfigured set of effects that imitate black-and-white photography film with low contrast.", 126 | "CIPhotoEffectNoir": "Applies a preconfigured set of effects that imitate black-and-white photography film with exaggerated contrast.", 127 | "CIPhotoEffectProcess": "Applies a preconfigured set of effects that imitate vintage photography film with emphasized cool colors.", 128 | "CIPhotoEffectTonal": "Applies a preconfigured set of effects that imitate black-and-white photography film without significantly altering contrast.", 129 | "CIPhotoEffectTransfer": "Applies a preconfigured set of effects that imitate vintage photography film with emphasized warm colors.", 130 | "CIPinchDistortion": "Creates a rectangular area that pinches source pixels inward, distorting those pixels closest to the rectangle the most.", 131 | "CIPinLightBlendMode": "Conditionally replaces background image samples with source image samples depending on the brightness of the source image samples.", 132 | "CIPixellate": "Makes an image blocky by mapping the image to colored squares whose color is defined by the replaced pixels.", 133 | "CIPointillize": "Renders the source image in a pointillistic style.", 134 | "CIQRCodeGenerator": "Generates a Quick Response code (two-dimensional barcode) from input data.", 135 | "CIRadialGradient": "Generates a gradient that varies radially between two circles having the same center.", 136 | "CIRandomGenerator": "Generates an image of infinite extent whose pixel values are made up of four independent, uniformly-distributed random numbers in the 0 to 1 range.", 137 | "CIRippleTransition": "Transitions from one image to another by creating a circular wave that expands from the center point, revealing the new image in the wake of the wave.", 138 | "CIRowAverage": "Returns a 1-pixel high image that contains the average color for each scan row.", 139 | "CISaturationBlendMode": "Uses the luminance and hue values of the background image with the saturation of the input image.", 140 | "CIScreenBlendMode": "Multiplies the inverse of the input image samples with the inverse of the background image samples.", 141 | "CISepiaTone": "Maps the colors of an image to various shades of brown.", 142 | "CIShadedMaterial": "Produces a shaded image from a height field.", 143 | "CISharpenLuminance": "Increases image detail by sharpening.", 144 | "CISixfoldReflectedTile": "Produces a tiled image from a source image by applying a 6-way reflected symmetry.", 145 | "CISixfoldRotatedTile": "Produces a tiled image from a source image by rotating the source image at increments of 60 degrees.", 146 | "CISmoothLinearGradient": "Generates a gradient that uses an S-curve function to blend colors along a linear axis between two defined endpoints.", 147 | "CISoftLightBlendMode": "Either darkens or lightens colors, depending on the input image sample color.", 148 | "CISourceAtopCompositing": "Places the input image over the background image, then uses the luminance of the background image to determine what to show.", 149 | "CISourceInCompositing": "Uses the background image to define what to leave in the input image, effectively cropping the input image.", 150 | "CISourceOutCompositing": "Uses the background image to define what to take out of the input image.", 151 | "CISourceOverCompositing": "Places the input image over the input background image.", 152 | "CISpotColor": "Replaces one or more color ranges with spot colors.", 153 | "CISpotLight": "Applies a directional spotlight effect to an image.", 154 | "CISRGBToneCurveToLinear": "Maps color intensity from the sRGB color space to a linear gamma curve.", 155 | "CIStarShineGenerator": "Generates a starburst pattern that is similar to a supernova; can be used to simulate a lens flare.", 156 | "CIStraightenFilter": "Rotates the source image by the specified angle in radians.", 157 | "CIStretchCrop": "Distorts an image by stretching and or cropping it to fit a target size.", 158 | "CIStripesGenerator": "Generates a stripe pattern.", 159 | "CISubtractBlendMode": "Subtracts the background image sample color from the source image sample color.", 160 | "CISunbeamsGenerator": "Generates a sun effect.", 161 | "CISwipeTransition": "Transitions from one image to another by simulating a swiping action.", 162 | "CITemperatureAndTint": "Adapts the reference white point for an image.", 163 | "CIToneCurve": "Adjusts tone response of the R, G, and B channels of an image.", 164 | "CITorusLensDistortion": "Creates a torus-shaped lens and distorts the portion of the image over which the lens is placed.", 165 | "CITriangleKaleidoscope": "Maps a triangular portion of an input image to create a kaleidoscope effect.", 166 | "CITriangleTile": "Maps a triangular portion of image to a triangular area and then tiles the result.", 167 | "CITwelvefoldReflectedTile": "Produces a tiled image from a source image by rotating the source image at increments of 30 degrees.", 168 | "CITwirlDistortion": "Rotates pixels around a point to give a twirling effect.", 169 | "CIUnsharpMask": "Increases the contrast of the edges between pixels of different colors in an image.", 170 | "CIVibrance": "Adjusts the saturation of an image while keeping pleasing skin tones.", 171 | "CIVignette": "Reduces the brightness of an image at the periphery.", 172 | "CIVignetteEffect": "Modifies the brightness of an image around the periphery of a specified region.", 173 | "CIVortexDistortion": "Rotates pixels around a point to simulate a vortex.", 174 | "CIWhitePointAdjust": "Adjusts the reference white point for an image and maps all colors in the source using the new reference.", 175 | "CIZoomBlur": "Simulates the effect of zooming the camera while capturing the image." 176 | } 177 | -------------------------------------------------------------------------------- /GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 56; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | CE67B0D12C2C82A4003E692B /* docLookup.json in Resources */ = {isa = PBXBuildFile; fileRef = CE67B0D02C2C82A4003E692B /* docLookup.json */; }; 11 | CE9A3AC02C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */; }; 12 | CE9A3AC22C2B75090085C241 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AC12C2B75090085C241 /* ContentView.swift */; }; 13 | CE9A3AE82C2B75260085C241 /* abstracts.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD12C2B75260085C241 /* abstracts.json */; }; 14 | CE9A3AE92C2B75260085C241 /* CIImage-Generation.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */; }; 15 | CE9A3AEB2C2B75260085C241 /* FunctionMinima.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD42C2B75260085C241 /* FunctionMinima.json */; }; 16 | CE9A3AEC2C2B75260085C241 /* MissingParameterDocumentation.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */; }; 17 | CE9A3AED2C2B75260085C241 /* CIImage-Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */; }; 18 | CE9A3AEE2C2B75260085C241 /* CIImage-Filters.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */; }; 19 | CE9A3AEF2C2B75260085C241 /* Image-Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD92C2B75260085C241 /* Image-Extensions.swift */; }; 20 | /* End PBXBuildFile section */ 21 | 22 | /* Begin PBXFileReference section */ 23 | CE67B0D02C2C82A4003E692B /* docLookup.json */ = {isa = PBXFileReference; lastKnownFileType = text.json; path = docLookup.json; sourceTree = ""; }; 24 | CE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = GenerateSwiftUICoreImage.app; sourceTree = BUILT_PRODUCTS_DIR; }; 25 | CE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GenerateSwiftUICoreImageApp.swift; sourceTree = ""; }; 26 | CE9A3AC12C2B75090085C241 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; 27 | CE9A3AD12C2B75260085C241 /* abstracts.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = abstracts.json; sourceTree = ""; }; 28 | CE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CIImage-Generation.swift"; sourceTree = ""; }; 29 | CE9A3AD42C2B75260085C241 /* FunctionMinima.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = FunctionMinima.json; sourceTree = ""; }; 30 | CE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = MissingParameterDocumentation.json; sourceTree = ""; }; 31 | CE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CIImage-Extensions.swift"; sourceTree = ""; }; 32 | CE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CIImage-Filters.swift"; sourceTree = ""; }; 33 | CE9A3AD92C2B75260085C241 /* Image-Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Image-Extensions.swift"; sourceTree = ""; }; 34 | CE9A3AF42C2B7B6A0085C241 /* GenerateSwiftUICoreImage.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = GenerateSwiftUICoreImage.entitlements; sourceTree = ""; }; 35 | /* End PBXFileReference section */ 36 | 37 | /* Begin PBXFrameworksBuildPhase section */ 38 | CE9A3AB92C2B75090085C241 /* Frameworks */ = { 39 | isa = PBXFrameworksBuildPhase; 40 | buildActionMask = 2147483647; 41 | files = ( 42 | ); 43 | runOnlyForDeploymentPostprocessing = 0; 44 | }; 45 | /* End PBXFrameworksBuildPhase section */ 46 | 47 | /* Begin PBXGroup section */ 48 | CE9A3AB32C2B75090085C241 = { 49 | isa = PBXGroup; 50 | children = ( 51 | CE9A3ABE2C2B75090085C241 /* GenerateSwiftUICoreImage */, 52 | CE9A3AD62C2B75260085C241 /* Generator */, 53 | CE9A3ADA2C2B75260085C241 /* Sources */, 54 | CE9A3ABD2C2B75090085C241 /* Products */, 55 | ); 56 | sourceTree = ""; 57 | }; 58 | CE9A3ABD2C2B75090085C241 /* Products */ = { 59 | isa = PBXGroup; 60 | children = ( 61 | CE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */, 62 | ); 63 | name = Products; 64 | sourceTree = ""; 65 | }; 66 | CE9A3ABE2C2B75090085C241 /* GenerateSwiftUICoreImage */ = { 67 | isa = PBXGroup; 68 | children = ( 69 | CE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */, 70 | CE9A3AC12C2B75090085C241 /* ContentView.swift */, 71 | CE9A3AF42C2B7B6A0085C241 /* GenerateSwiftUICoreImage.entitlements */, 72 | ); 73 | path = GenerateSwiftUICoreImage; 74 | sourceTree = ""; 75 | }; 76 | CE9A3AD62C2B75260085C241 /* Generator */ = { 77 | isa = PBXGroup; 78 | children = ( 79 | CE67B0D02C2C82A4003E692B /* docLookup.json */, 80 | CE9A3AD12C2B75260085C241 /* abstracts.json */, 81 | CE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */, 82 | CE9A3AD42C2B75260085C241 /* FunctionMinima.json */, 83 | CE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */, 84 | ); 85 | name = Generator; 86 | path = ../Generator; 87 | sourceTree = ""; 88 | }; 89 | CE9A3ADA2C2B75260085C241 /* Sources */ = { 90 | isa = PBXGroup; 91 | children = ( 92 | CE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */, 93 | CE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */, 94 | CE9A3AD92C2B75260085C241 /* Image-Extensions.swift */, 95 | ); 96 | name = Sources; 97 | path = ../Sources; 98 | sourceTree = ""; 99 | }; 100 | /* End PBXGroup section */ 101 | 102 | /* Begin PBXNativeTarget section */ 103 | CE9A3ABB2C2B75090085C241 /* GenerateSwiftUICoreImage */ = { 104 | isa = PBXNativeTarget; 105 | buildConfigurationList = CE9A3ACB2C2B750B0085C241 /* Build configuration list for PBXNativeTarget "GenerateSwiftUICoreImage" */; 106 | buildPhases = ( 107 | CE9A3AB82C2B75090085C241 /* Sources */, 108 | CE9A3AB92C2B75090085C241 /* Frameworks */, 109 | CE9A3ABA2C2B75090085C241 /* Resources */, 110 | ); 111 | buildRules = ( 112 | ); 113 | dependencies = ( 114 | ); 115 | name = GenerateSwiftUICoreImage; 116 | productName = GenerateSwiftUICoreImage; 117 | productReference = CE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */; 118 | productType = "com.apple.product-type.application"; 119 | }; 120 | /* End PBXNativeTarget section */ 121 | 122 | /* Begin PBXProject section */ 123 | CE9A3AB42C2B75090085C241 /* Project object */ = { 124 | isa = PBXProject; 125 | attributes = { 126 | BuildIndependentTargetsInParallel = 1; 127 | LastSwiftUpdateCheck = 1540; 128 | LastUpgradeCheck = 1540; 129 | TargetAttributes = { 130 | CE9A3ABB2C2B75090085C241 = { 131 | CreatedOnToolsVersion = 15.4; 132 | }; 133 | }; 134 | }; 135 | buildConfigurationList = CE9A3AB72C2B75090085C241 /* Build configuration list for PBXProject "GenerateSwiftUICoreImage" */; 136 | compatibilityVersion = "Xcode 14.0"; 137 | developmentRegion = en; 138 | hasScannedForEncodings = 0; 139 | knownRegions = ( 140 | en, 141 | Base, 142 | ); 143 | mainGroup = CE9A3AB32C2B75090085C241; 144 | productRefGroup = CE9A3ABD2C2B75090085C241 /* Products */; 145 | projectDirPath = ""; 146 | projectRoot = ""; 147 | targets = ( 148 | CE9A3ABB2C2B75090085C241 /* GenerateSwiftUICoreImage */, 149 | ); 150 | }; 151 | /* End PBXProject section */ 152 | 153 | /* Begin PBXResourcesBuildPhase section */ 154 | CE9A3ABA2C2B75090085C241 /* Resources */ = { 155 | isa = PBXResourcesBuildPhase; 156 | buildActionMask = 2147483647; 157 | files = ( 158 | CE9A3AE82C2B75260085C241 /* abstracts.json in Resources */, 159 | CE9A3AEC2C2B75260085C241 /* MissingParameterDocumentation.json in Resources */, 160 | CE9A3AEB2C2B75260085C241 /* FunctionMinima.json in Resources */, 161 | CE67B0D12C2C82A4003E692B /* docLookup.json in Resources */, 162 | ); 163 | runOnlyForDeploymentPostprocessing = 0; 164 | }; 165 | /* End PBXResourcesBuildPhase section */ 166 | 167 | /* Begin PBXSourcesBuildPhase section */ 168 | CE9A3AB82C2B75090085C241 /* Sources */ = { 169 | isa = PBXSourcesBuildPhase; 170 | buildActionMask = 2147483647; 171 | files = ( 172 | CE9A3AE92C2B75260085C241 /* CIImage-Generation.swift in Sources */, 173 | CE9A3AC22C2B75090085C241 /* ContentView.swift in Sources */, 174 | CE9A3AEE2C2B75260085C241 /* CIImage-Filters.swift in Sources */, 175 | CE9A3AEF2C2B75260085C241 /* Image-Extensions.swift in Sources */, 176 | CE9A3AC02C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift in Sources */, 177 | CE9A3AED2C2B75260085C241 /* CIImage-Extensions.swift in Sources */, 178 | ); 179 | runOnlyForDeploymentPostprocessing = 0; 180 | }; 181 | /* End PBXSourcesBuildPhase section */ 182 | 183 | /* Begin XCBuildConfiguration section */ 184 | CE9A3AC92C2B750B0085C241 /* Debug */ = { 185 | isa = XCBuildConfiguration; 186 | buildSettings = { 187 | ALWAYS_SEARCH_USER_PATHS = NO; 188 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; 189 | CLANG_ANALYZER_NONNULL = YES; 190 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 191 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 192 | CLANG_ENABLE_MODULES = YES; 193 | CLANG_ENABLE_OBJC_ARC = YES; 194 | CLANG_ENABLE_OBJC_WEAK = YES; 195 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 196 | CLANG_WARN_BOOL_CONVERSION = YES; 197 | CLANG_WARN_COMMA = YES; 198 | CLANG_WARN_CONSTANT_CONVERSION = YES; 199 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 200 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 201 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 202 | CLANG_WARN_EMPTY_BODY = YES; 203 | CLANG_WARN_ENUM_CONVERSION = YES; 204 | CLANG_WARN_INFINITE_RECURSION = YES; 205 | CLANG_WARN_INT_CONVERSION = YES; 206 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 207 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 208 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 209 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 210 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 211 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 212 | CLANG_WARN_STRICT_PROTOTYPES = YES; 213 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 214 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 215 | CLANG_WARN_UNREACHABLE_CODE = YES; 216 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 217 | COPY_PHASE_STRIP = NO; 218 | DEBUG_INFORMATION_FORMAT = dwarf; 219 | ENABLE_STRICT_OBJC_MSGSEND = YES; 220 | ENABLE_TESTABILITY = YES; 221 | ENABLE_USER_SCRIPT_SANDBOXING = YES; 222 | GCC_C_LANGUAGE_STANDARD = gnu17; 223 | GCC_DYNAMIC_NO_PIC = NO; 224 | GCC_NO_COMMON_BLOCKS = YES; 225 | GCC_OPTIMIZATION_LEVEL = 0; 226 | GCC_PREPROCESSOR_DEFINITIONS = ( 227 | "DEBUG=1", 228 | "$(inherited)", 229 | ); 230 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 231 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 232 | GCC_WARN_UNDECLARED_SELECTOR = YES; 233 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 234 | GCC_WARN_UNUSED_FUNCTION = YES; 235 | GCC_WARN_UNUSED_VARIABLE = YES; 236 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES; 237 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; 238 | MTL_FAST_MATH = YES; 239 | ONLY_ACTIVE_ARCH = YES; 240 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; 241 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 242 | }; 243 | name = Debug; 244 | }; 245 | CE9A3ACA2C2B750B0085C241 /* Release */ = { 246 | isa = XCBuildConfiguration; 247 | buildSettings = { 248 | ALWAYS_SEARCH_USER_PATHS = NO; 249 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; 250 | CLANG_ANALYZER_NONNULL = YES; 251 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 252 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 253 | CLANG_ENABLE_MODULES = YES; 254 | CLANG_ENABLE_OBJC_ARC = YES; 255 | CLANG_ENABLE_OBJC_WEAK = YES; 256 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 257 | CLANG_WARN_BOOL_CONVERSION = YES; 258 | CLANG_WARN_COMMA = YES; 259 | CLANG_WARN_CONSTANT_CONVERSION = YES; 260 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 261 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 262 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 263 | CLANG_WARN_EMPTY_BODY = YES; 264 | CLANG_WARN_ENUM_CONVERSION = YES; 265 | CLANG_WARN_INFINITE_RECURSION = YES; 266 | CLANG_WARN_INT_CONVERSION = YES; 267 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 268 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 269 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 270 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 271 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 272 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 273 | CLANG_WARN_STRICT_PROTOTYPES = YES; 274 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 275 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 276 | CLANG_WARN_UNREACHABLE_CODE = YES; 277 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 278 | COPY_PHASE_STRIP = NO; 279 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 280 | ENABLE_NS_ASSERTIONS = NO; 281 | ENABLE_STRICT_OBJC_MSGSEND = YES; 282 | ENABLE_USER_SCRIPT_SANDBOXING = YES; 283 | GCC_C_LANGUAGE_STANDARD = gnu17; 284 | GCC_NO_COMMON_BLOCKS = YES; 285 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 286 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 287 | GCC_WARN_UNDECLARED_SELECTOR = YES; 288 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 289 | GCC_WARN_UNUSED_FUNCTION = YES; 290 | GCC_WARN_UNUSED_VARIABLE = YES; 291 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES; 292 | MTL_ENABLE_DEBUG_INFO = NO; 293 | MTL_FAST_MATH = YES; 294 | SWIFT_COMPILATION_MODE = wholemodule; 295 | }; 296 | name = Release; 297 | }; 298 | CE9A3ACC2C2B750B0085C241 /* Debug */ = { 299 | isa = XCBuildConfiguration; 300 | buildSettings = { 301 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 302 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 303 | CODE_SIGN_ENTITLEMENTS = GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements; 304 | "CODE_SIGN_IDENTITY[sdk=macosx*]" = "Apple Development"; 305 | CODE_SIGN_STYLE = Automatic; 306 | CURRENT_PROJECT_VERSION = 1; 307 | DEVELOPMENT_ASSET_PATHS = "\"GenerateSwiftUICoreImage/Preview Content\""; 308 | DEVELOPMENT_TEAM = 3SP7MRA6P9; 309 | ENABLE_HARDENED_RUNTIME = YES; 310 | ENABLE_PREVIEWS = YES; 311 | GENERATE_INFOPLIST_FILE = YES; 312 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES; 313 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES; 314 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES; 315 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES; 316 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES; 317 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES; 318 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault; 319 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault; 320 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; 321 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; 322 | IPHONEOS_DEPLOYMENT_TARGET = 17.5; 323 | LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; 324 | "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; 325 | MACOSX_DEPLOYMENT_TARGET = 14.5; 326 | MARKETING_VERSION = 1.0; 327 | PRODUCT_BUNDLE_IDENTIFIER = com.gigliwood.GenerateSwiftUICoreImage; 328 | PRODUCT_NAME = "$(TARGET_NAME)"; 329 | SDKROOT = auto; 330 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx"; 331 | SWIFT_EMIT_LOC_STRINGS = YES; 332 | SWIFT_VERSION = 5.0; 333 | TARGETED_DEVICE_FAMILY = "1,2"; 334 | }; 335 | name = Debug; 336 | }; 337 | CE9A3ACD2C2B750B0085C241 /* Release */ = { 338 | isa = XCBuildConfiguration; 339 | buildSettings = { 340 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 341 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 342 | CODE_SIGN_ENTITLEMENTS = GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements; 343 | "CODE_SIGN_IDENTITY[sdk=macosx*]" = "Apple Development"; 344 | CODE_SIGN_STYLE = Automatic; 345 | CURRENT_PROJECT_VERSION = 1; 346 | DEVELOPMENT_ASSET_PATHS = "\"GenerateSwiftUICoreImage/Preview Content\""; 347 | DEVELOPMENT_TEAM = 3SP7MRA6P9; 348 | ENABLE_HARDENED_RUNTIME = YES; 349 | ENABLE_PREVIEWS = YES; 350 | GENERATE_INFOPLIST_FILE = YES; 351 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES; 352 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES; 353 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES; 354 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES; 355 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES; 356 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES; 357 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault; 358 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault; 359 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; 360 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; 361 | IPHONEOS_DEPLOYMENT_TARGET = 17.5; 362 | LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; 363 | "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; 364 | MACOSX_DEPLOYMENT_TARGET = 14.5; 365 | MARKETING_VERSION = 1.0; 366 | PRODUCT_BUNDLE_IDENTIFIER = com.gigliwood.GenerateSwiftUICoreImage; 367 | PRODUCT_NAME = "$(TARGET_NAME)"; 368 | SDKROOT = auto; 369 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx"; 370 | SWIFT_EMIT_LOC_STRINGS = YES; 371 | SWIFT_VERSION = 5.0; 372 | TARGETED_DEVICE_FAMILY = "1,2"; 373 | }; 374 | name = Release; 375 | }; 376 | /* End XCBuildConfiguration section */ 377 | 378 | /* Begin XCConfigurationList section */ 379 | CE9A3AB72C2B75090085C241 /* Build configuration list for PBXProject "GenerateSwiftUICoreImage" */ = { 380 | isa = XCConfigurationList; 381 | buildConfigurations = ( 382 | CE9A3AC92C2B750B0085C241 /* Debug */, 383 | CE9A3ACA2C2B750B0085C241 /* Release */, 384 | ); 385 | defaultConfigurationIsVisible = 0; 386 | defaultConfigurationName = Release; 387 | }; 388 | CE9A3ACB2C2B750B0085C241 /* Build configuration list for PBXNativeTarget "GenerateSwiftUICoreImage" */ = { 389 | isa = XCConfigurationList; 390 | buildConfigurations = ( 391 | CE9A3ACC2C2B750B0085C241 /* Debug */, 392 | CE9A3ACD2C2B750B0085C241 /* Release */, 393 | ); 394 | defaultConfigurationIsVisible = 0; 395 | defaultConfigurationName = Release; 396 | }; 397 | /* End XCConfigurationList section */ 398 | }; 399 | rootObject = CE9A3AB42C2B75090085C241 /* Project object */; 400 | } 401 | -------------------------------------------------------------------------------- /Generator/CIImage-Generation.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CIImage-Generation.swift 3 | // SwiftUI Core Image 4 | // 5 | // Created by Dan Wood on 4/27/23. 6 | // 7 | // When executed, this outputs Swift code that can be pasted into the file "CIImage+Generated.swift". 8 | // 9 | // This will run under iOS or macOS and the resulting code is almost the same. Notably in affineClamp and affineTile the default values are not 10 | // the same. Also as noted in the documentation that we generate, the `cubeDimension` parameter has a different range between iOS and macOS. 11 | 12 | import Foundation 13 | import CoreImage 14 | import CoreImage.CIFilterBuiltins 15 | 16 | #if canImport(UIKit) 17 | private typealias AffineTransform = CGAffineTransform 18 | #elseif canImport(AppKit) 19 | private typealias AffineTransform = NSAffineTransform 20 | #endif 21 | 22 | private var unknownProperties: [String: [String: String]] = [:] 23 | 24 | func dumpFilters() { 25 | 26 | /* 27 | 28 | New documentation base found at 29 | https://developer.apple.com/documentation/coreimage 30 | or 31 | https://developer.apple.com/documentation/coreimage/cifilter 32 | 33 | 15 categories. Open each in tab. Select all, copy, paste into rich text TextEdit doc. Save as HTML. 34 | 35 | Copy this source, then in terminal, grep out the lines I want: 36 | 37 | pbpaste | grep 'class func' | grep 'any CIFilter ' | sort | uniq > ~/Desktop/AllFunctions.html 38 | 39 | (There are a few duplicated functions; gonna not worry about right now) 40 | 41 | In BBEdit, remove the stuff before the 42 | 43 | From that, in BBEdit, grep replace all lines: 44 | 45 | ^.+class func ([^>]+)\(\) -> any CIFilter & ([^>]+)

46 | to: 47 | 48 | "\2": "\1", 49 | 50 | and then… 51 | 52 | ^.+class func ([^(]+).+? any CIFilter & ([^<]+)

53 | 54 | to: 55 | 56 | "\2": "\1", 57 | 58 | 59 | Save as RawLookup.json to Desktop 60 | 61 | cat ~/Desktop/RawLookup.json | sort | uniq > ~/Desktop/docLookup.json 62 | 63 | Now edit to include { and } and remove last comma 64 | 65 | This file lets us know the documentation URL fragment to append to https://developer.apple.com/documentation/coreimage/cifilter/ 66 | */ 67 | 68 | guard let url = Bundle.main.url(forResource: "docLookup", withExtension: "json"), 69 | let data = try? Data(contentsOf: url), 70 | let json = try? JSONSerialization.jsonObject(with: data, options: []), 71 | let docLookup: [String: String] = json as? [String: String] 72 | else { print("// 🛑 can't load docLookup.json"); return } 73 | 74 | 75 | /* 76 | Load abstracts for all functions that are documented on the OLD reference page. Still, some of these descriptions are a bit more descriptive than the built-in descriptions. 77 | 78 | Possible improvement, scrape the same pages that are used above to generate docLookup.json to get the most up-to-date abstracts from the web. 79 | 80 | Start with 81 | https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/ 82 | 83 | auto-expand all symbols 84 | 85 | get HTML source 86 | in BBEdit change all instances (with Grep) of: 87 | +href="#//apple_ref/doc/filter/ci/([^"]+)"\n +title="([^"]+)"> 88 | to: 89 | •"\1": "\2", 90 | 91 | Sort, extract lines starting with • 92 | Paste and preserve formatting into abstracts.json; fix the last line. 93 | Look for any little tweaks that may be needed. 94 | 95 | */ 96 | guard let url = Bundle.main.url(forResource: "abstracts", withExtension: "json"), 97 | let data = try? Data(contentsOf: url), 98 | let json = try? JSONSerialization.jsonObject(with: data, options: []), 99 | let abstractLookup: [String: String] = json as? [String: String] 100 | else { print("// 🛑 can't load abstracts.json"); return } 101 | 102 | /* 103 | A dictionary mapping filters (pretty function names) to override iOS versions when we have noted that the core image functions (or occasionally parameters of them) required newer OSs. 104 | 105 | Not sure where we got this originally! We may need to update some of these. 106 | 107 | */ 108 | guard let url = Bundle.main.url(forResource: "FunctionMinima", withExtension: "json"), 109 | let data = try? Data(contentsOf: url), 110 | let json = try? JSONSerialization.jsonObject(with: data, options: []), 111 | let functionMinima: [String: String] = json as? [String: String] 112 | else { print("// 🛑 can't load FunctionMinima.json"); return } 113 | 114 | /* Generate this list by running the code; it finds inputs missing documentation replacing with "_____TODO_____". Update the MissingParameterDocumentation.json file as this is improved. Documentation can come from whatever sources can be scrapped together; use "_NOTE" key just to notate how we found the information. 115 | */ 116 | guard let url = Bundle.main.url(forResource: "MissingParameterDocumentation", withExtension: "json"), 117 | let data = try? Data(contentsOf: url), 118 | let json = try? JSONSerialization.jsonObject(with: data, options: []), 119 | let forUnknownProperties = json as? [String: [String: String]] 120 | else { print("// 🛑 can't load MissingParameterDocumentation.json"); return } 121 | unknownProperties = forUnknownProperties 122 | 123 | let ciFilterList = CIFilter.filterNames(inCategories: nil) 124 | 125 | var generators: [String: CIFilter] = [:] 126 | var imageToImage: [String: CIFilter] = [:] 127 | 128 | for filterName in ciFilterList { 129 | 130 | guard let filter = CIFilter(name: filterName) else { print("// 🛑 can't instantiate \(filterName)"); continue } 131 | 132 | if !filter.inputKeys.contains(kCIInputImageKey) { 133 | generators[filterName] = filter 134 | } else if filter.outputKeys.contains(kCIOutputImageKey) { 135 | imageToImage[filterName] = filter 136 | } else { 137 | print("// 🛑 Don't know what to do with \(filterName) - outputKeys = \(filter.outputKeys)") 138 | } 139 | } 140 | 141 | print("//") 142 | print("// Automatically generated by CIImage-Generation.swift - do not edit") 143 | print("//") 144 | print("") 145 | print("import Foundation") 146 | print("import CoreImage") 147 | print("import CoreImage.CIFilterBuiltins") 148 | print("import CoreML") 149 | print("import AVFoundation") 150 | print("") 151 | print("public extension CIImage {") 152 | print("") 153 | print("//") 154 | print("// MARK: IMAGE-TO-IMAGE FILTERS") 155 | print("//") 156 | for filterName in imageToImage.keys.sorted() { 157 | guard let filter: CIFilter = imageToImage[filterName] else { continue } 158 | outputImageToImage(filter, abstractLookup: abstractLookup, docLookup: docLookup, functionMinima: functionMinima) 159 | } 160 | print("") 161 | print("//") 162 | print("// MARK: GENERATORS") 163 | print("//") 164 | for filterName in generators.keys.sorted() { 165 | guard let filter: CIFilter = generators[filterName] else { continue } 166 | outputGeneratorFilter(filter, abstractLookup: abstractLookup, docLookup: docLookup, functionMinima: functionMinima) 167 | } 168 | 169 | // End of class extension 170 | print("}") 171 | print("\n\n\n\n\n\n\n") 172 | } 173 | 174 | // Use this to start collecting properties needing some documentation, to then put into MissingParameterDocumentation.json 175 | func dumpUnknownProperties() { 176 | do { 177 | let theJSONData = try JSONSerialization.data( 178 | withJSONObject: unknownProperties, 179 | options: [.sortedKeys, .prettyPrinted] 180 | ) 181 | if let theJSONText = String(data: theJSONData, 182 | encoding: String.Encoding.utf8) { 183 | print("\n\n\n_________________________\n\nDumped properties missing documentation = \n\n\n\(theJSONText)") 184 | } else { 185 | print("Unable to convert data to JSON") 186 | } 187 | } 188 | catch { 189 | print(error) 190 | } 191 | } 192 | 193 | private func outputGeneratorFilter(_ filter: CIFilter, abstractLookup: [String: String], docLookup: [String: String], functionMinima: [String: String]) { 194 | let filterName = filter.name 195 | 196 | let filtersThatAlreadyHaveInitializer: [String: String] = ["CIConstantColorGenerator": "init(color: CIColor)"] 197 | 198 | if let existingFunction: String = filtersThatAlreadyHaveInitializer[filterName] { 199 | print("// ℹ️ \(filterName) already has a CIImage initializer: \(existingFunction)") 200 | return 201 | } 202 | 203 | outputDocumentation(filter, isGenerator: true, abstractLookup: abstractLookup, docLookup: docLookup) 204 | outputOSVersion(filter, functionMinima: functionMinima) 205 | outputImageFunction(filter, isGenerator: true) 206 | } 207 | 208 | private func outputDocumentation(_ filter: CIFilter, isGenerator: Bool, abstractLookup: [String: String], docLookup: [String: String]) { 209 | 210 | let filterName = filter.name 211 | let description: String? = CIFilter.localizedDescription(forFilterName: filterName) 212 | let categories: Array = filter.attributes[kCIAttributeFilterCategories] as? Array ?? [] 213 | let filterDisplayName: String = filter.attributes[kCIAttributeFilterDisplayName] as? String ?? "" 214 | let documentationURL: URL? = filter.attributes[kCIAttributeReferenceDocumentation] as? URL 215 | 216 | // https://developer.apple.com/documentation/xcode/writing-symbol-documentation-in-your-source-files 217 | print("\n/// \(filterDisplayName)") 218 | print("///") 219 | if let description { 220 | if let abstract = abstractLookup[filterName], !abstract.hasPrefix("Returns "), abstract.count > description.count { 221 | // Replace description with longer abstract scraped from the website, unless it starts with 'Returns ' since we use that for the output. 222 | print("/// \(abstract)") 223 | } else { 224 | print("/// \(description)") 225 | } 226 | print("///") 227 | } 228 | 229 | // Convert, for example, CIAccordionFoldTransition to accordionFoldTransition 230 | let functionFilterNameCapitalized = filterName.dropFirst(2) 231 | var functionFilterName = (functionFilterNameCapitalized.first?.lowercased() ?? "") + functionFilterNameCapitalized.dropFirst() 232 | 233 | let manualNameLookup = ["CICMYKHalftone": "cmykHalftone", "CIPDF417BarcodeGenerator": "pdf417BarcodeGenerator", "CIQRCodeGenerator": "qrCodeGenerator"] 234 | if let foundManualLookup = manualNameLookup[filterName] { 235 | functionFilterName = foundManualLookup 236 | } 237 | 238 | // These are still in beta, so I'm not seeing them on the main category lists. https://developer.apple.com/documentation/coreimage/cifilter 239 | let manualURLLookup = ["CIAreaBoundsRed": "4401847-areaboundsred", 240 | "CIMaximumScaleTransform": "4401870-maximumscaletransform", 241 | "CIToneMapHeadroom": "4401878-tonemapheadroom", 242 | "CIAreaAlphaWeightedHistogram": "4401846-areaalphaweightedhistogram" 243 | ] 244 | 245 | let newDocURLFragment: String? 246 | if let manualURLFragment = manualURLLookup[filterName] { 247 | newDocURLFragment = manualURLFragment 248 | } else { 249 | newDocURLFragment = docLookup[functionFilterName] 250 | } 251 | 252 | if let newDocURLFragment { 253 | print("/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/\(newDocURLFragment))") 254 | } else { 255 | let withoutSuffix = functionFilterName.replacingOccurrences(of: "Filter", with: "", options: [.backwards, .anchored]) 256 | if let newDocURLFragment = docLookup[withoutSuffix] { 257 | print("/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/\(newDocURLFragment))") 258 | } else { 259 | print("/// ⚠️ No documentation available for \(filterName)") 260 | } 261 | } 262 | 263 | if let documentationURL { 264 | if nil != abstractLookup[filterName] { 265 | let urlFragment: String 266 | #if canImport(UIKit) 267 | urlFragment = "http://developer.apple.com/library/ios" 268 | #elseif canImport(AppKit) 269 | urlFragment = "http://developer.apple.com/library/mac" 270 | #endif 271 | 272 | var urlString: String = documentationURL.absoluteString.replacingOccurrences(of: urlFragment, 273 | with: "https://developer.apple.com/library/archive", 274 | options: .anchored) 275 | urlString = urlString.replacingOccurrences(of: "https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/index.html", with: "https://t.ly/Gyd6") 276 | 277 | 278 | 279 | print("/// [Classic Documentation](\(urlString))") 280 | } 281 | 282 | // Special cases for documentation 283 | if filterName == "CIDepthBlurEffect" { 284 | // Some helpful hints since this is otherwise undocumented 285 | print("/// [WWDC Video](https://devstreaming-cdn.apple.com/videos/wwdc/2017/508wdyl5rm2jy9z8/508/508_hd_image_editing_with_depth.mp4)") 286 | print("/// [WWDC Slides](https://devstreaming-cdn.apple.com/videos/wwdc/2017/508wdyl5rm2jy9z8/508/508_image_editing_with_depth.pdf)") 287 | } else if filterName == "CICoreMLModelFilter" { 288 | print("/// [WWDC Video](https://developer.apple.com/videos/play/wwdc2018-719/?time=2378)") 289 | } 290 | print("///") 291 | } 292 | if categories.count == 1, let category = categories.first { 293 | print("/// Category: \(CIFilter.localizedName(forCategory: category))") 294 | print("///") 295 | } else if categories.count > 1 { 296 | let prettyList: String = categories.map { CIFilter.localizedName(forCategory: $0) }.joined(separator: ", ") 297 | print("/// Categories: \(prettyList)") 298 | print("///") 299 | } 300 | print("///") 301 | print("/// - Parameters:") 302 | 303 | var adjustedInputKeys = filter.inputKeys.filter { $0 != kCIInputImageKey } 304 | if !isGenerator && filter.identityInputKeys.isEmpty && !filter.inputKeys.contains("inputBackgroundImage") { 305 | adjustedInputKeys.append("active") 306 | } 307 | for inputKey in adjustedInputKeys { 308 | guard inputKey != "active" else { 309 | print("/// - active: should this filter be applied") 310 | continue 311 | } 312 | guard let attributes = filter.attributes[inputKey] as? [String: AnyObject], 313 | let attributeClass = attributes[kCIAttributeClass] as? String 314 | else { 315 | print("/// - \(inputKey): 🛑 couldn't get input attributes") 316 | continue 317 | } 318 | 319 | let displayName: String = attributes[kCIAttributeDisplayName] as? String ?? "" // space-separated 320 | let longerInput: String = parameterName(displayName: displayName, filterName: filterName) 321 | var description: String = attributes[kCIAttributeDescription] as? String ?? "[unknown]" 322 | 323 | if nil == attributes[kCIAttributeDescription] { 324 | 325 | // TEMPORARY CODE TO COLLECT UNKNOWN PROPERTIES 326 | var foundUnknownPropertiesForFilter: [String: String] = unknownProperties[filterName] ?? [:] 327 | if nil == foundUnknownPropertiesForFilter[longerInput] { 328 | foundUnknownPropertiesForFilter[longerInput] = "_____TODO_____" 329 | } 330 | unknownProperties[filterName] = foundUnknownPropertiesForFilter 331 | 332 | if let missingParameters: [String: String] = unknownProperties[filterName], 333 | let replacementDocumentation: String = missingParameters[longerInput] { 334 | description = replacementDocumentation 335 | } 336 | } 337 | // Remove rounding information since we are passing in integers directly. 338 | description = description.replacing(" The value will be rounded to the nearest odd integer.", with: "") 339 | description = description.replacing(" Set to nil for automatic.", with: "") 340 | // Fix this weird ObjC style documentation 341 | description = description.replacing("Force a compact style Aztec code to @YES or @NO.", 342 | with: "A Boolean that specifies whether to force a compact style Aztec code.") 343 | description = description.replacing("Force compaction style to @YES or @NO.", 344 | with: "A Boolean value specifying whether to force compaction style.") 345 | 346 | print("/// - \(longerInput): \(description)", terminator: "") 347 | 348 | // For numbers, show the range on the same line 349 | switch attributeClass { 350 | case "NSNumber": 351 | guard attributes[kCIAttributeType] as? String != kCIAttributeTypeBoolean, longerInput != "extrapolate" else { break } 352 | guard longerInput != "cubeDimension" else { 353 | // Special case. MacOS and iOS report different values so show that here 354 | print("(2...64 iOS; 2...128 macOS)", terminator: "") 355 | break 356 | } 357 | let minimumValue: Float? = (attributes[kCIAttributeMin] as? NSNumber)?.floatValue 358 | let maximumValue: Float? = (attributes[kCIAttributeMax] as? NSNumber)?.floatValue 359 | // Ignore very large maximum value since it's not practical 360 | if let minimumValue, let maximumValue, maximumValue < 0x0800_0000_00000_0000 { 361 | print(" (\(minimumValue.format5)...\(maximumValue.format5))", terminator: "") 362 | } else if let minimumValue { 363 | print(" (\(minimumValue.format5)...)", terminator: "") 364 | } else if let maximumValue, maximumValue < 0x0800_0000_00000_0000 { 365 | print(" (...\(maximumValue.format5))", terminator: "") 366 | } 367 | 368 | default: 369 | break 370 | } 371 | print("") // finish up the line 372 | 373 | } 374 | 375 | 376 | if filter.outputKeys.contains(kCIOutputImageKey) { 377 | if isGenerator { 378 | if let abstract: String = abstractLookup[filterName], 379 | let match = abstract.firstMatch(of: /^Generates*\h/) { 380 | let abstractWithoutReturnsPrefix = abstract[match.range.upperBound...] 381 | let sentences = Array(abstractWithoutReturnsPrefix.split(separator: /\./)) 382 | let firstSentence = sentences.first ?? abstractWithoutReturnsPrefix 383 | print("/// - Returns: \(firstSentence)") 384 | } else if let description, 385 | let match = description.firstMatch(of: /^Generates*\h/) { 386 | let descriptionWithoutReturnsPrefix = description[match.range.upperBound...] 387 | let sentences = Array(descriptionWithoutReturnsPrefix.split(separator: /\./)) 388 | let firstSentence = sentences.first ?? descriptionWithoutReturnsPrefix 389 | print("/// - Returns: \(firstSentence)") 390 | } else { 391 | print("/// - Returns: new `CIImage`") 392 | } 393 | } else { 394 | var returnInfo: String 395 | if var abstract = abstractLookup[filterName], abstract.hasPrefix("Returns ") { 396 | abstract = String(abstract.dropFirst(8)) 397 | abstract = abstract.replacingOccurrences(of: ".", with: "", options: [.anchored, .backwards]) // remove any ending period 398 | returnInfo = abstract 399 | } else { 400 | returnInfo = "processed new `CIImage`" 401 | } 402 | if filter.identityInputKeys.isEmpty && filter.inputKeys.contains("inputBackgroundImage") { 403 | // Append info about when active is false 404 | returnInfo += ", or identity if `backgroundImage` is nil" 405 | } else if filter.identityInputKeys.isEmpty { 406 | // Append info about when active is false 407 | returnInfo += ", or identity if `active` is false" 408 | } else { 409 | // Append info about identity parameters 410 | returnInfo += " or identity if parameters result in no operation applied" 411 | 412 | // TODO: colorCrossPolynomial broken 413 | } 414 | print("/// - Returns: \(returnInfo)") 415 | 416 | } 417 | } 418 | } 419 | 420 | private func outputOSVersion(_ filter: CIFilter, functionMinima: [String: String]) { 421 | 422 | let filterName = filter.name 423 | var macOSVersion: String? = filter.attributes[kCIAttributeFilterAvailable_Mac] as? String 424 | if nil == Float(macOSVersion ?? "") { 425 | if filterName == "CIHistogramDisplayFilter" { 426 | macOSVersion = "10.9" // repair "10.?" with 10.9 from documentation 427 | } 428 | } 429 | 430 | if nil != macOSVersion?.firstMatch(of: /10\.[0-9]+/) && macOSVersion != "10.15" { 431 | macOSVersion = "10.15" // For minimum version of SwiftUI and most filter functions 432 | } 433 | 434 | var iOSVersion: String? = filter.attributes[kCIAttributeFilterAvailable_iOS] as? String 435 | if Float(iOSVersion ?? "") ?? 0 < 13 { 436 | iOSVersion = "13" // minimum version for SwiftUI and most filter functions 437 | } 438 | 439 | // Override versions of our functions when we have noted that the core image functions (or occasionally parameters of them) required newer OSs 440 | if let functionMinimum = functionMinima[filter.name.prettyFunction] { 441 | macOSVersion = functionMinimum 442 | if let convertedFromMacVersion = ["11.0": "14", "12.0": "15", "13.0": "16"][functionMinimum] { 443 | iOSVersion = convertedFromMacVersion 444 | } 445 | } 446 | 447 | if let macOSVersion, let iOSVersion { 448 | print("@available(iOS \(iOSVersion), macOS \(macOSVersion), *)") 449 | } 450 | } 451 | 452 | private func outputImageFunctionHeader(_ filter: CIFilter, isGenerator: Bool) { 453 | let filterName: String = filter.name 454 | let filterFunction: String = filterName.prettyFunction 455 | 456 | print("\(isGenerator ? "static " : "")func \(filterFunction)(", terminator: "") 457 | 458 | var inputParams: [String] = filter.inputKeys 459 | .filter { $0 != kCIInputImageKey } 460 | .map { inputKey in 461 | (inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) } // tuple of the inputKey and its attributes 462 | .compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in 463 | parameterStatement(inputKey: inputKey, inputAttributes: inputAttributes, filterName: filterName) 464 | } 465 | 466 | if !isGenerator && filter.identityInputKeys.isEmpty && !filter.inputKeys.contains("inputBackgroundImage"), 467 | let attributesForActiveParam: [String: AnyObject] = .some([kCIAttributeDisplayName: "Active" as NSString, 468 | kCIAttributeClass: "NSNumber" as NSString, 469 | kCIAttributeType: kCIAttributeTypeBoolean as NSString, 470 | kCIAttributeDefault: true as AnyObject, 471 | kCIAttributeIdentity: true as AnyObject]), 472 | let activeParameterStatement: String = parameterStatement(inputKey: "active", inputAttributes: attributesForActiveParam, filterName: filterName) { 473 | inputParams.append(activeParameterStatement) 474 | } 475 | let inputParamsOnOneLine = inputParams.joined(separator: ", ") 476 | let forceMultiLines: Bool = inputParamsOnOneLine.contains("//") 477 | if inputParamsOnOneLine.count + filterFunction.count >= 100 || forceMultiLines { 478 | print(inputParams.joined(separator: ",\n "), terminator: forceMultiLines ? "\n" : "") 479 | } else { 480 | print(inputParamsOnOneLine, terminator: "") 481 | } 482 | print(") -> CIImage {") 483 | } 484 | 485 | private func outputImageDictionaryFunction(_ filter: CIFilter, isGenerator: Bool) { 486 | 487 | assert(!isGenerator) // not supported for generators; none known to be needed 488 | let filterName: String = filter.name 489 | 490 | outputImageFunctionHeader(filter, isGenerator: isGenerator) 491 | 492 | outputIdentityGuards(filter) 493 | 494 | print(" // Filter not included in CoreImage.CIFilterBuiltins; using dictionary-based method.") 495 | print(" guard let filter = CIFilter(name: \"\(filter.name)\", parameters: [", terminator: "") 496 | 497 | let otherInputSettingStatements: [String] = filter.inputKeys 498 | .filter { $0 != kCIInputImageKey } 499 | .map { inputKey in 500 | (inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) } // tuple of the inputKey and its attributes 501 | .compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in 502 | guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String 503 | else { return nil } 504 | let inputName: String = parameterName(displayName: displayName, filterName: filterName) 505 | return " \"\(inputKey)\": \(inputName)," 506 | } 507 | 508 | if !otherInputSettingStatements.isEmpty { 509 | print("\n") 510 | print(otherInputSettingStatements.joined(separator: "\n")) 511 | print(" ", terminator: "") 512 | } else { 513 | print(":", terminator: "") 514 | } 515 | 516 | print("]) else { return self }") 517 | print(" return filter.outputImage ?? CIImage.empty()") 518 | 519 | print("}") 520 | 521 | } 522 | 523 | private func outputIdentityGuards(_ filter: CIFilter) { 524 | let filterName: String = filter.name 525 | // doesn't make sense to have an identity function for generators 526 | // Guards for identity/inert values 527 | let identityComparisons: String 528 | 529 | if filter.identityInputKeys.isEmpty { 530 | if filter.inputKeys.contains("inputBackgroundImage") { 531 | identityComparisons = "let backgroundImage" 532 | } else { 533 | identityComparisons = "active" 534 | } 535 | } else { 536 | identityComparisons = filter.inputKeys 537 | .filter { $0 != kCIInputImageKey } 538 | .map { inputKey in 539 | (inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) } // tuple of the inputKey and its attributes 540 | .compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in 541 | guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String, 542 | let identityValue: Any = inputAttributes[kCIAttributeIdentity] 543 | else { return nil } 544 | 545 | let attributeType: String? = inputAttributes[kCIAttributeType] as? String 546 | let inputName: String = parameterName(displayName: displayName, filterName: filterName) 547 | guard hasReasonableDefaultValue(identityValue, attributeType: attributeType, inputName: inputName) 548 | else { return nil } 549 | 550 | let identityValueFormatted: String = formatSmart(identityValue, attributeType: attributeType, inputName: inputName, filterName: filterName) 551 | return "\(inputName) != \(identityValueFormatted)" 552 | } 553 | .joined(separator: " || ") 554 | } 555 | if !identityComparisons.isEmpty { 556 | print(" guard \(identityComparisons) else { return self }") 557 | print("") 558 | } 559 | } 560 | 561 | private func outputImageFunction(_ filter: CIFilter, isGenerator: Bool) { 562 | let filterName: String = filter.name 563 | let filterFunction: String = filterName.prettyFunction 564 | 565 | outputImageFunctionHeader(filter, isGenerator: isGenerator) 566 | 567 | if !isGenerator { 568 | outputIdentityGuards(filter) 569 | } 570 | print(" let filter = CIFilter.\(filterFunction)() // \(filterName)") 571 | if !isGenerator { 572 | print(" filter.inputImage = self") 573 | } 574 | 575 | let otherInputSettingStatements: String = filter.inputKeys 576 | .filter { $0 != kCIInputImageKey } 577 | .map { inputKey in 578 | (inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) } // tuple of the inputKey and its attributes 579 | .compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in 580 | guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String 581 | else { return nil } 582 | let inputName: String = parameterName(displayName: displayName, filterName: filterName) 583 | let attributeType: String? = inputAttributes[kCIAttributeType] as? String 584 | 585 | // Special case - barcode generators, for some reason, want all their parameters as Float. Let's upgrade it here to keep the API simple. 586 | if nil != filterFunction.firstMatch(of: /(?i)codeGenerator$/), 587 | let className = inputAttributes[kCIAttributeClass] as? String, 588 | let attributeType = inputAttributes[kCIAttributeType] as? String, 589 | className == "NSNumber" { 590 | if attributeType == kCIAttributeTypeBoolean { 591 | return " filter.\(inputName) = Float(\(inputName) ? 1 : 0)" 592 | } else { 593 | return " filter.\(inputName) = Float(\(inputName))" 594 | } 595 | } 596 | 597 | // Annoying to have these negative cases, but the instances where 598 | // we need to wrap in a float are much more numerous! 599 | if !(filterFunction == "kMeans" && inputName == "count"), // this function's parameter wants an integer so leave alone 600 | !(filterFunction == "cannyEdgeDetector" && inputName == "hysteresisPasses"), 601 | !(filterFunction == "personSegmentation" && inputName == "qualityLevel"), 602 | 603 | attributeType == kCIAttributeTypeInteger || attributeType == kCIAttributeTypeCount { 604 | return " filter.\(inputName) = Float(\(inputName))" // We pass in Int, but function wants a Float 605 | } 606 | // fall through 607 | return " filter.\(inputName) = \(inputName)" 608 | } 609 | .joined(separator: "\n") 610 | 611 | print(otherInputSettingStatements) 612 | print(" return filter.outputImage ?? CIImage.empty()") 613 | print("}") 614 | } 615 | 616 | private func outputImageToImage(_ filter: CIFilter, abstractLookup: [String: String], docLookup: [String: String], functionMinima: [String: String]) { 617 | 618 | let filterName = filter.name 619 | 620 | let filtersWithoutSwiftAPI: Set = ["CICameraCalibrationLensCorrection", "CIGuidedFilter"] 621 | let filtersThatAlreadyHaveImageExtension: [String: String] = ["CIAffineTransform": "transformed(by: CGAffineTransform)", 622 | "CICrop": "cropped(to: CGRect)", 623 | "CIClamp": "clamped(to: CGRect)", 624 | "CISampleNearest": "samplingNearest()", 625 | // https://developer.apple.com/documentation/coreimage/ciimage/2867429-samplingnearest 626 | "CIDepthBlurEffect": "depthBlurEffectFilter(for...)" 627 | // https://developer.apple.com/documentation/coreimage/cicontext#4375374 628 | ] 629 | 630 | let filtersThatAlreadyHaveImageExtensionDoc: [String: String] = ["CISampleNearest": "https://developer.apple.com/documentation/coreimage/ciimage/2867429-samplingnearest", 631 | "CIDepthBlurEffect": "https://developer.apple.com/documentation/coreimage/cicontext#4375374"] 632 | 633 | if let existingFunction: String = filtersThatAlreadyHaveImageExtension[filterName] { 634 | print("") 635 | print("// ℹ️ \(filterName) already has a CIImage method: func \(existingFunction) -> CIImage") 636 | if let existingFunctionURL = filtersThatAlreadyHaveImageExtensionDoc[filterName] { 637 | print("// \(existingFunctionURL)") 638 | } 639 | print("") 640 | return 641 | } 642 | outputDocumentation(filter, isGenerator: false, abstractLookup: abstractLookup, docLookup: docLookup) 643 | outputOSVersion(filter, functionMinima: functionMinima) 644 | 645 | if filtersWithoutSwiftAPI.contains(filterName) { 646 | outputImageDictionaryFunction(filter, isGenerator: false) 647 | } else { 648 | outputImageFunction(filter, isGenerator: false) 649 | } 650 | } 651 | 652 | 653 | // convert long name like "Gray Component Replacement" to input name used in CoreImage.CIFilterBuiltins. And fix a bunch of inconsistencies. 654 | private func parameterName(displayName: String, filterName: String) -> String { 655 | let words: [String] = displayName.components(separatedBy: " ").map { $0.capitalized } 656 | let removeSpaces: String = words.joined(separator: "") 657 | var result: String = removeSpaces.prefix(1).lowercased() + removeSpaces.dropFirst() 658 | if result == "texture" { 659 | result = "textureImage" 660 | } else if result == "b" { 661 | result = "parameterB" 662 | } else if result == "c" { 663 | result = "parameterC" 664 | } else if result == "means" { 665 | result = "inputMeans" 666 | } else if result == "redVector" { 667 | result = "rVector" 668 | } else if result == "greenVector" { 669 | result = "gVector" 670 | } else if result == "blueVector" { 671 | result = "bVector" 672 | } else if result == "alphaVector" { 673 | result = "aVector" 674 | } else if result == "maximumStriationRadius" { 675 | result = "maxStriationRadius" 676 | } else if result == "color1" { 677 | result = "color0" 678 | } else if result == "color2" { 679 | result = "color1" 680 | } else if result == "radius1" { 681 | result = "radius0" 682 | } else if result == "radius2" { 683 | result = "radius1" 684 | } else if result == "image2" && filterName == "CIColorAbsoluteDifference" { // only substitute for this function 685 | result = "inputImage2" 686 | } else if result.hasSuffix(".") { 687 | result = String(result.dropLast(1)) // to deal with data anomoly where "." is at end of parameter 688 | } 689 | return result 690 | } 691 | 692 | private func parameterStatement(inputKey: String, inputAttributes: [String: AnyObject], filterName: String) -> String? { 693 | 694 | guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String, 695 | let attributeClass: String = inputAttributes[kCIAttributeClass] as? String 696 | else { return nil } 697 | 698 | let inputName: String = parameterName(displayName: displayName, filterName: filterName) 699 | let attributeType: String? = inputAttributes[kCIAttributeType] as? String 700 | var convertedClass: String 701 | switch attributeClass { 702 | case "NSNumber": 703 | 704 | if attributeType == kCIAttributeTypeBoolean 705 | || inputName == "extrapolate" { // Hack - missing info 706 | convertedClass = "Bool" 707 | } else if attributeType == kCIAttributeTypeInteger || attributeType == kCIAttributeTypeCount 708 | || inputName == "qualityLevel" || inputName == "count" { // Hack - missing or misleading info 709 | convertedClass = "Int" 710 | } else if [kCIAttributeTypeScalar, kCIAttributeTypeAngle, kCIAttributeTypeDistance, kCIAttributeTypeTime].contains(attributeType) 711 | || inputName == "preferredAspectRatio" // missing info 712 | { 713 | convertedClass = "Float" 714 | } else { 715 | print("\n// 🛑 unknown number type \(inputName): \(attributeType ?? "")") 716 | convertedClass = "Float" // seems to be when no type is specified 717 | } 718 | case "CIVector": 719 | guard filterName != "CITemperatureAndTint" && filterName != "CIDepthBlurEffect" else { // special case, should remain a CIVector 720 | convertedClass = "CIVector" 721 | break 722 | } 723 | convertedClass = attributeType == kCIAttributeTypeRectangle 724 | ? "CGRect" 725 | : attributeType == kCIAttributeTypePosition || attributeType == kCIAttributeTypeOffset 726 | ? "CGPoint" 727 | : "CIVector" // CIVector tends to have no attribute type 728 | case "NSAffineTransform": 729 | convertedClass = "CGAffineTransform" 730 | case "NSData": 731 | convertedClass = "Data" 732 | case "NSString": 733 | convertedClass = "String" 734 | case "NSArray": 735 | convertedClass = "[Any]" 736 | case "CGImageMetadataRef": 737 | convertedClass = "CGImageMetadata" 738 | case "NSObject": 739 | if inputName == "colorSpace" { 740 | convertedClass = "CGColorSpace" 741 | } else { 742 | convertedClass = attributeClass // Unexpected case 743 | print("\n// 🛑 unknown attributeClass \(attributeClass) with \(inputName), \(attributeType ?? "")") 744 | } 745 | case "NSValue": 746 | if attributeType == kCIAttributeTypeTransform { 747 | convertedClass = "CGAffineTransform" 748 | } else { 749 | convertedClass = attributeClass // Unexpected case 750 | print("\n// 🛑 unknown attributeClass \(attributeClass) with \(inputName), \(attributeType ?? "")") 751 | } 752 | default: 753 | // Other cases where the class is the same: CIImage, CIColor, etc. 754 | convertedClass = attributeClass 755 | } 756 | if inputName == "backgroundImage" && convertedClass == "CIImage" { 757 | convertedClass = "CIImage?" // make optional, for our special identity handling 758 | } 759 | var defaultStatement: String = "" 760 | if let defaultValue: AnyObject = inputAttributes[kCIAttributeDefault] { 761 | 762 | if hasReasonableDefaultValue(defaultValue, attributeType: attributeType, inputName: inputName) { 763 | let defaultValueString = formatSmart(defaultValue, attributeType: attributeType, inputName: inputName, filterName: filterName) 764 | if !defaultValueString.isEmpty { 765 | defaultStatement = " = \(defaultValueString)" 766 | } 767 | } 768 | } 769 | return "\(inputName): \(convertedClass)\(defaultStatement)" 770 | } 771 | 772 | // Look at value and/or context. 773 | private func hasReasonableDefaultValue(_ value: Any, attributeType: String?, inputName: String) -> Bool { 774 | if nil != value as? Data { 775 | return false // Not feasible to have data anyhow 776 | } else if let number = value as? NSNumber { 777 | if attributeType == kCIAttributeTypeDistance { 778 | return number == 0 779 | } else if attributeType == kCIAttributeTypeInteger { 780 | return false 781 | } else if attributeType == kCIAttributeTypeCount { 782 | return false 783 | } else if attributeType == kCIAttributeTypeBoolean { 784 | return true 785 | } else if attributeType == kCIAttributeTypeAngle { 786 | return number.doubleValue <= Double.pi // avoid those weird angles that don't make any sense 787 | } else if attributeType == kCIAttributeTypeScalar { 788 | return true // not sure 789 | } 790 | } else if let defaultVector = value as? CIVector { 791 | 792 | if defaultVector.count > 4 { 793 | return false 794 | } 795 | if attributeType == kCIAttributeTypeRectangle { 796 | return defaultVector == CIVector(x: 0, y: 0, z: 0, w: 0) // only keep zero rectangle 797 | } else if attributeType == kCIAttributeTypePosition3 { 798 | return false 799 | } else if attributeType == kCIAttributeTypePosition { 800 | return defaultVector.x < 50 && defaultVector.y < 50 // seems like 50+ values are arbitrary coordinates 801 | } else if attributeType == kCIAttributeTypeOffset { 802 | return defaultVector.x != 0 && defaultVector.y != 0 // any non-zero points seem pretty arbitrary 803 | } 804 | } else if let color = value as? CIColor { 805 | return color == CIColor.black 806 | || color == CIColor.white 807 | || color == CIColor.clear 808 | } else if nil != value as? AffineTransform { 809 | return true 810 | } else if nil != value as? String { 811 | return true 812 | } else if inputName == "colorSpace" { // it's a CFType so not so easy to compare 813 | return true 814 | } else { 815 | print("\n🛑 \(attributeType ?? "") \(inputName) -> \(value) \((value as? AnyObject)?.className)") 816 | return true // not sure yet 817 | } 818 | return false 819 | } 820 | 821 | 822 | private func formatSmart(_ value: Any, attributeType: String?, inputName: String, filterName: String?) -> String { 823 | var result: String = "" 824 | if let number = value as? NSNumber { 825 | if attributeType == kCIAttributeTypeBoolean || inputName == "extrapolate" { // Hack - missing info 826 | result = number.boolValue.description 827 | } else { 828 | result = number.formatSmart 829 | } 830 | } else if let defaultVector = value as? CIVector { 831 | 832 | if attributeType == kCIAttributeTypeRectangle { 833 | result = defaultVector.formatRectSmart 834 | } else if attributeType == kCIAttributeTypePosition { 835 | result = defaultVector.formatPointSmart 836 | } else { 837 | result = defaultVector.formatVectorSmart 838 | } 839 | } else if let color = value as? CIColor { 840 | result = color.formatSmart 841 | } else if let string = value as? String { 842 | result = "\"" + string.replacingOccurrences(of: "\"", with: "\\\"") + "\"" 843 | } else if inputName == "colorSpace" { 844 | if CFGetTypeID(value as AnyObject) == CGColorSpace.typeID { 845 | let colorspace: CGColorSpace = value as! CGColorSpace 846 | if let name: String = colorspace.name as? String { 847 | var newName = name.replacing(/^kCGColorSpace/, with: "") 848 | newName = newName.prefix(1).lowercased() + newName.dropFirst() 849 | result = "CGColorSpace(name: CGColorSpace." + newName + ")!" 850 | } 851 | } 852 | } else if let transform = value as? AffineTransform { 853 | let transformIdentity: AffineTransform 854 | #if canImport(UIKit) 855 | transformIdentity = CGAffineTransform.identity 856 | #elseif canImport(AppKit) 857 | transformIdentity = NSAffineTransform() 858 | #endif 859 | 860 | // Special case these filters to default to identity. Their default values are weird! 861 | if transform == transformIdentity || filterName == "CIAffineClamp" || filterName == "CIAffineTile" { 862 | result = "CGAffineTransform.identity" 863 | } else { 864 | #if canImport(UIKit) 865 | let t: CGAffineTransform = transform 866 | result = "CGAffineTransform(a: \(t.a.format5), b: \(t.b.format5), c: \(t.c.format5), d: \(t.d.format5), tx: \(t.tx.format5), ty: \(t.tx.format5))" 867 | #elseif canImport(AppKit) 868 | let t: NSAffineTransformStruct = transform.transformStruct 869 | result = "CGAffineTransform(a: \(t.m11.format5), b: \(t.m12.format5), c: \(t.m21.format5), d: \(t.m22.format5), tx: \(t.tX.format5), ty: \(t.tY.format5))" 870 | #endif 871 | } 872 | } else { 873 | print("\n🛑 \(attributeType ?? "") \(inputName) -> \(value) \((value as? AnyObject)?.className)") 874 | result = String(describing: value) 875 | } 876 | return result 877 | } 878 | 879 | // https://unicode-org.github.io/icu/userguide/strings/regexp.html 880 | 881 | private extension String { 882 | var prettyFunction: String { 883 | let result: String = self.replacing(/^CI/, with: "").replacing(/Filter$/, with: "") 884 | return result.fixingCamelCase 885 | } 886 | 887 | // AbcDef -> abcDef but ABcdef -> aBcdef, ABCDEF -> abcDef - keep the last 888 | var fixingCamelCase: String { 889 | if nil != self.firstMatch(of: /^[A-Z][^A-Z]/) 890 | || self.hasPrefix("SRGB") // special case 891 | { 892 | // Just one uppercase characters, so make it lowercase and append the rest 893 | return self.prefix(1).lowercased() + self.dropFirst() 894 | } else if let foundUppercaseMatch: Regex.RegexOutput>.Match = self.firstMatch(of: /^[A-Z]{2,}/) { 895 | // FIXME: Might need some tweaking to deal with complex characters. But since we are just modifying ASCII, this simple case is fine. 896 | // More than one, so make all but the last character lowercased, so that the last character there stays capitalized. 897 | let lowercasedPrefix = self[foundUppercaseMatch.range].lowercased() 898 | let remaining = self.dropFirst(lowercasedPrefix.count) 899 | if nil != remaining.firstMatch(of: /^[a-z]/) { // lowercase letter after uppercase, the usual. Keep last uppercase from prefix 900 | return String(lowercasedPrefix.dropLast()) + self.dropFirst(lowercasedPrefix.count - 1) 901 | } else { 902 | // Unusual; characters after uppercase is not a lowercase character, e.g. a number. Keep all the uppercase characters. 903 | return String(lowercasedPrefix) + self.dropFirst(lowercasedPrefix.count) 904 | } 905 | } 906 | return self 907 | 908 | } 909 | } 910 | 911 | // Format numbers with UP TO five decimal places 912 | 913 | private extension Float { 914 | var format5: String { 915 | let formatter = NumberFormatter() 916 | formatter.numberStyle = .decimal 917 | #if canImport(UIKit) 918 | formatter.numberStyle = .none 919 | #elseif canImport(AppKit) 920 | formatter.hasThousandSeparators = false 921 | #endif 922 | formatter.maximumFractionDigits = 5 923 | let number = NSNumber(value: self) 924 | return formatter.string(from: number) ?? "" 925 | } 926 | } 927 | private extension Double { 928 | var format5: String { 929 | let formatter = NumberFormatter() 930 | formatter.numberStyle = .decimal 931 | #if canImport(UIKit) 932 | formatter.numberStyle = .none 933 | #elseif canImport(AppKit) 934 | formatter.hasThousandSeparators = false 935 | #endif 936 | formatter.maximumFractionDigits = 5 937 | let number = NSNumber(value: self) 938 | return formatter.string(from: number) ?? "" 939 | } 940 | } 941 | private extension CGFloat { 942 | var format5: String { 943 | let formatter = NumberFormatter() 944 | formatter.numberStyle = .decimal 945 | #if canImport(UIKit) 946 | formatter.numberStyle = .none 947 | #elseif canImport(AppKit) 948 | formatter.hasThousandSeparators = false 949 | #endif 950 | formatter.maximumFractionDigits = 5 951 | let number = NSNumber(value: self) 952 | return formatter.string(from: number) ?? "" 953 | } 954 | } 955 | 956 | private extension NSNumber { 957 | 958 | var format5: String { 959 | let formatter = NumberFormatter() 960 | formatter.numberStyle = .decimal 961 | #if canImport(UIKit) 962 | formatter.numberStyle = .none 963 | #elseif canImport(AppKit) 964 | formatter.hasThousandSeparators = false 965 | #endif 966 | formatter.maximumFractionDigits = 5 967 | return formatter.string(from: self) ?? "" 968 | } 969 | 970 | var formatSmart: String { 971 | let result: String 972 | switch self.doubleValue { 973 | case Double.pi: 974 | result = ".pi" 975 | case Double.pi/2: 976 | result = ".pi/2" 977 | case Double.pi * 18: 978 | result = ".pi*18" // for vortexDistortion 979 | 980 | // What about triangleKaleidoscope 5.924285296593801 981 | default: 982 | result = self.format5 983 | } 984 | return result 985 | } 986 | } 987 | private extension CIVector { 988 | var formatPointSmart: String { 989 | if x == 0 && y == 0 { 990 | return ".zero" 991 | } else { 992 | return ".init(x: \(x.format5), y: \(y.format5))" 993 | } 994 | } 995 | 996 | // The CGRect structure’s X, Y, height and width values are stored in the vector’s X, Y, Z and W properties. 997 | var formatRectSmart: String { 998 | if x == 0 && y == 0 && z == 0 && w == 0 { 999 | return ".zero" 1000 | } else { 1001 | return ".init(x: \(x.format5), y: \(y.format5), width: \(w.format5), height: \(z.format5))" 1002 | } 1003 | } 1004 | var formatVectorSmart: String { 1005 | switch count { 1006 | case 0: 1007 | return ".init()" 1008 | case 1: 1009 | return ".init(x: \(x.format5))" 1010 | case 2: 1011 | return ".init(x: \(x.format5), y: \(y.format5))" 1012 | case 3: 1013 | return ".init(x: \(x.format5), y: \(y.format5), z: \(z.format5))" 1014 | case 4: 1015 | return ".init(x: \(x.format5), y: \(y.format5), z: \(z.format5), w: \(w.format5))" 1016 | default: 1017 | return "🛑 no vector initializer for count > 4" 1018 | } 1019 | } 1020 | } 1021 | private extension CIColor { 1022 | var formatSmart: String { 1023 | 1024 | switch self { 1025 | case CIColor.black: return "CIColor.black" // Include "CIColor." so it's compatible with older OS 1026 | case CIColor.white: return "CIColor.white" 1027 | case CIColor.gray: return "CIColor.gray" 1028 | case CIColor.red: return "CIColor.red" 1029 | case CIColor.green: return "CIColor.green" 1030 | case CIColor.blue: return "CIColor.blue" 1031 | case CIColor.cyan: return "CIColor.cyan" 1032 | case CIColor.magenta: return "CIColor.magenta" 1033 | case CIColor.yellow: return "CIColor.yellow" 1034 | case CIColor.clear: return "CIColor.clear" 1035 | default: 1036 | let colorSpaceName: String = colorSpace.name as? String ?? "" // e.g. kCGColorSpaceDeviceRGB 1037 | let colorSpaceNameSuffix: String = colorSpaceName.replacing(/^kCGColorSpace/, with: "") 1038 | let colorSpaceNameFormatted = "CGColorSpace." + colorSpaceNameSuffix.prefix(1).lowercased() + colorSpaceNameSuffix.dropFirst() 1039 | let colorSpaceSRGB: String = CGColorSpace.sRGB as String 1040 | 1041 | // Some issues with kCGColorSpaceDeviceRGB since we would have to create that. Let's just ignore. 1042 | if alpha != 1.0 && colorSpaceName != colorSpaceSRGB 1043 | && colorSpaceName != "kCGColorSpaceDeviceRGB" { 1044 | return "CIColor(red: \(red), green: \(green), blue: \(blue), alpha: \(alpha), colorSpace: \(colorSpaceNameFormatted))" 1045 | } else if alpha == 1.0 && colorSpaceName != colorSpaceSRGB 1046 | && colorSpaceName != "kCGColorSpaceDeviceRGB" { 1047 | return "CIColor(red: \(red), green: \(green), blue: \(blue), colorSpace: \(colorSpaceNameFormatted))" 1048 | } else 1049 | if alpha != 1.0 { 1050 | return "CIColor(red: \(red), green: \(green), blue: \(blue), alpha: \(alpha))" 1051 | } else { 1052 | return "CIColor(red: \(red), green: \(green), blue: \(blue))" 1053 | } 1054 | } 1055 | } 1056 | 1057 | } 1058 | 1059 | private extension CIFilter { 1060 | var identityInputKeys: [String] { 1061 | inputKeys 1062 | .filter { $0 != kCIInputImageKey } 1063 | .map { inputKey in 1064 | (inputKey, (attributes[inputKey] as? [String: AnyObject] ?? [:])) } // tuple of the inputKey and its attributes 1065 | .compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in 1066 | guard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String, 1067 | let identityValue: Any = inputAttributes[kCIAttributeIdentity] 1068 | else { return nil } 1069 | 1070 | let attributeType: String? = inputAttributes[kCIAttributeType] as? String 1071 | let inputName: String = parameterName(displayName: displayName, filterName: self.name) 1072 | guard hasReasonableDefaultValue(identityValue, attributeType: attributeType, inputName: inputName) 1073 | else { return nil } 1074 | 1075 | return inputKey 1076 | } 1077 | } 1078 | } 1079 | 1080 | --------------------------------------------------------------------------------