├── README.md ├── VideoEditorSwiftUI.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ └── xcshareddata │ │ └── IDEWorkspaceChecks.plist └── xcuserdata │ └── bogdanzykov.xcuserdatad │ └── xcschemes │ └── xcschememanagement.plist ├── VideoEditorSwiftUI ├── Assets.xcassets │ ├── AccentColor.colorset │ │ └── Contents.json │ ├── AppIcon.appiconset │ │ └── Contents.json │ ├── Contents.json │ └── simpleImage.imageset │ │ ├── Contents.json │ │ └── images22233.jpeg ├── ContentView.swift ├── Models │ ├── AudioModel.swift │ ├── FilteredImage.swift │ ├── TextBox.swift │ ├── ToolModel.swift │ ├── Video.swift │ ├── VideoItem.swift │ └── VideoQuality.swift ├── Preview Content │ └── Preview Assets.xcassets │ │ └── Contents.json ├── Service │ ├── Camera │ │ └── CameraManager.swift │ ├── CoreData │ │ ├── AudioEntity+Ext.swift │ │ ├── CoreDataContainer.xcdatamodeld │ │ │ └── CoreDataContainer.xcdatamodel │ │ │ │ └── contents │ │ ├── CoreDataManager.swift │ │ ├── PersistenceController.swift │ │ └── ProjectEntity+Ext.swift │ ├── Player │ │ └── VideoPlayerManager.swift │ └── Recorder │ │ └── AudioRecorderManager.swift ├── Utils │ ├── Extensions │ │ ├── AVAssets+Ext.swift │ │ ├── AVAudioSession+Ext.swift │ │ ├── Color.swift │ │ ├── FileManager.swift │ │ ├── Preview.swift │ │ ├── TimeInterval+Ext.swift │ │ ├── UIImage+Ext.swift │ │ └── View+Ext.swift │ └── Helpers │ │ ├── Helpers.swift │ │ └── VideoEditor.swift ├── VideoEditorSwiftUIApp.swift ├── ViewModels │ ├── EditorViewModel.swift │ ├── ExporterViewModel.swift │ ├── FiltersViewModel.swift │ ├── RootViewModel.swift │ └── TextEditorViewModel.swift └── Views │ ├── Camera │ ├── CameraPreviewView.swift │ └── RecordVideoView.swift │ ├── EditorView │ ├── MainEditorView.swift │ ├── PlayerHolderView.swift │ ├── Swipe.swift │ ├── TimeLineView.swift │ └── VideoExporterBottomSheetView.swift │ ├── RootView │ └── RootView.swift │ ├── ToolsView │ ├── Audio │ │ └── AudioSheetView.swift │ ├── Corrections │ │ └── CorrectionsToolView.swift │ ├── Crop │ │ ├── CropSheetView.swift │ │ └── CropView.swift │ ├── CropperView │ │ ├── CropperRatio.swift │ │ └── TestCroppedView.swift │ ├── CutView │ │ └── ThumbnailsSliderView.swift │ ├── Filters │ │ └── FiltersView.swift │ ├── Frames │ │ └── FramesToolView.swift │ ├── Speed │ │ └── VideoSpeedSlider.swift │ ├── Text │ │ ├── TextEditorView.swift │ │ ├── TextOverlayView.swift │ │ └── TextToolsView.swift │ └── ToolsSectionView.swift │ └── ViewComponents │ ├── Buttons │ ├── AudioButtonView.swift │ ├── RecorderButtonView.swift │ └── ToolButtonView.swift │ ├── Player │ └── PlayerView.swift │ ├── SheetView.swift │ └── Sliders │ ├── CustomSlider.swift │ ├── NewTimelineSlider.swift │ ├── RangeSliderView.swift │ ├── SliderView.swift │ └── TimeLineSlider.swift └── screenshots ├── editor_screen.png ├── export_screen.png ├── fullscreen.png ├── mainScreen.png ├── tool_audio.png ├── tool_corrections.png ├── tool_crop.png ├── tool_cut.png ├── tool_filters.png ├── tool_frame.png ├── tool_speed.png └── tool_text.png /README.md: -------------------------------------------------------------------------------- 1 | # VideoEditorSwiftUI 2 | 3 | Video editing application with great functionality of tools and the ability to export video in different formats. 4 | 5 | ## Features 6 | 7 | - **Creating a video project and saving its progress** 8 | - **Cropping video** 9 | - **Changing the video duration** 10 | - **Adding filters and effects to videos** 11 | - **Adding text to a video** 12 | - **Recording and editing audio** 13 | - **Adding frames to videos** 14 | - **Saving or share videos in different sizes** 15 | 16 | ## Includes 17 | 18 | - SwiftUI 19 | - iOS 16+ 20 | - MVVM 21 | - Combine 22 | - Core Data 23 | - AVFoundation 24 | - AVKit 25 | 26 | ## 📹 Video 27 | [![Preview](http://img.youtube.com/vi/JJZzDcUuOcw/0.jpg)](https://www.youtube.com/watch?v=JJZzDcUuOcw) 28 | 29 | ## Screenshots 📷 30 | 31 | ### Projects and editor views 32 | 33 |
34 | Screenshot 35 | Screenshot 36 | Screenshot 37 | Screenshot 38 |
39 | 40 | 41 | ### Editor tools 42 | 43 |
44 | Screenshot 45 | Screenshot 46 | Screenshot 47 | Screenshot 48 | Screenshot 49 | Screenshot 50 | Screenshot 51 | Screenshot 52 |
53 | 54 | 55 | 56 | 57 | ### 58 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI.xcodeproj/xcuserdata/bogdanzykov.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | VideoEditorSwiftUI.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Assets.xcassets/AccentColor.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "idiom" : "universal" 5 | } 6 | ], 7 | "info" : { 8 | "author" : "xcode", 9 | "version" : 1 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "platform" : "ios", 6 | "size" : "1024x1024" 7 | } 8 | ], 9 | "info" : { 10 | "author" : "xcode", 11 | "version" : 1 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Assets.xcassets/simpleImage.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "images22233.jpeg", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "author" : "xcode", 19 | "version" : 1 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Assets.xcassets/simpleImage.imageset/images22233.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/VideoEditorSwiftUI/Assets.xcassets/simpleImage.imageset/images22233.jpeg -------------------------------------------------------------------------------- /VideoEditorSwiftUI/ContentView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ContentView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 14.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct ContentView: View { 11 | var body: some View { 12 | MainEditorView() 13 | } 14 | } 15 | 16 | struct ContentView_Previews: PreviewProvider { 17 | static var previews: some View { 18 | ContentView() 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Models/AudioModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AudioModel.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 04.05.2023. 6 | // 7 | 8 | import SwiftUI 9 | import AVKit 10 | 11 | struct Audio: Identifiable, Equatable{ 12 | 13 | var id: UUID = UUID() 14 | var url: URL 15 | var duration: Double 16 | var volume: Float = 1.0 17 | 18 | var asset: AVAsset{ 19 | AVAsset(url: url) 20 | } 21 | 22 | func createSimples(_ size: CGFloat) -> [AudioSimple]{ 23 | let simplesCount = Int(size / 3) 24 | return (1...simplesCount).map({.init(id: $0)}) 25 | } 26 | 27 | mutating func setVolume(_ value: Float){ 28 | volume = value 29 | } 30 | 31 | struct AudioSimple: Identifiable{ 32 | var id: Int 33 | var size: CGFloat = CGFloat((5...25).randomElement() ?? 5) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Models/FilteredImage.swift: -------------------------------------------------------------------------------- 1 | // 2 | // FilteredImage.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 26.04.2023. 6 | // 7 | 8 | import Foundation 9 | import SwiftUI 10 | import CoreImage 11 | 12 | struct FilteredImage: Identifiable{ 13 | var id: UUID = UUID() 14 | var image: UIImage 15 | var filter: CIFilter 16 | } 17 | 18 | 19 | enum CorrectionType: String, CaseIterable{ 20 | case brightness = "Brightness" 21 | case contrast = "Contrast" 22 | case saturation = "Saturation" 23 | 24 | var key: String{ 25 | switch self { 26 | case .brightness: return kCIInputBrightnessKey 27 | case .contrast: return kCIInputContrastKey 28 | case .saturation: return kCIInputSaturationKey 29 | } 30 | } 31 | } 32 | 33 | struct ColorCorrection{ 34 | var brightness: Double = 0 35 | var contrast: Double = 0 36 | var saturation: Double = 0 37 | } 38 | 39 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Models/TextBox.swift: -------------------------------------------------------------------------------- 1 | // 2 | // TextBox.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 28.04.2023. 6 | // 7 | 8 | import Foundation 9 | import SwiftUI 10 | 11 | 12 | struct TextBox: Identifiable{ 13 | 14 | var id: UUID = UUID() 15 | var text: String = "" 16 | var fontSize: CGFloat = 20 17 | var lastFontSize: CGFloat = .zero 18 | var bgColor: Color = .white 19 | var fontColor: Color = .black 20 | var timeRange: ClosedRange = 0...3 21 | var offset: CGSize = .zero 22 | var lastOffset: CGSize = .zero 23 | 24 | 25 | } 26 | 27 | 28 | extension TextBox: Equatable{} 29 | 30 | 31 | extension TextBox{ 32 | static let texts: [TextBox] = 33 | 34 | [ 35 | 36 | .init(text: "Test1", fontSize: 38, bgColor: .red, fontColor: .white, timeRange: 0...2), 37 | .init(text: "Test2", fontSize: 38, bgColor: .secondary, fontColor: .white, timeRange: 2...6), 38 | .init(text: "Test3", fontSize: 38, bgColor: .black, fontColor: .red, timeRange: 3...6), 39 | .init(text: "Test4", fontSize: 38, bgColor: .black, fontColor: .blue, timeRange: 5...6), 40 | .init(text: "Test5", fontSize: 38, bgColor: .black, fontColor: .white, timeRange: 1...6), 41 | ] 42 | 43 | static let simple = TextBox(text: "Test", fontSize: 38, bgColor: .black, fontColor: .white, timeRange: 1...3) 44 | } 45 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Models/ToolModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ToolModel.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 20.04.2023. 6 | // 7 | 8 | import Foundation 9 | 10 | 11 | enum ToolEnum: Int, CaseIterable{ 12 | case cut, speed, crop, audio, text, filters, corrections, frames 13 | 14 | 15 | var title: String{ 16 | switch self { 17 | case .cut: return "Cut" 18 | case .speed: return "Speed" 19 | case .crop: return "Crop" 20 | case .audio: return "Audio" 21 | case .text: return "Text" 22 | case .filters: return "Filters" 23 | case .corrections: return "Corrections" 24 | case .frames: return "Frames" 25 | } 26 | } 27 | 28 | var image: String{ 29 | switch self { 30 | case .cut: return "scissors" 31 | case .speed: return "timer" 32 | case .crop: return "crop" 33 | case .audio: return "waveform" 34 | case .text: return "t.square.fill" 35 | case .filters: return "camera.filters" 36 | case .corrections: return "circle.righthalf.filled" 37 | case .frames: return "person.crop.artframe" 38 | } 39 | } 40 | 41 | var timeState: TimeLineViewState{ 42 | switch self{ 43 | case .audio: return .audio 44 | case .text: return .text 45 | default: return .empty 46 | } 47 | } 48 | } 49 | 50 | 51 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Models/Video.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Video.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 19.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | import AVKit 10 | 11 | struct Video: Identifiable{ 12 | 13 | var id: UUID = UUID() 14 | var url: URL 15 | var asset: AVAsset 16 | let originalDuration: Double 17 | var rangeDuration: ClosedRange 18 | var thumbnailsImages = [ThumbnailImage]() 19 | var rate: Float = 1.0 20 | var rotation: Double = 0 21 | var frameSize: CGSize = .zero 22 | var geometrySize: CGSize = .zero 23 | var isMirror: Bool = false 24 | var toolsApplied = [Int]() 25 | var filterName: String? = nil 26 | var colorCorrection = ColorCorrection() 27 | var videoFrames: VideoFrames? = nil 28 | var textBoxes: [TextBox] = [] 29 | var audio: Audio? 30 | var volume: Float = 1.0 31 | 32 | var totalDuration: Double{ 33 | rangeDuration.upperBound - rangeDuration.lowerBound 34 | } 35 | 36 | init(url: URL){ 37 | self.url = url 38 | self.asset = AVAsset(url: url) 39 | self.originalDuration = asset.videoDuration() 40 | self.rangeDuration = 0...originalDuration 41 | } 42 | 43 | init(url: URL, rangeDuration: ClosedRange, rate: Float = 1.0, rotation: Double = 0){ 44 | self.url = url 45 | self.asset = AVAsset(url: url) 46 | self.originalDuration = asset.videoDuration() 47 | self.rangeDuration = rangeDuration 48 | self.rate = rate 49 | self.rotation = rotation 50 | } 51 | 52 | mutating func updateThumbnails(_ geo: GeometryProxy){ 53 | let imagesCount = thumbnailCount(geo) 54 | 55 | var offset: Float64 = 0 56 | for i in 0.. Bool{ 106 | toolsApplied.contains(tool.rawValue) 107 | } 108 | 109 | 110 | private func thumbnailCount(_ geo: GeometryProxy) -> Int { 111 | 112 | let num = Double(geo.size.width - 32) / Double(70 / 1.5) 113 | 114 | return Int(ceil(num)) 115 | } 116 | 117 | 118 | static var mock: Video = .init(url:URL(string: "https://www.google.com/")!, rangeDuration: 0...250) 119 | } 120 | 121 | 122 | extension Video: Equatable{ 123 | 124 | static func == (lhs: Video, rhs: Video) -> Bool { 125 | lhs.id == rhs.id 126 | } 127 | } 128 | 129 | extension Double{ 130 | func nextAngle() -> Double { 131 | var next = Int(self) + 90 132 | if next >= 360 { 133 | next = 0 134 | } else if next < 0 { 135 | next = 360 - abs(next % 360) 136 | } 137 | return Double(next) 138 | } 139 | } 140 | 141 | 142 | 143 | struct ThumbnailImage: Identifiable{ 144 | var id: UUID = UUID() 145 | var image: UIImage? 146 | 147 | 148 | init(image: UIImage? = nil) { 149 | self.image = image?.resize(to: .init(width: 250, height: 350)) 150 | } 151 | } 152 | 153 | 154 | struct VideoFrames{ 155 | var scaleValue: Double = 0 156 | var frameColor: Color = .white 157 | 158 | var scale: Double{ 159 | 1 - scaleValue 160 | } 161 | 162 | var isActive: Bool{ 163 | scaleValue > 0 164 | } 165 | 166 | mutating func reset(){ 167 | scaleValue = 0 168 | frameColor = .white 169 | } 170 | } 171 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Models/VideoItem.swift: -------------------------------------------------------------------------------- 1 | // 2 | // VideoItem.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 14.04.2023. 6 | // 7 | 8 | import AVKit 9 | import SwiftUI 10 | 11 | 12 | struct VideoItem: Transferable { 13 | let url: URL 14 | 15 | static var transferRepresentation: some TransferRepresentation { 16 | FileRepresentation(contentType: .movie) { movie in 17 | SentTransferredFile(movie.url) 18 | } importing: { received in 19 | let id = UUID().uuidString 20 | let copy = URL.documentsDirectory.appending(path: "\(id).mp4") 21 | 22 | if FileManager.default.fileExists(atPath: copy.path()) { 23 | try FileManager.default.removeItem(at: copy) 24 | } 25 | 26 | try FileManager.default.copyItem(at: received.file, to: copy) 27 | return Self.init(url: copy) 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Models/VideoQuality.swift: -------------------------------------------------------------------------------- 1 | // 2 | // VideoQuality.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 24.04.2023. 6 | // 7 | 8 | import Foundation 9 | import AVKit 10 | 11 | enum VideoQuality: Int, CaseIterable{ 12 | 13 | case low, medium, high 14 | 15 | 16 | var exportPresetName: String { 17 | switch self { 18 | case .low: 19 | return AVAssetExportPresetMediumQuality 20 | case .high, .medium: 21 | return AVAssetExportPresetHighestQuality 22 | } 23 | } 24 | 25 | var title: String{ 26 | switch self { 27 | case .low: return "qHD - 480" 28 | case .medium: return "HD - 720p" 29 | case .high: return "Full HD - 1080p" 30 | } 31 | } 32 | 33 | var subtitle: String{ 34 | switch self { 35 | case .low: return "Fast loading and small size, low quality" 36 | case .medium: return "Optimal size to quality ratio" 37 | case .high: return "Ideal for publishing on social networks" 38 | } 39 | } 40 | 41 | var size: CGSize{ 42 | switch self { 43 | case .low: return .init(width: 854, height: 480) 44 | case .medium: return .init(width: 1280, height: 720) 45 | case .high: return .init(width: 1920, height: 1080) 46 | } 47 | } 48 | 49 | var frameRate: Double{ 50 | switch self { 51 | case .low, .medium: return 30 52 | case .high: return 60 53 | } 54 | } 55 | 56 | var bitrate: Double{ 57 | switch self { 58 | case .low: return 2.5 59 | case .medium: return 5 60 | case .high: return 8 61 | } 62 | } 63 | 64 | 65 | var megaBytesPerSecond: Double { 66 | let totalPixels = self.size.width * self.size.height 67 | let bitsPerSecond = bitrate * Double(totalPixels) 68 | let bytesPerSecond = bitsPerSecond / 8.0 // Convert to bytes 69 | 70 | return bytesPerSecond / (1024 * 1024) 71 | } 72 | 73 | 74 | func calculateVideoSize(duration: Double) -> Double? { 75 | duration * megaBytesPerSecond 76 | } 77 | 78 | } 79 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Preview Content/Preview Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Service/Camera/CameraManager.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CameraPreviewView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 14.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | import AVFoundation 10 | 11 | 12 | 13 | final class CameraManager: NSObject, ObservableObject{ 14 | 15 | enum Status{ 16 | case unconfigurate 17 | case configurate 18 | case unauthorized 19 | case faild 20 | } 21 | 22 | @Published var error: CameraError? 23 | @Published var session = AVCaptureSession() 24 | @Published var finalURL: URL? 25 | @Published var recordedDuration: Double = .zero 26 | @Published var cameraPosition: AVCaptureDevice.Position = .front 27 | 28 | let maxDuration: Double = 100 // sec 29 | private var timer: Timer? 30 | private let sessionQueue = DispatchQueue(label: "com.VideoEditorSwiftUI") 31 | private let videoOutput = AVCaptureMovieFileOutput() 32 | private var status: Status = .unconfigurate 33 | 34 | var isRecording: Bool{ 35 | videoOutput.isRecording 36 | } 37 | 38 | override init(){ 39 | super.init() 40 | config() 41 | } 42 | 43 | private func config(){ 44 | checkPermissions() 45 | sessionQueue.async { 46 | self.configCaptureSession() 47 | self.session.startRunning() 48 | } 49 | } 50 | 51 | func controllSession(start: Bool){ 52 | guard status == .configurate else { 53 | config() 54 | return 55 | } 56 | sessionQueue.async { 57 | if start{ 58 | if !self.session.isRunning{ 59 | self.session.startRunning() 60 | } 61 | }else{ 62 | self.session.stopRunning() 63 | } 64 | } 65 | } 66 | 67 | private func setError(_ error: CameraError?){ 68 | DispatchQueue.main.async { 69 | self.error = error 70 | } 71 | } 72 | 73 | ///Check user permissions 74 | private func checkPermissions(){ 75 | switch AVCaptureDevice.authorizationStatus(for: .video){ 76 | 77 | case .notDetermined: 78 | sessionQueue.suspend() 79 | AVCaptureDevice.requestAccess(for: .video) { aurhorized in 80 | if !aurhorized{ 81 | self.status = .unauthorized 82 | self.setError(.deniedAuthorization) 83 | } 84 | self.sessionQueue.resume() 85 | } 86 | case .restricted: 87 | status = .unauthorized 88 | setError(.restrictedAuthorization) 89 | case .denied: 90 | status = .unauthorized 91 | setError(.deniedAuthorization) 92 | 93 | case .authorized: break 94 | @unknown default: 95 | status = .unauthorized 96 | setError(.unknowAuthorization) 97 | } 98 | } 99 | 100 | ///Configuring a session and adding video, audio input and adding video output 101 | private func configCaptureSession(){ 102 | guard status == .unconfigurate else { 103 | return 104 | } 105 | session.beginConfiguration() 106 | 107 | session.sessionPreset = .hd1280x720 108 | 109 | let device = getCameraDevice(for: .back) 110 | let audioDevice = AVCaptureDevice.default(for: .audio) 111 | 112 | guard let camera = device, let audio = audioDevice else { 113 | setError(.cameraUnavalible) 114 | status = .faild 115 | return 116 | } 117 | 118 | do{ 119 | let cameraInput = try AVCaptureDeviceInput(device: camera) 120 | let audioInput = try AVCaptureDeviceInput(device: audio) 121 | 122 | if session.canAddInput(cameraInput) && session.canAddInput(audioInput){ 123 | session.addInput(audioInput) 124 | session.addInput(cameraInput) 125 | }else{ 126 | setError(.cannotAddInput) 127 | status = .faild 128 | return 129 | } 130 | }catch{ 131 | setError(.createCaptureInput(error)) 132 | status = .faild 133 | return 134 | } 135 | 136 | if session.canAddOutput(videoOutput){ 137 | session.addOutput(videoOutput) 138 | }else{ 139 | setError(.cannotAddInput) 140 | status = .faild 141 | return 142 | } 143 | 144 | session.commitConfiguration() 145 | } 146 | 147 | 148 | private func getCameraDevice(for position: AVCaptureDevice.Position) -> AVCaptureDevice? { 149 | let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInTripleCamera, .builtInTelephotoCamera, .builtInDualCamera, .builtInTrueDepthCamera, .builtInDualWideCamera], mediaType: AVMediaType.video, position: .unspecified) 150 | for device in discoverySession.devices { 151 | if device.position == position { 152 | return device 153 | } 154 | } 155 | return nil 156 | } 157 | 158 | func stopRecord(){ 159 | print("stop") 160 | timer?.invalidate() 161 | videoOutput.stopRecording() 162 | } 163 | 164 | func startRecording(){ 165 | ///Temporary URL for recording Video 166 | let tempURL = NSTemporaryDirectory() + "\(Date().ISO8601Format()).mov" 167 | print(tempURL) 168 | videoOutput.startRecording(to: URL(fileURLWithPath: tempURL), recordingDelegate: self) 169 | startTimer() 170 | } 171 | 172 | // func set(_ delegate: AVCaptureVideoDataOutputSampleBufferDelegate, 173 | // queue: DispatchQueue){ 174 | // sessionQueue.async { 175 | // self.videoOutput.setSampleBufferDelegate(delegate, queue: queue) 176 | // } 177 | // } 178 | 179 | 180 | } 181 | 182 | 183 | 184 | extension CameraManager{ 185 | 186 | private func onTimerFires(){ 187 | 188 | if recordedDuration <= maxDuration && videoOutput.isRecording{ 189 | print("🟢 RECORDING") 190 | recordedDuration += 1 191 | } 192 | if recordedDuration >= maxDuration && videoOutput.isRecording{ 193 | stopRecord() 194 | } 195 | } 196 | 197 | private func startTimer(){ 198 | if timer == nil { 199 | timer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { [weak self] (timer) in 200 | self?.onTimerFires() 201 | } 202 | } 203 | } 204 | } 205 | 206 | 207 | 208 | extension CameraManager: AVCaptureFileOutputRecordingDelegate{ 209 | func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { 210 | print(outputFileURL) 211 | if let error{ 212 | self.error = .outputError(error) 213 | }else{ 214 | self.finalURL = outputFileURL 215 | } 216 | } 217 | 218 | 219 | } 220 | 221 | 222 | 223 | enum CameraError: Error{ 224 | case deniedAuthorization 225 | case restrictedAuthorization 226 | case unknowAuthorization 227 | case cameraUnavalible 228 | case cannotAddInput 229 | case createCaptureInput(Error) 230 | case outputError(Error) 231 | } 232 | 233 | 234 | extension Int { 235 | 236 | func secondsToTime() -> String { 237 | 238 | let (m,s) = ((self % 3600) / 60, (self % 3600) % 60) 239 | let m_string = m < 10 ? "0\(m)" : "\(m)" 240 | let s_string = s < 10 ? "0\(s)" : "\(s)" 241 | 242 | return "\(m_string):\(s_string)" 243 | } 244 | } 245 | 246 | extension Double{ 247 | 248 | func formatterTimeString() -> String{ 249 | let minutes = Int(self / 60) 250 | let seconds = Int(self.truncatingRemainder(dividingBy: 60)) 251 | let milliseconds = Int((self.truncatingRemainder(dividingBy: 1)) * 10) 252 | return "\(minutes):\(String(format: "%02d", seconds)).\(milliseconds)" 253 | } 254 | 255 | } 256 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Service/CoreData/AudioEntity+Ext.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AudioEntity+Ext.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 04.05.2023. 6 | // 7 | 8 | import Foundation 9 | import CoreData 10 | 11 | 12 | extension AudioEntity{ 13 | 14 | 15 | var audioModel: Audio?{ 16 | guard let urlStr = url, let url = URL(string: urlStr) else { return nil } 17 | return .init(url: url, duration: duration) 18 | } 19 | 20 | static func createAudio(context: NSManagedObjectContext, url: String, duration: Double) -> AudioEntity{ 21 | let entity = AudioEntity(context: context) 22 | entity.duration = duration 23 | entity.url = url 24 | return entity 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Service/CoreData/CoreDataContainer.xcdatamodeld/CoreDataContainer.xcdatamodel/contents: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Service/CoreData/CoreDataManager.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CoreDataManager.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 20.04.2023. 6 | // 7 | 8 | import Foundation 9 | import CoreData 10 | 11 | struct CoreDataManager { 12 | 13 | let mainContext: NSManagedObjectContext 14 | 15 | init(mainContext: NSManagedObjectContext) { 16 | self.mainContext = mainContext 17 | } 18 | 19 | 20 | func fetchProjects() -> [ProjectEntity] { 21 | let fetchRequest = ProjectEntity.request() 22 | 23 | do { 24 | let projects = try mainContext.fetch(fetchRequest) 25 | return projects 26 | } catch let error { 27 | print("Failed to fetch FoodEntity: \(error)") 28 | } 29 | return [] 30 | } 31 | 32 | } 33 | 34 | //MARK: - Account 35 | extension CoreDataManager{ 36 | 37 | 38 | // func updateAccount(account: AccountEntity){ 39 | // AccountEntity.updateAccount(for: account) 40 | // } 41 | // 42 | // func createAccount(title: String, currencyCode: String, color: String, balance: Double, members: Set) -> AccountEntity{ 43 | // AccountEntity.create(title: title, currencyCode: currencyCode, balance: balance, color: color, members: members, context: mainContext) 44 | // } 45 | 46 | } 47 | 48 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Service/CoreData/PersistenceController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // PersistenceController.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 20.04.2023. 6 | // 7 | 8 | import Foundation 9 | import CoreData 10 | 11 | struct PersistenceController { 12 | static let shared = PersistenceController() 13 | 14 | // Convenience 15 | var viewContext: NSManagedObjectContext { 16 | container.viewContext 17 | } 18 | 19 | let container: NSPersistentContainer 20 | 21 | 22 | init(inMemory: Bool = false) { 23 | 24 | container = NSPersistentContainer(name: "CoreDataContainer") 25 | 26 | if inMemory { 27 | container.persistentStoreDescriptions.first!.url = URL(fileURLWithPath: "/dev/null") 28 | } 29 | 30 | container.loadPersistentStores(completionHandler: { (storeDescription, error) in 31 | if let error = error as NSError? { 32 | fatalError("Unresolved error \(error), \(error.userInfo)") 33 | } 34 | }) 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Service/CoreData/ProjectEntity+Ext.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ProjectEntity+Ext.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 20.04.2023. 6 | // 7 | 8 | import Foundation 9 | import CoreData 10 | import SwiftUI 11 | 12 | 13 | extension ProjectEntity{ 14 | 15 | 16 | 17 | var videoURL: URL?{ 18 | guard let url else {return nil} 19 | return FileManager().createVideoPath(with: url) 20 | } 21 | 22 | 23 | var wrappedTextBoxes: [TextBox]{ 24 | wrappedBoxes.compactMap { entity -> TextBox? in 25 | if let text = entity.text, let bgColor = entity.bgColor, 26 | let fontColor = entity.fontColor{ 27 | return .init(text: text, fontSize: entity.fontSize, bgColor: Color(hex: bgColor), fontColor: Color(hex: fontColor), timeRange: (entity.lowerTime...entity.upperTime), offset: .init(width: entity.offsetX, height: entity.offsetY)) 28 | } 29 | return nil 30 | } 31 | } 32 | 33 | 34 | private var wrappedBoxes: Set { 35 | get { (textBoxes as? Set) ?? [] } 36 | set { textBoxes = newValue as NSSet } 37 | } 38 | 39 | var wrappedTools: [Int]{ 40 | appliedTools?.components(separatedBy: ",").compactMap({Int($0)}) ?? [] 41 | } 42 | 43 | var wrappedColor: Color{ 44 | guard let frameColor else { return .blue } 45 | return Color(hex: frameColor) 46 | } 47 | 48 | var uiImage: UIImage{ 49 | if let id, let uImage = FileManager().retrieveImage(with: id){ 50 | return uImage 51 | }else{ 52 | return UIImage(systemName: "exclamationmark.circle")! 53 | } 54 | } 55 | 56 | 57 | static func request() -> NSFetchRequest { 58 | let request = NSFetchRequest(entityName: "ProjectEntity") 59 | request.sortDescriptors = [NSSortDescriptor(key: "createAt", ascending: true)] 60 | return request 61 | } 62 | 63 | 64 | static func createTextBoxes(context: NSManagedObjectContext, boxes: [TextBox]) -> [TextBoxEntity]{ 65 | 66 | boxes.map { box -> TextBoxEntity in 67 | let entity = TextBoxEntity(context: context) 68 | let offset = box.offset 69 | entity.text = box.text 70 | entity.bgColor = box.bgColor.toHex() 71 | entity.fontColor = box.fontColor.toHex() 72 | entity.fontSize = box.fontSize 73 | entity.lowerTime = box.timeRange.lowerBound 74 | entity.upperTime = box.timeRange.upperBound 75 | entity.offsetX = offset.width 76 | entity.offsetY = offset.height 77 | 78 | return entity 79 | } 80 | 81 | } 82 | 83 | 84 | static func create(video: Video, context: NSManagedObjectContext){ 85 | let project = ProjectEntity(context: context) 86 | let id = UUID().uuidString 87 | if let image = video.thumbnailsImages.first?.image{ 88 | FileManager.default.saveImage(with: id, image: image) 89 | } 90 | project.id = id 91 | project.createAt = Date.now 92 | project.url = video.url.lastPathComponent 93 | project.rotation = video.rotation 94 | project.rate = Double(video.rate) 95 | project.isMirror = video.isMirror 96 | project.filterName = video.filterName 97 | project.lowerBound = video.rangeDuration.lowerBound 98 | project.upperBound = video.rangeDuration.upperBound 99 | project.textBoxes = [] 100 | 101 | context.saveContext() 102 | } 103 | 104 | 105 | static func update(for video: Video, project: ProjectEntity){ 106 | if let context = project.managedObjectContext { 107 | project.isMirror = video.isMirror 108 | project.lowerBound = video.rangeDuration.lowerBound 109 | project.upperBound = video.rangeDuration.upperBound 110 | project.filterName = video.filterName 111 | project.saturation = video.colorCorrection.saturation 112 | project.contrast = video.colorCorrection.contrast 113 | project.brightness = video.colorCorrection.brightness 114 | project.appliedTools = video.toolsApplied.map({String($0)}).joined(separator: ",") 115 | project.rotation = video.rotation 116 | project.rate = Double(video.rate) 117 | project.frameColor = video.videoFrames?.frameColor.toHex() 118 | project.frameScale = video.videoFrames?.scaleValue ?? 0 119 | let boxes = createTextBoxes(context: context, boxes: video.textBoxes) 120 | project.wrappedBoxes = Set(boxes) 121 | 122 | if let audio = video.audio{ 123 | project.audio = AudioEntity.createAudio(context: context, 124 | url: audio.url.absoluteString, 125 | duration: audio.duration) 126 | }else{ 127 | project.audio = nil 128 | } 129 | 130 | context.saveContext() 131 | } 132 | } 133 | 134 | static func remove(_ item: ProjectEntity){ 135 | if let context = item.managedObjectContext, let id = item.id, let url = item.url{ 136 | let manager = FileManager.default 137 | manager.deleteImage(with: id) 138 | manager.deleteVideo(with: url) 139 | context.delete(item) 140 | context.saveContext() 141 | } 142 | } 143 | 144 | } 145 | 146 | 147 | extension NSManagedObjectContext { 148 | 149 | func saveContext (){ 150 | if self.hasChanges { 151 | do{ 152 | try self.save() 153 | } catch { 154 | let nsError = error as NSError 155 | fatalError("Unresolved error \(nsError), \(nsError.userInfo)") 156 | } 157 | } 158 | } 159 | } 160 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Service/Player/VideoPlayerManager.swift: -------------------------------------------------------------------------------- 1 | // 2 | // VideoPlayerManager.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 14.04.2023. 6 | // 7 | 8 | import Foundation 9 | import Combine 10 | import AVKit 11 | import PhotosUI 12 | import SwiftUI 13 | 14 | 15 | final class VideoPlayerManager: ObservableObject{ 16 | 17 | @Published var currentTime: Double = .zero 18 | @Published var selectedItem: PhotosPickerItem? 19 | @Published var loadState: LoadState = .unknown 20 | @Published private(set) var videoPlayer = AVPlayer() 21 | @Published private(set) var audioPlayer = AVPlayer() 22 | @Published private(set) var isPlaying: Bool = false 23 | private var isSetAudio: Bool = false 24 | private var cancellable = Set() 25 | private var timeObserver: Any? 26 | private var currentDurationRange: ClosedRange? 27 | 28 | 29 | deinit { 30 | removeTimeObserver() 31 | } 32 | 33 | init(){ 34 | onSubsUrl() 35 | } 36 | 37 | 38 | var scrubState: PlayerScrubState = .reset { 39 | didSet { 40 | switch scrubState { 41 | case .scrubEnded(let seekTime): 42 | pause() 43 | seek(seekTime, player: videoPlayer) 44 | if isSetAudio{ 45 | seek(seekTime, player: audioPlayer) 46 | } 47 | default : break 48 | } 49 | } 50 | } 51 | 52 | func action(_ video: Video){ 53 | self.currentDurationRange = video.rangeDuration 54 | if isPlaying{ 55 | pause() 56 | }else{ 57 | play(video.rate) 58 | } 59 | } 60 | 61 | func setAudio(_ url: URL?){ 62 | guard let url else { 63 | isSetAudio = false 64 | return 65 | } 66 | audioPlayer = .init(url: url) 67 | isSetAudio = true 68 | } 69 | 70 | private func onSubsUrl(){ 71 | $loadState 72 | .dropFirst() 73 | .receive(on: DispatchQueue.main) 74 | 75 | .sink {[weak self] returnLoadState in 76 | guard let self = self else {return} 77 | 78 | switch returnLoadState { 79 | case .loaded(let url): 80 | self.pause() 81 | self.videoPlayer = AVPlayer(url: url) 82 | self.startStatusSubscriptions() 83 | print("AVPlayer set url:", url.absoluteString) 84 | case .failed, .loading, .unknown: 85 | break 86 | } 87 | } 88 | .store(in: &cancellable) 89 | } 90 | 91 | 92 | private func startStatusSubscriptions(){ 93 | videoPlayer.publisher(for: \.timeControlStatus) 94 | .sink { [weak self] status in 95 | guard let self = self else {return} 96 | switch status { 97 | case .playing: 98 | self.isPlaying = true 99 | self.startTimer() 100 | case .paused: 101 | self.isPlaying = false 102 | case .waitingToPlayAtSpecifiedRate: 103 | break 104 | @unknown default: 105 | break 106 | } 107 | } 108 | .store(in: &cancellable) 109 | } 110 | 111 | 112 | func pause(){ 113 | if isPlaying{ 114 | videoPlayer.pause() 115 | if isSetAudio{ 116 | audioPlayer.pause() 117 | } 118 | } 119 | } 120 | 121 | func setVolume(_ isVideo: Bool, value: Float){ 122 | pause() 123 | if isVideo{ 124 | videoPlayer.volume = value 125 | }else{ 126 | audioPlayer.volume = value 127 | } 128 | } 129 | 130 | private func play(_ rate: Float?){ 131 | 132 | AVAudioSession.sharedInstance().configurePlaybackSession() 133 | 134 | if let currentDurationRange{ 135 | if currentTime >= currentDurationRange.upperBound{ 136 | seek(currentDurationRange.lowerBound, player: videoPlayer) 137 | if isSetAudio{ 138 | seek(currentDurationRange.lowerBound, player: audioPlayer) 139 | } 140 | }else{ 141 | seek(videoPlayer.currentTime().seconds, player: videoPlayer) 142 | if isSetAudio{ 143 | seek(audioPlayer.currentTime().seconds, player: audioPlayer) 144 | } 145 | } 146 | } 147 | videoPlayer.play() 148 | if isSetAudio{ 149 | audioPlayer.play() 150 | } 151 | 152 | if let rate{ 153 | videoPlayer.rate = rate 154 | if isSetAudio{ 155 | audioPlayer.play() 156 | } 157 | } 158 | 159 | if let currentDurationRange, videoPlayer.currentItem?.duration.seconds ?? 0 >= currentDurationRange.upperBound{ 160 | NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: videoPlayer.currentItem, queue: .main) { _ in 161 | self.playerDidFinishPlaying() 162 | } 163 | } 164 | } 165 | 166 | private func seek(_ seconds: Double, player: AVPlayer){ 167 | player.seek(to: CMTime(seconds: seconds, preferredTimescale: 600)) 168 | } 169 | 170 | private func startTimer() { 171 | 172 | let interval = CMTimeMake(value: 1, timescale: 10) 173 | timeObserver = videoPlayer.addPeriodicTimeObserver(forInterval: interval, queue: .main) { [weak self] time in 174 | guard let self = self else { return } 175 | if self.isPlaying{ 176 | let time = time.seconds 177 | 178 | if let currentDurationRange = self.currentDurationRange, time >= currentDurationRange.upperBound{ 179 | self.pause() 180 | } 181 | 182 | switch self.scrubState { 183 | case .reset: 184 | self.currentTime = time 185 | case .scrubEnded: 186 | self.scrubState = .reset 187 | case .scrubStarted: 188 | break 189 | } 190 | } 191 | } 192 | } 193 | 194 | 195 | private func playerDidFinishPlaying() { 196 | self.videoPlayer.seek(to: .zero) 197 | } 198 | 199 | private func removeTimeObserver(){ 200 | if let timeObserver = timeObserver { 201 | videoPlayer.removeTimeObserver(timeObserver) 202 | } 203 | } 204 | 205 | } 206 | 207 | extension VideoPlayerManager{ 208 | 209 | @MainActor 210 | func loadVideoItem(_ selectedItem: PhotosPickerItem?) async{ 211 | do { 212 | loadState = .loading 213 | 214 | if let video = try await selectedItem?.loadTransferable(type: VideoItem.self) { 215 | loadState = .loaded(video.url) 216 | } else { 217 | loadState = .failed 218 | } 219 | } catch { 220 | loadState = .failed 221 | } 222 | } 223 | } 224 | 225 | 226 | extension VideoPlayerManager{ 227 | 228 | 229 | func setFilters(mainFilter: CIFilter?, colorCorrection: ColorCorrection?){ 230 | 231 | let filters = Helpers.createFilters(mainFilter: mainFilter, colorCorrection) 232 | 233 | if filters.isEmpty{ 234 | return 235 | } 236 | self.pause() 237 | DispatchQueue.global(qos: .userInteractive).async { 238 | let composition = self.videoPlayer.currentItem?.asset.setFilters(filters) 239 | self.videoPlayer.currentItem?.videoComposition = composition 240 | } 241 | } 242 | 243 | func removeFilter(){ 244 | pause() 245 | videoPlayer.currentItem?.videoComposition = nil 246 | } 247 | } 248 | 249 | enum LoadState: Identifiable, Equatable { 250 | case unknown, loading, loaded(URL), failed 251 | 252 | var id: Int{ 253 | switch self { 254 | case .unknown: return 0 255 | case .loading: return 1 256 | case .loaded: return 2 257 | case .failed: return 3 258 | } 259 | } 260 | } 261 | 262 | 263 | enum PlayerScrubState{ 264 | case reset 265 | case scrubStarted 266 | case scrubEnded(Double) 267 | } 268 | 269 | 270 | extension AVAsset{ 271 | 272 | func setFilter(_ filter: CIFilter) -> AVVideoComposition{ 273 | let composition = AVVideoComposition(asset: self, applyingCIFiltersWithHandler: { request in 274 | filter.setValue(request.sourceImage, forKey: kCIInputImageKey) 275 | 276 | guard let output = filter.outputImage else {return} 277 | 278 | request.finish(with: output, context: nil) 279 | }) 280 | 281 | return composition 282 | } 283 | 284 | func setFilters(_ filters: [CIFilter]) -> AVVideoComposition{ 285 | let composition = AVVideoComposition(asset: self, applyingCIFiltersWithHandler: { request in 286 | 287 | let source = request.sourceImage 288 | var output = source 289 | 290 | filters.forEach { filter in 291 | filter.setValue(output, forKey: kCIInputImageKey) 292 | if let image = filter.outputImage{ 293 | output = image 294 | } 295 | } 296 | 297 | request.finish(with: output, context: nil) 298 | }) 299 | 300 | return composition 301 | } 302 | 303 | } 304 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Service/Recorder/AudioRecorderManager.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AudioRecorderManager.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 03.05.2023. 6 | // 7 | 8 | import Foundation 9 | import Combine 10 | import AVFoundation 11 | 12 | 13 | final class AudioRecorderManager: ObservableObject { 14 | 15 | private var audioRecorder: AVAudioRecorder! 16 | 17 | @Published private(set) var recordState: AudioRecordEnum = .empty 18 | @Published private(set) var finishedAudio: Audio? 19 | @Published private(set) var timerCount: Timer? 20 | @Published private(set) var currentRecordTime: TimeInterval = 0 21 | 22 | 23 | func startRecording(recordMaxTime: Double = 10){ 24 | print("DEBUG:", "startRecording") 25 | AVAudioSession.sharedInstance().configureRecordAudioSessionCategory() 26 | 27 | let path = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask)[0] 28 | let audioURL = path.appendingPathComponent("video-record.m4a") 29 | FileManager.default.removefileExists(for: audioURL) 30 | 31 | let settings = [ 32 | AVFormatIDKey: Int(kAudioFormatMPEG4AAC), 33 | AVSampleRateKey: 12000, 34 | AVNumberOfChannelsKey: 1, 35 | AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue 36 | ] 37 | do { 38 | audioRecorder = try AVAudioRecorder(url: audioURL, settings: settings) 39 | audioRecorder.prepareToRecord() 40 | audioRecorder.record() 41 | recordState = .recording 42 | timerCount = Timer.scheduledTimer(withTimeInterval: 0.2, repeats: true) {[weak self] (value) in 43 | guard let self = self else {return} 44 | self.currentRecordTime += 0.2 45 | if self.currentRecordTime >= recordMaxTime{ 46 | self.stopRecording() 47 | } 48 | } 49 | } catch { 50 | recordState = .error 51 | print("Failed to Setup the Recording") 52 | } 53 | } 54 | 55 | 56 | func stopRecording(){ 57 | print("DEBUG:", "stopRecording") 58 | audioRecorder.stop() 59 | recordState = .empty 60 | finishedAudio = .init(url: audioRecorder.url, duration: currentRecordTime) 61 | resetTimer() 62 | } 63 | 64 | func cancel(){ 65 | print("DEBUG:", "cancel") 66 | audioRecorder.stop() 67 | recordState = .empty 68 | resetTimer() 69 | removeRecordedAudio() 70 | } 71 | 72 | 73 | private func resetTimer(){ 74 | timerCount!.invalidate() 75 | self.currentRecordTime = 0 76 | } 77 | 78 | private func removeRecordedAudio(){ 79 | FileManager.default.removefileExists(for: audioRecorder.url) 80 | } 81 | 82 | enum AudioRecordEnum: Int{ 83 | case recording, empty, error 84 | } 85 | } 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Utils/Extensions/AVAssets+Ext.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AVAssets+Ext.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 16.04.2023. 6 | // 7 | 8 | import Foundation 9 | import AVKit 10 | import SwiftUI 11 | 12 | extension AVAsset { 13 | 14 | struct TrimError: Error { 15 | let description: String 16 | let underlyingError: Error? 17 | 18 | init(_ description: String, underlyingError: Error? = nil) { 19 | self.description = "TrimVideo: " + description 20 | self.underlyingError = underlyingError 21 | } 22 | } 23 | 24 | func getImage(_ second: Int, compressionQuality: Double = 0.05) -> UIImage?{ 25 | let imgGenerator = AVAssetImageGenerator(asset: self) 26 | guard let cgImage = try? imgGenerator.copyCGImage(at: .init(seconds: Double(second), preferredTimescale: 1), actualTime: nil) else { return nil} 27 | let uiImage = UIImage(cgImage: cgImage) 28 | guard let imageData = uiImage.jpegData(compressionQuality: compressionQuality), let compressedUIImage = UIImage(data: imageData) else { return nil } 29 | return compressedUIImage 30 | } 31 | 32 | 33 | func videoDuration() -> Double{ 34 | 35 | self.duration.seconds 36 | 37 | } 38 | 39 | // guard let duration = try? await self.load(.duration) else { return nil } 40 | // 41 | // return duration.seconds 42 | 43 | func naturalSize() async -> CGSize? { 44 | guard let tracks = try? await loadTracks(withMediaType: .video) else { return nil } 45 | guard let track = tracks.first else { return nil } 46 | guard let size = try? await track.load(.naturalSize) else { return nil } 47 | return size 48 | } 49 | 50 | 51 | func adjustVideoSize(to viewSize: CGSize) async -> CGSize? { 52 | 53 | 54 | guard let assetSize = await self.naturalSize() else { return nil } 55 | 56 | let videoRatio = assetSize.width / assetSize.height 57 | let isPortrait = assetSize.height > assetSize.width 58 | var videoSize = viewSize 59 | if isPortrait { 60 | videoSize = CGSize(width: videoSize.height * videoRatio, height: videoSize.height) 61 | } else { 62 | videoSize = CGSize(width: videoSize.width, height: videoSize.width / videoRatio) 63 | } 64 | return videoSize 65 | } 66 | 67 | } 68 | 69 | 70 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Utils/Extensions/AVAudioSession+Ext.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AVAudioSession+Ext.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 04.05.2023. 6 | // 7 | 8 | import AVFoundation 9 | 10 | extension AVAudioSession{ 11 | 12 | 13 | func playAndRecord(){ 14 | print("Configuring playAndRecord session") 15 | do { 16 | try self.setCategory(.playAndRecord, mode: .default) 17 | try self.overrideOutputAudioPort(AVAudioSession.PortOverride.none) 18 | print("AVAudio Session out options: ", self.currentRoute) 19 | print("Successfully configured audio session.") 20 | } catch (let error) { 21 | print("Error while configuring audio session: \(error)") 22 | } 23 | } 24 | 25 | func configureRecordAudioSessionCategory() { 26 | print("Configuring record session") 27 | do { 28 | try self.setCategory(.record, mode: .default) 29 | try self.overrideOutputAudioPort(AVAudioSession.PortOverride.none) 30 | print("AVAudio Session out options: ", self.currentRoute) 31 | print("Successfully configured audio session.") 32 | } catch (let error) { 33 | print("Error while configuring audio session: \(error)") 34 | } 35 | } 36 | 37 | func configurePlaybackSession(){ 38 | print("Configuring playback session") 39 | do { 40 | try self.setCategory(.playback, mode: .default) 41 | try self.overrideOutputAudioPort(.none) 42 | try self.setActive(true) 43 | print("Current audio route: ", self.currentRoute.outputs) 44 | } catch let error as NSError { 45 | print("#configureAudioSessionToSpeaker Error \(error.localizedDescription)") 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Utils/Extensions/Color.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Color.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 27.04.2023. 6 | // 7 | 8 | import Foundation 9 | import SwiftUI 10 | 11 | extension Color { 12 | 13 | 14 | init(hex: String) { 15 | let hex = hex.trimmingCharacters(in: CharacterSet.alphanumerics.inverted) 16 | var int: UInt64 = 0 17 | Scanner(string: hex).scanHexInt64(&int) 18 | let a, r, g, b: UInt64 19 | switch hex.count { 20 | case 3: // RGB (12-bit) 21 | (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17) 22 | case 6: // RGB (24-bit) 23 | (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF) 24 | case 8: // ARGB (32-bit) 25 | (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF) 26 | default: 27 | (a, r, g, b) = (1, 1, 1, 0) 28 | } 29 | 30 | self.init( 31 | .sRGB, 32 | red: Double(r) / 255, 33 | green: Double(g) / 255, 34 | blue: Double(b) / 255, 35 | opacity: Double(a) / 255 36 | ) 37 | } 38 | 39 | func toHex() -> String? { 40 | let uic = UIColor(self) 41 | guard let components = uic.cgColor.components, components.count >= 3 else { 42 | return nil 43 | } 44 | let r = Float(components[0]) 45 | let g = Float(components[1]) 46 | let b = Float(components[2]) 47 | var a = Float(1.0) 48 | 49 | if components.count >= 4 { 50 | a = Float(components[3]) 51 | } 52 | 53 | if a != Float(1.0) { 54 | return String(format: "%02lX%02lX%02lX%02lX", lroundf(r * 255), lroundf(g * 255), lroundf(b * 255), lroundf(a * 255)) 55 | } else { 56 | return String(format: "%02lX%02lX%02lX", lroundf(r * 255), lroundf(g * 255), lroundf(b * 255)) 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Utils/Extensions/FileManager.swift: -------------------------------------------------------------------------------- 1 | // 2 | // FileManager.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 20.04.2023. 6 | // 7 | 8 | import Foundation 9 | import UIKit 10 | 11 | extension FileManager{ 12 | 13 | 14 | func createImagePath(with id: String) -> URL?{ 15 | guard let url = self.urls(for: .documentDirectory, in: .userDomainMask).first?.appendingPathComponent("\(id).jpg") else { return nil} 16 | return url 17 | } 18 | 19 | func createVideoPath(with name: String) -> URL?{ 20 | guard let url = self.urls(for: .documentDirectory, in: .userDomainMask).first?.appendingPathComponent(name) else { return nil} 21 | return url 22 | } 23 | 24 | func retrieveImage(with id: String) -> UIImage?{ 25 | guard let url = createImagePath(with: id) else { return nil } 26 | do{ 27 | let imageData = try Data(contentsOf: url) 28 | return UIImage(data: imageData) 29 | }catch{ 30 | print("Error retrieve image", error.localizedDescription) 31 | return nil 32 | } 33 | } 34 | 35 | func saveImage(with id: String, image: UIImage){ 36 | guard let url = createImagePath(with: id) else { return } 37 | if let data = image.jpegData(compressionQuality: 0.9){ 38 | do{ 39 | try data.write(to: url) 40 | print("success saved \(url)") 41 | }catch{ 42 | print("Error to save image", error.localizedDescription) 43 | } 44 | } 45 | } 46 | 47 | func deleteImage(with id: String){ 48 | guard let url = createImagePath(with: id) else { return } 49 | removefileExists(for: url) 50 | } 51 | 52 | func deleteVideo(with name: String){ 53 | guard let url = createVideoPath(with: name) else { return } 54 | if fileExists(atPath: url.path){ 55 | do{ 56 | try removeItem(at: url) 57 | }catch{ 58 | print("Error to remove item", error.localizedDescription) 59 | } 60 | } 61 | } 62 | 63 | func removefileExists(for url: URL){ 64 | if fileExists(atPath: url.path){ 65 | do{ 66 | try removeItem(at: url) 67 | }catch{ 68 | print("Error to remove item", error.localizedDescription) 69 | } 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Utils/Extensions/Preview.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Preview.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 20.04.2023. 6 | // 7 | 8 | import CoreData 9 | import SwiftUI 10 | 11 | extension PreviewProvider { 12 | 13 | 14 | static var dev: DeveloperPreview { 15 | return DeveloperPreview.instance 16 | } 17 | 18 | 19 | 20 | 21 | } 22 | 23 | class DeveloperPreview { 24 | 25 | static let instance = DeveloperPreview() 26 | private init() { } 27 | 28 | 29 | let contreller = PersistenceController(inMemory: true) 30 | 31 | var viewContext: NSManagedObjectContext { 32 | 33 | 34 | _ = projects 35 | // 36 | // _ = accounts 37 | // 38 | return contreller.viewContext 39 | } 40 | 41 | // var transactions: [TransactionEntity]{ 42 | // let context = contreller.viewContext 43 | // let trans1 = TransactionEntity(context: context) 44 | // trans1.id = UUID().uuidString 45 | // trans1.createAt = Date.now 46 | // trans1.amount = 1300 47 | // trans1.currencyCode = "RUB" 48 | // trans1.type = TransactionType.income.rawValue 49 | // trans1.category = category[1] 50 | 51 | 52 | var projects: [ProjectEntity]{ 53 | let context = contreller.viewContext 54 | let project1 = ProjectEntity(context: context) 55 | project1.id = UUID().uuidString 56 | project1.createAt = Date.now 57 | project1.url = "file:///Users/bogdanzykov/Library/Developer/CoreSimulator/Devices/86D65E8C-7D49-47AF-A511-BFA631289CB1/data/Containers/Data/Application/52E5EF3C-9E78-4676-B3EA-03BD22CCD09A/Documents/video_copy.mp4" 58 | 59 | return [project1] 60 | } 61 | 62 | } 63 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Utils/Extensions/TimeInterval+Ext.swift: -------------------------------------------------------------------------------- 1 | // 2 | // TimeInterval+Ext.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 04.05.2023. 6 | // 7 | 8 | import Foundation 9 | 10 | 11 | extension TimeInterval { 12 | var minutesSecondsMilliseconds: String { 13 | String(format: "%02.0f:%02.0f:%02.0f", 14 | (self / 60).truncatingRemainder(dividingBy: 60), 15 | truncatingRemainder(dividingBy: 60), 16 | (self * 100).truncatingRemainder(dividingBy: 100).rounded(.down)) 17 | } 18 | 19 | 20 | var minuteSeconds: String { 21 | guard self > 0 && self < Double.infinity else { 22 | return "unknown" 23 | } 24 | let time = NSInteger(self) 25 | 26 | let seconds = time % 60 27 | let minutes = (time / 60) % 60 28 | 29 | return String(format: "%0.2d:%0.2d", minutes, seconds) 30 | 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Utils/Extensions/UIImage+Ext.swift: -------------------------------------------------------------------------------- 1 | // 2 | // UIImage+Ext.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 05.05.2023. 6 | // 7 | 8 | import Foundation 9 | import SwiftUI 10 | 11 | extension UIImage{ 12 | 13 | /// Resize image 14 | /// Return new UIImage with needed size and scale 15 | func resize(to size: CGSize, scale: CGFloat = 1.0) -> UIImage{ 16 | let format = UIGraphicsImageRendererFormat.default() 17 | format.scale = scale 18 | let renderer = UIGraphicsImageRenderer(size: size, format: format) 19 | return renderer.image { _ in draw(in: CGRect(origin: .zero, size: size))} 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Utils/Extensions/View+Ext.swift: -------------------------------------------------------------------------------- 1 | // 2 | // View+Ext.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 17.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | extension View{ 11 | 12 | func getRect() -> CGRect{ 13 | return UIScreen.main.bounds 14 | } 15 | 16 | //MARK: Vertical Center 17 | func vCenter() -> some View{ 18 | self 19 | .frame(maxHeight: .infinity, alignment: .center) 20 | } 21 | //MARK: Vertical Top 22 | func vTop() -> some View{ 23 | self 24 | .frame(maxHeight: .infinity, alignment: .top) 25 | } 26 | //MARK: Vertical Bottom 27 | func vBottom() -> some View{ 28 | self 29 | .frame(maxHeight: .infinity, alignment: .bottom) 30 | } 31 | //MARK: Horizontal Center 32 | func hCenter() -> some View{ 33 | self 34 | .frame(maxWidth: .infinity, alignment: .center) 35 | } 36 | //MARK: Horizontal Leading 37 | func hLeading() -> some View{ 38 | self 39 | .frame(maxWidth: .infinity, alignment: .leading) 40 | } 41 | //MARK: Horizontal Trailing 42 | func hTrailing() -> some View{ 43 | self 44 | .frame(maxWidth: .infinity, alignment: .trailing) 45 | } 46 | 47 | //MARK: - All frame 48 | func allFrame() -> some View{ 49 | self 50 | .frame(maxWidth: .infinity, maxHeight: .infinity) 51 | } 52 | 53 | func withoutAnimation() -> some View { 54 | self.animation(nil, value: UUID()) 55 | } 56 | 57 | var isSmallScreen: Bool{ 58 | getRect().height < 700 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Utils/Helpers/Helpers.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Helpers.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 27.04.2023. 6 | // 7 | 8 | import Foundation 9 | import CoreImage 10 | 11 | final class Helpers{ 12 | 13 | 14 | static func createColorFilter(_ colorCorrection: ColorCorrection?) -> CIFilter?{ 15 | guard let colorCorrection else { return nil } 16 | let colorCorrectionFilter = CIFilter(name: "CIColorControls") 17 | colorCorrectionFilter?.setValue(colorCorrection.brightness, forKey: CorrectionType.brightness.key) 18 | colorCorrectionFilter?.setValue(colorCorrection.contrast + 1, forKey: CorrectionType.contrast.key) 19 | colorCorrectionFilter?.setValue(colorCorrection.saturation + 1, forKey: CorrectionType.saturation.key) 20 | return colorCorrectionFilter 21 | } 22 | 23 | 24 | static func createFilters(mainFilter: CIFilter?, _ colorCorrection: ColorCorrection?) -> [CIFilter]{ 25 | var filters = [CIFilter]() 26 | 27 | if let mainFilter{ 28 | filters.append(mainFilter) 29 | } 30 | 31 | if let colorFilter = createColorFilter(colorCorrection){ 32 | filters.append(colorFilter) 33 | } 34 | 35 | return filters 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/VideoEditorSwiftUIApp.swift: -------------------------------------------------------------------------------- 1 | // 2 | // VideoEditorSwiftUIApp.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 14.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | @main 11 | struct VideoEditorSwiftUIApp: App { 12 | @StateObject var rootVM = RootViewModel(mainContext: PersistenceController.shared.viewContext) 13 | var body: some Scene { 14 | WindowGroup { 15 | RootView(rootVM: rootVM) 16 | .preferredColorScheme(.dark) 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/ViewModels/EditorViewModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // EditorViewModel.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 14.04.2023. 6 | // 7 | 8 | import Foundation 9 | import AVKit 10 | import SwiftUI 11 | import Photos 12 | import Combine 13 | 14 | class EditorViewModel: ObservableObject{ 15 | 16 | @Published var currentVideo: Video? 17 | @Published var selectedTools: ToolEnum? 18 | @Published var frames = VideoFrames() 19 | @Published var isSelectVideo: Bool = true 20 | 21 | private var projectEntity: ProjectEntity? 22 | 23 | 24 | func setNewVideo(_ url: URL, geo: GeometryProxy){ 25 | currentVideo = .init(url: url) 26 | currentVideo?.updateThumbnails(geo) 27 | createProject() 28 | } 29 | 30 | func setProject(_ project: ProjectEntity, geo: GeometryProxy){ 31 | projectEntity = project 32 | 33 | guard let url = project.videoURL else {return} 34 | 35 | currentVideo = .init(url: url, rangeDuration: project.lowerBound...project.upperBound, rate: Float(project.rate), rotation: project.rotation) 36 | currentVideo?.toolsApplied = project.wrappedTools 37 | currentVideo?.filterName = project.filterName 38 | currentVideo?.colorCorrection = .init(brightness: project.brightness, contrast: project.contrast, saturation: project.saturation) 39 | let frame = VideoFrames(scaleValue: project.frameScale, frameColor: project.wrappedColor) 40 | currentVideo?.videoFrames = frame 41 | self.frames = frame 42 | currentVideo?.updateThumbnails(geo) 43 | currentVideo?.textBoxes = project.wrappedTextBoxes 44 | if let audio = project.audio?.audioModel{ 45 | currentVideo?.audio = audio 46 | } 47 | } 48 | 49 | } 50 | 51 | //MARK: - Core data logic 52 | extension EditorViewModel{ 53 | 54 | private func createProject(){ 55 | guard let currentVideo else { return } 56 | let context = PersistenceController.shared.viewContext 57 | ProjectEntity.create(video: currentVideo, context: context) 58 | } 59 | 60 | func updateProject(){ 61 | guard let projectEntity, let currentVideo else { return } 62 | ProjectEntity.update(for: currentVideo, project: projectEntity) 63 | } 64 | } 65 | 66 | //MARK: - Tools logic 67 | extension EditorViewModel{ 68 | 69 | 70 | func setFilter(_ filter: String?){ 71 | currentVideo?.setFilter(filter) 72 | if filter != nil{ 73 | setTools() 74 | }else{ 75 | removeTool() 76 | } 77 | } 78 | 79 | 80 | func setText(_ textBox: [TextBox]){ 81 | currentVideo?.textBoxes = textBox 82 | setTools() 83 | } 84 | 85 | func setFrames(){ 86 | currentVideo?.videoFrames = frames 87 | setTools() 88 | } 89 | 90 | func setCorrections(_ correction: ColorCorrection){ 91 | currentVideo?.colorCorrection = correction 92 | setTools() 93 | } 94 | 95 | func updateRate(rate: Float){ 96 | currentVideo?.updateRate(rate) 97 | setTools() 98 | } 99 | 100 | func rotate(){ 101 | currentVideo?.rotate() 102 | setTools() 103 | } 104 | 105 | func toggleMirror(){ 106 | currentVideo?.isMirror.toggle() 107 | setTools() 108 | } 109 | 110 | func setAudio(_ audio: Audio){ 111 | currentVideo?.audio = audio 112 | setTools() 113 | } 114 | 115 | func setTools(){ 116 | guard let selectedTools else { return } 117 | currentVideo?.appliedTool(for: selectedTools) 118 | } 119 | 120 | func removeTool(){ 121 | guard let selectedTools else { return } 122 | self.currentVideo?.removeTool(for: selectedTools) 123 | } 124 | 125 | func removeAudio(){ 126 | guard let url = currentVideo?.audio?.url else {return} 127 | FileManager.default.removefileExists(for: url) 128 | currentVideo?.audio = nil 129 | isSelectVideo = true 130 | removeTool() 131 | updateProject() 132 | } 133 | 134 | func reset(){ 135 | guard let selectedTools else {return} 136 | 137 | switch selectedTools{ 138 | 139 | case .cut: 140 | currentVideo?.resetRangeDuration() 141 | case .speed: 142 | currentVideo?.resetRate() 143 | case .text, .audio, .crop: 144 | break 145 | case .filters: 146 | currentVideo?.setFilter(nil) 147 | case .corrections: 148 | currentVideo?.colorCorrection = ColorCorrection() 149 | case .frames: 150 | frames.reset() 151 | currentVideo?.videoFrames = nil 152 | } 153 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.1){ 154 | self.removeTool() 155 | } 156 | } 157 | } 158 | 159 | 160 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/ViewModels/ExporterViewModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ExporterViewModel.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 24.04.2023. 6 | // 7 | 8 | import Foundation 9 | import Combine 10 | import Photos 11 | import UIKit 12 | import SwiftUI 13 | 14 | 15 | class ExporterViewModel: ObservableObject{ 16 | 17 | let video: Video 18 | 19 | @Published var renderState: ExportState = .unknown 20 | @Published var showAlert: Bool = false 21 | @Published var progressTimer: TimeInterval = .zero 22 | @Published var selectedQuality: VideoQuality = .medium 23 | private var cancellable = Set() 24 | private var action: ActionEnum = .save 25 | private let editorHelper = VideoEditor() 26 | private var timer: Timer? 27 | 28 | init(video: Video){ 29 | self.video = video 30 | startRenderStateSubs() 31 | } 32 | 33 | 34 | deinit{ 35 | cancellable.forEach({$0.cancel()}) 36 | resetTimer() 37 | } 38 | 39 | 40 | @MainActor 41 | private func renderVideo() async{ 42 | renderState = .loading 43 | do{ 44 | let url = try await editorHelper.startRender(video: video, videoQuality: selectedQuality) 45 | renderState = .loaded(url) 46 | }catch{ 47 | renderState = .failed(error) 48 | } 49 | } 50 | 51 | 52 | 53 | func action(_ action: ActionEnum) async{ 54 | self.action = action 55 | await renderVideo() 56 | } 57 | 58 | private func startRenderStateSubs(){ 59 | $renderState 60 | .sink {[weak self] state in 61 | guard let self = self else {return} 62 | switch state { 63 | case .loading: 64 | self.timer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { time in 65 | self.progressTimer += 1 66 | } 67 | case .loaded(let url): 68 | if self.action == .save{ 69 | self.saveVideoInLib(url) 70 | }else{ 71 | self.showShareSheet(data: url) 72 | } 73 | self.resetTimer() 74 | default: 75 | break 76 | } 77 | } 78 | .store(in: &cancellable) 79 | } 80 | 81 | 82 | private func resetTimer(){ 83 | timer?.invalidate() 84 | timer = nil 85 | progressTimer = .zero 86 | } 87 | 88 | private func showShareSheet(data: Any){ 89 | DispatchQueue.main.async { 90 | self.renderState = .unknown 91 | } 92 | UIActivityViewController(activityItems: [data], applicationActivities: nil).presentInKeyWindow() 93 | } 94 | 95 | private func saveVideoInLib(_ url: URL){ 96 | PHPhotoLibrary.shared().performChanges({ 97 | PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: url) 98 | }) {[weak self] saved, error in 99 | guard let self = self else {return} 100 | if saved { 101 | DispatchQueue.main.async { 102 | self.renderState = .saved 103 | } 104 | } 105 | } 106 | } 107 | 108 | enum ActionEnum: Int{ 109 | case save, share 110 | } 111 | 112 | 113 | 114 | enum ExportState: Identifiable, Equatable { 115 | 116 | case unknown, loading, loaded(URL), failed(Error), saved 117 | 118 | var id: Int{ 119 | switch self { 120 | case .unknown: return 0 121 | case .loading: return 1 122 | case .loaded: return 2 123 | case .failed: return 3 124 | case .saved: return 4 125 | } 126 | } 127 | 128 | static func == (lhs: ExporterViewModel.ExportState, rhs: ExporterViewModel.ExportState) -> Bool { 129 | lhs.id == rhs.id 130 | } 131 | } 132 | 133 | } 134 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/ViewModels/FiltersViewModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // FiltersViewModel.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 26.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | import CoreImage 10 | import CoreImage.CIFilterBuiltins 11 | 12 | class FiltersViewModel: ObservableObject{ 13 | 14 | @Published var images = [FilteredImage]() 15 | @Published var colorCorrection = ColorCorrection() 16 | @Published var value: Double = 1.0 17 | 18 | var image: UIImage? 19 | 20 | 21 | private let filters: [CIFilter] = [ 22 | 23 | .photoEffectChrome(), 24 | .photoEffectFade(), 25 | .photoEffectInstant(), 26 | .photoEffectMono(), 27 | .photoEffectNoir(), 28 | .photoEffectProcess(), 29 | .photoEffectTonal(), 30 | .photoEffectTransfer(), 31 | .sepiaTone(), 32 | .thermal(), 33 | .vignette(), 34 | .vignetteEffect(), 35 | .xRay(), 36 | .gaussianBlur() 37 | 38 | ] 39 | 40 | func loadFilters(for image: UIImage){ 41 | self.image = image 42 | let context = CIContext() 43 | filters.forEach { filter in 44 | DispatchQueue.global(qos: .userInteractive).async { 45 | 46 | guard let CiImage = CIImage(image: image) else {return} 47 | filter.setValue(CiImage, forKey: kCIInputImageKey) 48 | 49 | guard let newImage = filter.outputImage, let cgImage = context.createCGImage(newImage, from: CiImage.extent) else {return} 50 | 51 | let filterImage = FilteredImage(image: UIImage(cgImage: cgImage), filter: filter) 52 | 53 | DispatchQueue.main.async { 54 | self.images.append(filterImage) 55 | } 56 | } 57 | } 58 | } 59 | } 60 | 61 | 62 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/ViewModels/RootViewModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // RootViewModel.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 20.04.2023. 6 | // 7 | 8 | import Foundation 9 | import CoreData 10 | import PhotosUI 11 | import SwiftUI 12 | 13 | final class RootViewModel: ObservableObject{ 14 | 15 | @Published var projects = [ProjectEntity]() 16 | private let dataManager: CoreDataManager 17 | 18 | 19 | init(mainContext: NSManagedObjectContext){ 20 | self.dataManager = CoreDataManager(mainContext: mainContext) 21 | } 22 | 23 | func fetch(){ 24 | projects = dataManager.fetchProjects() 25 | } 26 | 27 | func removeProject(_ project: ProjectEntity){ 28 | ProjectEntity.remove(project) 29 | fetch() 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/ViewModels/TextEditorViewModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // TextEditorViewModel.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 02.05.2023. 6 | // 7 | 8 | import Foundation 9 | import SwiftUI 10 | 11 | class TextEditorViewModel: ObservableObject{ 12 | 13 | @Published var textBoxes: [TextBox] = [] 14 | @Published var showEditor: Bool = false 15 | @Published var currentTextBox: TextBox = TextBox() 16 | @Published var selectedTextBox: TextBox? 17 | private var isEditMode: Bool = false 18 | 19 | func cancelTextEditor(){ 20 | showEditor = false 21 | } 22 | 23 | func selectTextBox(_ texBox: TextBox){ 24 | selectedTextBox = texBox 25 | } 26 | 27 | func isSelected(_ id: UUID) -> Bool{ 28 | selectedTextBox?.id == id 29 | } 30 | 31 | func setTime(_ time: ClosedRange){ 32 | guard let selectedTextBox else {return} 33 | if let index = textBoxes.firstIndex(where: {$0.id == selectedTextBox.id}){ 34 | textBoxes[index].timeRange = time 35 | } 36 | } 37 | 38 | func removeTextBox(){ 39 | guard let selectedTextBox else {return} 40 | textBoxes.removeAll(where: {$0.id == selectedTextBox.id}) 41 | } 42 | 43 | func copy(_ textBox: TextBox){ 44 | var new = textBox 45 | new.id = UUID() 46 | new.offset = .init(width: new.offset.width + 10, height: new.offset.height + 10) 47 | textBoxes.append(new) 48 | } 49 | 50 | func openTextEditor(isEdit: Bool, _ textBox: TextBox? = nil, timeRange: ClosedRange? = nil){ 51 | if let textBox, isEdit{ 52 | isEditMode = true 53 | currentTextBox = textBox 54 | }else{ 55 | currentTextBox = TextBox(timeRange: timeRange ?? (1...5)) 56 | isEditMode = false 57 | } 58 | showEditor = true 59 | } 60 | 61 | func saveTapped(){ 62 | if isEditMode{ 63 | if let index = textBoxes.firstIndex(where: {$0.id == currentTextBox.id}){ 64 | textBoxes[index] = currentTextBox 65 | } 66 | }else{ 67 | textBoxes.append(currentTextBox) 68 | } 69 | selectedTextBox = currentTextBox 70 | cancelTextEditor() 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/Camera/CameraPreviewView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CameraPreviewView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 14.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | import AVKit 10 | 11 | class CameraPreviewView: UIView{ 12 | 13 | private var captureSession: AVCaptureSession 14 | 15 | 16 | init(captureSession: AVCaptureSession) { 17 | self.captureSession = captureSession 18 | super.init(frame: .zero) 19 | } 20 | 21 | required init?(coder: NSCoder) { 22 | fatalError("init(coder:) has not been implemented") 23 | } 24 | 25 | override class var layerClass: AnyClass{ 26 | AVCaptureVideoPreviewLayer.self 27 | } 28 | 29 | var videoPreviewLayer: AVCaptureVideoPreviewLayer{ 30 | return layer as! AVCaptureVideoPreviewLayer 31 | } 32 | 33 | override func didMoveToSuperview() { 34 | super.didMoveToSuperview() 35 | 36 | if nil != self.superview{ 37 | self.videoPreviewLayer.session = self.captureSession 38 | self.videoPreviewLayer.videoGravity = .resizeAspectFill 39 | }else{ 40 | self.videoPreviewLayer.session = nil 41 | self.videoPreviewLayer.removeFromSuperlayer() 42 | } 43 | } 44 | } 45 | 46 | 47 | struct CameraPreviewHolder: UIViewRepresentable{ 48 | 49 | typealias UIViewType = CameraPreviewView 50 | 51 | var captureSession: AVCaptureSession 52 | 53 | 54 | func makeUIView(context: Context) -> CameraPreviewView { 55 | CameraPreviewView(captureSession: captureSession) 56 | } 57 | 58 | func updateUIView(_ uiView: CameraPreviewView, context: Context) { 59 | 60 | } 61 | } 62 | 63 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/Camera/RecordVideoView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // RecordVideoView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 14.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct RecordVideoView: View { 11 | @StateObject var cameraManager = CameraManager() 12 | @Environment(\.dismiss) private var dismiss 13 | let onFinishRecord: (URL) -> Void 14 | var body: some View { 15 | ZStack{ 16 | CameraPreviewHolder(captureSession: cameraManager.session) 17 | VStack(spacing: 0) { 18 | Text(cameraManager.recordedDuration.formatterTimeString()) 19 | .foregroundColor(.white) 20 | Spacer() 21 | Button { 22 | if cameraManager.isRecording{ 23 | cameraManager.stopRecord() 24 | }else{ 25 | cameraManager.startRecording() 26 | } 27 | 28 | } label: { 29 | Circle() 30 | .fill(cameraManager.isRecording ? .white : .red) 31 | .frame(width: 55, height: 55) 32 | } 33 | } 34 | .padding() 35 | } 36 | .overlay(alignment: .topLeading) { 37 | Button { 38 | dismiss() 39 | } label: { 40 | Image(systemName: "xmark") 41 | .padding() 42 | } 43 | } 44 | .onChange(of: cameraManager.finalURL) { newValue in 45 | if let url = newValue{ 46 | onFinishRecord(url) 47 | dismiss() 48 | } 49 | } 50 | } 51 | } 52 | 53 | struct RecordVideoView_Previews: PreviewProvider { 54 | static var previews: some View { 55 | RecordVideoView( onFinishRecord: {_ in }) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/EditorView/MainEditorView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // MainEditorView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 14.04.2023. 6 | // 7 | import AVKit 8 | import SwiftUI 9 | import PhotosUI 10 | 11 | struct MainEditorView: View { 12 | @Environment(\.scenePhase) private var scenePhase 13 | @Environment(\.dismiss) private var dismiss 14 | var project: ProjectEntity? 15 | var selectedVideoURl: URL? 16 | @State var isFullScreen: Bool = false 17 | @State var showVideoQualitySheet: Bool = false 18 | @State var showRecordView: Bool = false 19 | @StateObject var editorVM = EditorViewModel() 20 | @StateObject var audioRecorder = AudioRecorderManager() 21 | @StateObject var videoPlayer = VideoPlayerManager() 22 | @StateObject var textEditor = TextEditorViewModel() 23 | var body: some View { 24 | ZStack{ 25 | GeometryReader { proxy in 26 | VStack(spacing: 0){ 27 | headerView 28 | PlayerHolderView(isFullScreen: $isFullScreen, editorVM: editorVM, videoPlayer: videoPlayer, textEditor: textEditor) 29 | .frame(height: proxy.size.height / (isFullScreen ? 1.25 : 1.8)) 30 | PlayerControl(isFullScreen: $isFullScreen, recorderManager: audioRecorder, editorVM: editorVM, videoPlayer: videoPlayer, textEditor: textEditor) 31 | ToolsSectionView(videoPlayer: videoPlayer, editorVM: editorVM, textEditor: textEditor) 32 | .opacity(isFullScreen ? 0 : 1) 33 | .padding(.top, 5) 34 | } 35 | .onAppear{ 36 | setVideo(proxy) 37 | } 38 | } 39 | 40 | if showVideoQualitySheet, let video = editorVM.currentVideo{ 41 | VideoExporterBottomSheetView(isPresented: $showVideoQualitySheet, video: video) 42 | } 43 | } 44 | .background(Color.black) 45 | .navigationBarHidden(true) 46 | .navigationBarBackButtonHidden(true) 47 | .ignoresSafeArea(.all, edges: .top) 48 | .fullScreenCover(isPresented: $showRecordView) { 49 | RecordVideoView{ url in 50 | videoPlayer.loadState = .loaded(url) 51 | } 52 | } 53 | .statusBar(hidden: true) 54 | .onChange(of: scenePhase) { phase in 55 | saveProject(phase) 56 | } 57 | .blur(radius: textEditor.showEditor ? 10 : 0) 58 | .ignoresSafeArea(.keyboard, edges: .bottom) 59 | .overlay { 60 | if textEditor.showEditor{ 61 | TextEditorView(viewModel: textEditor, onSave: editorVM.setText) 62 | } 63 | } 64 | } 65 | } 66 | 67 | struct RootView_Previews: PreviewProvider { 68 | static var previews: some View { 69 | MainEditorView(selectedVideoURl: URL(string: "file:///Users/bogdanzykov/Library/Developer/CoreSimulator/Devices/86D65E8C-7D49-47AF-A511-BFA631289CB1/data/Containers/Data/Application/52E5EF3C-9E78-4676-B3EA-03BD22CCD09A/Documents/video_copy.mp4")) 70 | } 71 | } 72 | 73 | extension MainEditorView{ 74 | private var headerView: some View{ 75 | HStack{ 76 | Button { 77 | editorVM.updateProject() 78 | dismiss() 79 | } label: { 80 | Image(systemName: "folder.fill") 81 | } 82 | 83 | Spacer() 84 | 85 | Button { 86 | editorVM.selectedTools = nil 87 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.2){ 88 | showVideoQualitySheet.toggle() 89 | } 90 | } label: { 91 | Image(systemName: "square.and.arrow.up.fill") 92 | } 93 | } 94 | .foregroundColor(.white) 95 | .padding(.horizontal, 20) 96 | .frame(height: 50) 97 | .padding(.bottom) 98 | } 99 | 100 | private func saveProject(_ phase: ScenePhase){ 101 | switch phase{ 102 | case .background, .inactive: 103 | editorVM.updateProject() 104 | default: 105 | break 106 | } 107 | } 108 | 109 | private func setVideo(_ proxy: GeometryProxy){ 110 | if let selectedVideoURl{ 111 | videoPlayer.loadState = .loaded(selectedVideoURl) 112 | editorVM.setNewVideo(selectedVideoURl, geo: proxy) 113 | } 114 | 115 | if let project, let url = project.videoURL{ 116 | videoPlayer.loadState = .loaded(url) 117 | editorVM.setProject(project, geo: proxy) 118 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.1){ 119 | videoPlayer.setFilters(mainFilter: CIFilter(name: project.filterName ?? ""), colorCorrection: editorVM.currentVideo?.colorCorrection) 120 | } 121 | } 122 | } 123 | } 124 | 125 | 126 | 127 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/EditorView/PlayerHolderView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // PlayerHolderView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 18.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct PlayerHolderView: View{ 11 | @Binding var isFullScreen: Bool 12 | @ObservedObject var editorVM: EditorViewModel 13 | @ObservedObject var videoPlayer: VideoPlayerManager 14 | @ObservedObject var textEditor: TextEditorViewModel 15 | var scale: CGFloat{ 16 | isFullScreen ? 1.4 : 1 17 | } 18 | 19 | var body: some View{ 20 | VStack(spacing: 6) { 21 | ZStack(alignment: .bottom){ 22 | switch videoPlayer.loadState{ 23 | case .loading: 24 | ProgressView() 25 | case .unknown: 26 | Text("Add new video") 27 | case .failed: 28 | Text("Failed to open video") 29 | case .loaded: 30 | playerCropView 31 | } 32 | } 33 | .allFrame() 34 | } 35 | } 36 | } 37 | 38 | struct PlayerHolderView_Previews: PreviewProvider { 39 | static var previews: some View { 40 | MainEditorView() 41 | .preferredColorScheme(.dark) 42 | } 43 | } 44 | 45 | extension PlayerHolderView{ 46 | 47 | private var playerCropView: some View{ 48 | Group{ 49 | if let video = editorVM.currentVideo{ 50 | GeometryReader { proxy in 51 | CropView( 52 | originalSize: .init(width: video.frameSize.width * scale, height: video.frameSize.height * scale), 53 | rotation: editorVM.currentVideo?.rotation, 54 | isMirror: editorVM.currentVideo?.isMirror ?? false, 55 | isActiveCrop: editorVM.selectedTools == .crop) { 56 | ZStack{ 57 | editorVM.frames.frameColor 58 | ZStack{ 59 | PlayerView(player: videoPlayer.videoPlayer) 60 | TextOverlayView(currentTime: videoPlayer.currentTime, viewModel: textEditor, disabledMagnification: isFullScreen) 61 | .scaleEffect(scale) 62 | .disabled(isFullScreen) 63 | } 64 | .scaleEffect(editorVM.frames.scale) 65 | } 66 | } 67 | .allFrame() 68 | .onAppear{ 69 | Task{ 70 | guard let size = await editorVM.currentVideo?.asset.adjustVideoSize(to: proxy.size) else {return} 71 | editorVM.currentVideo?.frameSize = size 72 | editorVM.currentVideo?.geometrySize = proxy.size 73 | } 74 | } 75 | } 76 | } 77 | timelineLabel 78 | } 79 | } 80 | } 81 | 82 | extension PlayerHolderView{ 83 | 84 | @ViewBuilder 85 | private var timelineLabel: some View{ 86 | if let video = editorVM.currentVideo{ 87 | HStack{ 88 | Text((videoPlayer.currentTime - video.rangeDuration.lowerBound) .formatterTimeString()) + 89 | Text(" / ") + 90 | Text(Int(video.totalDuration).secondsToTime()) 91 | } 92 | .font(.caption2) 93 | .foregroundColor(.white) 94 | .frame(width: 80) 95 | .padding(5) 96 | .background(Color(.black).opacity(0.5), in: RoundedRectangle(cornerRadius: 10)) 97 | .padding() 98 | } 99 | } 100 | } 101 | 102 | 103 | struct PlayerControl: View{ 104 | @Binding var isFullScreen: Bool 105 | @ObservedObject var recorderManager: AudioRecorderManager 106 | @ObservedObject var editorVM: EditorViewModel 107 | @ObservedObject var videoPlayer: VideoPlayerManager 108 | @ObservedObject var textEditor: TextEditorViewModel 109 | var body: some View{ 110 | VStack(spacing: 6) { 111 | playSection 112 | timeLineControlSection 113 | } 114 | } 115 | 116 | 117 | @ViewBuilder 118 | private var timeLineControlSection: some View{ 119 | if let video = editorVM.currentVideo{ 120 | TimeLineView( 121 | recorderManager: recorderManager, 122 | currentTime: $videoPlayer.currentTime, 123 | isSelectedTrack: $editorVM.isSelectVideo, 124 | viewState: editorVM.selectedTools?.timeState ?? .empty, 125 | video: video, textInterval: textEditor.selectedTextBox?.timeRange) { 126 | videoPlayer.scrubState = .scrubEnded(videoPlayer.currentTime) 127 | } onChangeTextTime: { textTime in 128 | textEditor.setTime(textTime) 129 | } onSetAudio: { audio in 130 | editorVM.setAudio(audio) 131 | videoPlayer.setAudio(audio.url) 132 | } 133 | } 134 | } 135 | 136 | private var playSection: some View{ 137 | 138 | Button { 139 | if let video = editorVM.currentVideo{ 140 | videoPlayer.action(video) 141 | } 142 | } label: { 143 | Image(systemName: videoPlayer.isPlaying ? "pause.fill" : "play.fill") 144 | .imageScale(.medium) 145 | } 146 | .buttonStyle(.plain) 147 | .hCenter() 148 | .frame(height: 30) 149 | .overlay(alignment: .trailing) { 150 | Button { 151 | videoPlayer.pause() 152 | withAnimation { 153 | isFullScreen.toggle() 154 | } 155 | } label: { 156 | Image(systemName: isFullScreen ? "arrow.down.right.and.arrow.up.left" : "arrow.up.left.and.arrow.down.right") 157 | .imageScale(.medium) 158 | } 159 | .buttonStyle(.plain) 160 | } 161 | .padding(.horizontal) 162 | } 163 | } 164 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/EditorView/Swipe.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Swipe.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 02.05.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct TestView: View { 11 | @State var rows = [1, 2, 3, 4, 5, 6] 12 | var body: some View { 13 | 14 | ScrollView(.vertical, showsIndicators: false) { 15 | LazyVStack(spacing: 0){ 16 | ForEach(rows, id: \.self) { index in 17 | VStack{ 18 | HStack{ 19 | Text("\(index)") 20 | Spacer() 21 | } 22 | .padding() 23 | Divider() 24 | } 25 | .swipeAction{ 26 | withAnimation { 27 | rows.removeAll(where: {$0 == index}) 28 | } 29 | } 30 | } 31 | } 32 | .padding() 33 | } 34 | } 35 | } 36 | 37 | struct TestView_Previews: PreviewProvider { 38 | static var previews: some View { 39 | TestView() 40 | } 41 | } 42 | 43 | extension View { 44 | 45 | func onDelete(perform action: @escaping () -> Void) -> some View { 46 | self.modifier(Delete(action: action)) 47 | } 48 | 49 | func swipeAction(perform action: @escaping () -> Void) -> some View{ 50 | self.modifier(Swipe(action: action)) 51 | } 52 | } 53 | 54 | struct Swipe: ViewModifier{ 55 | let halfDeletionDistance: CGFloat = 70 56 | @State private var isSwiped: Bool = false 57 | @State private var offset: CGFloat = .zero 58 | let action: () -> Void 59 | 60 | func body(content: Content) -> some View { 61 | 62 | ZStack{ 63 | Color.red 64 | 65 | HStack{ 66 | Spacer() 67 | Button { 68 | delete() 69 | } label: { 70 | Image(systemName: "trash") 71 | .font(.title2) 72 | .foregroundColor(.white) 73 | .padding(.trailing) 74 | } 75 | } 76 | content 77 | .background(.white) 78 | .contentShape(Rectangle()) 79 | .offset(x: offset) 80 | .gesture(DragGesture().onChanged(onChange).onEnded(onEnded)) 81 | .animation(.easeIn, value: offset) 82 | 83 | } 84 | } 85 | 86 | private func onChange(_ value: DragGesture.Value){ 87 | 88 | 89 | if value.translation.width < 0{ 90 | if isSwiped{ 91 | offset = value.translation.width - halfDeletionDistance 92 | } 93 | else{ 94 | offset = value.translation.width 95 | } 96 | } 97 | 98 | } 99 | 100 | private func onEnded(_ value: DragGesture.Value){ 101 | if value.translation.width < 0{ 102 | if -offset > 50{ 103 | isSwiped = true 104 | offset = -halfDeletionDistance 105 | }else{ 106 | isSwiped = false 107 | offset = .zero 108 | } 109 | }else{ 110 | isSwiped = false 111 | offset = .zero 112 | } 113 | } 114 | 115 | private func delete(){ 116 | offset = -1000 117 | action() 118 | } 119 | } 120 | 121 | 122 | 123 | struct Delete: ViewModifier { 124 | 125 | let action: () -> Void 126 | 127 | @State var offset: CGSize = .zero 128 | @State var initialOffset: CGSize = .zero 129 | @State var contentWidth: CGFloat = 0.0 130 | @State var willDeleteIfReleased = false 131 | 132 | func body(content: Content) -> some View { 133 | content 134 | .background( 135 | GeometryReader { geometry in 136 | ZStack { 137 | Rectangle() 138 | // .clipShape(CustomCorners(corners: [.topLeft, .bottomLeft], radius: 7)) 139 | .foregroundColor(.red) 140 | Image(systemName: "trash") 141 | .foregroundColor(.white) 142 | .font(.title2.bold()) 143 | .layoutPriority(-1) 144 | } 145 | .frame(width: -offset.width) 146 | .clipShape(Rectangle() ) 147 | .offset(x: geometry.size.width) 148 | .onAppear { 149 | withAnimation(.easeIn(duration: 0.2)){ 150 | contentWidth = geometry.size.width 151 | } 152 | } 153 | .gesture( 154 | TapGesture() 155 | .onEnded { 156 | delete() 157 | } 158 | ) 159 | } 160 | ) 161 | .offset(x: offset.width, y: 0) 162 | .gesture ( 163 | DragGesture() 164 | .onChanged { gesture in 165 | if gesture.translation.width + initialOffset.width <= 0 { 166 | self.offset.width = gesture.translation.width + initialOffset.width 167 | } 168 | if self.offset.width < -deletionDistance && !willDeleteIfReleased { 169 | hapticFeedback() 170 | willDeleteIfReleased.toggle() 171 | } else if offset.width > -deletionDistance && willDeleteIfReleased { 172 | hapticFeedback() 173 | willDeleteIfReleased.toggle() 174 | } 175 | } 176 | .onEnded { _ in 177 | if offset.width < -deletionDistance { 178 | delete() 179 | } else if offset.width < -halfDeletionDistance { 180 | offset.width = -tappableDeletionWidth 181 | initialOffset.width = -tappableDeletionWidth 182 | } else { 183 | offset = .zero 184 | initialOffset = .zero 185 | } 186 | } 187 | ) 188 | .animation(.interactiveSpring(), value: offset) 189 | .animation(.interactiveSpring(), value: initialOffset) 190 | .animation(.interactiveSpring(), value: willDeleteIfReleased) 191 | } 192 | 193 | private func delete() { 194 | 195 | //offset.width = -contentWidth 196 | offset = .zero 197 | initialOffset = .zero 198 | action() 199 | } 200 | 201 | private func hapticFeedback() { 202 | let generator = UIImpactFeedbackGenerator(style: .medium) 203 | generator.impactOccurred() 204 | } 205 | 206 | //MARK: Constants 207 | 208 | let deletionDistance = CGFloat(100) 209 | let halfDeletionDistance = CGFloat(50) 210 | let tappableDeletionWidth = CGFloat(100) 211 | 212 | 213 | } 214 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/EditorView/TimeLineView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // TimeLineView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 18.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct TimeLineView: View { 11 | @ObservedObject var recorderManager: AudioRecorderManager 12 | @State private var isActiveTextRangeSlider: Bool = false 13 | @State private var textTimeInterval: ClosedRange = 0...1 14 | @Binding var currentTime: Double 15 | @Binding var isSelectedTrack: Bool 16 | var viewState: TimeLineViewState = .empty 17 | var video: Video 18 | var textInterval: ClosedRange? 19 | let onChangeTimeValue: () -> Void 20 | let onChangeTextTime: (ClosedRange) -> Void 21 | let onSetAudio: (Audio) -> Void 22 | private let frameWight: CGFloat = 55 23 | 24 | private var calcWight: CGFloat{ 25 | frameWight * CGFloat(viewState.countImages) + 10 26 | } 27 | var body: some View { 28 | ZStack{ 29 | if !video.thumbnailsImages.isEmpty{ 30 | TimelineSlider(bounds: video.rangeDuration, disableOffset: isActiveTextRangeSlider, value: $currentTime, frameWight: calcWight) { 31 | VStack(alignment: .leading, spacing: 5) { 32 | ZStack { 33 | tubneilsImages(video.thumbnailsImages) 34 | textRangeTimeLayer 35 | } 36 | audioLayerSection 37 | } 38 | } actionView: { 39 | recordButton 40 | } 41 | onChange: { 42 | onChangeTimeValue() 43 | } 44 | } 45 | } 46 | .frame(height: viewState.height) 47 | .onChange(of: textTimeInterval.lowerBound) { newValue in 48 | isActiveTextRangeSlider = true 49 | currentTime = newValue 50 | onChangeTimeValue() 51 | onChangeTextTime(textTimeInterval) 52 | } 53 | .onChange(of: textTimeInterval.upperBound) { newValue in 54 | isActiveTextRangeSlider = true 55 | currentTime = newValue 56 | onChangeTimeValue() 57 | onChangeTextTime(textTimeInterval) 58 | } 59 | .onChange(of: textInterval) { newValue in 60 | if let newValue{ 61 | textTimeInterval = newValue 62 | } 63 | } 64 | .onChange(of: viewState) { newValue in 65 | if newValue == .empty{ 66 | currentTime = 0 67 | onChangeTimeValue() 68 | } 69 | } 70 | } 71 | } 72 | 73 | struct TimeLineView_Previews: PreviewProvider { 74 | static var video: Video { 75 | var video = Video.mock 76 | video.thumbnailsImages = [.init(image: UIImage(systemName: "person")!)] 77 | return video 78 | } 79 | static var previews: some View { 80 | ZStack{ 81 | Color.secondary 82 | TimeLineView(recorderManager: AudioRecorderManager(), currentTime: .constant(0), isSelectedTrack: .constant(true), viewState: .audio, video: video, onChangeTimeValue: {}, onChangeTextTime: {_ in}, onSetAudio: {_ in}) 83 | } 84 | } 85 | } 86 | 87 | 88 | 89 | extension TimeLineView{ 90 | 91 | private func tubneilsImages(_ images: [ThumbnailImage]) -> some View{ 92 | let images = firstAndAverageImage(images) 93 | return HStack(spacing: 0){ 94 | ForEach(images) { image in 95 | if let image = image.image{ 96 | Image(uiImage: image) 97 | .resizable() 98 | .aspectRatio(contentMode: .fill) 99 | .frame(height: frameWight) 100 | .clipped() 101 | } 102 | } 103 | } 104 | .overlay { 105 | if viewState == .audio{ 106 | if isSelectedTrack{ 107 | RoundedRectangle(cornerRadius: 5) 108 | .strokeBorder(lineWidth: 2) 109 | .foregroundColor(.white) 110 | } 111 | HStack(spacing: 1){ 112 | if video.volume > 0{ 113 | Image(systemName: "speaker.wave.2.fill") 114 | Text(verbatim: String(Int(video.volume * 100))) 115 | }else{ 116 | Image(systemName: "speaker.slash.fill") 117 | } 118 | } 119 | .font(.system(size: 9)) 120 | .foregroundColor(.white) 121 | .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .bottomLeading) 122 | .padding(5) 123 | } 124 | } 125 | .onTapGesture { 126 | if viewState == .audio, !isSelectedTrack{ 127 | isSelectedTrack.toggle() 128 | currentTime = 0 129 | onChangeTimeValue() 130 | } 131 | } 132 | } 133 | 134 | private func firstAndAverageImage(_ images: [ThumbnailImage]) -> [ThumbnailImage]{ 135 | guard let first = images.first else {return []} 136 | 137 | var newArray = [first] 138 | 139 | if viewState == .audio || viewState == .text{ 140 | let averageIndex = Int(images.count / 2) 141 | newArray.append(images[averageIndex]) 142 | } 143 | return newArray 144 | } 145 | 146 | private var textRangeTimeLayer: some View{ 147 | Group{ 148 | if let textInterval, viewState == .text{ 149 | RangedSliderView(value: $textTimeInterval, bounds: 0...video.originalDuration, onEndChange: { 150 | isActiveTextRangeSlider = false 151 | }) { 152 | Rectangle().blendMode(.destinationOut) 153 | } 154 | .frame(width: calcWight) 155 | .onAppear{ 156 | textTimeInterval = textInterval 157 | } 158 | .onDisappear{ 159 | isActiveTextRangeSlider = false 160 | } 161 | } 162 | } 163 | } 164 | 165 | private var recordButton: some View{ 166 | Group{ 167 | if viewState == .audio{ 168 | RecorderButtonView(video: video, recorderManager: recorderManager, onRecorded: onSetAudio) { time in 169 | currentTime = time 170 | onChangeTimeValue() 171 | } 172 | .vBottom() 173 | .padding(.bottom, viewState.height / 6) 174 | }else{ 175 | Rectangle() 176 | .opacity(0) 177 | } 178 | } 179 | } 180 | 181 | private var audioLayerSection: some View{ 182 | Group{ 183 | if viewState == .audio{ 184 | AudioButtonView( 185 | video: video, 186 | isSelectedTrack: $isSelectedTrack, 187 | recorderManager: recorderManager) 188 | } 189 | } 190 | } 191 | } 192 | 193 | enum TimeLineViewState: Int{ 194 | case text, audio, empty 195 | 196 | var wight: CGFloat{ 197 | switch self { 198 | case .text, .audio: return 40 199 | case .empty: return 10 200 | } 201 | } 202 | 203 | var height: CGFloat{ 204 | switch self { 205 | case .audio: return 110 206 | case .empty, .text: return 60 207 | } 208 | } 209 | var countImages: Int{ 210 | switch self { 211 | case .audio, .text: return 2 212 | case .empty: return 1 213 | } 214 | } 215 | } 216 | 217 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/EditorView/VideoExporterBottomSheetView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // VideoExporterBottomSheetView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 24.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct VideoExporterBottomSheetView: View { 11 | @Binding var isPresented: Bool 12 | @StateObject private var viewModel: ExporterViewModel 13 | 14 | init(isPresented: Binding, video: Video) { 15 | self._isPresented = isPresented 16 | self._viewModel = StateObject(wrappedValue: ExporterViewModel(video: video)) 17 | } 18 | var body: some View { 19 | SheetView(isPresented: $isPresented, bgOpacity: 0.1) { 20 | VStack(alignment: .leading){ 21 | 22 | switch viewModel.renderState{ 23 | case .unknown: 24 | list 25 | case .failed: 26 | Text("Failed") 27 | case .loading, .loaded: 28 | loadingView 29 | case .saved: 30 | saveView 31 | } 32 | } 33 | .hCenter() 34 | .frame(height: getRect().height / 2.8) 35 | } 36 | .ignoresSafeArea() 37 | .alert("Save video", isPresented: $viewModel.showAlert) {} 38 | .disabled(viewModel.renderState == .loading) 39 | .animation(.easeInOut, value: viewModel.renderState) 40 | } 41 | } 42 | 43 | struct VideoQualityPopapView2_Previews: PreviewProvider { 44 | static var previews: some View { 45 | ZStack(alignment: .bottom){ 46 | Color.secondary.opacity(0.5) 47 | VideoExporterBottomSheetView(isPresented: .constant(true), video: Video.mock) 48 | } 49 | } 50 | } 51 | 52 | extension VideoExporterBottomSheetView{ 53 | 54 | 55 | private var list: some View{ 56 | Group{ 57 | qualityListSection 58 | 59 | HStack { 60 | saveButton 61 | shareButton 62 | } 63 | .padding(.top, 10) 64 | } 65 | } 66 | 67 | private var loadingView: some View{ 68 | VStack(spacing: 30){ 69 | ProgressView() 70 | .scaleEffect(2) 71 | Text(viewModel.progressTimer.formatted()) 72 | Text("Video export in progress") 73 | .font(.headline) 74 | Text("Do not close the app or lock the screen") 75 | .font(.subheadline) 76 | .foregroundColor(.secondary) 77 | } 78 | } 79 | 80 | 81 | private var saveView: some View{ 82 | VStack(spacing: 30){ 83 | Image(systemName: "checkmark.circle") 84 | .font(.system(size: 40, weight: .light)) 85 | Text("Video saved") 86 | .font(.title2.bold()) 87 | } 88 | .onAppear{ 89 | DispatchQueue.main.asyncAfter(deadline: .now() + 1.5){ 90 | viewModel.renderState = .unknown 91 | } 92 | } 93 | } 94 | 95 | private var qualityListSection: some View{ 96 | ForEach(VideoQuality.allCases.reversed(), id: \.self) { type in 97 | 98 | HStack{ 99 | VStack(alignment: .leading) { 100 | Text(type.title) 101 | .font(.headline) 102 | Text(type.subtitle) 103 | .font(.subheadline) 104 | .foregroundColor(.secondary) 105 | } 106 | Spacer() 107 | if let value = type.calculateVideoSize(duration: viewModel.video.totalDuration){ 108 | Text(String(format: "%.1fMb", value)) 109 | } 110 | } 111 | .padding(10) 112 | .hLeading() 113 | .background{ 114 | if viewModel.selectedQuality == type{ 115 | RoundedRectangle(cornerRadius: 10) 116 | .fill( Color(.systemGray5)) 117 | } 118 | } 119 | .contentShape(Rectangle()) 120 | .onTapGesture { 121 | viewModel.selectedQuality = type 122 | } 123 | } 124 | } 125 | 126 | 127 | private var saveButton: some View{ 128 | Button { 129 | mainAction(.save) 130 | } label: { 131 | buttonLabel("Save", icon: "square.and.arrow.down") 132 | } 133 | .hCenter() 134 | } 135 | 136 | private var shareButton: some View{ 137 | Button { 138 | mainAction(.share) 139 | } label: { 140 | buttonLabel("Share", icon: "square.and.arrow.up") 141 | } 142 | .hCenter() 143 | } 144 | 145 | private func buttonLabel(_ label: String, icon: String) -> some View{ 146 | 147 | VStack{ 148 | Image(systemName: icon) 149 | .imageScale(.large) 150 | .padding(10) 151 | .background(Color(.systemGray), in: Circle()) 152 | Text(label) 153 | } 154 | .foregroundColor(.white) 155 | } 156 | 157 | 158 | private func mainAction(_ action: ExporterViewModel.ActionEnum){ 159 | Task{ 160 | await viewModel.action(action) 161 | } 162 | } 163 | 164 | } 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | extension UIViewController { 173 | 174 | func presentInKeyWindow(animated: Bool = true, completion: (() -> Void)? = nil) { 175 | UIApplication.shared.windows.last { $0.isKeyWindow }?.rootViewController? 176 | .present(self, animated: animated, completion: completion) 177 | } 178 | } 179 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/RootView/RootView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // RootView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 20.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | import PhotosUI 10 | 11 | struct RootView: View { 12 | @ObservedObject var rootVM: RootViewModel 13 | @State private var item: PhotosPickerItem? 14 | @State private var selectedVideoURL: URL? 15 | @State private var showLoader: Bool = false 16 | @State private var showEditor: Bool = false 17 | let columns = [ 18 | GridItem(.adaptive(minimum: 150)), 19 | GridItem(.adaptive(minimum: 150)), 20 | ] 21 | var body: some View { 22 | NavigationStack { 23 | ZStack { 24 | ScrollView(.vertical, showsIndicators: false) { 25 | VStack(alignment: .leading) { 26 | Text("My projects") 27 | .font(.headline) 28 | LazyVGrid(columns: columns, alignment: .center, spacing: 10) { 29 | newProjectButton 30 | 31 | ForEach(rootVM.projects) { project in 32 | 33 | NavigationLink { 34 | MainEditorView(project: project) 35 | } label: { 36 | cellView(project) 37 | } 38 | } 39 | } 40 | } 41 | .padding() 42 | } 43 | } 44 | .navigationDestination(isPresented: $showEditor){ 45 | MainEditorView(selectedVideoURl: selectedVideoURL) 46 | } 47 | .toolbar { 48 | ToolbarItem(placement: .navigationBarLeading) { 49 | Text("Video editor") 50 | .font(.title2.bold()) 51 | } 52 | } 53 | .onChange(of: item) { newItem in 54 | loadPhotosItem(newItem) 55 | } 56 | .onAppear{ 57 | rootVM.fetch() 58 | } 59 | .overlay { 60 | if showLoader{ 61 | Color.secondary.opacity(0.2).ignoresSafeArea() 62 | VStack(spacing: 10){ 63 | Text("Loading video") 64 | ProgressView() 65 | } 66 | .padding() 67 | .frame(height: 100) 68 | .background(Color(.systemGray6), in: RoundedRectangle(cornerRadius: 12)) 69 | } 70 | } 71 | } 72 | } 73 | } 74 | 75 | struct RootView_Previews2: PreviewProvider { 76 | static var previews: some View { 77 | RootView(rootVM: RootViewModel(mainContext: dev.viewContext)) 78 | } 79 | } 80 | 81 | extension RootView{ 82 | 83 | 84 | private var newProjectButton: some View{ 85 | 86 | PhotosPicker(selection: $item, matching: .videos) { 87 | VStack(spacing: 10) { 88 | Image(systemName: "plus") 89 | Text("New project") 90 | } 91 | .hCenter() 92 | .frame(height: 150) 93 | .background(Color(.systemGray6), in: RoundedRectangle(cornerRadius: 5)) 94 | .foregroundColor(.white) 95 | } 96 | } 97 | 98 | private func cellView(_ project: ProjectEntity) -> some View{ 99 | ZStack { 100 | Color.white 101 | Image(uiImage: project.uiImage) 102 | .resizable() 103 | .aspectRatio(contentMode: .fill) 104 | LinearGradient(colors: [.black.opacity(0.35), .black.opacity(0.2), .black.opacity(0.1)], startPoint: .bottom, endPoint: .top) 105 | } 106 | .hCenter() 107 | .frame(height: 150) 108 | .cornerRadius(5) 109 | .clipped() 110 | .overlay { 111 | VStack{ 112 | Button { 113 | rootVM.removeProject(project) 114 | } label: { 115 | Image(systemName: "trash.fill") 116 | .foregroundColor(.white) 117 | .shadow(color: .black.opacity(0.3), radius: 5) 118 | } 119 | .hTrailing() 120 | Spacer() 121 | Text(project.createAt?.formatted(date: .abbreviated, time: .omitted) ?? "") 122 | .foregroundColor(.white) 123 | .hLeading() 124 | } 125 | .font(.footnote.weight(.medium)) 126 | .padding(10) 127 | } 128 | } 129 | 130 | 131 | private func loadPhotosItem(_ newItem: PhotosPickerItem?){ 132 | Task { 133 | self.showLoader = true 134 | if let video = try await newItem?.loadTransferable(type: VideoItem.self) { 135 | selectedVideoURL = video.url 136 | try await Task.sleep(for: .milliseconds(50)) 137 | self.showLoader = false 138 | self.showEditor.toggle() 139 | 140 | } else { 141 | print("Failed load video") 142 | self.showLoader = false 143 | } 144 | } 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/Audio/AudioSheetView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AudioSheetView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 04.05.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct AudioSheetView: View { 11 | @State private var videoVolume: Float = 1.0 12 | @State private var audioVolume: Float = 1.0 13 | @ObservedObject var videoPlayer: VideoPlayerManager 14 | @ObservedObject var editorVM: EditorViewModel 15 | 16 | var value: Binding{ 17 | editorVM.isSelectVideo ? $videoVolume : $audioVolume 18 | } 19 | 20 | var body: some View { 21 | HStack { 22 | Image(systemName: value.wrappedValue > 0 ? "speaker.wave.2.fill" : "speaker.slash.fill") 23 | Slider(value: value, in: 0...1) { change in 24 | onChange() 25 | } 26 | .tint(.white) 27 | Text("\(Int(value.wrappedValue * 100))") 28 | } 29 | .font(.caption) 30 | .onAppear{ 31 | setValue() 32 | } 33 | } 34 | } 35 | 36 | struct AudioSheetView_Previews: PreviewProvider { 37 | static var previews: some View { 38 | AudioSheetView(videoPlayer: VideoPlayerManager(), editorVM: EditorViewModel()) 39 | } 40 | } 41 | 42 | extension AudioSheetView{ 43 | 44 | 45 | private func setValue(){ 46 | guard let video = editorVM.currentVideo else {return} 47 | if editorVM.isSelectVideo{ 48 | videoVolume = video.volume 49 | }else if let audio = video.audio{ 50 | audioVolume = audio.volume 51 | } 52 | } 53 | 54 | private func onChange(){ 55 | if editorVM.isSelectVideo{ 56 | editorVM.currentVideo?.setVolume(videoVolume) 57 | }else { 58 | editorVM.currentVideo?.audio?.setVolume(audioVolume) 59 | } 60 | videoPlayer.setVolume(editorVM.isSelectVideo, value: value.wrappedValue) 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/Corrections/CorrectionsToolView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CorrectionsToolView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 26.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct CorrectionsToolView: View { 11 | @State var currentTab: CorrectionType = .brightness 12 | @Binding var correction: ColorCorrection 13 | let onChange: (ColorCorrection) -> Void 14 | var body: some View { 15 | VStack(spacing: 20){ 16 | 17 | HStack{ 18 | ForEach(CorrectionType.allCases, id: \.self) { type in 19 | Text(type.rawValue) 20 | .font(.subheadline) 21 | .hCenter() 22 | .foregroundColor(currentTab == type ? .white : .secondary) 23 | .onTapGesture { 24 | currentTab = type 25 | } 26 | } 27 | } 28 | slider 29 | } 30 | } 31 | } 32 | 33 | struct CorrectionsToolView_Previews: PreviewProvider { 34 | static var previews: some View { 35 | CorrectionsToolView(correction: .constant(Video.mock.colorCorrection), onChange: {_ in}) 36 | } 37 | } 38 | 39 | 40 | extension CorrectionsToolView{ 41 | 42 | 43 | 44 | private var slider: some View{ 45 | 46 | let value = getValue(currentTab) 47 | 48 | return VStack { 49 | Text(String(format: "%.1f", value.wrappedValue)) 50 | .font(.subheadline) 51 | Slider(value: value, in: -1...1) { change in 52 | if !change{ 53 | onChange(correction) 54 | } 55 | } 56 | .tint(Color.white) 57 | } 58 | } 59 | 60 | func getValue(_ type: CorrectionType) -> Binding{ 61 | switch type { 62 | case .brightness: 63 | return $correction.brightness 64 | case .contrast: 65 | return $correction.contrast 66 | case .saturation: 67 | return $correction.saturation 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/Crop/CropSheetView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CropSheetView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 20.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct CropSheetView: View { 11 | @State var rotateValue: Double = 0 12 | @ObservedObject var editorVM: EditorViewModel 13 | @State private var currentTab: Tab = .rotate 14 | var body: some View { 15 | VStack(spacing: 40){ 16 | tabButtons 17 | Group{ 18 | switch currentTab{ 19 | case .format: 20 | EmptyView() 21 | case .rotate: 22 | rotateSection 23 | } 24 | } 25 | } 26 | .onAppear{ 27 | rotateValue = editorVM.currentVideo?.rotation ?? 0 28 | } 29 | .onChange(of: editorVM.currentVideo?.rotation) { newValue in 30 | rotateValue = newValue ?? 0 31 | } 32 | } 33 | } 34 | 35 | struct CropSheetView_Previews: PreviewProvider { 36 | static var previews: some View { 37 | CropSheetView(editorVM: EditorViewModel()) 38 | } 39 | } 40 | 41 | extension CropSheetView{ 42 | 43 | 44 | 45 | private var rotateSection: some View{ 46 | 47 | 48 | HStack(spacing: 30){ 49 | 50 | CustomSlider(value: $rotateValue, 51 | in: 0...360, 52 | step: 90, 53 | onEditingChanged: { started in 54 | if !started{ 55 | editorVM.currentVideo?.rotation = rotateValue 56 | editorVM.setTools() 57 | } 58 | }, track: { 59 | Capsule() 60 | .foregroundColor(.secondary) 61 | .frame(width: 200, height: 5) 62 | }, thumb: { 63 | Circle() 64 | .foregroundColor(.white) 65 | .shadow(radius: 20 / 1) 66 | }, thumbSize: CGSize(width: 20, height: 20)) 67 | 68 | 69 | Button { 70 | editorVM.rotate() 71 | } label: { 72 | Image(systemName: "arrow.triangle.2.circlepath") 73 | } 74 | .buttonStyle(.plain) 75 | 76 | Button { 77 | editorVM.toggleMirror() 78 | } label: { 79 | Image(systemName: "arrow.left.and.right.righttriangle.left.righttriangle.right.fill") 80 | .foregroundColor((editorVM.currentVideo?.isMirror ?? false) ? .secondary : .white) 81 | } 82 | .buttonStyle(.plain) 83 | } 84 | } 85 | 86 | 87 | private var tabButtons: some View{ 88 | HStack{ 89 | ForEach(Tab.allCases, id: \.self){tab in 90 | VStack(spacing: 0) { 91 | Text(tab.rawValue.capitalized) 92 | .font(.subheadline) 93 | .padding(.bottom, 5) 94 | if currentTab == tab{ 95 | Rectangle() 96 | .frame(height: 1) 97 | } 98 | } 99 | .foregroundColor(tab == currentTab ? .white : .secondary) 100 | .hCenter() 101 | .onTapGesture { 102 | currentTab = tab 103 | } 104 | } 105 | } 106 | } 107 | 108 | enum Tab: String, CaseIterable{ 109 | case format, rotate 110 | } 111 | 112 | } 113 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/Crop/CropView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CropView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 20.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct CropView: View{ 11 | @State private var position: CGPoint = .zero 12 | @State var size: CGSize = .zero 13 | @State var clipped: Bool = false 14 | let originalSize: CGSize 15 | var rotation: Double? 16 | var isMirror: Bool 17 | var isActiveCrop: Bool 18 | var setFrameScale: Bool = false 19 | var frameScale: CGFloat = 1 20 | 21 | @ViewBuilder 22 | var frameView: () -> T 23 | private let lineWidth: CGFloat = 2 24 | 25 | var body: some View { 26 | ZStack{ 27 | // VStack { 28 | // Text("\(currentPosition.width)") 29 | // Text("\(currentPosition.height)") 30 | // } 31 | frameView() 32 | 33 | if isActiveCrop{ 34 | ZStack{ 35 | Color.black.opacity(0.3) 36 | Rectangle() 37 | .fill(Color.black.opacity(0.1)) 38 | .frame(width: size.width , height: size.height) 39 | .overlay(Rectangle().stroke(Color.white, lineWidth: lineWidth)) 40 | .position(position) 41 | .gesture( 42 | DragGesture() 43 | .onChanged { value in 44 | 45 | let sizeWithBorder: CGSize = .init(width: size.width + lineWidth, height: size.height + lineWidth) 46 | 47 | // limit movement to min and max value 48 | let limitedX = max(min(value.location.x, originalSize.width - sizeWithBorder.width / 2), sizeWithBorder.width / 2) 49 | let limitedY = max(min(value.location.y, originalSize.height - (sizeWithBorder.height) / 2), sizeWithBorder.height / 2) 50 | 51 | self.position = CGPoint(x: limitedX, 52 | y: limitedY) 53 | } 54 | ) 55 | .onTapGesture { 56 | clipped.toggle() 57 | } 58 | } 59 | .onAppear{ 60 | position = .init(x: originalSize.width / 2, y: originalSize.height / 2) 61 | size = .init(width: originalSize.width - 100, height: originalSize.height - 100) 62 | } 63 | } 64 | } 65 | .frame(width: originalSize.width, height: originalSize.height) 66 | .border(isActiveCrop ? Color.white : .clear) 67 | // .clipShape( 68 | // CropFrame(isActive: clipped, currentPosition: position, size: size) 69 | // ) 70 | //.scaleEffect(scaleEffect) 71 | .rotationEffect(.degrees(rotation ?? 0)) 72 | .rotation3DEffect(.degrees(isMirror ? 180 : 0), axis: (x: 0, y: 1, z: 0)) 73 | } 74 | } 75 | 76 | struct CropView_Previews: PreviewProvider { 77 | @State static var size: CGSize = .init(width: 250, height: 450) 78 | static let originalSize: CGSize = .init(width: 300, height: 600) 79 | static var previews: some View { 80 | GeometryReader { proxy in 81 | CropView(originalSize: originalSize, rotation: 0, isMirror: false, isActiveCrop: true){ 82 | //CropImage(originalSize: originalSize, frameSize: $size){ 83 | 84 | Rectangle() 85 | .fill(Color.secondary) 86 | //} 87 | 88 | } 89 | .allFrame() 90 | .frame(height: proxy.size.height / 1.45, alignment: .center) 91 | } 92 | } 93 | } 94 | 95 | 96 | 97 | struct CropFrame: Shape { 98 | let isActive: Bool 99 | let currentPosition: CGSize 100 | let size: CGSize 101 | func path(in rect: CGRect) -> Path { 102 | guard isActive else { return Path(rect) } 103 | 104 | let size = CGSize(width: size.width, height: size.height) 105 | let origin = CGPoint(x: rect.midX - size.width / 2, y: rect.midY - size.height / 2) 106 | return Path(CGRect(origin: origin, size: size).integral) 107 | } 108 | } 109 | 110 | struct CropImage: View{ 111 | let originalSize: CGSize 112 | @Binding var frameSize: CGSize 113 | @State private var currentPosition: CGSize = .zero 114 | @State private var newPosition: CGSize = .zero 115 | @State private var clipped = false 116 | 117 | @ViewBuilder 118 | var frameView: () -> T 119 | 120 | var body: some View { 121 | VStack { 122 | ZStack { 123 | frameView() 124 | .offset(x: self.currentPosition.width, y: self.currentPosition.height) 125 | Rectangle() 126 | .fill(Color.black.opacity(0.3)) 127 | .frame(width: frameSize.width , height: frameSize.height) 128 | .overlay(Rectangle().stroke(Color.white, lineWidth: 2)) 129 | } 130 | .clipShape( 131 | CropFrame(isActive: clipped, currentPosition: currentPosition, size: frameSize) 132 | ) 133 | .onChange(of: frameSize) { newValue in 134 | currentPosition = .zero 135 | newPosition = .zero 136 | } 137 | // .gesture(DragGesture() 138 | // .onChanged { value in 139 | // 140 | // self.currentPosition = CGSize(width: value.translation.width + self.newPosition.width, height: value.translation.height + self.newPosition.height) 141 | // } 142 | // .onEnded { value in 143 | // self.currentPosition = CGSize(width: value.translation.width + self.newPosition.width, height: value.translation.height + self.newPosition.height) 144 | // 145 | // self.newPosition = self.currentPosition 146 | // }) 147 | 148 | 149 | Button (action : { self.clipped.toggle() }) { 150 | Text("Crop Image") 151 | .padding(.all, 10) 152 | .background(Color.blue) 153 | .foregroundColor(.white) 154 | .shadow(color: .gray, radius: 1) 155 | .padding(.top, 50) 156 | } 157 | } 158 | } 159 | } 160 | 161 | extension Comparable{ 162 | func bounded(lowerBound: Self, uppderBound: Self) -> Self{ 163 | max(lowerBound, min(self, uppderBound)) 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/CropperView/CropperRatio.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CropperRatio.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 20.04.2023. 6 | // 7 | 8 | import Foundation 9 | import CoreGraphics 10 | 11 | struct CropperRatio { 12 | 13 | let width: CGFloat 14 | let height: CGFloat 15 | 16 | init(width: CGFloat, height: CGFloat) { 17 | self.width = width 18 | self.height = height 19 | } 20 | 21 | static var r_1_1: Self { 22 | return .init(width: 1, height: 1) 23 | } 24 | 25 | static var r_3_2: Self { 26 | return .init(width: 3, height: 2) 27 | } 28 | 29 | static var r_4_3: Self { 30 | return .init(width: 4, height: 3) 31 | } 32 | 33 | static var r_16_9: Self { 34 | return .init(width: 16, height: 9) 35 | } 36 | 37 | static var r_18_6: Self { 38 | return .init(width: 18, height: 6) 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/CropperView/TestCroppedView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // TestCroppedView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 21.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct TestCroppedView: View { 11 | @State private var position = CGPoint(x: 100, y: 100) 12 | private var rectSize: CGFloat = 350 13 | let size: CGSize = .init(width: 200, height: 400) 14 | let frameSize: CGSize = .init(width: 350, height: 700) 15 | var body: some View { 16 | VStack { 17 | Text("Current postion = (x: \(Int(position.x)), y: \(Int(position.y))") 18 | 19 | Rectangle() 20 | .fill(.gray) 21 | .frame(width: frameSize.width, height: frameSize.height) 22 | .overlay( 23 | Rectangle() 24 | .fill(.clear) 25 | .border(.blue, width: 2.0) 26 | .contentShape(Rectangle()) 27 | .frame(width: size.width, height: size.height) 28 | .position(position) 29 | .gesture( 30 | DragGesture() 31 | .onChanged { value in 32 | // limit movement to min and max value 33 | let limitedX = max(min(value.location.x, frameSize.width - size.width / 2), size.width / 2) 34 | let limitedY = max(min(value.location.y, frameSize.height - size.height / 2), size.height / 2) 35 | 36 | self.position = CGPoint(x: limitedX, 37 | y: limitedY) 38 | } 39 | ) 40 | ) 41 | } 42 | } 43 | } 44 | 45 | struct TestCroppedView_Previews: PreviewProvider { 46 | static var previews: some View { 47 | TestCroppedView() 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/CutView/ThumbnailsSliderView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ThumbnailsSliderView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 17.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | import AVKit 10 | 11 | struct ThumbnailsSliderView: View { 12 | @State var rangeDuration: ClosedRange = 0...1 13 | @Binding var curretTime: Double 14 | @Binding var video: Video? 15 | var isChangeState: Bool? 16 | let onChangeTimeValue: () -> Void 17 | 18 | 19 | private var totalDuration: Double{ 20 | rangeDuration.upperBound - rangeDuration.lowerBound 21 | } 22 | 23 | var body: some View { 24 | VStack(spacing: 6) { 25 | Text(totalDuration.formatterTimeString()) 26 | .foregroundColor(.white) 27 | .font(.subheadline) 28 | GeometryReader { proxy in 29 | ZStack{ 30 | thumbnailsImagesSection(proxy) 31 | .border(Color.red, width: 2) 32 | if let video{ 33 | RangedSliderView(value: $rangeDuration, bounds: 0...video.originalDuration, onEndChange: { setOnChangeTrim(false)}) { 34 | Rectangle().blendMode(.destinationOut) 35 | } 36 | .onChange(of: self.video?.rangeDuration.upperBound) { upperBound in 37 | if let upperBound{ 38 | curretTime = Double(upperBound) 39 | onChangeTimeValue() 40 | setOnChangeTrim(true) 41 | } 42 | } 43 | .onChange(of: self.video?.rangeDuration.lowerBound) { lowerBound in 44 | if let lowerBound{ 45 | curretTime = Double(lowerBound) 46 | onChangeTimeValue() 47 | setOnChangeTrim(true) 48 | } 49 | } 50 | .onChange(of: rangeDuration) { newValue in 51 | self.video?.rangeDuration = newValue 52 | } 53 | } 54 | } 55 | .frame(width: proxy.size.width, height: proxy.size.height) 56 | .onAppear{ 57 | setVideoRange() 58 | } 59 | } 60 | .frame(width: getRect().width - 64, height: 70) 61 | } 62 | .onChange(of: isChangeState) { isChange in 63 | if !(isChange ?? true){ 64 | setVideoRange() 65 | } 66 | } 67 | } 68 | } 69 | 70 | struct ThumbnailsSliderView_Previews: PreviewProvider { 71 | static var previews: some View { 72 | ThumbnailsSliderView(curretTime: .constant(0), video: .constant(Video.mock), isChangeState: nil, onChangeTimeValue: {}) 73 | } 74 | } 75 | 76 | 77 | extension ThumbnailsSliderView{ 78 | 79 | private func setVideoRange(){ 80 | if let video{ 81 | rangeDuration = video.rangeDuration 82 | } 83 | } 84 | 85 | @ViewBuilder 86 | private func thumbnailsImagesSection(_ proxy: GeometryProxy) -> some View{ 87 | if let video{ 88 | HStack(spacing: 0){ 89 | ForEach(video.thumbnailsImages) { trimData in 90 | if let image = trimData.image{ 91 | Image(uiImage: image) 92 | .resizable() 93 | .aspectRatio(contentMode: .fill) 94 | .frame(width: proxy.size.width / CGFloat(video.thumbnailsImages.count), height: proxy.size.height - 5) 95 | .clipped() 96 | } 97 | } 98 | } 99 | } 100 | } 101 | 102 | private func setOnChangeTrim(_ isChange: Bool){ 103 | if !isChange{ 104 | curretTime = video?.rangeDuration.upperBound ?? 0 105 | onChangeTimeValue() 106 | } 107 | } 108 | } 109 | 110 | 111 | 112 | 113 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/Filters/FiltersView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // FiltersView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 26.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | 11 | struct FiltersView: View { 12 | @State var selectedFilterName: String? = nil 13 | @ObservedObject var viewModel: FiltersViewModel 14 | let onChangeFilter: (String?) -> Void 15 | var body: some View { 16 | ScrollView(.horizontal, showsIndicators: false) { 17 | LazyHStack(alignment: .center, spacing: 5) { 18 | resetButton 19 | ForEach(viewModel.images.sorted(by: {$0.filter.name < $1.filter.name})) { filterImage in 20 | imageView(filterImage.image, isSelected: selectedFilterName == filterImage.filter.name) 21 | .onTapGesture { 22 | selectedFilterName = filterImage.filter.name 23 | } 24 | } 25 | } 26 | .frame(height: 60) 27 | .padding(.horizontal) 28 | } 29 | .onChange(of: selectedFilterName) { newValue in 30 | onChangeFilter(newValue) 31 | } 32 | .padding(.horizontal, -16) 33 | } 34 | } 35 | 36 | struct FiltersView_Previews: PreviewProvider { 37 | @StateObject static var vm = FiltersViewModel() 38 | static var previews: some View { 39 | FiltersView(selectedFilterName: nil, viewModel: vm, onChangeFilter: {_ in}) 40 | .padding() 41 | .onAppear{ 42 | vm.loadFilters(for: UIImage(named: "simpleImage")!) 43 | } 44 | } 45 | } 46 | 47 | extension FiltersView{ 48 | private func imageView(_ uiImage: UIImage, isSelected: Bool) -> some View{ 49 | Image(uiImage: uiImage) 50 | .resizable() 51 | .aspectRatio(contentMode: .fill) 52 | .frame(width: 55, height: 55) 53 | .clipped() 54 | .border(.white, width: isSelected ? 2 : 0) 55 | } 56 | 57 | 58 | 59 | private var resetButton: some View{ 60 | Group{ 61 | if let image = viewModel.image{ 62 | imageView(image, isSelected: selectedFilterName == nil) 63 | .onTapGesture { 64 | selectedFilterName = nil 65 | } 66 | .padding(.trailing, 30) 67 | } 68 | } 69 | } 70 | } 71 | 72 | 73 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/Frames/FramesToolView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // FramesToolView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 27.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct FramesToolView: View { 11 | @Binding var selectedColor: Color 12 | @Binding var scaleValue: Double 13 | let colors: [Color] = [.white, .black, .blue, .brown, .cyan, .green, .orange] 14 | let onChange: () -> Void 15 | var body: some View { 16 | VStack(spacing: 20){ 17 | ScrollView(.horizontal){ 18 | HStack{ 19 | ForEach(colors, id: \.self) { color in 20 | color 21 | .frame(width: 30, height: 30) 22 | .clipShape(Circle()) 23 | .onTapGesture { 24 | selectedColor = color 25 | onChange() 26 | } 27 | } 28 | } 29 | } 30 | Slider(value: $scaleValue, in: 0...0.5) { change in 31 | if !change{ 32 | onChange() 33 | } 34 | } 35 | } 36 | } 37 | } 38 | 39 | struct FramesToolView_Previews: PreviewProvider { 40 | static var previews: some View { 41 | FramesToolView(selectedColor: .constant(.white), scaleValue: .constant(0.3)){} 42 | .frame(height: 300) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/Speed/VideoSpeedSlider.swift: -------------------------------------------------------------------------------- 1 | // 2 | // VideoSpeedSlider.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 19.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct VideoSpeedSlider: View { 11 | @State var value: Double = 1 12 | var isChangeState: Bool? 13 | let onEditingChanged: (Float) -> Void 14 | private let rateRange = 0.1...8 15 | var body: some View { 16 | VStack { 17 | Text(String(format: "%.1fx", value)) 18 | CustomSlider(value: $value, 19 | in: rateRange, 20 | step: 0.2, 21 | onEditingChanged: { started in 22 | if !started{ 23 | onEditingChanged(Float(value)) 24 | } 25 | }, track: { 26 | Capsule() 27 | .foregroundColor(.secondary) 28 | .frame(width: 250, height: 5) 29 | }, thumb: { 30 | Circle() 31 | .foregroundColor(.white) 32 | .shadow(radius: 20 / 1) 33 | }, thumbSize: CGSize(width: 20, height: 20)) 34 | } 35 | .onChange(of: isChangeState) { isChange in 36 | if !(isChange ?? true){ 37 | value = 1 38 | } 39 | } 40 | } 41 | } 42 | 43 | struct VideoSpeedSlider_Previews: PreviewProvider { 44 | static var previews: some View { 45 | VideoSpeedSlider(isChangeState: false){_ in} 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/Text/TextEditorView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // TextEditorView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 02.05.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct TextEditorView: View{ 11 | @ObservedObject var viewModel: TextEditorViewModel 12 | @State private var textHeight: CGFloat = 100 13 | @State private var isFocused: Bool = true 14 | let onSave: ([TextBox]) -> Void 15 | var body: some View{ 16 | Color.black.opacity(0.35) 17 | .ignoresSafeArea() 18 | VStack{ 19 | Spacer() 20 | TextView(textBox: $viewModel.currentTextBox, isFirstResponder: $isFocused, minHeight: textHeight, calculatedHeight: $textHeight) 21 | .frame(maxHeight: textHeight) 22 | Spacer() 23 | 24 | Button { 25 | closeKeyboard() 26 | viewModel.saveTapped() 27 | onSave(viewModel.textBoxes) 28 | } label: { 29 | Text("Save") 30 | .padding(.horizontal, 20) 31 | .padding(.vertical, 12) 32 | .foregroundColor(.black) 33 | .background(Color.white, in: RoundedRectangle(cornerRadius: 20)) 34 | .opacity(viewModel.currentTextBox.text.isEmpty ? 0.5 : 1) 35 | .disabled(viewModel.currentTextBox.text.isEmpty) 36 | } 37 | .hCenter() 38 | .overlay(alignment: .leading) { 39 | HStack { 40 | Button{ 41 | closeKeyboard() 42 | viewModel.cancelTextEditor() 43 | } label: { 44 | Image(systemName: "xmark") 45 | .padding(12) 46 | .foregroundColor(.white) 47 | .background(Color.secondary, in: Circle()) 48 | } 49 | 50 | Spacer() 51 | HStack(spacing: 20){ 52 | ColorPicker(selection: $viewModel.currentTextBox.fontColor, supportsOpacity: true) { 53 | }.labelsHidden() 54 | 55 | ColorPicker(selection: $viewModel.currentTextBox.bgColor, supportsOpacity: true) { 56 | }.labelsHidden() 57 | } 58 | } 59 | } 60 | } 61 | .padding(.bottom) 62 | .padding(.horizontal) 63 | } 64 | 65 | 66 | private func closeKeyboard(){ 67 | isFocused = false 68 | } 69 | } 70 | struct TextEditorView_Previews: PreviewProvider { 71 | static var previews: some View { 72 | TextEditorView(viewModel: TextEditorViewModel(), onSave: {_ in}) 73 | } 74 | } 75 | 76 | 77 | 78 | struct TextView: UIViewRepresentable { 79 | 80 | @Binding var isFirstResponder: Bool 81 | @Binding var textBox: TextBox 82 | 83 | var minHeight: CGFloat 84 | @Binding var calculatedHeight: CGFloat 85 | 86 | init(textBox: Binding, isFirstResponder: Binding, minHeight: CGFloat, calculatedHeight: Binding) { 87 | self._textBox = textBox 88 | self._isFirstResponder = isFirstResponder 89 | self.minHeight = minHeight 90 | self._calculatedHeight = calculatedHeight 91 | } 92 | 93 | func makeCoordinator() -> Coordinator { 94 | Coordinator(self) 95 | } 96 | 97 | func makeUIView(context: Context) -> UITextView { 98 | let textView = UITextView() 99 | textView.delegate = context.coordinator 100 | 101 | // Decrease priority of content resistance, so content would not push external layout set in SwiftUI 102 | textView.setContentCompressionResistancePriority(.defaultLow, for: .horizontal) 103 | textView.text = self.textBox.text 104 | textView.isScrollEnabled = true 105 | textView.isEditable = true 106 | textView.textAlignment = .center 107 | textView.isUserInteractionEnabled = true 108 | textView.backgroundColor = UIColor.clear 109 | 110 | return textView 111 | } 112 | 113 | func updateUIView(_ textView: UITextView, context: Context) { 114 | 115 | focused(textView) 116 | recalculateHeight(view: textView) 117 | setTextAttrs(textView) 118 | 119 | } 120 | 121 | private func setTextAttrs(_ textView: UITextView){ 122 | 123 | let attrStr = NSMutableAttributedString(string: textView.text) 124 | let range = NSRange(location: 0, length: attrStr.length) 125 | 126 | attrStr.addAttribute(NSAttributedString.Key.backgroundColor, value: UIColor(textBox.bgColor), range: range) 127 | attrStr.addAttribute(NSAttributedString.Key.font, value: UIFont.systemFont(ofSize: textBox.fontSize, weight: .medium), range: range) 128 | attrStr.addAttribute(NSAttributedString.Key.foregroundColor, value: UIColor(textBox.fontColor), range: range) 129 | 130 | textView.attributedText = attrStr 131 | textView.textAlignment = .center 132 | } 133 | 134 | private func recalculateHeight(view: UIView) { 135 | let newSize = view.sizeThatFits(CGSize(width: view.frame.size.width, height: CGFloat.greatestFiniteMagnitude)) 136 | if minHeight < newSize.height && $calculatedHeight.wrappedValue != newSize.height { 137 | DispatchQueue.main.async { 138 | self.$calculatedHeight.wrappedValue = newSize.height // !! must be called asynchronously 139 | } 140 | } else if minHeight >= newSize.height && $calculatedHeight.wrappedValue != minHeight { 141 | DispatchQueue.main.async { 142 | self.$calculatedHeight.wrappedValue = self.minHeight // !! must be called asynchronously 143 | } 144 | } 145 | } 146 | 147 | private func focused(_ textView: UITextView){ 148 | DispatchQueue.main.async { 149 | switch isFirstResponder { 150 | case true: textView.becomeFirstResponder() 151 | case false: textView.resignFirstResponder() 152 | } 153 | } 154 | } 155 | 156 | class Coordinator : NSObject, UITextViewDelegate { 157 | 158 | var parent: TextView 159 | 160 | init(_ uiTextView: TextView) { 161 | self.parent = uiTextView 162 | } 163 | 164 | func textViewDidChange(_ textView: UITextView) { 165 | if textView.markedTextRange == nil { 166 | parent.textBox.text = textView.text ?? String() 167 | parent.recalculateHeight(view: textView) 168 | } 169 | } 170 | 171 | // func textViewDidBeginEditing(_ textView: UITextView) { 172 | // parent.isFirstResponder = true 173 | // } 174 | } 175 | } 176 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/Text/TextOverlayView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // TextOverlayView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 01.05.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct TextOverlayView: View { 11 | var currentTime: Double 12 | @ObservedObject var viewModel: TextEditorViewModel 13 | var disabledMagnification: Bool = false 14 | var body: some View { 15 | ZStack{ 16 | if !disabledMagnification{ 17 | Color.secondary.opacity(0.001) 18 | .simultaneousGesture(MagnificationGesture() 19 | .onChanged({ value in 20 | if let box = viewModel.selectedTextBox{ 21 | let lastFontSize = viewModel.textBoxes[getIndex(box.id)].lastFontSize 22 | viewModel.textBoxes[getIndex(box.id)].fontSize = (value * 10) + lastFontSize 23 | } 24 | }).onEnded({ value in 25 | if let box = viewModel.selectedTextBox{ 26 | viewModel.textBoxes[getIndex(box.id)].lastFontSize = value * 10 27 | } 28 | })) 29 | } 30 | 31 | ForEach(viewModel.textBoxes) { textBox in 32 | let isSelected = viewModel.isSelected(textBox.id) 33 | 34 | if textBox.timeRange.contains(currentTime){ 35 | 36 | VStack(alignment: .leading, spacing: 2) { 37 | if isSelected{ 38 | textBoxButtons(textBox) 39 | } 40 | 41 | Text(createAttr(textBox)) 42 | .padding(.horizontal, 14) 43 | .padding(.vertical, 8) 44 | .overlay { 45 | if isSelected{ 46 | RoundedRectangle(cornerRadius: 6) 47 | .stroke(lineWidth: 1) 48 | .foregroundColor(.cyan) 49 | } 50 | } 51 | .onTapGesture { 52 | editOrSelectTextBox(textBox, isSelected) 53 | } 54 | 55 | } 56 | .offset(textBox.offset) 57 | .simultaneousGesture(DragGesture(minimumDistance: 1).onChanged({ value in 58 | guard isSelected else {return} 59 | let current = value.translation 60 | let lastOffset = textBox.lastOffset 61 | let newTranslation: CGSize = .init(width: current.width + lastOffset.width, height: current.height + lastOffset.height) 62 | 63 | DispatchQueue.main.async { 64 | viewModel.textBoxes[getIndex(textBox.id)].offset = newTranslation 65 | } 66 | 67 | }).onEnded({ value in 68 | guard isSelected else {return} 69 | DispatchQueue.main.async { 70 | viewModel.textBoxes[getIndex(textBox.id)].lastOffset = value.translation 71 | } 72 | })) 73 | } 74 | } 75 | } 76 | .allFrame() 77 | } 78 | 79 | private func createAttr(_ textBox: TextBox) -> AttributedString{ 80 | var result = AttributedString(textBox.text) 81 | result.font = .systemFont(ofSize: textBox.fontSize, weight: .medium) 82 | result.foregroundColor = UIColor(textBox.fontColor) 83 | result.backgroundColor = UIColor(textBox.bgColor) 84 | return result 85 | } 86 | } 87 | 88 | struct TextOverlayView_Previews: PreviewProvider { 89 | static var previews: some View { 90 | MainEditorView(selectedVideoURl: Video.mock.url) 91 | } 92 | } 93 | 94 | 95 | extension TextOverlayView{ 96 | 97 | private func textBoxButtons(_ textBox: TextBox) -> some View{ 98 | HStack(spacing: 10){ 99 | Button { 100 | viewModel.removeTextBox() 101 | } label: { 102 | Image(systemName: "xmark") 103 | .padding(5) 104 | .background(Color(.systemGray2), in: Circle()) 105 | } 106 | Button { 107 | viewModel.copy(textBox) 108 | } label: { 109 | Image(systemName: "doc.on.doc") 110 | .imageScale(.small) 111 | .padding(5) 112 | .background(Color(.systemGray2), in: Circle()) 113 | } 114 | } 115 | .foregroundColor(.white) 116 | } 117 | 118 | private func editOrSelectTextBox(_ textBox: TextBox, _ isSelected: Bool){ 119 | if isSelected{ 120 | viewModel.openTextEditor(isEdit: true, textBox) 121 | }else{ 122 | viewModel.selectTextBox(textBox) 123 | } 124 | } 125 | 126 | private func getIndex(_ id: UUID) -> Int{ 127 | let index = viewModel.textBoxes.firstIndex(where: {$0.id == id}) 128 | return index ?? 0 129 | } 130 | } 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/Text/TextToolsView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // TextToolsView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 02.05.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct TextToolsView: View { 11 | var video: Video 12 | @ObservedObject var editor: TextEditorViewModel 13 | var body: some View { 14 | ScrollView(.horizontal, showsIndicators: false) { 15 | HStack(spacing: 15){ 16 | ForEach(editor.textBoxes) { box in 17 | cellView(box) 18 | } 19 | addTextButton 20 | } 21 | } 22 | .animation(.easeIn(duration: 0.2), value: editor.textBoxes) 23 | .onAppear{ 24 | editor.selectedTextBox = editor.textBoxes.first 25 | } 26 | .onDisappear{ 27 | editor.selectedTextBox = nil 28 | } 29 | } 30 | } 31 | 32 | struct TextToolsView_Previews: PreviewProvider { 33 | static var previews: some View { 34 | TextToolsView(video: Video.mock, editor: TextEditorViewModel()) 35 | } 36 | } 37 | 38 | extension TextToolsView{ 39 | 40 | private func cellView(_ textBox: TextBox) -> some View{ 41 | let isSelected = editor.isSelected(textBox.id) 42 | return ZStack{ 43 | RoundedRectangle(cornerRadius: 12) 44 | .fill(Color(isSelected ? .systemGray : .systemGray4)) 45 | Text(textBox.text) 46 | .lineLimit(1) 47 | .font(.caption) 48 | } 49 | .frame(width: 80, height: 80) 50 | .overlay(alignment: .topLeading) { 51 | if isSelected{ 52 | Button { 53 | editor.removeTextBox() 54 | } label: { 55 | Image(systemName: "xmark") 56 | .imageScale(.small) 57 | .foregroundColor(Color(.systemGray2)) 58 | .padding(5) 59 | .background(Color.black, in: Circle()) 60 | } 61 | .padding(5) 62 | } 63 | } 64 | .onTapGesture { 65 | if isSelected{ 66 | editor.openTextEditor(isEdit: true, textBox) 67 | }else{ 68 | editor.selectTextBox(textBox) 69 | } 70 | } 71 | } 72 | 73 | private var addTextButton: some View{ 74 | ZStack{ 75 | RoundedRectangle(cornerRadius: 12) 76 | .fill(Color(.systemGray4)) 77 | Text("+T") 78 | .font(.title2.weight(.light)) 79 | } 80 | .frame(width: 80, height: 80) 81 | .onTapGesture { 82 | editor.openTextEditor(isEdit: false, timeRange: video.rangeDuration) 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ToolsView/ToolsSectionView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ToolsSectionView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 18.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | import AVKit 10 | 11 | struct ToolsSectionView: View { 12 | @StateObject var filtersVM = FiltersViewModel() 13 | @ObservedObject var videoPlayer: VideoPlayerManager 14 | @ObservedObject var editorVM: EditorViewModel 15 | @ObservedObject var textEditor: TextEditorViewModel 16 | private let columns = Array(repeating: GridItem(.flexible()), count: 4) 17 | var body: some View { 18 | ZStack{ 19 | LazyVGrid(columns: columns, alignment: .center, spacing: 8) { 20 | ForEach(ToolEnum.allCases, id: \.self) { tool in 21 | ToolButtonView(label: tool.title, image: tool.image, isChange: editorVM.currentVideo?.isAppliedTool(for: tool) ?? false) { 22 | editorVM.selectedTools = tool 23 | } 24 | } 25 | } 26 | .padding() 27 | .opacity(editorVM.selectedTools != nil ? 0 : 1) 28 | if let toolState = editorVM.selectedTools, let video = editorVM.currentVideo{ 29 | bottomSheet(toolState, video) 30 | .transition(.move(edge: .bottom).combined(with: .opacity)) 31 | } 32 | } 33 | .animation(.easeIn(duration: 0.15), value: editorVM.selectedTools) 34 | .onChange(of: editorVM.currentVideo){ newValue in 35 | if let video = newValue, let image = video.thumbnailsImages.first?.image{ 36 | filtersVM.loadFilters(for: image) 37 | filtersVM.colorCorrection = video.colorCorrection 38 | textEditor.textBoxes = video.textBoxes 39 | } 40 | } 41 | .onChange(of: textEditor.selectedTextBox) { box in 42 | if box != nil{ 43 | if editorVM.selectedTools != .text{ 44 | editorVM.selectedTools = .text 45 | } 46 | }else{ 47 | editorVM.selectedTools = nil 48 | } 49 | } 50 | .onChange(of: editorVM.selectedTools) { newValue in 51 | 52 | if newValue == .text, textEditor.textBoxes.isEmpty{ 53 | textEditor.openTextEditor(isEdit: false, timeRange: editorVM.currentVideo?.rangeDuration) 54 | } 55 | 56 | if newValue == nil{ 57 | editorVM.setText(textEditor.textBoxes) 58 | } 59 | } 60 | } 61 | } 62 | 63 | struct ToolsSectionView_Previews: PreviewProvider { 64 | static var previews: some View { 65 | MainEditorView(selectedVideoURl: Video.mock.url) 66 | } 67 | } 68 | 69 | 70 | extension ToolsSectionView{ 71 | 72 | @ViewBuilder 73 | private func bottomSheet(_ tool: ToolEnum, _ video: Video) -> some View{ 74 | 75 | let isAppliedTool = video.isAppliedTool(for: tool) 76 | 77 | VStack(spacing: 16){ 78 | 79 | sheetHeader(tool) 80 | switch tool { 81 | case .cut: 82 | ThumbnailsSliderView(curretTime: $videoPlayer.currentTime, video: $editorVM.currentVideo, isChangeState: isAppliedTool) { 83 | videoPlayer.scrubState = .scrubEnded(videoPlayer.currentTime) 84 | editorVM.setTools() 85 | } 86 | case .speed: 87 | VideoSpeedSlider(value: Double(video.rate), isChangeState: isAppliedTool) {rate in 88 | videoPlayer.pause() 89 | editorVM.updateRate(rate: rate) 90 | } 91 | case .crop: 92 | CropSheetView(editorVM: editorVM) 93 | case .audio: 94 | AudioSheetView(videoPlayer: videoPlayer, editorVM: editorVM) 95 | case .text: 96 | TextToolsView(video: video, editor: textEditor) 97 | case .filters: 98 | FiltersView(selectedFilterName: video.filterName, viewModel: filtersVM) { filterName in 99 | if let filterName{ 100 | videoPlayer.setFilters(mainFilter: CIFilter(name: filterName), colorCorrection: filtersVM.colorCorrection) 101 | }else{ 102 | videoPlayer.removeFilter() 103 | } 104 | editorVM.setFilter(filterName) 105 | } 106 | case .corrections: 107 | CorrectionsToolView(correction: $filtersVM.colorCorrection) { corrections in 108 | videoPlayer.setFilters(mainFilter: CIFilter(name: video.filterName ?? ""), colorCorrection: corrections) 109 | editorVM.setCorrections(corrections) 110 | } 111 | case .frames: 112 | FramesToolView(selectedColor: $editorVM.frames.frameColor, scaleValue: $editorVM.frames.scaleValue, onChange: editorVM.setFrames) 113 | } 114 | Spacer() 115 | } 116 | .padding([.horizontal, .top]) 117 | .background(Color(.systemGray6)) 118 | } 119 | } 120 | 121 | extension ToolsSectionView{ 122 | 123 | private func sheetHeader(_ tool: ToolEnum) -> some View{ 124 | HStack { 125 | Button { 126 | editorVM.selectedTools = nil 127 | } label: { 128 | Image(systemName: "chevron.down") 129 | .imageScale(.small) 130 | .foregroundColor(.white) 131 | .padding(10) 132 | .background(Color(.systemGray5), in: RoundedRectangle(cornerRadius: 5)) 133 | } 134 | Spacer() 135 | if tool != .filters, tool != .audio, tool != .text{ 136 | Button { 137 | editorVM.reset() 138 | } label: { 139 | Text("Reset") 140 | .font(.subheadline) 141 | } 142 | .buttonStyle(.plain) 143 | }else if !editorVM.isSelectVideo{ 144 | Button { 145 | videoPlayer.pause() 146 | editorVM.removeAudio() 147 | } label: { 148 | Image(systemName: "trash.fill") 149 | .foregroundColor(.white) 150 | } 151 | } 152 | } 153 | .overlay { 154 | Text(tool.title) 155 | .font(.headline) 156 | } 157 | } 158 | 159 | } 160 | 161 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ViewComponents/Buttons/AudioButtonView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AudioButtonView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 04.05.2023. 6 | // 7 | 8 | import SwiftUI 9 | import AVKit 10 | 11 | struct AudioButtonView: View { 12 | var video: Video 13 | @Binding var isSelectedTrack: Bool 14 | @ObservedObject var recorderManager: AudioRecorderManager 15 | @State private var audioSimples = [Audio.AudioSimple]() 16 | var body: some View { 17 | GeometryReader { proxy in 18 | ZStack{ 19 | Color(.systemGray5) 20 | if let audio = video.audio{ 21 | audioButton(proxy, audio) 22 | }else if recorderManager.recordState == .recording{ 23 | recordRectangle(proxy) 24 | } 25 | } 26 | } 27 | .frame(height: 40) 28 | } 29 | } 30 | 31 | struct AudioButtonView_Previews: PreviewProvider { 32 | static var previews: some View { 33 | AudioButtonView(video: Video.mock, isSelectedTrack: .constant(false), recorderManager: AudioRecorderManager()) 34 | } 35 | } 36 | 37 | 38 | extension AudioButtonView{ 39 | 40 | 41 | 42 | 43 | private func recordRectangle(_ proxy: GeometryProxy) -> some View{ 44 | let width = getWidthFromDuration(allWight: proxy.size.width, currentDuration: recorderManager.currentRecordTime, totalDuration: video.totalDuration) 45 | return RoundedRectangle(cornerRadius: 8) 46 | .fill(Color.red.opacity(0.5)) 47 | .frame(width: width) 48 | .hLeading() 49 | .animation(.easeIn, value: recorderManager.currentRecordTime) 50 | } 51 | 52 | private func audioButton(_ proxy: GeometryProxy, _ audio: Audio) -> some View{ 53 | let width = getWidthFromDuration(allWight: proxy.size.width, currentDuration: audio.duration, totalDuration: video.totalDuration) 54 | return RoundedRectangle(cornerRadius: 8) 55 | .fill(Color.red.opacity(0.5)) 56 | .overlay { 57 | ZStack{ 58 | if !isSelectedTrack{ 59 | RoundedRectangle(cornerRadius: 8) 60 | .strokeBorder(lineWidth: 2) 61 | } 62 | HStack(spacing: 1){ 63 | ForEach(audioSimples) { simple in 64 | Capsule() 65 | .fill(.white) 66 | .frame(width: 2, height: simple.size) 67 | } 68 | } 69 | } 70 | } 71 | .frame(width: width) 72 | .hLeading() 73 | .onAppear{ 74 | audioSimples = audio.createSimples(width) 75 | } 76 | .onTapGesture { 77 | isSelectedTrack.toggle() 78 | } 79 | } 80 | 81 | private func getWidthFromDuration(allWight: CGFloat, currentDuration: Double, totalDuration: Double) -> CGFloat{ 82 | return (allWight / totalDuration) * currentDuration 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ViewComponents/Buttons/RecorderButtonView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // RecorderButtonView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 05.05.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct RecorderButtonView: View { 11 | var video: Video 12 | @ObservedObject var recorderManager: AudioRecorderManager 13 | @State private var timeRemaining = 3 14 | @State private var timer: Timer? = nil 15 | @State private var state: StateEnum = .empty 16 | let onRecorded: (Audio) -> Void 17 | let onRecordTime: (Double) -> Void 18 | 19 | private var isSetAudio: Bool{ 20 | video.audio != nil 21 | } 22 | 23 | var body: some View { 24 | ZStack{ 25 | switch state { 26 | case .empty: 27 | if isSetAudio{} 28 | recordButton 29 | case .timer: 30 | timerButton 31 | case .record: 32 | stopButton 33 | } 34 | } 35 | .opacity(isSetAudio ? 0 : 1) 36 | .disabled(isSetAudio) 37 | .onChange(of: recorderManager.finishedAudio) { newValue in 38 | guard let newValue else { return } 39 | onRecorded(newValue) 40 | state = .empty 41 | } 42 | .onChange(of: recorderManager.currentRecordTime) { newValue in 43 | if newValue > 0{ 44 | onRecordTime(newValue) 45 | } 46 | } 47 | } 48 | } 49 | 50 | struct RecorderButtonView_Previews: PreviewProvider { 51 | static var previews: some View { 52 | RecorderButtonView(video: Video.mock, recorderManager: AudioRecorderManager(), onRecorded: {_ in}, onRecordTime: {_ in}) 53 | } 54 | } 55 | 56 | extension RecorderButtonView{ 57 | 58 | 59 | enum StateEnum: Int{ 60 | case empty, timer, record 61 | } 62 | 63 | 64 | private var recordButton: some View{ 65 | Button { 66 | state = .timer 67 | startTimer() 68 | } label: { 69 | Image(systemName: "mic.fill") 70 | .foregroundColor(.white) 71 | } 72 | } 73 | 74 | private var timerButton: some View{ 75 | Text("\(timeRemaining)") 76 | .font(.subheadline.bold()) 77 | .foregroundColor(.red) 78 | .onTapGesture { 79 | state = .empty 80 | stopTimer() 81 | } 82 | } 83 | 84 | private var stopButton: some View{ 85 | Image(systemName: "stop.fill") 86 | .foregroundColor(.red) 87 | .onTapGesture { 88 | state = .empty 89 | recorderManager.stopRecording() 90 | } 91 | } 92 | 93 | 94 | private func startTimer(){ 95 | timer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true){ _ in 96 | timeRemaining -= 1 97 | if timeRemaining == 0{ 98 | state = .record 99 | stopTimer() 100 | recorderManager.startRecording(recordMaxTime: video.totalDuration) 101 | } 102 | } 103 | } 104 | 105 | private func stopTimer(){ 106 | timeRemaining = 3 107 | timer?.invalidate() 108 | timer = nil 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ViewComponents/Buttons/ToolButtonView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ToolButtonView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 18.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct ToolButtonView: View { 11 | let label: String 12 | let image: String 13 | let isChange: Bool 14 | let action: () -> Void 15 | 16 | 17 | private var bgColor: Color{ 18 | Color(isChange ? .systemGray5 : .systemGray6) 19 | } 20 | var body: some View { 21 | Button { 22 | action() 23 | } label: { 24 | VStack(spacing: 4) { 25 | Image(systemName: image) 26 | .imageScale(.medium) 27 | Text(label) 28 | .font(.caption) 29 | } 30 | .frame(height: 85) 31 | .hCenter() 32 | .background(bgColor, in: RoundedRectangle(cornerRadius: 12, style: .continuous)) 33 | } 34 | .buttonStyle(.plain) 35 | } 36 | } 37 | 38 | struct ToolButtonView_Previews: PreviewProvider { 39 | static var previews: some View { 40 | VStack { 41 | ToolButtonView(label: "Cut", image: "scissors", isChange: false){} 42 | ToolButtonView(label: "Cut", image: "scissors", isChange: true){} 43 | 44 | } 45 | .frame(width: 100) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ViewComponents/Player/PlayerView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // PlayerView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 18.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | import AVKit 10 | 11 | struct PlayerView: UIViewControllerRepresentable { 12 | 13 | var player: AVPlayer 14 | 15 | typealias UIViewControllerType = AVPlayerViewController 16 | 17 | func makeUIViewController(context: Context) -> AVPlayerViewController { 18 | let view = AVPlayerViewController() 19 | view.player = player 20 | view.showsPlaybackControls = false 21 | view.videoGravity = .resizeAspectFill 22 | return view 23 | } 24 | 25 | func updateUIViewController(_ uiViewController: AVPlayerViewController, context: Context) { 26 | uiViewController.player = player 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ViewComponents/SheetView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SheetView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 24.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct SheetView: View { 11 | @Binding var isPresented: Bool 12 | @State private var showSheet: Bool = false 13 | @State private var slideGesture: CGSize 14 | var bgOpacity: CGFloat 15 | let content: Content 16 | init(isPresented: Binding, bgOpacity: CGFloat = 0.01, @ViewBuilder content: () -> Content){ 17 | self._isPresented = isPresented 18 | self.bgOpacity = bgOpacity 19 | self._slideGesture = State(initialValue: CGSize.zero) 20 | self.content = content() 21 | 22 | } 23 | var body: some View { 24 | ZStack(alignment: .bottom){ 25 | Color.black.opacity(bgOpacity) 26 | .onTapGesture { 27 | closeSheet() 28 | } 29 | .onAppear{ 30 | withAnimation(.spring().delay(0.1)){ 31 | showSheet = true 32 | } 33 | } 34 | if showSheet{ 35 | sheetLayer 36 | .transition(.move(edge: .bottom)) 37 | .onDisappear{ 38 | withAnimation(.easeIn(duration: 0.1)){ 39 | isPresented = false 40 | } 41 | } 42 | } 43 | } 44 | } 45 | } 46 | 47 | 48 | extension SheetView{ 49 | private var sheetLayer: some View{ 50 | VStack(spacing: 0){ 51 | HStack(alignment: .top, spacing: -20){ 52 | Spacer() 53 | Capsule() 54 | .fill(Color(.systemGray4)) 55 | .frame(width: 80, height: 6) 56 | Spacer() 57 | Button { 58 | closeSheet() 59 | } label: { 60 | Image(systemName: "xmark") 61 | .imageScale(.medium) 62 | .foregroundColor(.white) 63 | } 64 | } 65 | .padding(.top, 10) 66 | .padding(.horizontal) 67 | content 68 | .padding(.horizontal) 69 | .padding(.top, 10) 70 | .padding(.bottom, 40) 71 | } 72 | .frame(maxWidth: .infinity) 73 | .background(Color(.systemGray6)) 74 | .clipShape(CustomCorners(corners: [.topLeft, .topRight], radius: 12)) 75 | .shadow(color: .black.opacity(0.1), radius: 5, x: 0, y: -5) 76 | .gesture(DragGesture().onChanged{ value in 77 | self.slideGesture = value.translation 78 | } 79 | .onEnded{ value in 80 | if self.slideGesture.height > -10 { 81 | closeSheet() 82 | } 83 | self.slideGesture = .zero 84 | }) 85 | } 86 | 87 | private func closeSheet(){ 88 | withAnimation(.easeIn(duration: 0.2)){ 89 | showSheet = false 90 | } 91 | } 92 | } 93 | 94 | 95 | struct CustomCorners: Shape { 96 | 97 | var corners: UIRectCorner 98 | var radius: CGFloat 99 | 100 | func path(in rect: CGRect) -> Path { 101 | let path = UIBezierPath(roundedRect: rect, byRoundingCorners: corners, cornerRadii: CGSize(width: radius, height: radius)) 102 | return Path(path.cgPath) 103 | } 104 | } 105 | 106 | 107 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ViewComponents/Sliders/CustomSlider.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CustomSlider.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 17.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct CustomSlider: View 11 | where Value: BinaryFloatingPoint, Value.Stride: BinaryFloatingPoint, Track: View, Thumb: View { 12 | // the value of the slider, inside `bounds` 13 | @Binding var value: Value 14 | // range to which the thumb offset is mapped 15 | let bounds: ClosedRange 16 | // tells how discretely does the value change 17 | let step: Value 18 | // left-hand label 19 | let minimumValueLabel: Text? 20 | // right-hand label 21 | let maximumValueLabel: Text? 22 | // called with `true` when sliding starts and with `false` when it stops 23 | let onEditingChanged: ((Bool) -> Void)? 24 | let onChanged: (() -> Void)? 25 | // the track view 26 | let track: () -> Track 27 | // the thumb view 28 | let thumb: () -> Thumb 29 | // tells how big the thumb is. This is here because there's no good 30 | // way in SwiftUI to get the thumb size at runtime, and its an important 31 | // to know it in order to compute its insets in the track overlay. 32 | let thumbSize: CGSize 33 | 34 | // x offset of the thumb from the track left-hand side 35 | @State private var xOffset: CGFloat = 0 36 | // last moved offset, used to decide if sliding has started 37 | @State private var lastOffset: CGFloat = 0 38 | // the size of the track view. This can be obtained at runtime. 39 | @State private var trackSize: CGSize = .zero 40 | @State private var isOnChange: Bool = false 41 | 42 | // initializer allows us to set default values for some view params 43 | init(value: Binding, 44 | in bounds: ClosedRange = 0...1, 45 | step: Value = 0.001, 46 | minimumValueLabel: Text? = nil, 47 | maximumValueLabel: Text? = nil, 48 | onEditingChanged: ((Bool) -> Void)? = nil, 49 | onChanged: (() -> Void)? = nil, 50 | track: @escaping () -> Track, 51 | thumb: @escaping () -> Thumb, 52 | thumbSize: CGSize) { 53 | _value = value 54 | self.bounds = bounds 55 | self.step = step 56 | self.minimumValueLabel = minimumValueLabel 57 | self.maximumValueLabel = maximumValueLabel 58 | self.onEditingChanged = onEditingChanged 59 | self.onChanged = onChanged 60 | self.track = track 61 | self.thumb = thumb 62 | self.thumbSize = thumbSize 63 | } 64 | 65 | // where does the current value sit, percentage-wise, in the provided bounds 66 | private var percentage: Value { 67 | 1 - (bounds.upperBound - value) / (bounds.upperBound - bounds.lowerBound) 68 | } 69 | 70 | // how wide the should the fill view be 71 | private var fillWidth: CGFloat { 72 | trackSize.width * CGFloat(percentage) 73 | } 74 | 75 | var body: some View { 76 | // the HStack orders minimumValueLabel, the slider and maximumValueLabel horizontally 77 | HStack { 78 | minimumValueLabel 79 | 80 | // Represent the custom slider. ZStack overlays `fill` on top of `track`, 81 | // while the `thumb` is in their `overlay`. 82 | ZStack { 83 | track() 84 | // get the size of the track at runtime as it 85 | // defines all the other functionality 86 | .measureSize { 87 | // if this is the first time trackSize is computed, 88 | // update the offset to reflect the current `value` 89 | let firstInit = (trackSize == .zero) 90 | trackSize = $0 91 | if firstInit { 92 | xOffset = (trackSize.width - thumbSize.width) * CGFloat(percentage) 93 | lastOffset = xOffset 94 | } 95 | } 96 | .onChange(of: value) { _ in 97 | if !isOnChange{ 98 | xOffset = (trackSize.width - thumbSize.width) * CGFloat(percentage) 99 | lastOffset = xOffset 100 | } 101 | } 102 | } 103 | // make sure the entire ZStack is the same size as `track` 104 | .frame(width: trackSize.width, height: trackSize.height) 105 | // the thumb lives in the ZStack overlay 106 | .overlay(thumb() 107 | // adjust the insets so that `thumb` doesn't sit outside the `track` 108 | .position(x: thumbSize.width / 2, 109 | y: thumbSize.height / 2) 110 | // set the size here to make sure it's really the same as the 111 | // provided `thumbSize` parameter 112 | .frame(width: thumbSize.width, height: thumbSize.height) 113 | // set the offset to, well, the stored xOffset 114 | .offset(x: xOffset) 115 | // use the DragGesture to move the `thumb` around as adjust xOffset 116 | .gesture(DragGesture(minimumDistance: 0).onChanged({ gestureValue in 117 | // make sure at least some dragging was done to trigger `onEditingChanged` 118 | if abs(gestureValue.translation.width) < 0.1 { 119 | lastOffset = xOffset 120 | onEditingChanged?(true) 121 | isOnChange = true 122 | } 123 | // update xOffset by the gesture translation, making sure it's within the view's bounds 124 | let availableWidth = trackSize.width - thumbSize.width 125 | xOffset = max(0, min(lastOffset + gestureValue.translation.width, availableWidth)) 126 | // update the value by mapping xOffset to the track width and then to the provided bounds 127 | // also make sure that the value changes discretely based on the `step` para 128 | let newValue = (bounds.upperBound - bounds.lowerBound) * Value(xOffset / availableWidth) + bounds.lowerBound 129 | let steppedNewValue = (round(newValue / step) * step) 130 | value = min(bounds.upperBound, max(bounds.lowerBound, steppedNewValue)) 131 | onChanged?() 132 | }).onEnded({ _ in 133 | // once the gesture ends, trigger `onEditingChanged` again 134 | onEditingChanged?(false) 135 | isOnChange = false 136 | })), 137 | alignment: .leading) 138 | 139 | maximumValueLabel 140 | } 141 | // manually set the height of the entire view to account for thumb height 142 | .frame(height: max(trackSize.height, thumbSize.height)) 143 | } 144 | } 145 | 146 | struct CustomSlider_Previews: PreviewProvider { 147 | static var previews: some View { 148 | CustomSlider(value: .constant(10), 149 | in: 10...255, 150 | step: 90, 151 | minimumValueLabel: Text("Min"), 152 | maximumValueLabel: Text("Max"), 153 | onEditingChanged: { started in 154 | print("started custom slider: \(started)") 155 | }, track: { 156 | Capsule() 157 | .foregroundColor(.init(red: 0.9, green: 0.9, blue: 0.9)) 158 | .frame(width: 200, height: 5) 159 | }, thumb: { 160 | Circle() 161 | .foregroundColor(.white) 162 | .shadow(radius: 20 / 1) 163 | }, thumbSize: CGSize(width: 20, height: 20)) 164 | } 165 | } 166 | 167 | 168 | 169 | 170 | struct SizePreferenceKey: PreferenceKey { 171 | static var defaultValue: CGSize = .zero 172 | 173 | static func reduce(value: inout CGSize, nextValue: () -> CGSize) { 174 | value = nextValue() 175 | } 176 | } 177 | 178 | struct MeasureSizeModifier: ViewModifier { 179 | func body(content: Content) -> some View { 180 | content.background(GeometryReader { geometry in 181 | Color.clear.preference(key: SizePreferenceKey.self, 182 | value: geometry.size) 183 | }) 184 | } 185 | } 186 | 187 | extension View { 188 | func measureSize(perform action: @escaping (CGSize) -> Void) -> some View { 189 | self.modifier(MeasureSizeModifier()) 190 | .onPreferenceChange(SizePreferenceKey.self, perform: action) 191 | } 192 | } 193 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ViewComponents/Sliders/NewTimelineSlider.swift: -------------------------------------------------------------------------------- 1 | // 2 | // NewTimelineSlider.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 19.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct TimelineSlider: View { 11 | @State private var lastOffset: CGFloat = 0 12 | var bounds: ClosedRange 13 | var disableOffset: Bool 14 | @Binding var value: Double 15 | @State var isChange: Bool = false 16 | @State var offset: CGFloat = 0 17 | @State var gestureW: CGFloat = 0 18 | var frameWight: CGFloat = 65 19 | let actionWidth: CGFloat = 30 20 | @ViewBuilder 21 | var frameView: () -> T 22 | @ViewBuilder 23 | var actionView: () -> A 24 | let onChange: () -> Void 25 | 26 | var body: some View { 27 | GeometryReader { proxy in 28 | let sliderViewYCenter = proxy.size.height / 2 29 | let sliderPositionX = proxy.size.width / 2 + frameWight / 2 + (disableOffset ? 0 : offset) 30 | ZStack{ 31 | frameView() 32 | .frame(width: frameWight, height: proxy.size.height - 5) 33 | .position(x: sliderPositionX - actionWidth/2, y: sliderViewYCenter) 34 | HStack(spacing: 0) { 35 | Capsule() 36 | .fill(Color.white) 37 | .frame(width: 4, height: proxy.size.height) 38 | actionView() 39 | .frame(width: actionWidth) 40 | } 41 | .shadow(color: .black.opacity(0.3), radius: 3, x: 0, y: 0) 42 | .opacity(disableOffset ? 0 : 1) 43 | } 44 | .frame(width: proxy.size.width, height: proxy.size.height) 45 | .contentShape(Rectangle()) 46 | 47 | .gesture( 48 | DragGesture(minimumDistance: 1) 49 | .onChanged { gesture in 50 | isChange = true 51 | 52 | let translationWidth = gesture.translation.width * 0.5 53 | 54 | 55 | offset = min(0, max(translationWidth, -frameWight)) 56 | 57 | let newValue = (bounds.upperBound - bounds.lowerBound) * (offset / frameWight) - bounds.lowerBound 58 | 59 | value = abs(newValue) 60 | 61 | onChange() 62 | 63 | } 64 | .onEnded { _ in 65 | isChange = false 66 | } 67 | ) 68 | .animation(.easeIn, value: offset) 69 | .onChange(of: value) { _ in 70 | if !disableOffset{ 71 | setOffset() 72 | } 73 | } 74 | } 75 | } 76 | } 77 | 78 | struct NewTimelineSlider_Previews: PreviewProvider { 79 | @State static var curretTime = 0.0 80 | static var previews: some View { 81 | TimelineSlider(bounds: 5...34, disableOffset: false, value: $curretTime, frameView: { 82 | Rectangle() 83 | .fill(Color.secondary) 84 | }, actionView: {EmptyView()}, onChange: {}) 85 | .frame(height: 80) 86 | } 87 | } 88 | 89 | extension TimelineSlider{ 90 | 91 | private func setOffset(){ 92 | if !isChange{ 93 | offset = ((-value + bounds.lowerBound) / (bounds.upperBound - bounds.lowerBound)) * frameWight 94 | } 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ViewComponents/Sliders/RangeSliderView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // RangeSliderView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 17.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct RangedSliderView: View { 11 | let currentValue: Binding>? 12 | let sliderBounds: ClosedRange 13 | let step: Double 14 | let onEndChange: () -> Void 15 | var thumbView: T 16 | 17 | init(value: Binding>?, bounds: ClosedRange, step: Double = 1, onEndChange: @escaping () -> Void, @ViewBuilder thumbView: () -> T) { 18 | self.onEndChange = onEndChange 19 | self.step = step 20 | self.currentValue = value 21 | self.sliderBounds = bounds 22 | self.thumbView = thumbView() 23 | } 24 | 25 | var body: some View { 26 | GeometryReader { geomentry in 27 | sliderView(sliderSize: geomentry.size) 28 | } 29 | } 30 | 31 | 32 | @ViewBuilder private func sliderView(sliderSize: CGSize) -> some View { 33 | let sliderViewYCenter = sliderSize.height / 2 34 | ZStack { 35 | Rectangle() 36 | .fill(Color(.systemGray5).opacity(0.75)) 37 | .frame(height: sliderSize.height) 38 | ZStack { 39 | let sliderBoundDifference = sliderBounds.upperBound / step 40 | let stepWidthInPixel = CGFloat(sliderSize.width) / CGFloat(sliderBoundDifference) 41 | 42 | // Calculate Left Thumb initial position 43 | let leftThumbLocation: CGFloat = currentValue?.wrappedValue.lowerBound == sliderBounds.lowerBound 44 | ? 0 45 | : CGFloat((currentValue?.wrappedValue.lowerBound ?? 0) - sliderBounds.lowerBound) * stepWidthInPixel 46 | 47 | // Calculate right thumb initial position 48 | let rightThumbLocation = CGFloat(currentValue?.wrappedValue.upperBound ?? 1) * stepWidthInPixel 49 | let height = rightThumbLocation - leftThumbLocation 50 | // Path between both handles 51 | 52 | 53 | thumbView 54 | .frame(width: height, height: sliderSize.height) 55 | .position(x: sliderSize.width - (sliderSize.width - leftThumbLocation - height / 2) , y: sliderViewYCenter) 56 | 57 | 58 | // Left Thumb Handle 59 | let leftThumbPoint = CGPoint(x: leftThumbLocation, y: sliderViewYCenter) 60 | thumbView(height: sliderSize.height, position: leftThumbPoint, isLeftThumb: true) 61 | .highPriorityGesture(DragGesture().onChanged { dragValue in 62 | 63 | let dragLocation = dragValue.location 64 | let xThumbOffset = min(max(0, dragLocation.x), sliderSize.width) 65 | 66 | let newValue = (sliderBounds.lowerBound) + (xThumbOffset / stepWidthInPixel) 67 | 68 | // Stop the range thumbs from colliding each other 69 | if newValue < currentValue?.wrappedValue.upperBound ?? 1 { 70 | currentValue?.wrappedValue = newValue...(currentValue?.wrappedValue.upperBound ?? 1) 71 | } 72 | }.onEnded({ _ in 73 | onEndChange() 74 | })) 75 | 76 | // Right Thumb Handle 77 | thumbView(height: sliderSize.height, position: CGPoint(x: rightThumbLocation, y: sliderViewYCenter), isLeftThumb: false) 78 | .highPriorityGesture(DragGesture().onChanged { dragValue in 79 | let dragLocation = dragValue.location 80 | let xThumbOffset = min(max(CGFloat(leftThumbLocation), dragLocation.x), sliderSize.width) 81 | 82 | var newValue = xThumbOffset / stepWidthInPixel // convert back the value bound 83 | newValue = min(newValue, sliderBounds.upperBound) 84 | 85 | // Stop the range thumbs from colliding each other 86 | if newValue > currentValue?.wrappedValue.lowerBound ?? 0 { 87 | currentValue?.wrappedValue = (currentValue?.wrappedValue.lowerBound ?? 0)...newValue 88 | } 89 | }.onEnded({ _ in 90 | onEndChange() 91 | })) 92 | } 93 | } 94 | .compositingGroup() 95 | } 96 | 97 | @ViewBuilder func lineBetweenThumbs(height: CGFloat, from: CGPoint, to: CGPoint) -> some View { 98 | Path { path in 99 | path.move(to: from) 100 | path.addLine(to: to) 101 | }.stroke(Color.white, lineWidth: height + 5) 102 | } 103 | 104 | @ViewBuilder func thumbView(height: CGFloat, position: CGPoint, isLeftThumb: Bool) -> some View { 105 | let width = 14 106 | Rectangle() 107 | .frame(width: 14, height: height) 108 | .foregroundColor(.red) 109 | .shadow(color: Color.black.opacity(0.16), radius: 8, x: 0, y: 2) 110 | .contentShape(Rectangle()) 111 | .overlay(alignment: .center) { 112 | Image(systemName: isLeftThumb ? "chevron.left" : "chevron.right") 113 | .imageScale(.small) 114 | 115 | } 116 | .position(x: position.x + CGFloat((isLeftThumb ? -(width/2) : width/2)), y: position.y) 117 | 118 | } 119 | } 120 | 121 | struct RangeSliderView_Previews: PreviewProvider { 122 | static var previews: some View { 123 | RangedSliderView(value: .constant(16...60), bounds: 1...100, onEndChange: {}, thumbView: {Rectangle().blendMode(.destinationOut)}) 124 | .frame(height: 60) 125 | .padding() 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ViewComponents/Sliders/SliderView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // SliderView.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 17.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | 11 | struct SliderView: View where V : BinaryFloatingPoint, V.Stride : BinaryFloatingPoint { 12 | 13 | // MARK: - Value 14 | // MARK: Private 15 | @Binding private var value: V 16 | private let bounds: ClosedRange 17 | private let step: V.Stride 18 | 19 | private let length: CGFloat = 8 20 | private let lineWidth: CGFloat = 2 21 | let height: CGFloat 22 | 23 | @State private var ratio: CGFloat = 0 24 | @State private var startX: CGFloat? = nil 25 | 26 | let onChange: () -> Void 27 | 28 | // MARK: - Initializer 29 | init(value: Binding, in bounds: ClosedRange, height: CGFloat = 60, step: V.Stride = 1, onChange: @escaping () -> Void) { 30 | _value = value 31 | self.onChange = onChange 32 | self.bounds = bounds 33 | self.height = height 34 | self.step = step 35 | } 36 | 37 | 38 | // MARK: - View 39 | // MARK: Public 40 | var body: some View { 41 | GeometryReader { proxy in 42 | ZStack(alignment: .center) { 43 | // Thumb 44 | Capsule() 45 | .foregroundColor(.orange) 46 | .frame(width: length, height: height) 47 | .offset(x: (proxy.size.width - length) * ratio) 48 | .gesture(DragGesture(minimumDistance: 0) 49 | .onChanged({ 50 | updateStatus(value: $0, proxy: proxy) 51 | onChange() 52 | }) 53 | .onEnded {_ in startX = nil}) 54 | } 55 | .frame(height: height, alignment: .center) 56 | .simultaneousGesture(DragGesture(minimumDistance: 0) 57 | .onChanged({ update(value: $0, proxy: proxy) })) 58 | .onAppear { 59 | ratio = min(1, max(0,CGFloat(value / bounds.upperBound))) 60 | } 61 | 62 | .onChange(of: value) { newValue in 63 | withAnimation(.easeIn(duration: 0.1)){ 64 | ratio = min(1, max(0,CGFloat(newValue / bounds.upperBound))) 65 | } 66 | } 67 | } 68 | } 69 | 70 | 71 | // MARK: - Function 72 | // MARK: Private 73 | private func updateStatus(value: DragGesture.Value, proxy: GeometryProxy) { 74 | guard startX == nil else { return } 75 | 76 | let delta = value.startLocation.x - (proxy.size.width - length) * ratio 77 | startX = (length < value.startLocation.x && 0 < delta) ? delta : value.startLocation.x 78 | } 79 | 80 | private func update(value: DragGesture.Value, proxy: GeometryProxy) { 81 | guard let x = startX else { return } 82 | startX = min(length, max(0, x)) 83 | 84 | var point = value.location.x - x 85 | let delta = proxy.size.width - length 86 | 87 | // Check the boundary 88 | if point < 0 { 89 | startX = value.location.x 90 | point = 0 91 | 92 | } else if delta < point { 93 | startX = value.location.x - delta 94 | point = delta 95 | } 96 | 97 | // Ratio 98 | var ratio = point / delta 99 | 100 | 101 | // Step 102 | if step != 1 { 103 | let unit = CGFloat(step) / CGFloat(bounds.upperBound - bounds.lowerBound) 104 | 105 | let remainder = ratio.remainder(dividingBy: unit) 106 | if remainder != 0 { 107 | ratio = ratio - CGFloat(remainder) 108 | } 109 | } 110 | 111 | self.ratio = ratio 112 | self.value = V(bounds.upperBound - bounds.lowerBound) * V(ratio) 113 | print( self.value) 114 | } 115 | } 116 | struct StickerSliderView_Previews: PreviewProvider { 117 | static var previews: some View { 118 | VStack{ 119 | SliderView(value: .constant(40), in: 10...100) { } 120 | .frame(height: 60) 121 | .background(Color.secondary) 122 | .padding() 123 | } 124 | } 125 | } 126 | 127 | 128 | 129 | -------------------------------------------------------------------------------- /VideoEditorSwiftUI/Views/ViewComponents/Sliders/TimeLineSlider.swift: -------------------------------------------------------------------------------- 1 | // 2 | // LineSlider.swift 3 | // VideoEditorSwiftUI 4 | // 5 | // Created by Bogdan Zykov on 17.04.2023. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct LineSlider: View { 11 | @Binding var value: Double 12 | var range: ClosedRange 13 | let onEditingChanged: () -> Void 14 | var body: some View { 15 | 16 | GeometryReader { proxy in 17 | CustomSlider(value: $value, 18 | in: range, 19 | onChanged: { 20 | 21 | onEditingChanged() 22 | 23 | }, track: { 24 | Rectangle() 25 | .fill(.clear) 26 | .frame(width: proxy.size.width, height: proxy.size.height) 27 | }, thumb: { 28 | Capsule() 29 | .foregroundColor(.orange) 30 | }, thumbSize: CGSize(width: 10, height: proxy.size.height)) 31 | } 32 | } 33 | } 34 | 35 | struct TimeLineSlider_Previews: PreviewProvider { 36 | static var previews: some View { 37 | LineSlider(value: .constant(100), range: 14...100){} 38 | .frame(width: 250, height: 60) 39 | .background(Color.secondary) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /screenshots/editor_screen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/editor_screen.png -------------------------------------------------------------------------------- /screenshots/export_screen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/export_screen.png -------------------------------------------------------------------------------- /screenshots/fullscreen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/fullscreen.png -------------------------------------------------------------------------------- /screenshots/mainScreen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/mainScreen.png -------------------------------------------------------------------------------- /screenshots/tool_audio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/tool_audio.png -------------------------------------------------------------------------------- /screenshots/tool_corrections.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/tool_corrections.png -------------------------------------------------------------------------------- /screenshots/tool_crop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/tool_crop.png -------------------------------------------------------------------------------- /screenshots/tool_cut.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/tool_cut.png -------------------------------------------------------------------------------- /screenshots/tool_filters.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/tool_filters.png -------------------------------------------------------------------------------- /screenshots/tool_frame.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/tool_frame.png -------------------------------------------------------------------------------- /screenshots/tool_speed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/tool_speed.png -------------------------------------------------------------------------------- /screenshots/tool_text.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BogdanZyk/VideoEditorSwiftUI/c38d3d43f34f5f34d5cd1222803699cbde0e2921/screenshots/tool_text.png --------------------------------------------------------------------------------