├── .gitignore
├── .gitmodules
├── .swift-version
├── .swiftpm
└── xcode
│ ├── package.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
│ └── xcshareddata
│ └── xcschemes
│ └── iOSClientPlayer.xcscheme
├── CHANGELOG.md
├── Documentation
├── Bitrates-Framerates-Resolutions.md
├── analytics-how-to.md
├── custom-playback-controls.md
├── enabling-airplay.md
├── error-handling.md
├── getting-started.md
├── modular-playback-technology.md
├── responding-to-playback-events.md
└── subtitles-and-multi-audio.md
├── LICENSE
├── Package.resolved
├── Package.swift
├── README.md
├── Sources
├── iOSClientPlayer
│ ├── Analytics
│ │ ├── AnalyticsConnector.swift
│ │ ├── AnalyticsProvider.swift
│ │ ├── Connectors
│ │ │ └── PassThroughConnector.swift
│ │ ├── Providers
│ │ │ └── AnalyticsLogger.swift
│ │ ├── SourceAbandonedEventProvider.swift
│ │ ├── TechDeallocationEventProvider.swift
│ │ ├── TimedMetadataProvider.swift
│ │ └── TraceProvider.swift
│ ├── Components
│ │ ├── MediaPlayback.swift
│ │ ├── MediaRendering.swift
│ │ ├── NetworkBehavior.swift
│ │ ├── StartTime.swift
│ │ └── TrackSelectable.swift
│ ├── Context
│ │ ├── Manifest
│ │ │ ├── Components
│ │ │ │ ├── HLSNative+ManifestContext+Airplay.swift
│ │ │ │ └── Player+ManifestContext.swift
│ │ │ ├── Manifest.swift
│ │ │ └── ManifestContext.swift
│ │ ├── MediaContext.swift
│ │ ├── MediaSource.swift
│ │ └── MediaSourceRequestHeaders.swift
│ ├── Events
│ │ ├── EventDispatcher.swift
│ │ └── EventResponder.swift
│ ├── Extensions
│ │ ├── Date+Extensions.swift
│ │ ├── Error+Extensions.swift
│ │ ├── Int64+Extensions.swift
│ │ └── Sequence+Extensions.swift
│ ├── Info.plist
│ ├── Player.swift
│ ├── PlayerError.swift
│ ├── PrivacyInfo.xcprivacy
│ ├── Tech
│ │ ├── HLS
│ │ │ ├── AirplayHandler.swift
│ │ │ ├── Components
│ │ │ │ ├── HLSNative+MediaPlayback.swift
│ │ │ │ ├── HLSNative+MediaRendering.swift
│ │ │ │ ├── HLSNative+NetworkBehavior.swift
│ │ │ │ ├── HLSNative+StartTime.swift
│ │ │ │ └── HLSNative+TrackSelectable.swift
│ │ │ ├── Extensions
│ │ │ │ ├── AVAsset+LoadableKeys.swift
│ │ │ │ ├── AVMediaSelectionGroup+Extensions.swift
│ │ │ │ ├── AVPlayer+KeyValueObservable.swift
│ │ │ │ ├── AVPlayerItem+Extensions.swift
│ │ │ │ ├── AVPlayerItemAccessLogEvent+Extensions.swift
│ │ │ │ └── AVPlayerItemErrorLogEvent+Extensions.swift
│ │ │ ├── FairplayRequester.swift
│ │ │ ├── HLSNative.swift
│ │ │ ├── HLSNativeConfiguration.swift
│ │ │ ├── HLSNativeError.swift
│ │ │ ├── HLSNativeWarning.swift
│ │ │ ├── Observation
│ │ │ │ ├── DateRangeMetadataCollector.swift
│ │ │ │ ├── ItemObserver.swift
│ │ │ │ ├── KVOChange.swift
│ │ │ │ ├── KeyValueObservable.swift
│ │ │ │ ├── KeyValueObserver.swift
│ │ │ │ ├── NotificationObserver.swift
│ │ │ │ ├── NotificationToken.swift
│ │ │ │ ├── Observer.swift
│ │ │ │ ├── PlayerObserver.swift
│ │ │ │ ├── RateObserver.swift
│ │ │ │ └── UnmanagedPlayerObserver.swift
│ │ │ └── Tracks
│ │ │ │ ├── MediaGroup.swift
│ │ │ │ └── MediaTrack.swift
│ │ └── Tech.swift
│ ├── Version.swift
│ ├── Views
│ │ └── PlayerView.swift
│ └── Warning.swift
└── iOSClientPlayerObjc
│ └── Player.h
├── Tests
└── iOSClientPlayerTests
│ ├── HLSNativeNetworkBehavior.swift
│ ├── HLSNativeTrackSelectableSpec.swift
│ ├── Info.plist
│ ├── InvalidStartTimeSpec.swift
│ ├── MockedAVPlayer.swift
│ ├── PlayerErrorSpec.swift
│ ├── PlayerTests.swift
│ └── TestEnv.swift
├── UPGRADE_GUIDE.md
├── fastlane
├── .env
├── Appfile
├── Fastfile
└── actions
│ └── update_dependency_graph.rb
├── iOSClientPlayer.podspec
└── iOSClientPlayer.xcodeproj
├── project.pbxproj
├── project.xcworkspace
├── contents.xcworkspacedata
└── xcshareddata
│ ├── IDEWorkspaceChecks.plist
│ └── Player.xcscmblueprint
├── xcshareddata
└── xcschemes
│ ├── iOSClientPlayer-tvOS.xcscheme
│ └── iOSClientPlayer.xcscheme
└── xcuserdata
└── udaya.xcuserdatad
└── xcschemes
└── xcschememanagement.plist
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | ## Build generated
6 | build/
7 | DerivedData/
8 |
9 | ## Various settings
10 | *.pbxuser
11 | !default.pbxuser
12 | *.mode1v3
13 | !default.mode1v3
14 | *.mode2v3
15 | !default.mode2v3
16 | *.perspectivev3
17 | !default.perspectivev3
18 | xcuserdata/
19 |
20 | ## Other
21 | *.moved-aside
22 | *.xcuserstate
23 |
24 | ## Obj-C/Swift specific
25 | *.hmap
26 | *.ipa
27 | *.dSYM.zip
28 | *.dSYM
29 |
30 | ## Playgrounds
31 | timeline.xctimeline
32 | playground.xcworkspace
33 |
34 | # Swift Package Manager
35 | #
36 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
37 | # Packages/
38 | .build/
39 |
40 | # CocoaPods
41 | #
42 | # We recommend against adding the Pods directory to your .gitignore. However
43 | # you should judge for yourself, the pros and cons are mentioned at:
44 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
45 | #
46 | # Pods/
47 |
48 | # Carthage
49 | #
50 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
51 |
52 | Carthage/Checkouts
53 | Carthage/Build
54 |
55 | # fastlane
56 | #
57 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
58 | # screenshots whenever they are needed.
59 | # For more information about the recommended setup visit:
60 | # https://github.com/fastlane/fastlane/blob/master/fastlane/docs/Gitignore.md
61 |
62 | fastlane/report.xml
63 | fastlane/Preview.html
64 | fastlane/screenshots
65 | fastlane/test_output
66 | fastlane/README.md
67 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EricssonBroadcastServices/iOSClientPlayer/713761df527cf295994d9164fc7bc3549b5f5328/.gitmodules
--------------------------------------------------------------------------------
/.swift-version:
--------------------------------------------------------------------------------
1 | 5.0
2 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/xcshareddata/xcschemes/iOSClientPlayer.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
33 |
39 |
40 |
41 |
42 |
43 |
53 |
54 |
60 |
61 |
67 |
68 |
69 |
70 |
72 |
73 |
76 |
77 |
78 |
--------------------------------------------------------------------------------
/Documentation/Bitrates-Framerates-Resolutions.md:
--------------------------------------------------------------------------------
1 | ## Bitrates / Framerates & Resolutions
2 |
3 | Client applications using a `Tech` which adopts the `TrackSelectable` *api*, such as `HLSNative`, have access to `AVAssetVariant` s in the current Player Item using below *api* .
4 |
5 | ```Swift
6 | let availableVariants = player.variants
7 | ```
8 |
9 | The *api* will return the array of `AVAssetVariant`s . This is available from **iOS 15.xx , tvOS 15.xx** and above only.
10 |
11 | ```Swift
12 | if #available(iOS 15.0, *) {
13 | let _ = self.player.variants?.compactMap { variant in
14 | print(variant.peakBitRate)
15 | print(variant.videoAttributes?.nominalFrameRate)
16 | print(variant.videoAttributes?.presentationSize)
17 | }
18 | } else {
19 | // Fallback on earlier versions
20 | }
21 | ```
22 |
--------------------------------------------------------------------------------
/Documentation/analytics-how-to.md:
--------------------------------------------------------------------------------
1 | ## Analytics How-To
2 |
3 | Each `PlaybackTech` is responsible for continuously broadcasting a set of analytics related events throughout an active playback session. These events are processed per session by an associated `AnalyticsConnector` which can modulate, filter and modify this data before delivery to a set of `AnalyticsProvider`s. *Client applications* are encouraged to implement their own `AnalyticsProvider`s suitable to their infrastructure.
4 |
5 | *EMP* provides a complete, out of the box Analytics module through [ExposurePlayback](https://github.com/EricssonBroadcastServices/iOSClientExposurePlayback) which integrates seamlessly with the rest of the platform.
6 |
7 | ```Swift
8 | class PlayerViewController: UIViewController {
9 | fileprivate let player: Player>!
10 |
11 | @IBOutlet weak var playerView: UIView!
12 | @IBOutlet weak var overlayView: UIView!
13 | ...
14 |
15 | override func viewWillAppear(_ animated: Bool) {
16 | super.viewWillAppear(animated)
17 | player.configure(playerView: playerView)
18 | }
19 | }
20 | ```
21 |
--------------------------------------------------------------------------------
/Documentation/custom-playback-controls.md:
--------------------------------------------------------------------------------
1 | ## Custom Playback Controls
2 |
3 | *Client applications* using a `PlaybackTech` which features the `MediaRendering` *component* can build their own *view hierarchy* on top of a simple `UIView` allowing for extensive customization.
4 |
5 | * PlayerViewController
6 | * View
7 | * PlayerView (supplied to player)
8 | * OverlayView
9 |
10 | Configuring a rendering view using the build in `HLSNative` `PlaybackTech` is handled automatically by calling `configure(playerView:)`. This will insert a rendering layer as a subview to the supplied view while also setting up *Autolayout Constraints*.
11 |
--------------------------------------------------------------------------------
/Documentation/enabling-airplay.md:
--------------------------------------------------------------------------------
1 | ## Enabling Airplay
2 | Airplay controls are built into the iOS *Control Center*. Client applications may optionally add an *Airplay button* to their UI by creating a `MPVolumeView` with `setShowsVolumeSlider` to `NO`.
3 |
4 | ```Swift
5 | let airplayButton = MPVolumeView()
6 | airplayButton.showsVolumeSlider = false
7 | view.addSubview(airplayButton)
8 | ```
9 |
10 | **Note : From `iOS 13.0+` developers need to use `AVAudioSessionRouteSharingPolicyLongFormVideo` to play to the same output as other long-form video apps, such as the built-in TV app.These apps should also set the `AVInitialRouteSharingPolicy` key in their Info.plist to `LongFormVideo.`**
11 |
12 | ```
13 | AirPlay optimization policy : Long Form Video
14 | ```
15 |
16 | For applications targeting `iOS 11.0+` there is an additional option in the form of `AVRoutePickerView`.
17 |
18 |
19 |
20 | #### Background Modes
21 | Client applications who wish to continue *Airplay* once a user locks their screen or navigates from the app need to set the relevant `Capabilities` in their *Xcode* project.
22 |
23 | 1. Select the relevant `Target` for your app in your *Xcode* project
24 | 2. Under `Capabilities`, locate `Background Modes`
25 | 3. Make sure `Audio, AirPlay, and Picture in Picture` is selected
26 | 4. Set the relevant `Categories` on the shared `AVAudioSession` object and make sure the session is active before playback starts
27 |
28 | ```Swift
29 | if #available(iOS 11.0, *) {
30 | try audioSession.setCategory(AVAudioSessionCategoryPlayback, mode: AVAudioSessionModeMoviePlayback, routeSharingPolicy: .longForm)
31 | }
32 | else if #available(iOS 10.0, *) {
33 | try audioSession.setCategory(AVAudioSessionCategoryPlayback, mode: AVAudioSessionModeMoviePlayback)
34 | }
35 | else {
36 | try audioSession.setCategory(AVAudioSessionCategoryPlayback)
37 | }
38 | try audioSession.setActive(true)
39 | ```
40 |
41 | #### Airplay Best Practices
42 | For the best possible user experience, client applications should reuse the `Player` object between playback calls instead of recreating it each time. This is especially important when using *Airplay mode*.
43 |
44 | An application that supports content browsing in *Airplay mode* may experience rendering discontinuities on the external screen (Airplay screen) if a new `Player` object is created for each subsequent play request. This scenario while airplaying to an *appleTV* manifests itself by the *tvOS springboard* briefly becoming visible between the two playback sessions. Client applications are recommended to keep the `Player` object alive during content switching. New `Source` objects are easily loaded into the existing player with the current playback is still underway.
45 |
46 | #### Known Limitations
47 | Continuous playback in the event of an incomming phone call is only supported when using `.longForm` `Route Sharing Policy`. For more information about adopting *Airplay 2* please see [Getting Airplay 2 Into Your App](https://developer.apple.com/documentation/avfoundation/airplay_2/getting_airplay_2_into_your_app).
48 |
--------------------------------------------------------------------------------
/Documentation/error-handling.md:
--------------------------------------------------------------------------------
1 | ## Error Handling
2 |
3 | `PlayerError` is the error type returned by the *Player Framework*. It contains both `MediaContext` and `PlaybackTech` related errors.
4 |
5 | This means effective error handling thus requires a deeper undestanding of the overall architecture, taking both *tech*, *context* and possibly *drm* errors in consideration.
6 |
7 | *Client applications* should register to receive errors through the `Player` method `onError(callback:)`
8 |
9 | ```Swift
10 | myPlayer.onError{ player, source, error in
11 | // Handle the error
12 | }
13 | ```
14 |
15 | Errors associated with `PlayerError` mandates 3 properties.
16 |
17 | * `domain` The domain of errors this specific error belongs too, for example `HLSNativeErrorDomain`
18 | * `code` An error code specific to the `domain`
19 | * `message` Explain what the error entails
20 |
--------------------------------------------------------------------------------
/Documentation/getting-started.md:
--------------------------------------------------------------------------------
1 | ## Getting Started
2 |
3 | `Player` has been designed with a minimalistic but extendable approach in mind. It is a *stand-alone* playback protocol designed to use modular playback technologies and context sensitive playback sources. *Features as components* allow `PlaybackTech` or `MediaContext` specific functionality when so desired. This flexible yet powerful model allows targeted behavior tailored for client specific needs.
4 | The framework also contains a `PlaybackTech` implementation, `HLSNative`, supporting playback using the built in `AVPlayer`.
5 |
6 | The `Player` class acts as an *api provider* granting *client applications* access to tailored, self-contained playback experience. Instantiation is done by defining the `PlaybackTech` and `MediaContext` to use. The following examples will use `HLSNative` to demonstrate the procedure
7 |
8 | ```Swift
9 | class PlayerViewController: UIViewController {
10 | fileprivate let context = ManifestContext()
11 | fileprivate let tech = HLSNative()
12 | fileprivate var player: Player>!
13 |
14 | override func viewDidLoad() {
15 | player = Player(tech: player, context: context)
16 | }
17 | }
18 | ```
19 |
20 | Media rendering can be done using `UIView` as defined by a *Component* called `MediaRendering`. It allows *client applications* to supply a `view` in which the media will be rendered under custom overlay controls.
21 |
22 | ```Swift
23 | player.configure(playerView: customPlayerView)
24 | ```
25 |
26 | Loading and preparation of a stream using the built in `HLSNative` `Tech` takes place in a multi-step process.
27 | First, the `ManifestContext` supplied to `Player` on initialisation defines the context in which source media exists. This `MediaContext` is responsible for producing a `MediaSource` when asked to do so. Our example case only relies on a valid media `URL` but more complex contexts likely involve fetching assets from a remote location or processing data on device.
28 |
29 | ```Swift
30 | let manifest = context.manifest(from: someUrl)
31 | ```
32 |
33 | The next step involves *loading* this context generated `MediaSource` into the selected `PlaybackTech`. In general, the `Tech` in question is completely agnostic when it comes to the media source loaded. This means the source is responsible for producing the `Tech`-specific `Configuration` type that encapsulate the information required for configuration.
34 |
35 | ```Swift
36 | extension Player where Tech == HLSNative {
37 | func stream(url: URL) {
38 | let manifest = context.manifest(from: url)
39 | let configuration = HLSNativeConfiguration(drm: manifest.fairplayRequester)
40 | tech.load(source: manifest, configuration: configuration)
41 | }
42 | }
43 | ```
44 |
--------------------------------------------------------------------------------
/Documentation/modular-playback-technology.md:
--------------------------------------------------------------------------------
1 | ## Modular Playback Technology
2 | One major goal when developing `Player` has been to decouple the playback *api* from the underlying playback technology and context. A tech independent architecture allows *client applications* to select their playback environment of choice or develop their own.
3 |
4 | Restrictions on `PlaybackTech` has been kept vague by design. The contract between `Player`, the `PlaybackTech`, the `MediaContext` and associated features should largely be defined by their interaction as a complete package. As such, *tech developers* are free to make choices that feel relevant to their platform.
5 |
6 | The `PlaybackTech` protocol should be considered a *container* for features related to rendering the media on screen. `HLSNative` provides a baseline implementation which may serve as a guide for this approach.
7 |
8 | #### Context Sensitive Playback
9 | A second cornerstone is a *Context Sensitive* playback. `MediaContext` should encapsulate everything related to the playback context in question, such as source url, content restrictions, `Drm Agent`s and related meta data. This kind of information is often very platform dependant and specialized.
10 |
11 | Contexts should define a `MediaSource`, usually fetched from some content managed remote source. It typically includes a media locator, content restrictions and possibly meta data regarding the source in question.
12 |
13 | #### Features as Components
14 | The final cornerstone is *Features as Components*. `PlaybackTech` and `MediaContext` tied together in a constrained fashion delivers a focused *api* definition in which certain functionality may only be available in a specific context using a specific tech.
15 |
16 | Features may be anything the platform defines. For example, convenience methods for starting playback of specific assets by identifiers or contract restrictions by module injection.
17 |
18 | [ExposurePlayback module](https://github.com/EricssonBroadcastServices/iOSClientExposurePlayback) has a rich set of *Features* constrained to an `ExposureContext` related playback.
19 |
20 | #### Drm Agents and FairPlay
21 | Streaming `DRM` protected media assets will require *client applications* to implement their own platform specific `DrmAgent`s. In the case of *FairPlay*, this most likely involves interaction with the *Apple* supplied `AVAssetResourceLoaderDelegate` protocol.
22 |
23 | **EMP** provides an out of the box implementation for *FairPlay* protection through the [ExposurePlayback module](https://github.com/EricssonBroadcastServices/iOSClientExposurePlayback) which integrates seamlessly with the rest of the platform.
24 |
25 | ### HLSNative Technology
26 | `HLSNative` provides a base implementaiton for playback of media using the native `AVPlayer`.
27 | The following features are supported out of the box. Please keep in mind that playback of *FairPlay* protected assets require a working fairplay server.
28 |
29 | - [x] VoD, live and catchup streaming
30 | - [x] FairPlay DRM protection as a plugin
31 | - [x] Customizable playback overlay
32 | - [x] Multi-device session shift
33 |
34 | Under the hood, `HLSNative` is a wrapper around `KVO`, `notifications` and state management. It implements `MediaRendering`, `MediaPlayback` and `StartTime`.
35 |
36 | #### Loading and Preparation
37 | Loading and preparation of playback using `HLSNative` is n asynchronous process.
38 |
39 | ```Swift
40 | avUrlAsset.loadValuesAsynchronously(forKeys: keys) {
41 | ...
42 | keys.forEach{
43 | let status = avUrlAsset.statusOfValue(forKey: $0, error: &error)
44 | // Handle status failed and/or errors
45 | }
46 | }
47 | ```
48 |
49 | For more information regarding the *async loading process* of `properties` on `AVURLAsset`s, please consult Apple's documentation on `AVAsynchronousKeyValueLoading`
50 | Once the loading process has run its course, the asset is either ready for playback or a `HLSNativeError.failedToReady(error: underlyingError)` is thrown.
51 |
--------------------------------------------------------------------------------
/Documentation/responding-to-playback-events.md:
--------------------------------------------------------------------------------
1 | ## Responding to Playback Events
2 | Streaming media is an inherently asychronous process. Preparation and initialisation of a *playback session* is subject to a host of outside factors, such as network avaliability, content hosting and possibly `DRM` validation. An active session must respond to environmental changes, report on playback progress and optionally deliver event specific [analytics](#analytics-how-to) data. Additionally, user interaction must be handled in a reliable and responsive way.
3 |
4 | Finally, [error handling](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/Documentation/error-handling.md) needs to be robust.
5 |
6 | `Player` exposes functionality allowing an interested party to register callbacks that fire when the events occur.
7 |
8 | #### Initialisation and Preparation of playback
9 | During the preparation, loading and finalization of a `MediaContext`, the associated `PlaybackTech` is responsible for publishing events detailing the process.
10 |
11 | ```Swift
12 | myPlayer
13 | .onPlaybackCreated{ player, source in
14 | // Fires once the associated MediaSource has been created.
15 | // Playback is not ready to start at this point.
16 | }
17 | .onPlaybackPrepared{ player, source in
18 | // Published when the associated MediaSource completed asynchronous loading of relevant properties.
19 | // Playback is not ready to start at this point.
20 | }
21 | .onPlaybackReady{ player, source in
22 | // When this event fires starting playback is possible
23 | player.play()
24 | }
25 | ```
26 |
27 | #### Playback events
28 | Once playback is in progress the `Player` continuously publishes *events* related media status and user interaction.
29 |
30 | ```Swift
31 | myPlayer
32 | .onPlaybackStarted{ player, source in
33 | // Published once the playback starts for the first time.
34 | // This is a one-time event.
35 | }
36 | .onPlaybackPaused{ [weak self] player, source in
37 | // Fires when the playback pauses for some reason
38 | self?.pausePlayButton.toggle(paused: true)
39 | }
40 | .onPlaybackResumed{ [weak self] player, source in
41 | // Fires when the playback resumes from a paused state
42 | self?.pausePlayButton.toggle(paused: false)
43 | }
44 | .onPlaybackAborted{ player, source in
45 | // Published once the player.stop() method is called.
46 | // This is considered a user action
47 | }
48 | .onPlaybackCompleted{ player, source in
49 | // Published when playback reached the end of the current media.
50 | }
51 | ```
52 | Besides playback control events `Player` also publishes several status related events.
53 |
54 | ```Swift
55 | myPlayer
56 | .onBitrateChanged{ [weak self] player, source, bitrate in
57 | // Published whenever the current bitrate changes
58 | self?.updateQualityIndicator(with: bitrate)
59 | }
60 | .onBufferingStarted{ player, source in
61 | // Fires whenever the buffer is unable to keep up with playback
62 | }
63 | .onBufferingStopped{ player, source in
64 | // Fires when buffering is no longer needed
65 | }
66 | .onDurationChanged{ player, source in
67 | // Published when the active media received an update to its duration property
68 | }
69 | ```
70 |
71 | #### Error forwarding
72 | Errors encountered throughout the lifecycle of `Player` are published through `onError(callback:)`. For more information, please see [Error Handling](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/Documentation/error-handling.md).
73 |
--------------------------------------------------------------------------------
/Documentation/subtitles-and-multi-audio.md:
--------------------------------------------------------------------------------
1 | ## Subtitles and Multi-Audio
2 |
3 | Client applications using a `Tech` which adopts the `TrackSelectable` *api*, such as `HLSNative`, have access to a set of methods and properties enabling selection of *subtitles* and *audio tracks*.
4 |
5 | The *api* allows for easy selection of tracks by supplying a *RFC 4646* compliant language tag
6 |
7 | ```Swift
8 | player.selectText(language: "fr")
9 | player.selectAudio(language: "en")
10 | ```
11 |
12 | or developers can pass the `mediaTrackId` or the `title` of the track to select subtitles or audios.
13 |
14 | ```Swift
15 | // Selecting Audio
16 | let availableAudioTracks = player.audioTracks
17 | let firstAudioTrack = availableAudioTracks.first
18 |
19 | if let mediaTrackId = firstAudioTrack.mediaTrackId {
20 | self.player.selectAudio(mediaTrackId: mediaTrackId )
21 | }
22 |
23 | self.player.selectAudio(title: firstAudioTrack.title)
24 |
25 | // Selecting Subtitles
26 | let availableSubtitleTracks = player.textTracks
27 | let firstSubTrack = availableSubtitleTracks.first
28 |
29 | if let mediaTrackId = firstSubTrack.mediaTrackId {
30 | self.player.selectText(mediaTrackId: mediaTrackId )
31 | }
32 |
33 | self.player.selectText(title: firstSubTrack.title)
34 |
35 | ```
36 |
37 | In addition, the protocol defines a set of inspection properties through which client applications can gain insight into the available, selected and default tracks.
38 |
39 | `HLSNative` expresses this through the `MediaGroup` and `MediaTrack` `struct`s.
40 |
41 | `MediaGroup` encapsulates a certain aspect of track selection, such as *audio* or *subtitles*. Each group can be queried for information regarding the following properties:
42 |
43 | * default track
44 | * all available tracks
45 | * currently selected track
46 | * and if the group allows empty selection
47 |
48 | What constitutes a default track is normally encoded in the stream manifest.
49 |
50 | `MediaTrack`s themselves contains a `name` which is a string suitable for display purposes, a `type` such as *subtitle* , `title` which is equivalent to the *NAME* tag for the track in the hls playlist, `mediaTrackId` which is a unique id to differentiate the tracks & finally the `extendedLanguageTag` which is a *RFC 4646* compliant language tag.
51 |
52 | ```Swift
53 | let availableAudioTracks = player.audioTracks
54 | let selectedAudioTrack = player.selectedAudioTrack
55 |
56 | let title = selectedAudioTrack.title
57 | let trackId = selectedAudioTrack.mediaTrackId
58 |
59 | ```
60 |
61 | Turning off a track is as simple as specifying a `nil` selection
62 |
63 | ```Swift
64 | player.selectText(track: nil)
65 | ```
66 |
--------------------------------------------------------------------------------
/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "object": {
3 | "pins": [
4 | {
5 | "package": "CwlCatchException",
6 | "repositoryURL": "https://github.com/mattgallagher/CwlCatchException.git",
7 | "state": {
8 | "branch": null,
9 | "revision": "3b123999de19bf04905bc1dfdb76f817b0f2cc00",
10 | "version": "2.1.2"
11 | }
12 | },
13 | {
14 | "package": "CwlPreconditionTesting",
15 | "repositoryURL": "https://github.com/mattgallagher/CwlPreconditionTesting.git",
16 | "state": {
17 | "branch": null,
18 | "revision": "a23ded2c91df9156628a6996ab4f347526f17b6b",
19 | "version": "2.1.2"
20 | }
21 | },
22 | {
23 | "package": "Nimble",
24 | "repositoryURL": "https://github.com/Quick/Nimble.git",
25 | "state": {
26 | "branch": null,
27 | "revision": "c93f16c25af5770f0d3e6af27c9634640946b068",
28 | "version": "9.2.1"
29 | }
30 | },
31 | {
32 | "package": "Quick",
33 | "repositoryURL": "https://github.com/Quick/Quick.git",
34 | "state": {
35 | "branch": null,
36 | "revision": "bd86ca0141e3cfb333546de5a11ede63f0c4a0e6",
37 | "version": "4.0.0"
38 | }
39 | }
40 | ]
41 | },
42 | "version": 1
43 | }
44 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version:5.5
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "iOSClientPlayer",
8 | platforms: [.iOS(.v12),
9 | .tvOS(.v12)],
10 | products: [
11 | // Products define the executables and libraries a package produces, and make them visible to other packages.
12 | .library(
13 | name: "iOSClientPlayer",
14 | targets: ["iOSClientPlayer"]
15 | ),
16 | ],
17 | dependencies: [
18 | // Dependencies declare other packages that this package depends on.
19 | // .package(url: /* package url */, from: "1.0.0"),
20 | .package(url: "https://github.com/Quick/Quick.git", from: "4.0.0"),
21 | .package(url: "https://github.com/Quick/Nimble.git", from: "9.1.0"),
22 | ],
23 | targets: [
24 | // Targets are the basic building blocks of a package. A target can define a module or a test suite.
25 | // Targets can depend on other targets in this package, and on products in packages this package depends on.
26 | .target(
27 | name: "iOSClientPlayer",
28 | dependencies: [],exclude: ["Info.plist"],
29 | resources: [.copy("PrivacyInfo.xcprivacy")]
30 | ),
31 | .target(
32 | name: "iOSClientPlayerObjc",
33 | dependencies: []
34 | ),
35 | .testTarget(
36 | name: "iOSClientPlayerTests",
37 | dependencies: ["iOSClientPlayer", "Quick", "Nimble"],
38 | exclude: ["Info.plist"]
39 | ),
40 | ]
41 | )
42 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://img.shields.io/badge/Swift-5.3_5.4_5.5-Orange?style=flat-square)
2 | [](https://img.shields.io/badge/Platforms-macOS_iOS_tvOS_watchOS_Linux_Windows-Green?style=flat-square)
3 | [](https://img.shields.io/cocoapods/v/Alamofire.svg)
4 | [](https://github.com/Carthage/Carthage)
5 | [](https://img.shields.io/badge/Swift_Package_Manager-compatible-orange?style=flat-square)
6 |
7 |
8 | # Player
9 |
10 | * [Features](#features)
11 | * [License](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/LICENSE)
12 | * [Requirements](#requirements)
13 | * [Installation](#installation)
14 | * Documentation
15 | - [Getting Started](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/Documentation/getting-started.md)
16 | - [Modular Playback Technology](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/Documentation/modular-playback-technology.md)
17 | - [Responding to Playback Events](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/Documentation/responding-to-playback-events.md)
18 | - [Enabling Airplay](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/Documentation/enabling-airplay.md)
19 | - [Analytics How-To](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/Documentation/analytics-how-to.md)
20 | - [Custom Playback Controls](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/Documentation/custom-playback-controls.md)
21 | - [Error Handling](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/Documentation/error-handling.md)
22 | - [Subtitles and Multi-Audio](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/Documentation/subtitles-and-multi-audio.md)
23 | * [Release Notes](#release-notes)
24 | * [Upgrade Guides](#upgrade-guides)
25 |
26 |
27 | ## Features
28 |
29 | - [x] Modular `PlaybackTech`
30 | - [x] Context sensitive playback
31 | - [x] Features as components
32 | - [x] Customizable `DrmAgent`s
33 | - [x] Pluggable analytics
34 | - [x] Playback event publishing
35 | - [x] Custom playback controls
36 | - [x] Airplay
37 | - [x] Track selection
38 | - [x] Preferred bitrate limitation
39 |
40 |
41 | ## Requirements
42 |
43 | * `iOS` 9.0+
44 | * `tvOS` 9.0+
45 | * `Swift` 4.0+
46 | * `Xcode` 9.0+
47 |
48 | ## Installation
49 |
50 | ### Swift Package Manager
51 |
52 | The Swift Package Manager is a tool for automating the distribution of Swift code and is integrated into the swift compiler.
53 | Once you have your Swift package set up, adding `iOSClientPlayer` as a dependency is as easy as adding it to the dependencies value of your Package.swift.
54 |
55 | ```sh
56 | dependencies: [
57 | .package(url: "https://github.com/EricssonBroadcastServices/iOSClientPlayer", from: "3.6.1")
58 | ]
59 | ```
60 |
61 | ### Carthage
62 | [Carthage](https://github.com/Carthage/Carthage) is a decentralized dependency manager that builds your dependency graph without interfering with your `Xcode` project setup. `CI` integration through [fastlane](https://github.com/fastlane/fastlane) is also available.
63 |
64 | Install *Carthage* through [Homebrew](https://brew.sh) by performing the following commands:
65 |
66 | ```sh
67 | $ brew update
68 | $ brew install carthage
69 | ```
70 |
71 | Once *Carthage* has been installed, you need to create a `Cartfile` which specifies your dependencies. Please consult the [artifacts](https://github.com/Carthage/Carthage/blob/master/Documentation/Artifacts.md) documentation for in-depth information about `Cartfile`s and the other artifacts created by *Carthage*.
72 |
73 | ```sh
74 | github "EricssonBroadcastServices/iOSClientPlayer"
75 | ```
76 |
77 | Running `carthage update` will fetch your dependencies and place them in `/Carthage/Checkouts`. You either build the `.framework`s and drag them in your `Xcode` or attach the fetched projects to your `Xcode workspace`.
78 |
79 | Finaly, make sure you add the `.framework`s to your targets *General -> Embedded Binaries* section.
80 |
81 | ### CocoaPods
82 | CocoaPods is a dependency manager for Cocoa projects. For usage and installation instructions, visit their website. To integrate `iOSClientPlayer` into your Xcode project using CocoaPods, specify it in your Podfile:
83 |
84 | ```sh
85 | pod 'iOSClientPlayer', '~> 3.6.1'
86 | ```
87 |
88 | ## Release Notes
89 | Release specific changes can be found in the [CHANGELOG](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/CHANGELOG.md).
90 |
91 | ## Upgrade Guides
92 | The procedure to apply when upgrading from one version to another depends on what solution your client application has chosen to integrate `Player`.
93 |
94 | Major changes between releases will be documented with special [Upgrade Guides](https://github.com/EricssonBroadcastServices/iOSClientPlayer/blob/master/UPGRADE_GUIDE.md).
95 |
96 | ### Carthage
97 | Updating your dependencies is done by running `carthage update` with the relevant *options*, such as `--use-submodules`, depending on your project setup. For more information regarding dependency management with `Carthage` please consult their [documentation](https://github.com/Carthage/Carthage/blob/master/README.md) or run `carthage help`.
98 |
99 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Analytics/AnalyticsConnector.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AnalyticsConnector.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-23.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 | /// `AnalyticsConnector` is responsible for managing the interaction between raw `PlaybackTech` events, tailored to the need of specific `AnalyticsProvider`s.
13 | public protocol AnalyticsConnector: EventResponder, TraceProvider, TechDeallocationEventProvider, SourceAbandonedEventProvider, TimedMetadataProvider {
14 | /// Analytics connector will manage, filter and possibly forward events to all providers specified here
15 | var providers: [AnalyticsProvider] { get set }
16 | }
17 |
18 | extension AnalyticsConnector {
19 | public func onTrace(tech: Tech?, source: Source?, data: [String : Any]) where Tech : PlaybackTech, Source : MediaSource {
20 | providers.forEach{
21 | if let provider = $0 as? TraceProvider {
22 | provider.onTrace(tech: tech, source: source, data: data)
23 | }
24 | }
25 | }
26 | }
27 |
28 | extension AnalyticsConnector {
29 | public func onTechDeallocated(beforeMediaPreparationFinalizedOf mediaSource: Source) where Source : MediaSource {
30 | providers.forEach{
31 | if let provider = $0 as? TechDeallocationEventProvider {
32 | provider.onTechDeallocated(beforeMediaPreparationFinalizedOf: mediaSource)
33 | }
34 | }
35 | }
36 | }
37 |
38 | extension AnalyticsConnector {
39 | public func onSourcePreparationAbandoned(ofSource mediaSource: Source, byTech tech: Tech) where Tech : PlaybackTech, Source : MediaSource {
40 | providers.forEach{
41 | if let provider = $0 as? SourceAbandonedEventProvider {
42 | provider.onSourcePreparationAbandoned(ofSource: mediaSource, byTech: tech)
43 | }
44 | }
45 | }
46 | }
47 |
48 | extension AnalyticsConnector {
49 | public func onTimedMetadataChanged(source: Source?, tech: Tech, metadata: [AVMetadataItem]?) where Tech : PlaybackTech, Source : MediaSource {
50 | providers.forEach{
51 | if let provider = $0 as? TimedMetadataProvider {
52 | provider.onTimedMetadataChanged(source: source, tech: tech, metadata: metadata)
53 | }
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Analytics/AnalyticsProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AnalyticsProvider.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-07-17.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Typealias for an `EventResponder` associated with analytics.
12 | public typealias AnalyticsProvider = EventResponder
13 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Analytics/Connectors/PassThroughConnector.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PassThroughConnector.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-23.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Simple `AnalyticsConnector` that forwards all events to the specified `AnalyticsProvider`s
12 | public class PassThroughConnector: AnalyticsConnector {
13 |
14 |
15 | public init(providers: [AnalyticsProvider] = []) {
16 | self.providers = providers
17 | }
18 |
19 | deinit {
20 | print("PassThroughConnector deinit")
21 | }
22 |
23 | public var providers: [AnalyticsProvider]
24 |
25 | public func onCreated(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
26 | providers.forEach{ $0.onCreated(tech: tech, source: source) }
27 | }
28 |
29 | public func onPrepared(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
30 | providers.forEach{ $0.onPrepared(tech: tech, source: source) }
31 | }
32 |
33 | public func onReady(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
34 | providers.forEach{ $0.onReady(tech: tech, source: source) }
35 | }
36 |
37 | public func onStarted(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
38 | providers.forEach{ $0.onStarted(tech: tech, source: source) }
39 | }
40 |
41 | public func onPaused(tech: Tech, source: Source) where Tech : PlaybackTech, Source: MediaSource {
42 | providers.forEach{ $0.onPaused(tech: tech, source: source) }
43 | }
44 |
45 | public func onResumed(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
46 | providers.forEach{ $0.onResumed(tech: tech, source: source) }
47 | }
48 |
49 | public func onAborted(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
50 | providers.forEach{ $0.onAborted(tech: tech, source: source) }
51 | }
52 |
53 | public func onCompleted(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
54 | providers.forEach{ $0.onCompleted(tech: tech, source: source) }
55 | }
56 |
57 | public func onError(tech: Tech?, source: Source?, error: PlayerError) where Tech : PlaybackTech, Source : MediaSource, Context : MediaContext {
58 | providers.forEach{ $0.onError(tech: tech, source: source, error: error) }
59 | }
60 |
61 | public func onBitrateChanged(tech: Tech, source: Source, bitrate: Double) where Tech : PlaybackTech, Source : MediaSource {
62 | providers.forEach{ $0.onBitrateChanged(tech: tech, source: source, bitrate: bitrate) }
63 | }
64 |
65 | public func onBufferingStarted(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
66 | providers.forEach{ $0.onBufferingStarted(tech: tech, source: source) }
67 | }
68 |
69 | public func onBufferingStopped(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
70 | providers.forEach{ $0.onBufferingStopped(tech: tech, source: source) }
71 | }
72 |
73 | public func onScrubbedTo(tech: Tech, source: Source, offset: Int64) where Tech : PlaybackTech, Source : MediaSource {
74 | providers.forEach{ $0.onScrubbedTo(tech: tech, source: source, offset: offset) }
75 | }
76 |
77 | public func onDurationChanged(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
78 | providers.forEach{ $0.onDurationChanged(tech: tech, source: source)}
79 | }
80 |
81 | public func onWarning(tech: Tech, source: Source?, warning: PlayerWarning) where Tech : PlaybackTech, Source : MediaSource, Context : MediaContext {
82 | providers.forEach{ $0.onWarning(tech: tech, source: source, warning: warning) }
83 | }
84 |
85 | public func onAppDidEnterBackground(tech: Tech, source: Source?) where Tech : PlaybackTech, Source : MediaSource {
86 | providers.forEach{ $0.onAppDidEnterBackground(tech: tech, source: source) }
87 | }
88 |
89 | public func onAppDidEnterForeground(tech: Tech, source: Source?) where Tech : PlaybackTech, Source : MediaSource {
90 | providers.forEach{ $0.onAppDidEnterForeground(tech: tech, source: source) }
91 | }
92 |
93 | public func onGracePeriodStarted(tech: Tech, source: Source?) where Tech : PlaybackTech, Source : MediaSource {
94 | providers.forEach{ $0.onGracePeriodStarted(tech: tech, source: source) }
95 | }
96 |
97 | public func onGracePeriodEnded(tech: Tech, source: Source?) where Tech : PlaybackTech, Source : MediaSource {
98 | providers.forEach{ $0.onGracePeriodEnded(tech: tech, source: source) }
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Analytics/Providers/AnalyticsLogger.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AnalyticsLogger.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-23.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Simple `AnalyticsProvider` that logs any events it receives to the console.
12 | public struct AnalyticsLogger: AnalyticsProvider {
13 |
14 |
15 | public init() { }
16 | public func onCreated(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
17 | print("🏷 AnalyticsLogger",type(of: tech),"🏗 onCreated",source.playSessionId)
18 | }
19 |
20 | public func onPrepared(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
21 | print("🏷 AnalyticsLogger",type(of: tech),"🛁 onPrepared",source.playSessionId)
22 | }
23 |
24 | public func onReady(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
25 | print("🏷 AnalyticsLogger",type(of: tech),"👍 onReady",source.playSessionId)
26 | }
27 |
28 | public func onStarted(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
29 | print("🏷 AnalyticsLogger",type(of: tech),"🎬 onStarted",source.playSessionId)
30 | }
31 |
32 | public func onPaused(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
33 | print("🏷 AnalyticsLogger",type(of: tech),"⏸ onPaused",source.playSessionId)
34 | }
35 |
36 | public func onResumed(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
37 | print("🏷 AnalyticsLogger",type(of: tech),"▶️ onResumed",source.playSessionId)
38 | }
39 |
40 | public func onAborted(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
41 | print("🏷 AnalyticsLogger",type(of: tech),"⏹ onAborted",source.playSessionId)
42 | }
43 |
44 | public func onCompleted(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
45 | print("🏷 AnalyticsLogger",type(of: tech),"🏁 onCompleted",source.playSessionId)
46 | }
47 |
48 | public func onError(tech: Tech?, source: Source?, error: PlayerError) where Tech : PlaybackTech, Source : MediaSource, Context : MediaContext {
49 | print("🏷 AnalyticsLogger",type(of: tech),"🚨 onError",error.message,error.message,source?.playSessionId ?? "")
50 | }
51 |
52 | public func onBitrateChanged(tech: Tech, source: Source, bitrate: Double) where Tech : PlaybackTech, Source : MediaSource {
53 | print("🏷 AnalyticsLogger",type(of: tech),"📶 onBitrateChanged [\(bitrate)]",source.playSessionId)
54 | }
55 |
56 | public func onBufferingStarted(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
57 | print("🏷 AnalyticsLogger",type(of: tech),"⏳ onBufferingStarted",source.playSessionId)
58 | }
59 |
60 | public func onBufferingStopped(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
61 | print("🏷 AnalyticsLogger",type(of: tech),"⌛ onBufferingStopped",source.playSessionId)
62 | }
63 |
64 | public func onScrubbedTo(tech: Tech, source: Source, offset: Int64) where Tech : PlaybackTech, Source : MediaSource {
65 | print("🏷 AnalyticsLogger",type(of: tech),"🕘 onScrubbedTo [\(offset)]",source.playSessionId)
66 | }
67 |
68 | public func onDurationChanged(tech: Tech, source: Source) where Tech : PlaybackTech, Source : MediaSource {
69 | print("🏷 AnalyticsLogger",type(of: tech),"📅 onDurationChanged",source.playSessionId)
70 | }
71 |
72 | public func onWarning(tech: Tech, source: Source?, warning: PlayerWarning) where Tech : PlaybackTech, Source : MediaSource, Context : MediaContext {
73 | print("🏷 AnalyticsLogger",type(of: tech),"⚠️ onWarning",warning.message,source?.playSessionId ?? "")
74 | }
75 |
76 | public func onAppDidEnterBackground(tech: Tech, source: Source?) where Tech : PlaybackTech, Source : MediaSource {
77 | print("🏷 AnalyticsLogger",type(of: tech),"⍂ onAppDidEnterBackground",source?.playSessionId ?? "" )
78 | }
79 |
80 | public func onAppDidEnterForeground(tech: Tech, source: Source?) where Tech : PlaybackTech, Source : MediaSource {
81 | print("🏷 AnalyticsLogger",type(of: tech),"⍂ onAppDidEnterForeground",source?.playSessionId ?? "" )
82 | }
83 |
84 | public func onGracePeriodStarted(tech: Tech, source: Source?) where Tech : PlaybackTech, Source : MediaSource {
85 | print("🏷 AnalyticsLogger",type(of: tech),"⏳ onGracePeriodStarted",source?.playSessionId ?? "" )
86 | }
87 |
88 | public func onGracePeriodEnded(tech: Tech, source: Source?) where Tech : PlaybackTech, Source : MediaSource {
89 | print("🏷 AnalyticsLogger",type(of: tech),"⌛️ onGracePeriodEnded",source?.playSessionId ?? "" )
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Analytics/SourceAbandonedEventProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SourceAbandonedEventProvider.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-08-06.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public protocol SourceAbandonedEventProvider {
12 | /// This method is called whenever a `MediaSource` in preparation was abandoned before it completed loading all properties.
13 | ///
14 | /// Adopters should treat this callback as the last point of interaction with `mediaSource` and take appropriate finalization actions.
15 | ///
16 | /// - parameter mediaSource: The `MediaSource` which was set to load and prepare itself
17 | /// - parameter tech: The `Tech` loading the `mediaSource`
18 | func onSourcePreparationAbandoned(ofSource mediaSource: Source, byTech tech: Tech) where Source: MediaSource, Tech: PlaybackTech
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Analytics/TechDeallocationEventProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TechDeallocationEventProvider.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-05-24.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public protocol TechDeallocationEventProvider {
12 | /// This method is called whenever preparation of a `MediaSource` finishes after the initiating `Tech` has been deallocated.
13 | ///
14 | /// Adopters should treat this callback as the last point of interaction with `mediaSource` and take appropriate finalization actions.
15 | ///
16 | /// - parameter mediaSource: The `MediaSource` which was set to load and prepare itself
17 | func onTechDeallocated(beforeMediaPreparationFinalizedOf mediaSource: Source) where Source: MediaSource
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Analytics/TimedMetadataProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TimedMetadataProvider.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-10-02.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 | public protocol TimedMetadataProvider {
13 | /// This method is called whenever a new `AVMetadataItem`s are encountered
14 | ///
15 | /// - parameter mediaSource: The `MediaSource` which was set to load and prepare itself
16 | /// - parameter tech: The `Tech` loading the `mediaSource`
17 | /// - parameter metadata: The metadata encountered
18 | func onTimedMetadataChanged(source: Source?, tech: Tech, metadata: [AVMetadataItem]?) where Source: MediaSource, Tech: PlaybackTech
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Analytics/TraceProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TraceProvider.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-05-21.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Endpoint hook for dealing with *Trace* analytics data.
12 | ///
13 | /// This could be useful for logging custom events.
14 | public protocol TraceProvider {
15 | /// Should process the specified *Trace* `data`
16 | ///
17 | /// - parameter tech: `Tech` broadcasting the event
18 | /// - parameter source: `MediaSource` causing the event
19 | /// - parameter data: Any data describing the event in JSON format.
20 | func onTrace(tech: Tech?, source: Source?, data: [String: Any]) where Tech: PlaybackTech, Source: MediaSource
21 | }
22 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Components/MediaPlayback.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MediaPlayback.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-04-07.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 | public protocol MediaPlayback: class {
13 | /// Starts playback
14 | func play()
15 |
16 | /// Pauses playback
17 | func pause()
18 |
19 | /// Stops playback
20 | func stop()
21 |
22 | /// Should return `true` if the playback rate, forward or backwards, is *non-zero*. Ie: Has the player been instructed to proceed.
23 | ///
24 | /// - note: This should not return `false` if playback has stopped due to *buffering* or similair events.
25 | var isPlaying: Bool { get }
26 |
27 | /// Should perform seeking to the specified `position` in the player's buffer.
28 | ///
29 | /// - parameter position: target buffer position in milliseconds
30 | func seek(toPosition position: Int64)
31 |
32 | /// Should return time ranges within which it is possible to seek.
33 | var seekableRanges: [CMTimeRange] { get }
34 |
35 | /// Should return time ranges in unix epoch time within which it is possible to seek.
36 | var seekableTimeRanges: [CMTimeRange] { get }
37 |
38 | /// Should return the playhead position timestamp using the internal buffer time reference in milliseconds
39 | var playheadPosition: Int64 { get }
40 |
41 | /// Should returns the playhead position mapped current time in unix epoch (milliseconds) or `nil` if playback is not mapped to any date.
42 | var playheadTime: Int64? { get }
43 |
44 | /// For streams where playback is associated with a series of dates, should perform seeking to `timeInterval` as specified in relation to the current `wallclock` time.
45 | ///
46 | /// - parameter timeInterval: target timestamp in unix epoch time (milliseconds)
47 | func seek(toTime timeInterval: Int64)
48 |
49 | /// Should return time ranges of the loaded item.
50 | var bufferedRanges: [CMTimeRange] { get }
51 |
52 | /// Should return time ranges in unix epoch time of the loaded item
53 | var bufferedTimeRanges: [CMTimeRange] { get }
54 |
55 | /// Playback duration.
56 | ///
57 | /// - note: If this is a live stream, duration should be `nil`
58 | var duration: Int64? { get }
59 |
60 | /// The throughput required to play the stream, as advertised by the server, in *bits per second*. Should return nil if no bitrate can be reported.
61 | var currentBitrate: Double? { get }
62 |
63 | /// When autoplay is enabled, playback will resume as soon as the stream is loaded and prepared.
64 | var autoplay: Bool { get set }
65 |
66 | /// Playback volume
67 | var volume: Float { get set }
68 |
69 | /// If the playback is muted or not
70 | var isMuted: Bool { get set }
71 |
72 | /// avplayer playerItem
73 | var playerItem: AVPlayerItem? { get }
74 |
75 | var isOfflinePlayable: Bool { get }
76 | }
77 |
78 | extension Player {
79 | /// Starts playback
80 | public func play() {
81 | tech.play()
82 | }
83 |
84 | /// Pauses playback
85 | public func pause() {
86 | tech.pause()
87 | }
88 |
89 | /// Stops playback
90 | public func stop() {
91 | tech.stop()
92 | }
93 |
94 | /// Should return `true` if the playback rate, forward or backwards, is *non-zero*. Ie: Has the player been instructed to proceed.
95 | ///
96 | /// - note: This should not return `false` if playback has stopped due to *buffering* or similair events.
97 | public var isPlaying: Bool {
98 | return tech.isPlaying
99 | }
100 |
101 | /// Should return time ranges within which it is possible to seek.
102 | public var seekableRanges: [CMTimeRange] {
103 | return tech.seekableRanges
104 | }
105 |
106 | /// Should return time ranges in unix epoch time within which it is possible to seek.
107 | public var seekableTimeRanges: [CMTimeRange] {
108 | return tech.seekableTimeRanges
109 | }
110 |
111 | /// Should seek the specified `position` in the player's buffer.
112 | ///
113 | /// - Parameter timeInterval: target timestamp
114 | public func seek(toPosition position: Int64) {
115 | tech.seek(toPosition: position)
116 | }
117 |
118 | /// Should return the playhead position timestamp using the internal buffer time reference in milliseconds
119 | public var playheadPosition: Int64 {
120 | return tech.playheadPosition
121 | }
122 |
123 | /// Should returns the playhead position mapped to wallclock time in unix epoch (milliseconds) or `nil` if playback is not mapped to any date.
124 | public var playheadTime: Int64? {
125 | return tech.playheadTime
126 | }
127 |
128 | /// For streams where playback is associated with a series of dates, should perform seeking to `timeInterval` as specified in relation to the current `wallclock` time.
129 | ///
130 | /// - Parameter timeInterval: target timestamp in unix epoch time (milliseconds)
131 | public func seek(toTime timeInterval: Int64) {
132 | tech.seek(toTime: timeInterval)
133 | }
134 |
135 | /// Playback duration.
136 | ///
137 | /// - note: If this is a live stream, duration should be `nil`
138 | public var duration: Int64? {
139 | return tech.duration
140 | }
141 |
142 | /// Should return the time ranges of the item that have been loaded.
143 | public var bufferedRanges: [CMTimeRange] {
144 | return tech.bufferedRanges
145 | }
146 |
147 | /// Should return time ranges in unix epoch time of the loaded item
148 | public var bufferedTimeRanges: [CMTimeRange] {
149 | return tech.bufferedTimeRanges
150 | }
151 |
152 | /// The throughput required to play the stream, as advertised by the server, in *bits per second*. Should return nil if no bitrate can be reported.
153 | public var currentBitrate: Double? {
154 | return tech.currentBitrate
155 | }
156 |
157 | /// When autoplay is enabled, playback will resume as soon as the stream is loaded and prepared.
158 | public var autoplayEnabled: Bool {
159 | get {
160 | return tech.autoplay
161 | }
162 | set {
163 | tech.autoplay = newValue
164 | }
165 | }
166 |
167 | /// This property is used to control the player audio volume relative to the system volume.
168 | ///
169 | /// There is no programmatic way to control the system volume in iOS, but you can use the MediaPlayer framework’s MPVolumeView class to present a standard user interface for controlling system volume.
170 | public var volume: Float {
171 | get {
172 | return tech.volume
173 | }
174 | set {
175 | tech.volume = newValue
176 | }
177 | }
178 |
179 | /// If the playback is muted or not
180 | public var isMuted: Bool {
181 | get {
182 | return tech.isMuted
183 | }
184 | set {
185 | tech.isMuted = newValue
186 | }
187 | }
188 |
189 | /// Should returns the AVPlayerItem associated with the avplayer
190 | public var playerItem: AVPlayerItem?{
191 | return tech.playerItem
192 | }
193 |
194 | public var isOfflinePlayable: Bool {
195 | get {
196 | return tech.isOfflinePlayable
197 | }
198 | }
199 | }
200 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Components/MediaRendering.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MediaRendering.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-04-10.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import AVFoundation
11 | import AVKit
12 |
13 | /// MediaRendering defines how the player configures a *user supplied* view for playback rendering.
14 | public protocol MediaRendering {
15 | /// Configures `playerView` according to specifications supplied by the adopter.
16 | ///
17 | /// - parameter playerView: *User supplied* view to configure for playback rendering.
18 | func configure(playerView: UIView) -> AVPlayerLayer
19 |
20 | func configureWithDefaultSkin(avPlayerViewController: AVPlayerViewController) -> AVPlayerViewController
21 | }
22 |
23 | extension Player where Tech: MediaRendering {
24 | /// Configures `playerView` according to specifications supplied by the adopter.
25 | /// - Pparameter playerView: *User supplied* view to configure for playback rendering.
26 | /// - Returns: AVPlayerLayer
27 | public func configure(playerView: UIView) -> AVPlayerLayer {
28 | return tech.configure(playerView: playerView)
29 | }
30 | }
31 |
32 | extension Player where Tech: MediaRendering {
33 | /// Configures `player` with default skin
34 | /// - Parameter avPlayerViewController: avPlayerViewController
35 | /// - Returns: AVPlayerViewController
36 | public func configureWithDefaultSkin(avPlayerViewController: AVPlayerViewController) -> AVPlayerViewController {
37 | return tech.configureWithDefaultSkin(avPlayerViewController: avPlayerViewController)
38 | }
39 | }
40 |
41 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Components/NetworkBehavior.swift:
--------------------------------------------------------------------------------
1 | //
2 | // NetworkLimitation.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-02-21.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public protocol NetworkBehavior {
12 | /// Should specify the desired limit, in bits per second, of network bandwidth consumption allowed during playback or `nil` if no limit is required
13 | var preferredMaxBitrate: Int64? { get }
14 | }
15 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Components/StartTime.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StartTime.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-08-07.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// SessionShift allows playback to start at a specified offset.
12 | public protocol StartTime: class {
13 | /// Returns a target buffer `offset` to start playback if it has been specified, else `nil`. (milliseconds)
14 | var startPosition: Int64? { get }
15 |
16 | /// Returns a target timestamp in wallclock unix epoch time to start playback if it has been specified, else `nil`. (milliseconds)
17 | var startTime: Int64? { get }
18 |
19 | /// Should set the `startPosition` (in milliseconds) to the specified `position` relative to the playback buffer.
20 | ///
21 | /// Specifying `nil` revert to the default behaviour for startup
22 | func startTime(atPosition position: Int64?)
23 |
24 | /// Should set the `startTime` to the specified `timestamp` in wallclock unix epoch time. (in milliseconds)
25 | ///
26 | /// Specifying `nil` revert to the default behaviour for startup
27 | func startTime(atTime timestamp: Int64?)
28 | }
29 |
30 | extension Player where Tech: StartTime {
31 |
32 | /// Returns a target buffer `offset` to start playback if it has been specified, else `nil`.
33 | public var startPosition: Int64? {
34 | return tech.startPosition
35 | }
36 |
37 | /// Returns a target timestamp in wallclock unix epoch time to start playback if it has been specified, else `nil`.
38 | public var startTime: Int64? {
39 | return tech.startTime
40 | }
41 |
42 | /// Should set the `startPosition` (in milliseconds) to the specified `position` relative to the playback buffer.
43 | ///
44 | /// Specifying `nil` revert to the default behaviour for startup
45 | public func startTime(atPosition position: Int64?) {
46 | tech.startTime(atPosition: position)
47 | }
48 |
49 | /// Should set the `startTime` to the specified `timestamp` in wallclock unix epoch time. (in milliseconds)
50 | ///
51 | /// Specifying `nil` revert to the default behaviour for startup
52 | public func startTime(atTime timestamp: Int64?) {
53 | tech.startTime(atTime: timestamp)
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Components/TrackSelectable.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MediaTracks.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-02-07.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 | public protocol Track {
13 | /// Should return a human readable display name of the track
14 | var name: String { get }
15 |
16 | /// Should return the RFC 4646 language tag associated with the track or `nil` if unavailable
17 | var extendedLanguageTag: String? { get }
18 |
19 | /// Should return`NAME` tag value associated with the track or `nil` if unavailable
20 | var title: String? { get }
21 |
22 | /// Should returns the random generated id value for the track
23 | var mediaTrackId: Int? { get }
24 | }
25 |
26 | /// Describes selectable and inspectable tracks
27 | public protocol TrackSelectable {
28 |
29 | // MARK: Audio
30 | /// Should fetch the default text track, or `nil` if unavailable
31 | associatedtype AudioTrack: Track
32 |
33 | /// Should fetch the default audio track, or `nil` if unavailable
34 | var defaultAudioTrack: AudioTrack? { get }
35 |
36 | /// Should fetch all associated audio tracks
37 | var audioTracks: [AudioTrack] { get }
38 |
39 | /// Should fetch all associated `AVAssetVariant` s
40 | @available(iOS 15.0, tvOS 15.0, *)
41 | var variants: [AVAssetVariant]? { get }
42 |
43 | /// Should fetch the selected audio track if available, otherwise `nil`
44 | var selectedAudioTrack: AudioTrack? { get }
45 |
46 | /// Should indicate if it is possible to select no audio track
47 | var allowsEmptyAudioSelection: Bool { get }
48 |
49 | /// Should select the specified audio track or, if `allowsEmptyAudioSelection` == true, select no audio track
50 | ///
51 | /// - parameter track: The audio track to select
52 | func selectAudio(track: AudioTrack?)
53 |
54 | /// Should select the specified audio language if available or, if `allowsEmptyAudioSelection` == true, select no audio track
55 | ///
56 | /// - parameter language: The RFC 4646 language tag identifying the track
57 | func selectAudio(language: String?)
58 |
59 | /// Should select the specified audio language if available or, if `allowsEmptyAudioSelection` == true, select no audio track
60 | ///
61 | /// - parameter mediaTrackId: unique id of the mediaTrack
62 | func selectAudio(mediaTrackId: Int?)
63 |
64 | /// Should select the specified audio language if available or, if `allowsEmptyAudioSelection` == true, select no audio track
65 | ///
66 | /// - parameter title: title of the track
67 | func selectAudio(title: String?)
68 |
69 | /// Should set the preferred audio language tag as defined by RFC 4646 standards
70 | var preferredAudioLanguage: String? { get set }
71 |
72 | // MARK: Text
73 | /// Should fetch the default text track, or `nil` if unavailable
74 | associatedtype TextTrack: Track
75 |
76 | /// Should fetch the default text track, or `nil` if unavailable
77 | var defaultTextTrack: TextTrack? { get }
78 |
79 | /// Should fetch all associated text tracks
80 | var textTracks: [TextTrack] { get }
81 |
82 | /// Should fetch the selected text track if available, otherwise `nil`
83 | var selectedTextTrack: TextTrack? { get }
84 |
85 | /// Should indicate if it is possible to select no text track
86 | var allowsEmptyTextSelection: Bool { get }
87 |
88 | /// Should select the specified text track or, if `allowsEmptyTextSelection` == true, select no text track
89 | ///
90 | /// - parameter track: The text track to select
91 | func selectText(track: TextTrack?)
92 |
93 | /// Should select the specified text language if available or, if `allowsEmptyTextSelection` == true, select no text track
94 | ///
95 | /// - parameter language: The RFC 4646 language tag identifying the track
96 | func selectText(language: String?)
97 |
98 | /// Should select the specified text language if available or, if `allowsEmptyTextSelection` == true, select no text track
99 | ///
100 | /// - parameter mediaTrackId: unique id of the track
101 | func selectText(mediaTrackId: Int?)
102 |
103 | /// Should select the specified text language if available or, if `allowsEmptyTextSelection` == true, select no text track
104 | ///
105 | /// - parameter title: title of the track
106 | func selectText(title: String?)
107 |
108 | /// Should set the preferred text language tag as defined by RFC 4646 standards
109 | var preferredTextLanguage: String? { get set }
110 |
111 | /// Set peakBitRate for the current Asset
112 | func setBitRate(selectedBitRate: Double )
113 | }
114 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Context/Manifest/Components/HLSNative+ManifestContext+Airplay.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HLSNative+ManifestContext+Airplay.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-03-15.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | extension Player where Tech == HLSNative {
12 | public func onAirplayStatusChanged(callback: @escaping (Player>, Manifest?, Bool) -> Void) -> Self {
13 | tech.onAirplayStatusChanged = { [weak self] tech, source, airplaying in
14 | guard let `self` = self else { return }
15 | callback(self, source, airplaying)
16 | }
17 | return self
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Context/Manifest/Components/Player+ManifestContext.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Player+ManifestContext.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-23.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | extension Player where Tech == HLSNative {
12 | /// Streaming extension to load and play `Manifest` sources defined by `ManifestContext`.
13 | ///
14 | /// - parameter url: the location of the media to play
15 | public func stream(url: URL) {
16 | let manifest = context.manifest(from: url)
17 | let configuration = HLSNativeConfiguration(drm: manifest.fairplayRequester)
18 | tech.load(source: manifest, configuration: configuration)
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Context/Manifest/Manifest.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Manifest.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-23.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Basic `MediaSource` that can do simple playback of *unencrypted* media sources.
12 | ///
13 | /// It has an optional drm agent in the form of a `FairplayRequester` that, if implemented, can be used to play *FairPlay* protected media using the `HLSNative` tech.
14 | public class Manifest: MediaSource {
15 | /// Basic connector
16 | public var analyticsConnector: AnalyticsConnector = PassThroughConnector()
17 |
18 | /// Drm agent to play *FairPlay* using the `HLSNative` tech.
19 | public let fairplayRequester: FairplayRequester?
20 |
21 | /// Unique playsession id
22 | public let playSessionId: String
23 |
24 | /// Media locator for the media source.
25 | public let url: URL
26 |
27 | public init(url: URL, playSessionId: String = UUID().uuidString, fairplayRequester: FairplayRequester? = nil) {
28 | self.url = url
29 | self.playSessionId = playSessionId
30 | self.fairplayRequester = fairplayRequester
31 | }
32 | }
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Context/Manifest/ManifestContext.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ManifestContext.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-23.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Basic `MediaContext` that allows playback of *unencrypted* media through a specified `URL`
12 | public final class ManifestContext: MediaContext {
13 | /// Simple error
14 | public typealias ContextError = Error
15 |
16 | /// Simple warning message
17 | public typealias ContextWarning = Warning
18 |
19 | /// Source is defined as a `Manifest`
20 | public typealias Source = Manifest
21 |
22 | public init() { }
23 |
24 | /// Creates a `Manifest` from the specified `URL`
25 | ///
26 | /// - parameter url: `URL` to the media source
27 | /// - returns: `Manifest` describing the media source
28 | public func manifest(from url: URL, fairplayRequester: FairplayRequester? = nil) -> Manifest {
29 | let source = Manifest(url: url)
30 | source.analyticsConnector.providers = analyticsProviders(for: source)
31 | return source
32 | }
33 |
34 | /// Default analytics contains an `AnalyticsLogger`
35 | public var analyticsGenerators: [(Source?) -> AnalyticsProvider] = []
36 |
37 | public struct Error: ExpandedError {
38 | public let message: String
39 | public let code: Int
40 |
41 | public let info: String?
42 |
43 | public init(message: String, code: Int, info: String? = nil) {
44 | self.message = message
45 | self.code = code
46 | self.info = info
47 | }
48 |
49 | public var domain: String { return "ManifestContextErrorDomain" }
50 | }
51 |
52 | public struct Warning: WarningMessage {
53 | public let message: String
54 |
55 | public init(message: String) {
56 | self.message = message
57 | }
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Context/MediaContext.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MediaContext.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-20.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Core protocol defining a strict context in which playback can take place.
12 | ///
13 | /// This context should be considered a staging ground for defining, managing and preparing playback sessions.
14 | public protocol MediaContext: class {
15 | /// Context related error
16 | associatedtype ContextError: ExpandedError
17 |
18 | /// Warning message associated with the `Context`
19 | associatedtype ContextWarning: WarningMessage
20 |
21 | /// Defines the individual source object used to initate a distinct playback session.
22 | associatedtype Source: MediaSource
23 |
24 | /// A collection of generator closures which creates `AnalyticsProvider`s per `Source`.
25 | var analyticsGenerators: [(Source?) -> AnalyticsProvider] { get set }
26 | }
27 |
28 | extension MediaContext {
29 | /// Generate all `AnalyticsProvider`s for the specified source
30 | public func analyticsProviders(for source: Source?) -> [AnalyticsProvider] {
31 | return analyticsGenerators.map{ $0(source) }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Context/MediaSource.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MediaSource.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-20.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public protocol MediaSource {
12 | var analyticsConnector: AnalyticsConnector { get set }
13 |
14 | /// Returns a token string uniquely identifying this playSession.
15 | /// Example: “E621E1F8-C36C-495A-93FC-0C247A3E6E5F”
16 | var playSessionId: String { get }
17 |
18 | /// The location for this media
19 | var url: URL { get }
20 | }
21 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Context/MediaSourceRequestHeaders.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HLSNativeMediaSource.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-08-21.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Extends the standard `MediaSource` protocol with functionality to track the HTTP headers set by internal playback when requesting manifest and media segments.
12 | public protocol MediaSourceRequestHeaders: MediaSource {
13 | /// Should store the HTTP headers used when requesting manifest and media segments
14 | var mediaSourceRequestHeaders: [String: String] { get set }
15 | }
16 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Events/EventDispatcher.swift:
--------------------------------------------------------------------------------
1 | //
2 | // EventDispatcher.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-23.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 | /// Dispatch class used by the player to trigger registered event callbacks.
13 | ///
14 | /// `Tech` implementations should trigger the related events where appropriate.
15 | public class EventDispatcher {
16 |
17 | /// Should be triggered when the requested media is created, but not yet loaded
18 | ///
19 | /// - parameter tech: `Tech` broadcasting the event
20 | /// - parameter source: `MediaSource` causing the event
21 | internal(set) public var onPlaybackCreated: (Tech, Context.Source) -> Void = { _,_ in }
22 |
23 | /// Should be triggered once the requested media is loaded
24 | ///
25 | /// - parameter tech: `Tech` broadcasting the event
26 | /// - parameter source: `MediaSource` causing the event
27 | internal(set) public var onPlaybackPrepared: (Tech, Context.Source) -> Void = { _,_ in }
28 |
29 | /// Should be triggered when playback is ready to start
30 | ///
31 | /// - parameter tech: `Tech` broadcasting the event
32 | /// - parameter source: `MediaSource` causing the event
33 | internal(set) public var onPlaybackReady: (Tech, Context.Source) -> Void = { _,_ in }
34 |
35 | /// Should be triggered once the playback starts for the first time
36 | ///
37 | /// - parameter tech: `Tech` broadcasting the event
38 | /// - parameter source: `MediaSource` causing the event
39 | internal(set) public var onPlaybackStarted: (Tech, Context.Source) -> Void = { _,_ in }
40 |
41 | /// Should be triggered by the user pausing playback
42 | ///
43 | /// - parameter tech: `Tech` broadcasting the event
44 | /// - parameter source: `MediaSource` causing the event
45 | internal(set) public var onPlaybackPaused: (Tech, Context.Source) -> Void = { _,_ in }
46 |
47 | /// Should be triggered by the user resuming playback
48 | ///
49 | /// - parameter tech: `Tech` broadcasting the event
50 | /// - parameter source: `MediaSource` causing the event
51 | internal(set) public var onPlaybackResumed: (Tech, Context.Source) -> Void = { _,_ in }
52 |
53 | /// Should be triggered by the user aborting playback
54 | ///
55 | /// - parameter tech: `Tech` broadcasting the event
56 | /// - parameter source: `MediaSource` causing the event
57 | internal(set) public var onPlaybackAborted: (Tech, Context.Source) -> Void = { _,_ in }
58 |
59 | /// Should be triggered once playback reaches end of stream
60 | ///
61 | /// - parameter tech: `Tech` broadcasting the event
62 | /// - parameter source: `MediaSource` causing the event
63 | internal(set) public var onPlaybackCompleted: (Tech, Context.Source) -> Void = { _,_ in }
64 |
65 | /// Should be triggered if an error during its lifetime
66 | ///
67 | /// - parameter tech: `Tech` broadcasting the event
68 | /// - parameter source: `MediaSource` causing the event
69 | /// - parameter error: `Error` encountered
70 | internal(set) public var onError: (Tech?, Context.Source?, PlayerError) -> Void = { _,_,_ in }
71 |
72 | /// Should be triggered when the bitrate changes
73 | ///
74 | /// - parameter tech: `Tech` broadcasting the event
75 | /// - parameter source: `MediaSource` causing the event
76 | /// - parameter bitrate: New bitrate
77 | internal(set) public var onBitrateChanged: (Tech, Context.Source, Double) -> Void = { _,_,_ in }
78 |
79 | /// Should be triggered when buffering is required
80 | ///
81 | /// - parameter tech: `Tech` broadcasting the event
82 | /// - parameter source: `MediaSource` causing the event
83 | internal(set) public var onBufferingStarted: (Tech, Context.Source) -> Void = { _,_ in }
84 |
85 | /// Should be triggered when buffering finished
86 | ///
87 | /// - parameter tech: `Tech` broadcasting the event
88 | /// - parameter source: `MediaSource` causing the event
89 | internal(set) public var onBufferingStopped: (Tech, Context.Source) -> Void = { _,_ in }
90 |
91 | /// Should be triggered by the user seeking to time
92 | ///
93 | /// - parameter tech: `Tech` broadcasting the event
94 | /// - parameter source: `MediaSource` causing the event
95 | /// - parameter offset: New offset
96 | internal(set) public var onPlaybackScrubbed: (Tech, Context.Source, Int64) -> Void = { _,_,_ in }
97 |
98 | /// Should be triggered when the duration of `source` changes
99 | ///
100 | /// - parameter tech: `Tech` broadcasting the event
101 | /// - parameter source: `MediaSource` causing the event
102 | internal(set) public var onDurationChanged: (Tech, Context.Source) -> Void = { _,_ in }
103 |
104 | /// Should be triggered when a *warning* for either the `Tech` or the `Context` occurs.
105 | ///
106 | /// - parameter tech: `Tech` broadcasting the event
107 | /// - parameter source: `MediaSource` causing the event
108 | /// - parameter warning: `Warning` encountered
109 | internal(set) public var onWarning: (Tech, Context.Source?, PlayerWarning) -> Void = { _,_,_ in }
110 |
111 | /// Should be triggered when a *DateRangeMetadataChanged*.
112 | internal(set) public var onDateRangeMetadataChanged: (_ metaDataGroup: [AVDateRangeMetadataGroup], _ indexesOfNewGroups: IndexSet, _ indexesOfModifiedGroups: IndexSet ) -> Void = { _, _, _ in }
113 |
114 | /// Should be triggered when the *GracePeriodStarted*.
115 | ///
116 | /// - parameter tech: `Tech` broadcasting the event
117 | /// - parameter source: `MediaSource` causing the event
118 | internal(set) public var onGracePeriodStarted: (Tech, Context.Source?) -> Void = { _,_ in }
119 |
120 | /// Should be triggered when the *GracePeriodEnded*.
121 | ///
122 | /// - parameter tech: `Tech` broadcasting the event
123 | /// - parameter source: `MediaSource` causing the event
124 | internal(set) public var onGracePeriodEnded: (Tech, Context.Source?) -> Void = { _,_ in }
125 |
126 |
127 | /// Should be triggered when the *AppDidEnterBackground*.
128 | ///
129 | /// - parameter tech: `Tech` broadcasting the event
130 | /// - parameter source: `MediaSource` causing the event
131 | internal(set) public var onAppDidEnterBackground: (Tech, Context.Source?) -> Void = { _,_ in }
132 |
133 |
134 | /// Should be triggered when the *AppDidEnterForeground*.
135 | ///
136 | /// - parameter tech: `Tech` broadcasting the event
137 | /// - parameter source: `MediaSource` causing the event
138 | internal(set) public var onAppDidEnterForeground: (Tech, Context.Source?) -> Void = { _,_ in }
139 | }
140 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Events/EventResponder.swift:
--------------------------------------------------------------------------------
1 | //
2 | // EventResponder.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-23.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Specifies a set of events that will be listened to.
12 | public protocol EventResponder {
13 | /// Triggered when the requested media is created, but not yet loaded
14 | ///
15 | /// - parameter tech: `Tech` broadcasting the event
16 | /// - parameter source: `MediaSource` causing the event
17 | func onCreated(tech: Tech, source: Source) where Tech: PlaybackTech, Source: MediaSource
18 |
19 | /// Triggered once the requested media is loaded
20 | ///
21 | /// - parameter tech: `Tech` broadcasting the event
22 | /// - parameter source: `MediaSource` causing the event
23 | func onPrepared(tech: Tech, source: Source) where Tech: PlaybackTech, Source: MediaSource
24 |
25 | /// Triggered when playback is ready to start
26 | ///
27 | /// - parameter tech: `Tech` broadcasting the event
28 | /// - parameter source: `MediaSource` causing the event
29 | func onReady(tech: Tech, source: Source) where Tech: PlaybackTech, Source: MediaSource
30 |
31 | /// Triggered once the playback starts for the first time
32 | ///
33 | /// - parameter tech: `Tech` broadcasting the event
34 | /// - parameter source: `MediaSource` causing the event
35 | func onStarted(tech: Tech, source: Source) where Tech: PlaybackTech, Source: MediaSource
36 |
37 | /// Triggered by the user pausing playback
38 | ///
39 | /// - parameter tech: `Tech` broadcasting the event
40 | /// - parameter source: `MediaSource` causing the event
41 | func onPaused(tech: Tech, source: Source) where Tech: PlaybackTech, Source: MediaSource
42 |
43 | /// Triggered by the user resuming playback
44 | ///
45 | /// - parameter tech: `Tech` broadcasting the event
46 | /// - parameter source: `MediaSource` causing the event
47 | func onResumed(tech: Tech, source: Source) where Tech: PlaybackTech, Source: MediaSource
48 |
49 | /// Triggered by the user aborting playback
50 | ///
51 | /// - parameter tech: `Tech` broadcasting the event
52 | /// - parameter source: `MediaSource` causing the event
53 | func onAborted(tech: Tech, source: Source) where Tech: PlaybackTech, Source: MediaSource
54 |
55 | /// Triggered once playback reaches end of stream
56 | ///
57 | /// - parameter tech: `Tech` broadcasting the event
58 | /// - parameter source: `MediaSource` causing the event
59 | func onCompleted(tech: Tech, source: Source) where Tech: PlaybackTech, Source: MediaSource
60 |
61 | /// Triggered if the player encounters an error during its lifetime
62 | ///
63 | /// - parameter tech: `Tech` broadcasting the event
64 | /// - parameter source: `MediaSource` causing the event
65 | /// - parameter error: `Error` encountered
66 | func onError(tech: Tech?, source: Source?, error: PlayerError) where Tech: PlaybackTech, Source: MediaSource, Context: MediaContext
67 |
68 | /// Triggered when the bitrate changes
69 | ///
70 | /// - parameter tech: `Tech` broadcasting the event
71 | /// - parameter source: `MediaSource` causing the event
72 | /// - parameter bitrate: New bitrate
73 | func onBitrateChanged(tech: Tech, source: Source, bitrate: Double) where Tech: PlaybackTech, Source: MediaSource
74 |
75 | /// Triggered when buffering is required
76 | ///
77 | /// - parameter tech: `Tech` broadcasting the event
78 | /// - parameter source: `MediaSource` causing the event
79 | func onBufferingStarted(tech: Tech, source: Source) where Tech: PlaybackTech, Source: MediaSource
80 |
81 | /// Triggered when buffering finished
82 | ///
83 | /// - parameter tech: `Tech` broadcasting the event
84 | /// - parameter source: `MediaSource` causing the event
85 | func onBufferingStopped(tech: Tech, source: Source) where Tech: PlaybackTech, Source: MediaSource
86 |
87 | /// Triggered by the user seeking to time
88 | ///
89 | /// - parameter tech: `Tech` broadcasting the event
90 | /// - parameter source: `MediaSource` causing the event
91 | /// - parameter offset: New offset
92 | func onScrubbedTo(tech: Tech, source: Source, offset: Int64) where Tech: PlaybackTech, Source: MediaSource
93 |
94 | /// Triggered when the duration of `source` changes
95 | ///
96 | /// - parameter tech: `Tech` broadcasting the event
97 | /// - parameter source: `MediaSource` causing the event
98 | func onDurationChanged(tech: Tech, source: Source) where Tech: PlaybackTech, Source: MediaSource
99 |
100 | /// Triggered when a *warning* for either the `Tech` or the `Context` occurs.
101 | ///
102 | /// - parameter tech: `Tech` broadcasting the event
103 | /// - parameter source: `MediaSource` causing the event
104 | /// - parameter warning: `Warning` encountered
105 | func onWarning(tech: Tech, source: Source?, warning: PlayerWarning) where Tech: PlaybackTech, Source: MediaSource, Context: MediaContext
106 |
107 |
108 |
109 | /// Triggered when the app did enter background.
110 | /// - Parameters:
111 | /// - tech: Tech` broadcasting the event
112 | /// - source: `MediaSource` causing the event
113 | func onAppDidEnterBackground(tech: Tech, source: Source?) where Tech: PlaybackTech, Source: MediaSource
114 |
115 |
116 | /// Triggered when the app did enter foreground.
117 | /// - Parameters:
118 | /// - tech: Tech` broadcasting the event
119 | /// - source: `MediaSource` causing the event
120 | func onAppDidEnterForeground(tech: Tech, source: Source?) where Tech: PlaybackTech, Source: MediaSource
121 |
122 | //// Triggered when the grace period satrted
123 | /// - Parameters:
124 | /// - tech: Tech` broadcasting the event
125 | /// - source: `MediaSource` causing the event
126 | func onGracePeriodStarted(tech: Tech, source: Source?) where Tech: PlaybackTech, Source: MediaSource
127 |
128 | /// Triggered when the grace period ended
129 | /// - Parameters:
130 | /// - tech: Tech` broadcasting the event
131 | /// - source: `MediaSource` causing the event
132 | func onGracePeriodEnded(tech: Tech, source: Source?) where Tech: PlaybackTech, Source: MediaSource
133 |
134 |
135 |
136 |
137 | }
138 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Extensions/Date+Extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Date+Extensions.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-01-30.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | extension Date {
12 | /// Date formatter for utc.
13 | internal static func utcFormatter() -> DateFormatter {
14 | let formatter = DateFormatter()
15 | formatter.locale = Locale(identifier: "en_GB")
16 | formatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"
17 | return formatter
18 | }
19 |
20 | /// Unix epoch time in milliseconds
21 | internal var millisecondsSince1970: Int64 {
22 | return Int64((timeIntervalSince1970 * 1000.0).rounded())
23 | }
24 |
25 | /// Create a Date from unix epoch time in milliseconds
26 | internal init(milliseconds: Int64) {
27 | self = Date(timeIntervalSince1970: TimeInterval(milliseconds / 1000))
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Extensions/Error+Extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Error+Extensions.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-02-19.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 | extension Error {
12 | internal var debugInfoString: String {
13 | if let expandedError = self as? ExpandedError {
14 | var message = "[\(expandedError.code):" + expandedError.domain + "] \n"
15 | if let underlyingError = expandedError.underlyingError {
16 | message += underlyingError.debugInfoString
17 | }
18 | else {
19 | message += "[" + expandedError.message + " " + (expandedError.info ?? "") + "]"
20 | }
21 | return message
22 | }
23 | else if let nsError = self as? NSError {
24 | var message = "[\(nsError.code):\(nsError.domain)] \n "
25 | message += "[\(nsError.debugDescription)] \n "
26 |
27 | if let uError = nsError.userInfo[NSUnderlyingErrorKey] as? NSError {
28 | message += uError.debugInfoString
29 | }
30 | return message
31 | }
32 | return "[\(self.localizedDescription)] \n"
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Extensions/Int64+Extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Int64+Extensions.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-10-16.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | extension Int64 {
12 | /// If `self` is a buffer positon, transform it to a wallclock timestamp in relation to `time` and `position`
13 | public func timestampFrom(referenceTime time: Int64, referencePosition position: Int64) -> Int64 {
14 | return time - position + self
15 | }
16 |
17 | /// If `self` is a wallclock timestamp, transform it to a buffer positon in relation to `time` and `position`
18 | public func positionFrom(referenceTime time: Int64, referencePosition position: Int64) -> Int64 {
19 | return position - time + self
20 | }
21 |
22 | /// If `self` is a buffer positon, transform it to a wallclock timestamp in relation to `time` and `position`
23 | ///
24 | /// Will return nil if `time` is nil
25 | public func timestampFrom(referenceTime time: Int64?, referencePosition position: Int64) -> Int64? {
26 | guard let time = time else { return nil }
27 | return time - position + self
28 | }
29 |
30 | /// If `self` is a wallclock timestamp, transform it to a buffer positon in relation to `time` and `position`
31 | ///
32 | /// Will return nil if `time` is nil
33 | public func positionFrom(referenceTime time: Int64?, referencePosition position: Int64) -> Int64? {
34 | guard let time = time else { return nil }
35 | return position - time + self
36 | }
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Extensions/Sequence+Extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Sequence+Extensions.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-04-07.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | extension Sequence where Iterator.Element: RawRepresentable {
12 | ///Convenience property to map an `Array` of `RawRepresentable`s to their *raw form*
13 | public var rawValues: [Iterator.Element.RawValue] {
14 | return self.map{ $0.rawValue }
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | FMWK
17 | CFBundleShortVersionString
18 | $(MARKETING_VERSION)
19 | CFBundleVersion
20 | $(CURRENT_PROJECT_VERSION)
21 | NSPrincipalClass
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Player.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Player.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-04-04.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 |
13 |
14 | /// Generic class which implements a base set of functionality not specific to actual playback of media sources. This functionality is instead aquired through *Feature Components* directly tied to the underlying `PlaybackTech` and `MediaContext`.
15 | ///
16 | /// In practice, this means `Player`s with different *tech* or *media sources* can express context sensitive methods in a highly configurable way.
17 | public final class Player {
18 | /// Active `PlaybackTech`
19 | fileprivate(set) public var tech: Tech
20 |
21 | /// Current `MediaContext`
22 | fileprivate(set) public var context: Tech.Context
23 |
24 | public init(tech: Tech, context: Tech.Context) {
25 | self.context = context
26 | self.tech = tech
27 | }
28 | }
29 |
30 | // MARK: - PlayerEventPublisher
31 | extension Player {
32 | /// Sets the callback to fire when the associated media is created but not yet loaded. Playback is not yet ready to start.
33 | ///
34 | /// - parameter callback: callback to fire once the event is fired.
35 | /// - returns: `Self`
36 | @discardableResult
37 | public func onPlaybackCreated(callback: @escaping (Player, Tech.Context.Source) -> Void) -> Self {
38 | tech.eventDispatcher.onPlaybackCreated = { [weak self] tech, source in
39 | guard let `self` = self else { return }
40 | callback(self, source)
41 | }
42 | return self
43 | }
44 |
45 | /// Sets the callback to fire when the associated media has loaded but is not playback ready.
46 | ///
47 | /// - parameter callback: callback to fire once the event is fired.
48 | /// - returns: `Self
49 | @discardableResult
50 | public func onPlaybackPrepared(callback: @escaping (Player, Tech.Context.Source) -> Void) -> Self {
51 | tech.eventDispatcher.onPlaybackPrepared = { [weak self] tech, source in
52 | guard let `self` = self else { return }
53 | callback(self,source)
54 | }
55 | return self
56 | }
57 |
58 | /// Sets the callback to fire once the associated media has loaded and is ready for playback. At this point, starting playback should be possible.
59 | ///
60 | /// - parameter callback: callback to fire once the event is fired.
61 | /// - returns: `Self`
62 | @discardableResult
63 | public func onPlaybackReady(callback: @escaping (Player, Tech.Context.Source) -> Void) -> Self {
64 | tech.eventDispatcher.onPlaybackReady = { [weak self] tech, source in
65 | guard let `self` = self else { return }
66 | callback(self,source)
67 | }
68 | return self
69 | }
70 |
71 | /// Sets the callback to fire once the playback first starts. This is fired once.
72 | ///
73 | /// - parameter callback: callback to fire once the event is fired.
74 | /// - returns: `Self`
75 | @discardableResult
76 | public func onPlaybackStarted(callback: @escaping (Player, Tech.Context.Source) -> Void) -> Self {
77 | tech.eventDispatcher.onPlaybackStarted = { [weak self] tech, source in
78 | guard let `self` = self else { return }
79 | callback(self,source)
80 | }
81 | return self
82 | }
83 |
84 | /// Sets the callback to fire if playback rate for transitions from *non-zero* to *zero.
85 | ///
86 | /// - parameter callback: callback to fire once the event is fired.
87 | /// - returns: `Self`
88 | @discardableResult
89 | public func onPlaybackPaused(callback: @escaping (Player, Tech.Context.Source) -> Void) -> Self {
90 | tech.eventDispatcher.onPlaybackPaused = { [weak self] tech, source in
91 | guard let `self` = self else { return }
92 | callback(self,source)
93 | }
94 | return self
95 | }
96 |
97 | /// Sets the callback to fire if playback is resumed from a paused state.
98 | ///
99 | /// This will not fire if the playback has not yet been started, ie `onPlaybackStarted:` has not fired yet.
100 | ///
101 | /// - parameter callback: callback to fire once the event is fired.
102 | /// - returns: `Self`
103 | @discardableResult
104 | public func onPlaybackResumed(callback: @escaping (Player, Tech.Context.Source) -> Void) -> Self {
105 | tech.eventDispatcher.onPlaybackResumed = { [weak self] tech, source in
106 | guard let `self` = self else { return }
107 | callback(self,source)
108 | }
109 | return self
110 | }
111 |
112 | /// Sets the callback to fire once playback is stopped by user action.
113 | ///
114 | /// - parameter callback: callback to fire once the event is fired.
115 | /// - returns: `Self`
116 | @discardableResult
117 | public func onPlaybackAborted(callback: @escaping (Player, Tech.Context.Source) -> Void) -> Self {
118 | tech.eventDispatcher.onPlaybackAborted = { [weak self] tech, source in
119 | guard let `self` = self else { return }
120 | callback(self,source)
121 | }
122 | return self
123 | }
124 |
125 | /// Sets the callback to fire once playback reached the end of the current media, ie when playback reaches `duration`.
126 | ///
127 | /// - parameter callback: callback to fire once the event is fired.
128 | /// - returns: `Self`
129 | @discardableResult
130 | public func onPlaybackCompleted(callback: @escaping (Player, Tech.Context.Source) -> Void) -> Self {
131 | tech.eventDispatcher.onPlaybackCompleted = { [weak self] tech, source in
132 | guard let `self` = self else { return }
133 | callback(self,source)
134 | }
135 | return self
136 | }
137 |
138 | /// Sets the callback to fire whenever an `error` occurs. Errors are thrown from throughout the `player` lifecycle. Make sure to handle them. If appropriate, present valid information to *end users*.
139 | ///
140 | /// - parameter callback: callback to fire once the event is fired.
141 | /// - returns: `Self`
142 | @discardableResult
143 | public func onError(callback: @escaping (Player, Tech.Context.Source?, PlayerError) -> Void) -> Self {
144 | tech.eventDispatcher.onError = { [weak self] tech, source, error in
145 | guard let `self` = self else { return }
146 | callback(self,source,error)
147 | }
148 | return self
149 | }
150 |
151 | /// Sets the callback to fire whenever the current *Bitrate* changes.
152 | ///
153 | /// - parameter callback: callback to fire once the event is fired.
154 | /// - returns: `Self`
155 | @discardableResult
156 | public func onBitrateChanged(callback: @escaping (Player, Tech.Context.Source, Double) -> Void) -> Self {
157 | tech.eventDispatcher.onBitrateChanged = { [weak self] tech, source, bitrate in
158 | guard let `self` = self else { return }
159 | callback(self,source,bitrate)
160 | }
161 | return self
162 | }
163 |
164 | /// Sets the callback to fire once buffering started.
165 | ///
166 | /// - parameter callback: callback to fire once the event is fired.
167 | /// - returns: `Self`
168 | @discardableResult
169 | public func onBufferingStarted(callback: @escaping (Player, Tech.Context.Source) -> Void) -> Self {
170 | tech.eventDispatcher.onBufferingStarted = { [weak self] tech, source in
171 | guard let `self` = self else { return }
172 | callback(self,source)
173 | }
174 | return self
175 | }
176 |
177 | /// Sets the callback to fire once buffering stopped.
178 | ///
179 | /// - parameter callback: callback to fire once the event is fired.
180 | /// - returns: `Self`
181 | @discardableResult
182 | public func onBufferingStopped(callback: @escaping (Player, Tech.Context.Source) -> Void) -> Self {
183 | tech.eventDispatcher.onBufferingStopped = { [weak self] tech, source in
184 | guard let `self` = self else { return }
185 | callback(self,source)
186 | }
187 | return self
188 | }
189 |
190 | /// Sets the callback to fire if user scrubs in player
191 | ///
192 | /// - parameter callback: callback to fire once the event is fired.
193 | /// - returns: `Self`
194 | @discardableResult
195 | public func onPlaybackScrubbed(callback: @escaping (Player, Tech.Context.Source, Int64) -> Void) -> Self {
196 | tech.eventDispatcher.onPlaybackScrubbed = { [weak self] tech, source, timestamp in
197 | guard let `self` = self else { return }
198 | callback(self,source,timestamp)
199 | }
200 | return self
201 | }
202 |
203 | /// Sets the callback to fire once the current playback `duration` changes.
204 | ///
205 | /// - parameter callback: callback to fire once the event is fired.
206 | /// - returns: `Self`
207 | @discardableResult
208 | public func onDurationChanged(callback: @escaping (Player, Tech.Context.Source) -> Void) -> Self {
209 | tech.eventDispatcher.onDurationChanged = { [weak self] tech, source in
210 | guard let `self` = self else { return }
211 | callback(self,source)
212 | }
213 | return self
214 | }
215 | /// Sets the callback to fire once the current playback `duration` changes.
216 | ///
217 | /// - parameter callback: callback to fire once the event is fired.
218 | /// - returns: `Self`
219 | @discardableResult
220 | public func onWarning(callback: @escaping (Player, Tech.Context.Source?, PlayerWarning) -> Void) -> Self {
221 | tech.eventDispatcher.onWarning = { [weak self] tech, source, warning in
222 | guard let `self` = self else { return }
223 | callback(self,source,warning)
224 | }
225 | return self
226 | }
227 |
228 | /// Sets the callback to fire once the current playback `DateRangeMetadata` changes.
229 | ///
230 | /// #EXT-X-DATERANGE tag will be used to define date range metadata in a media playlist.
231 | /// This tag is useful for defining timed metadata for interstitial regions such as advertisements, but can be used to define any timed metadata needed by your stream.
232 | ///
233 | /// - parameter callback: callback to fire once the event is fired.
234 | /// - returns: `Self`
235 | @discardableResult
236 | public func onDateRangeMetadataChanged(callback: @escaping ([AVDateRangeMetadataGroup], IndexSet, IndexSet) -> Void) -> Self {
237 | tech.eventDispatcher.onDateRangeMetadataChanged = { [weak self] metadata, indexesOfNewGroups, indexesOfModifiedGroups in
238 | guard let `self` = self else { return }
239 | callback(metadata, indexesOfNewGroups, indexesOfModifiedGroups)
240 | }
241 | return self
242 | }
243 | }
244 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/PlayerError.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PlayerError.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-27.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Generic *wrapper* for the underlying `PlaybackTech` and `MediaContext` errors.
12 | public enum PlayerError: ExpandedError {
13 | /// The related Tech error
14 | public typealias TechError = Tech.TechError
15 |
16 | /// The related Media error
17 | public typealias ContextError = Context.ContextError
18 |
19 | /// Wrapped `PlaybackTech` error
20 | case tech(error: TechError)
21 |
22 | /// Wrapped `MediaContext` error
23 | case context(error: ContextError)
24 | }
25 |
26 | extension PlayerError {
27 | /// Returns a unique message describing the error
28 | public var message: String {
29 | switch self {
30 | case .tech(error: let error): return error.message
31 | case .context(error: let error): return error.message
32 | }
33 | }
34 | }
35 |
36 | extension PlayerError {
37 | /// Returns detailed information about the error
38 | public var info: String? {
39 | switch self {
40 | case .tech(error: let error): return error.info
41 | case .context(error: let error): return error.info
42 | }
43 | }
44 | }
45 |
46 | extension PlayerError {
47 | /// The error code as defined in the error domain represented by the underlying error
48 | public var code: Int {
49 | switch self {
50 | case .tech(error: let error): return error.code
51 | case .context(error: let error): return error.code
52 | }
53 | }
54 | }
55 |
56 | extension PlayerError {
57 | /// The domain the error belongs to
58 | public var domain: String {
59 | switch self {
60 | case .context(error: let error): return error.domain
61 | case .tech(error: let error): return error.domain
62 | }
63 | }
64 | }
65 |
66 | extension PlayerError {
67 | /// The underlying error for this error
68 | public var underlyingError: Error? {
69 | switch self {
70 | case .context(error: let error): return error.underlyingError
71 | case .tech(error: let error): return error.underlyingError
72 | }
73 | }
74 | }
75 |
76 |
77 | /// Extension on the basic `Swift.Error` protocol adding an error code.
78 | public protocol ExpandedError: Error {
79 | /// Should return the error code
80 | var code: Int { get }
81 |
82 | /// Should return a message describing the error
83 | var message: String { get }
84 |
85 | /// Should specify a domain the error belongs to
86 | var domain: String { get }
87 |
88 | /// Should optionally return detailed information describing the error
89 | var info: String? { get }
90 |
91 | /// Should optionally return the underlying error for this error
92 | var underlyingError: Error? { get }
93 | }
94 |
95 | extension ExpandedError {
96 | public var underlyingError: Error? { return nil }
97 | }
98 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/PrivacyInfo.xcprivacy:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | NSPrivacyTracking
6 |
7 | NSPrivacyTrackingDomains
8 |
9 | NSPrivacyCollectedDataTypes
10 |
11 | NSPrivacyAccessedAPITypes
12 |
13 |
14 | NSPrivacyAccessedAPIType
15 | NSPrivacyAccessedAPICategoryUserDefaults
16 | NSPrivacyAccessedAPITypeReasons
17 |
18 | CA92.1
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/AirplayHandler.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Airplay.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-03-15.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | public protocol AirplayHandler: class {
12 | /// Defines an interaction point for handling status events related to Airplay
13 | ///
14 | /// - parameter active: If *Airplay* was tuned on of off
15 | /// - parameter tech: The tech used to play `source`
16 | /// - parameter source: The `Source` currently under playback for which the airplay event occured.
17 | func handleAirplayEvent(active: Bool, tech: Tech, source: Source?) where Tech: PlaybackTech, Source: MediaSource
18 |
19 | /// Send analytics events related to Airplay
20 | ///
21 | /// - parameter active: If *Airplay* was tuned on of off
22 | /// - parameter tech: The tech used to play `source`
23 | /// - parameter source: The `Source` currently under playback for which the airplay event occured.
24 | func sendAirplayAnalytics(active: Bool, tech: Tech, source: Source?) where Tech: PlaybackTech, Source: MediaSource
25 | }
26 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Components/HLSNative+MediaRendering.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HLSNative+MediaRendering.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-24.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 | import AVKit
12 |
13 | /// `HLSNative` adoption of `MediaRendering`
14 | extension HLSNative: MediaRendering {
15 | /// Creates and configures the associated `CALayer` used to render the media output. This view will be added to the *user supplied* `playerView` as a sub view at `index: 0`. A strong reference to `playerView` is also established.
16 | /// - parameter playerView: *User supplied* view to configure for playback rendering.
17 | /// - Returns: AVPlayerLayer
18 | public func configure(playerView: UIView) -> AVPlayerLayer {
19 | configureRendering {
20 | let renderingView = PlayerView(frame: playerView.frame)
21 |
22 | renderingView.avPlayerLayer.videoGravity = AVLayerVideoGravity.resizeAspect
23 | renderingView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
24 | renderingView.translatesAutoresizingMaskIntoConstraints = false
25 | playerView.insertSubview(renderingView, at: 0)
26 |
27 | let leading = renderingView
28 | .leadingAnchor
29 | .constraint(equalTo: playerView.leadingAnchor)
30 | leading.isActive = true
31 | leading.identifier = "PlayerView-RenderingView-Leading"
32 |
33 | let top = renderingView
34 | .topAnchor
35 | .constraint(equalTo: playerView.topAnchor)
36 | top.isActive = true
37 | top.identifier = "PlayerView-RenderingView-Top"
38 |
39 | let trailing = renderingView
40 | .trailingAnchor
41 | .constraint(equalTo: playerView.trailingAnchor)
42 | trailing.isActive = true
43 | trailing.identifier = "PlayerView-RenderingView-Trailing"
44 |
45 | let bottom = renderingView
46 | .bottomAnchor
47 | .constraint(equalTo: playerView.bottomAnchor)
48 | bottom.isActive = true
49 | bottom.identifier = "PlayerView-RenderingView-Bottom"
50 |
51 | return renderingView.avPlayerLayer
52 | }
53 | }
54 |
55 | /// This method allows for advanced configuration of the playback rendering.
56 | ///
57 | /// The caller is responsible for creating, configuring and retaining the related constituents. End by returning an `AVPlayerLayer` in which the rendering should take place.
58 | ///
59 | /// - parameter callback: closure detailing the custom rendering. Must return an `AVPlayerLayer` in which the rendering will take place
60 |
61 | /// - Returns: AVPlayerLayer
62 | public func configureRendering(closure: () -> AVPlayerLayer) -> AVPlayerLayer {
63 | let layer = closure()
64 | layer.player = avPlayer
65 | return layer
66 | }
67 |
68 |
69 | /// Assign the player to avPlayerViewController.player object
70 | /// - Parameter avPlayerViewController: avPlayerViewController
71 | /// - Returns: avPlayerViewController
72 | public func configureWithDefaultSkin(avPlayerViewController: AVPlayerViewController) -> AVPlayerViewController {
73 | avPlayerViewController.player = avPlayer
74 | return avPlayerViewController
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Components/HLSNative+NetworkBehavior.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HLSNative+NetworkBehavior.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-02-22.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | extension HLSNative: NetworkBehavior {
12 |
13 | /// The desired limit, in bits per second, of network bandwidth consumption for this item.
14 | ///
15 | /// Setting a non-zero value will indicate the player should attempt to limit playback to that bitrate. If network bandwidth consumption cannot be lowered to meet the preferredPeakBitRate, it will be reduced as much as possible while continuing to play the item.
16 | ///
17 | /// `nil` will indicate no restrictions should be applied.
18 | public var preferredMaxBitrate: Int64? {
19 | set {
20 | currentAsset?.playerItem.preferredPeakBitRate = (newValue == nil ? 0 : Double(newValue!))
21 | }
22 | get {
23 | guard let value = currentAsset?.playerItem.preferredPeakBitRate else { return nil }
24 | return Int64(value)
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Components/HLSNative+StartTime.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HLSNative+StartTime.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-23.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Tracking Bookmarks.
12 | public enum StartOffset {
13 | /// Default behaviour applies
14 | case defaultStartTime
15 |
16 | /// Playback should start from the specified `offset` into the buffer (milliseconds)
17 | case startPosition(position: Int64)
18 |
19 | /// Playback should start from the specified `wallclock timestamp` in unix epoch time (milliseconds)
20 | case startTime(time: Int64)
21 | }
22 |
23 | /// `HLSNative` optionally support setting a `StartTimeDelegate` to handle start time.
24 | ///
25 | /// Classes conforming to `StartTimeDelegate` are expected to provide a valid start time during the initialization process of a new `MediaSource`
26 | public protocol StartTimeDelegate: class {
27 | /// During the initialization process, `HLSNative` will ask its delegate for a `StartOffset`.
28 | ///
29 | /// Protocol adopters can use this method to for example implement a bookmarking service
30 | ///
31 | /// - parameter source: The `MediaSource` for which this start time request concerns.
32 | /// - parameter tech: Tech which will apply the start time.
33 | /// - returns: a valid `StartOffset`
34 | func startTime(for source: MediaSource, tech: HLSNative) -> StartOffset
35 | }
36 |
37 | /// `HLSNative` adoption of `StartTime`
38 | extension HLSNative: StartTime {
39 | /// Returns a target buffer `offset` (in milliseconds) to start playback if it has been specified, else `nil`.
40 | ///
41 | /// If a `StartTimeDelegate` has been specified, it will take precedence over deciding the start time
42 | public var startPosition: Int64? {
43 | if let delegate = startTimeConfiguration.startTimeDelegate, let source = currentSource {
44 | let value = delegate.startTime(for: source, tech: self)
45 | switch value {
46 | case let .startPosition(position: result): return result
47 | default: return nil
48 | }
49 | }
50 | else {
51 | switch startTimeConfiguration.startOffset {
52 | case .startPosition(position: let value): return value
53 | default: return nil
54 | }
55 | }
56 | }
57 |
58 | /// Returns a target timestamp in wallclock unix epoch time (in milliseconds) to start playback if it has been specified, else `nil`.
59 | ///
60 | /// If a `StartTimeDelegate` has been specified, it will take precedence over deciding the start time
61 | public var startTime: Int64? {
62 | if let delegate = startTimeConfiguration.startTimeDelegate, let source = currentSource {
63 | let value = delegate.startTime(for: source, tech: self)
64 | switch value {
65 | case let .startTime(time: result): return result
66 | default: return nil
67 | }
68 | }
69 | else {
70 | switch startTimeConfiguration.startOffset {
71 | case .startTime(time: let value): return value
72 | default: return nil
73 | }
74 | }
75 | }
76 |
77 | /// Sets the `startPosition` (in milliseconds) to the specified `position` relative to the playback buffer.
78 | ///
79 | /// Specifying `nil` reverts to the default behaviour for startup but will not remove any `StartTimeDelegate` set.
80 | public func startTime(atPosition position: Int64?) {
81 | startTimeConfiguration.startOffset = position != nil ? .startPosition(position: position!) : .defaultStartTime
82 | }
83 |
84 | /// Sets the `startTime` to the specified `timestamp` in wallclock unix epoch time. (in milliseconds)
85 | ///
86 | /// Specifying `nil` reverts to the default behaviour for startup but will not remove any `StartTimeDelegate` set.
87 | public func startTime(atTime timestamp: Int64?) {
88 | startTimeConfiguration.startOffset = timestamp != nil ? .startTime(time: timestamp!) : .defaultStartTime
89 | }
90 | }
91 |
92 | extension HLSNative {
93 | /// Specifies `startTime` will be handled by a delegate responsible for supplying the correct `StartOffset`.
94 | ///
95 | /// This will take precedence over any static `startOffset` behavior set. Specifying `nil` will remove the current delegate
96 | public func startTime(byDelegate delegate: StartTimeDelegate?) {
97 | startTimeConfiguration.startTimeDelegate = delegate
98 | }
99 |
100 | internal func startOffset(for mediaSource: MediaAsset) -> StartOffset {
101 | if let delegateOffset = startTimeConfiguration.startTimeDelegate?.startTime(for: mediaSource.source, tech: self) {
102 | return delegateOffset
103 | }
104 | return startTimeConfiguration.startOffset
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Components/HLSNative+TrackSelectable.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HLSNative+TrackSelectable.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-02-07.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 |
11 | extension HLSNative: TrackSelectable {
12 | // MARK: Audio
13 | /// Returns the audio related `MediaGroup`
14 | public var audioGroup: MediaGroup? {
15 | return currentAsset?
16 | .playerItem
17 | .audioGroup
18 | }
19 |
20 |
21 | @available(iOS 15.0,tvOS 15.0, *)
22 | /// Returns all the available `AVAssetVariant`
23 | public var variants: [AVAssetVariant]? {
24 | return currentAsset?.urlAsset.variants
25 | }
26 |
27 |
28 |
29 | /// Returns the default audio track, or `nil` if unavailable
30 | public var defaultAudioTrack: MediaTrack? {
31 | return audioGroup?.defaultTrack
32 | }
33 |
34 | /// Returns all associated audio tracks
35 | public var audioTracks: [MediaTrack] {
36 | return audioGroup?.tracks ?? []
37 | }
38 |
39 | /// Returns the selected audio track if available, otherwise `nil`
40 | public var selectedAudioTrack: MediaTrack? {
41 | return audioGroup?.selectedTrack
42 | }
43 |
44 | /// Indicated if it is possible to select no audio track
45 | public var allowsEmptyAudioSelection: Bool {
46 | return audioGroup?.mediaGroup.allowsEmptySelection ?? true
47 | }
48 |
49 | /// Selects the specified audio track or, if `allowsEmptyAudioSelection` == true, select no audio track
50 | ///
51 | /// - parameter track: The audio track to select
52 | public func selectAudio(track: MediaTrack?) {
53 | select(track: track, inGroup: audioGroup?.mediaGroup)
54 | UserDefaults.standard.set(track?.extendedLanguageTag, forKey: "lastSelectedAudioTrackLanguageTag")
55 | UserDefaults.standard.set(
56 | track?.mediaOption.hasMediaCharacteristic(.describesVideoForAccessibility),
57 | forKey: "doesLastSelectedAudioTrackDescribeVideo"
58 | )
59 | }
60 |
61 | /// Selects the specified audio language if available or, if `allowsEmptyAudioSelection` == true, select no audio track
62 | ///
63 | /// - parameter language: The RFC 4646 language tag identifying the track
64 | public func selectAudio(language: String?) {
65 | guard let language = language else {
66 | selectAudio(track: nil)
67 | return
68 | }
69 | guard let option = audioGroup?.mediaSelectionOption(forLanguage: language) else { return }
70 | selectAudio(track: MediaTrack(mediaOption: option))
71 | }
72 |
73 |
74 | /// Selects the specified audio language if available or, if `allowsEmptyAudioSelection` == true, select no audio track
75 | ///
76 | /// - parameter mediaTrackId: mediaTrackId of the track
77 | public func selectAudio(mediaTrackId: Int?) {
78 | guard let mediaTrackId = mediaTrackId else {
79 | selectAudio(track: nil)
80 | return
81 | }
82 | guard let option = audioGroup?.mediaSelectionOption(forId: mediaTrackId) else { return }
83 | selectAudio(track: option)
84 | }
85 |
86 | /// Selects the specified audio language if available or, if `allowsEmptyAudioSelection` == true, select no audio track
87 | ///
88 | /// - parameter title: title of the track
89 | public func selectAudio(title: String?) {
90 | guard let title = title else {
91 | selectAudio(track: nil)
92 | return
93 | }
94 | guard let track = textGroup?.mediaSelectionOption(forTitle: title) else { return }
95 | selectAudio(track: track)
96 | }
97 |
98 |
99 | // MARK: Text
100 | /// Returns the text related `MediaGroup`
101 | public var textGroup: MediaGroup? {
102 | return currentAsset?
103 | .playerItem
104 | .textGroup
105 | }
106 |
107 | /// Returns the default text track, or `nil` if unavailable
108 | public var defaultTextTrack: MediaTrack? {
109 | return textGroup?.defaultTrack
110 | }
111 |
112 | /// Returns all associated text tracks
113 | public var textTracks: [MediaTrack] {
114 | return textGroup?.tracks ?? []
115 | }
116 |
117 | /// Returns the selected text track if available, otherwise `nil`
118 | public var selectedTextTrack: MediaTrack? {
119 | return textGroup?.selectedTrack
120 | }
121 |
122 | /// Indicates if it is possible to select no text track
123 | public var allowsEmptyTextSelection: Bool {
124 | return textGroup?.mediaGroup.allowsEmptySelection ?? true
125 | }
126 |
127 | /// Selects the specified text track or, if `allowsEmptyTextSelection` == true, select no text track
128 | ///
129 | /// - parameter track: The text track to select
130 | public func selectText(track: MediaTrack?) {
131 | select(track: track, inGroup: textGroup?.mediaGroup)
132 | UserDefaults.standard.set(track?.extendedLanguageTag, forKey: "lastSelectedTextTrackLanguageTag")
133 | UserDefaults.standard.set(track?.mediaOption.mediaType.rawValue, forKey: "lastSelectedTextTrackMediaType")
134 | UserDefaults.standard.set(
135 | track?.mediaOption.hasMediaCharacteristic(.transcribesSpokenDialogForAccessibility),
136 | forKey: "doesLastSelectedTextTrackTranscribeDialog"
137 | )
138 | }
139 |
140 | /// Selects the specified text language if available or, if `allowsEmptyTextSelection` == true, select no text track
141 | ///
142 | /// - parameter language: The RFC 4646 language tag identifying the track
143 | public func selectText(language: String?) {
144 | guard let language = language else {
145 | selectText(track: nil)
146 | return
147 | }
148 | guard let option = textGroup?.mediaSelectionOption(forLanguage: language) else { return }
149 | selectText(track: MediaTrack(mediaOption: option))
150 | }
151 |
152 | /// Selects the specified text language if available or, if `allowsEmptyTextSelection` == true, select no text track
153 | ///
154 | /// - parameter mediaTrackId: mediaTrackId of the track
155 | public func selectText(mediaTrackId: Int?) {
156 | guard let mediaTrackId = mediaTrackId else {
157 | selectText(track: nil)
158 | return
159 | }
160 | guard let option = textGroup?.mediaSelectionOption(forId: mediaTrackId) else { return }
161 | selectText(track: option)
162 | }
163 |
164 | /// Selects the specified text language if available or, if `allowsEmptyTextSelection` == true, select no text track
165 | ///
166 | /// - parameter title: title of the track
167 | public func selectText(title: String?) {
168 | guard let title = title else {
169 | selectText(track: nil)
170 | return
171 | }
172 | guard let track = textGroup?.mediaSelectionOption(forTitle: title) else { return }
173 | selectText(track: track)
174 | }
175 |
176 | // MARK: Private
177 | /// Convenience method selecting a track in a group
178 | private func select(track: MediaTrack?, inGroup group: AVMediaSelectionGroup?) {
179 | guard let group = group else { return }
180 | currentAsset?.playerItem.select(track?.mediaOption, in: group)
181 |
182 | // Keep the selected subtitle in the userdefaults for downloaded assets
183 | // This is required for fast seeking as AVFoundation can loose the subtitle track sometimes.
184 | if let urlAsset = currentAsset?.urlAsset, let accetCache = urlAsset.assetCache {
185 | if accetCache.isPlayableOffline {
186 | // Add the current selected subtitle track to the userdefaults
187 | UserDefaults.standard.set(track?.extendedLanguageTag , forKey: "prefferedMediaSelection")
188 | } else {
189 | // do nothing
190 | }
191 | }
192 |
193 |
194 | }
195 |
196 | /// Convenience method for setting PeakBitRate in the currrentPlayerItem
197 | public func setBitRate(selectedBitRate: Double ) {
198 | currentAsset?.playerItem.preferredPeakBitRate = selectedBitRate
199 | }
200 | }
201 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Extensions/AVAsset+LoadableKeys.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AVAsset+LoadableKeys.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-04-07.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 | extension AVAsset {
13 | /// Convenience `enum` supplying *typed key paths* for loadable resources on `AVAsset`
14 | enum LoadableKeys: String {
15 | /// Duration
16 | case duration = "duration"
17 |
18 | /// Tracks
19 | case tracks = "tracks"
20 |
21 | /// Playable
22 | case playable = "playable"
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Extensions/AVMediaSelectionGroup+Extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AVMediaSelectionGroup+Extensions.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-02-20.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 |
11 | internal extension AVMediaSelectionGroup {
12 |
13 | // Convenience property returning the all `AVMediaSelectionOption`s for the group
14 | var tracks: [MediaTrack] {
15 | let tracks = options.enumerated().map( { (index, option) in
16 | return (MediaTrack.init(mediaOption: option, id: index))
17 | })
18 |
19 | return tracks
20 | }
21 |
22 | // Convenience property returning the default `AVMediaSelectionOption` for the group
23 | var defaultTrack: MediaTrack? {
24 | guard let option = defaultOption else { return nil }
25 |
26 | guard let index = options.firstIndex(where: { $0 == option }) else { return nil }
27 | return MediaTrack(mediaOption: option, id: index)
28 | }
29 |
30 | /// Convenience method selecting a track in a group
31 | func track(
32 | forLanguage language: String,
33 | andType mediaType: AVMediaType?,
34 | shouldDescribeVideo: Bool?,
35 | shouldTranscribeDialog: Bool?
36 | ) -> AVMediaSelectionOption? {
37 | let filteredTracks = options.filter { $0.extendedLanguageTag == language }
38 | let bestTrack = filteredTracks.first { track in
39 | let isMediaTypeMatched = mediaType.map { $0 == track.mediaType } ?? true
40 | let isTrackDescribingVideo = track.hasMediaCharacteristic(.describesVideoForAccessibility)
41 | let isDescribeVideoMatched = shouldDescribeVideo.map { isTrackDescribingVideo == $0 } ?? true
42 | let isTrackTranscribingDialog = track.hasMediaCharacteristic(.transcribesSpokenDialogForAccessibility)
43 | let isTranscribeDialogMatched = shouldTranscribeDialog.map { isTrackTranscribingDialog == $0 } ?? true
44 | return isMediaTypeMatched && isDescribeVideoMatched && isTranscribeDialogMatched
45 | }
46 | return bestTrack ?? filteredTracks.first
47 | }
48 |
49 | /// Convenience method selecting a track using `mediaTrackId`
50 | func track(forId mediaTrackId: Int) -> MediaTrack? {
51 | if let foundTrack = tracks.filter({ $0.mediaTrackId == mediaTrackId }).first {
52 | return foundTrack
53 | } else {
54 | return nil
55 | }
56 | }
57 |
58 | /// Convenience method selecting a track using `title`
59 | func track(forTitle title: String) -> MediaTrack? {
60 | return tracks.filter{ $0.title == title }.first
61 | }
62 |
63 | /// Convenience method returning the selectedTrack `AVMediaSelectionOption`
64 | func selectedTrack(media: AVMediaSelectionOption) -> MediaTrack? {
65 | guard let index = options.firstIndex(where: { $0 == media }) else { return nil }
66 | return MediaTrack(mediaOption: media, id: index)
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Extensions/AVPlayer+KeyValueObservable.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AVPlayer+KeyValueObservable.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-04-07.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 | /// Defines typed *Key Value Observable* paths for `AVPlayer`.
13 | extension AVPlayer: KeyValueObservable {
14 | typealias ObservableKeys = ObservableKey
15 |
16 | // MARK: ObservableKeys
17 | enum ObservableKey: String {
18 | /// `avPlayer.status`
19 | case status = "status"
20 |
21 | /// `avPlayer.rate`
22 | case rate = "rate"
23 |
24 | /// `avPlayer.timeControlStatus`
25 | case timeControlStatus = "timeControlStatus"
26 |
27 | /// `avPlayer.reasonForWaitingToPlay`
28 | case reasonForWaitingToPlay = "reasonForWaitingToPlay"
29 |
30 | /// `avPlayer.currentItem`
31 | case currentItem = "currentItem"
32 |
33 | /// `avPlayer.currentItemTimedMetadata`
34 | case currentItemTimedMetadata = "currentItem.timedMetadata"
35 |
36 | /// `avPlayer.isExternalPlaybackActive`
37 | case isExternalPlaybackActive = "externalPlaybackActive"
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Extensions/AVPlayerItem+Extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AVPlayerItem+Extensions.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-04-07.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 |
11 | /// Defines typed *Key Value Observable* paths for `AVPlayerItem`.
12 | extension AVPlayerItem: KeyValueObservable {
13 | typealias ObservableKeys = ObservableKey
14 |
15 | // MARK: ObservableKeys
16 | enum ObservableKey: String {
17 | /// `avPlayerItem.status`
18 | case status = "status"
19 |
20 | /// `avPlayerItem.tracks`
21 | case tracks = "tracks"
22 |
23 | /// `avPlayerItem.duration`
24 | case duration = "duration"
25 |
26 | /// `avPlayerItem.presentationSize`
27 | case presentationSize = "presentationSize"
28 |
29 | /// `avPlayerItem.timedMetadata`
30 | case timedMetadata = "timedMetadata"
31 |
32 | /// `avPlayerItem.playbackLikelyToKeepUp`
33 | case isPlaybackLikelyToKeepUp = "playbackLikelyToKeepUp"
34 |
35 | /// `avPlayerItem.playbackBufferFull`
36 | case isPlaybackBufferFull = "playbackBufferFull"
37 |
38 | /// `avPlayerItem.playbackBufferEmpty`
39 | case isPlaybackBufferEmpty = "playbackBufferEmpty"
40 |
41 | case seekableTimeRanges = "seekableTimeRanges"
42 |
43 | case loadedTimeRanges = "loadedTimeRanges"
44 | }
45 | }
46 |
47 | internal extension AVPlayerItem {
48 | // Convenience property returning the `AVMediaCharacteristic.audible`
49 | internal var audioGroup: MediaGroup? {
50 | guard let group = asset.mediaSelectionGroup(forMediaCharacteristic: .audible) else { return nil }
51 | return MediaGroup(mediaGroup: group, selectedMedia: selectedMediaOption(in: group))
52 | }
53 |
54 | // Convenience property returning the `AVMediaCharacteristic.legible`
55 | internal var textGroup: MediaGroup? {
56 | guard let group = asset.mediaSelectionGroup(forMediaCharacteristic: .legible) else { return nil }
57 | return MediaGroup(mediaGroup: group, selectedMedia: selectedMediaOption(in: group))
58 | }
59 | }
60 |
61 | // MARK: - TraceProvider Data
62 | internal extension AVPlayerItem {
63 | /// Gathers TraceProvider data into json format
64 | internal var traceProviderStatusData: [String: Any] {
65 | var json: [String: Any] = [
66 | "Message": "PLAYER_ITEM_STATUS_TRACE_ENTRY",
67 | ]
68 |
69 | var info: String = ""
70 | info += "PlaybackLikelyToKeepUp: \(isPlaybackLikelyToKeepUp) \n"
71 | info += "PlaybackBufferFull: \(isPlaybackBufferFull) \n"
72 | info += "PlaybackBufferEmpty: \(isPlaybackBufferEmpty) \n"
73 | if let urlAsset = asset as? AVURLAsset {
74 | info += "URL: \(urlAsset.url) \n"
75 | }
76 |
77 | switch status {
78 | case .failed:
79 | info += "PlayerItem.Status: .failed \n"
80 | info += "PlayerItem.Error: " + (error?.debugInfoString ?? "nil") + " \n"
81 | case .readyToPlay:
82 | info += "PlayerItem.Status: .readyToPlay \n"
83 | case .unknown:
84 | info += "PlayerItem.Status: .unknown \n"
85 | }
86 |
87 | json["Info"] = info
88 |
89 | return json
90 | }
91 | }
92 |
93 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Extensions/AVPlayerItemAccessLogEvent+Extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AVPlayerItemAccessLogEvent+Extensions.swift
3 | // Player-iOS
4 | //
5 | // Created by Fredrik Sjöberg on 2018-05-23.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 |
11 | // MARK: - TraceProvider Data
12 | internal extension AVPlayerItemAccessLogEvent {
13 | /// Gathers TraceProvider data into json format
14 | internal var traceProviderData: [String: Any] {
15 | var json: [String: Any] = [
16 | "Message": "PLAYER_ITEM_ACCESS_LOG_ENTRY",
17 | ]
18 |
19 | var info: String = ""
20 | if let value = uri {
21 | info += "URI: \(value)\n"
22 | }
23 |
24 | if let value = serverAddress {
25 | info += "ServerAddress: \(value)\n"
26 | }
27 |
28 | if let value = playbackType {
29 | info += "PlaybackType: \(value)\n"
30 | }
31 |
32 | if numberOfStalls > 0 {
33 | info += "PlaybackStalls: \(numberOfStalls)\n"
34 | }
35 |
36 | if numberOfBytesTransferred >= 0 {
37 | info += "BytesTransferred: \(numberOfBytesTransferred)\n"
38 | }
39 |
40 | if numberOfDroppedVideoFrames > 0 {
41 | info += "DroppedVideoFrames: \(numberOfDroppedVideoFrames)\n"
42 | }
43 |
44 | if downloadOverdue > 0 {
45 | info += "SegmentDownloadsOverdue: \(downloadOverdue)\n"
46 | }
47 |
48 | let downloadedDuration = Int64(segmentsDownloadedDuration)
49 | if downloadedDuration >= 0 {
50 | info += "DurationOfDownloadedSegments: \(downloadedDuration)\n"
51 | }
52 |
53 | let watched = Int64(durationWatched)
54 | if durationWatched >= 0 {
55 | info += "DurationWatched: \(watched)\n"
56 | }
57 |
58 | let startTime = Int64(startupTime)
59 | if startTime > 0 {
60 | info += "StartupTime: \(startTime)\n"
61 | }
62 |
63 |
64 | json["Info"] = info
65 | return json
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Extensions/AVPlayerItemErrorLogEvent+Extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AVPlayerItemErrorLogEvent+Extensions.swift
3 | // Player-iOS
4 | //
5 | // Created by Fredrik Sjöberg on 2018-05-23.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 |
11 | // MARK: - TraceProvider Data
12 | internal extension AVPlayerItemErrorLogEvent {
13 | /// Gathers TraceProvider data into json format
14 | internal var traceProviderData: [String: Any] {
15 | var json: [String: Any] = [
16 | "Message": "PLAYER_ITEM_ERROR_LOG_ENTRY",
17 | "Domain": errorDomain,
18 | "Code": errorStatusCode
19 | ]
20 |
21 | var info: String = ""
22 | if let comment = errorComment {
23 | info += "ErrorComment: \(comment) \n"
24 | }
25 |
26 | if let serverAddress = serverAddress {
27 | info += "ServerAddress: \(serverAddress) \n"
28 | }
29 |
30 | if let uri = uri {
31 | info += "URI: \(uri) \n"
32 | }
33 |
34 | json["Info"] = info
35 | return json
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/FairplayRequester.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FairplayRequester.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-06-04.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 | /// Specialized *named* protocol extending `AVAssetResourceLoaderDelegate` intended to be used for *Fairplay* `DRM` management.
13 | public protocol FairplayRequester: AVAssetResourceLoaderDelegate {
14 | /// Should expose errors encountered during the validation process.
15 | var keyValidationError: Error? { get }
16 | }
17 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/HLSNativeConfiguration.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HLSNativeConfiguration.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-02-22.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Playback configuration specific for the `HLSNative` *tech*.
12 | public struct HLSNativeConfiguration {
13 | /// DRM agent used to validate the context source
14 | public let drm: FairplayRequester?
15 |
16 | /// The desired limit, in bits per second, of network bandwidth consumption for this item.
17 | ///
18 | /// Setting a non-zero value will indicate the player should attempt to limit playback to that bitrate. If network bandwidth consumption cannot be lowered to meet the preferredPeakBitRate, it will be reduced as much as possible while continuing to play the item.
19 | ///
20 | /// `nil` will indicate no restrictions should be applied.
21 | public let preferredMaxBitrate: Int64?
22 |
23 | public init(drm: FairplayRequester? = nil, preferredMaxBitrate: Int64? = nil) {
24 | self.drm = drm
25 | self.preferredMaxBitrate = preferredMaxBitrate
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/HLSNativeError.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HLSNativeError.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-20.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import MapKit
11 |
12 |
13 | /// HLSAVPlayerItemErrorLogEvent : Extended Tech Error for `AVPlayerItemErrorLogEvent` errors
14 | public struct HLSAVPlayerItemErrorLogEventError: ExpandedError {
15 | public var code: Int
16 | public var message: String
17 | public var domain: String
18 | public var info: String?
19 | public init(code: Int, message: String, domain: String, info: String?) {
20 | self.code = code
21 | self.message = message
22 | self.domain = domain
23 | self.info = info
24 | }
25 | }
26 |
27 | /// `HLSNativeError` is the error type specific to the `HLSNative` `Tech`. It can manifest as both *native errors* to the framework and *nested errors* specific to underlying frameworks.
28 | /// Effective error handling thus requires a deeper undestanding of the overall architecture.
29 | public enum HLSNativeError: ExpandedError {
30 | /// Media is missing a valid `URL` to load data from.
31 | case missingMediaUrl
32 |
33 | /// `Player` failed to prepare the media for playback.
34 | ///
35 | /// This occurs when trying to asynchronously load values (eg `properties`) on `AVURLAsset` in preparation for playback. Examples include:
36 | /// * `duration`
37 | /// * `tracks`
38 | /// * `playable`
39 | ///
40 | /// Internally, `Player` calls `loadValuesAsynchronously(forKeys:)` and then checks the status of each *key* through `statusOfValue(forKey: error:)`. Any key-value pair which returns a `.failed` status will cause the preparation to fail, forwarding the assocaited error.
41 | ///
42 | /// For more information regarding the *async loading process* of `properties` on `AVAsset`s, please consult Apple's documentation regarding `AVAsynchronousKeyValueLoading`
43 | case failedToPrepare(errors: [Error])
44 |
45 | /// The *asynchronous loading* of `AVURLAsset` `properties` succeded but somehow `isPlayable` returned `false`.
46 | case loadedButNotPlayable
47 |
48 | /// Media could not ready for playback with the underlying `AVPlayerItem` status changed to `.failed`.
49 | case failedToReady(error: Error?)
50 |
51 | /// Meida could not complete playback.
52 | case failedToCompletePlayback(error: Error)
53 |
54 | /// Content Key Validation failed with the specified error, or `nil` if the underlyig error is expected.
55 | case failedToValdiateContentKey(error: Error?)
56 |
57 | /// Media preparation finished after `Tech` was torn down
58 | @available(*, deprecated: 2.0.85, message: "Deallocation of HLSNative during the media preparation phase is no longer considered an `Error`.")
59 | case techDeallocated
60 | }
61 |
62 | extension HLSNativeError {
63 |
64 | public var message: String {
65 | switch self {
66 | case .missingMediaUrl: return "MISSING_MEDIA_URL"
67 | case .failedToPrepare(errors: _): return "FAILED_TO_PREPARE"
68 | case .loadedButNotPlayable: return "LOADED_BUT_NOT_PLAYABLE"
69 | case .failedToReady(error: _): return "FAILED_TO_READY"
70 | case .failedToCompletePlayback(error: _): return "FAILED_TO_COMPLETE_PLAYBACK"
71 | case .failedToValdiateContentKey(error: _): return "FAILED_TO_VALIDATE_CONTENT_KEY"
72 | case .techDeallocated: return "TECH_DEALLOCATED"
73 | }
74 | }
75 |
76 | /// Returns detailed information about the error
77 | public var info: String? {
78 | switch self {
79 | case .missingMediaUrl: return "Missing media url"
80 | case .failedToPrepare(errors: let errors): return errors.map{ "\($0.debugInfoString)" }.joined(separator: "\n")
81 | case .loadedButNotPlayable: return "Asset loaded but not playable"
82 | case .failedToReady(error: let error): return error != nil ? error!.debugInfoString : "Unknown error"
83 | case .failedToCompletePlayback(error: let error): return error.debugInfoString
84 | case .failedToValdiateContentKey(error: let error): return error != nil ? error!.debugInfoString : "Unknown error"
85 | case .techDeallocated: return "Media preparation finished after Tech was deallocated"
86 | }
87 | }
88 | }
89 |
90 | extension HLSNativeError {
91 | /// Defines the specific code for the underlying error.
92 | public var code: Int {
93 | switch self {
94 | case .missingMediaUrl: return 101
95 | case .failedToPrepare(errors: _): return 102
96 | case .failedToReady(error: _): return 103
97 | case .loadedButNotPlayable: return 104
98 | case .failedToCompletePlayback(error: _): return 105
99 | case .failedToValdiateContentKey(error: _): return 106
100 | case .techDeallocated: return 107
101 | }
102 | }
103 | }
104 |
105 | extension HLSNativeError {
106 | public var domain: String { return String(describing: type(of: self))+"Domain" }
107 | }
108 |
109 | extension HLSNativeError {
110 | public var underlyingError: Error? {
111 | switch self {
112 | case .missingMediaUrl: return nil
113 | case .failedToPrepare(errors: let errors): return errors.first
114 | case .failedToReady(error: let error): return error
115 | case .loadedButNotPlayable: return nil
116 | case .failedToCompletePlayback(error: let error): return error
117 | case .failedToValdiateContentKey(error: let error): return error
118 | case .techDeallocated: return nil
119 | }
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/HLSNativeWarning.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HLSNativeWarning.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2018-01-30.
6 | // Copyright © 2018 emp. All rights reserved.
7 | //
8 |
9 | import AVFoundation
10 |
11 |
12 | public enum HLSNativeWarning: WarningMessage {
13 | /// Seekable ranges was empty
14 | case seekableRangesEmpty
15 |
16 | /// Seekable ranges contained a discontinuity
17 | case discontinuousSeekableRanges(seekableRanges: [CMTimeRange])
18 |
19 | /// The requested seek time was beyond the live point
20 | case seekTimeBeyondLivePoint(timestamp: Int64, livePoint: Int64)
21 |
22 | /// The supplied startTime was invalid for the seekable ranges.
23 | case invalidStartTime(startTime: Int64, seekableRanges: [CMTimeRange])
24 |
25 | /// Another media source was loaded before the currently loading source finalized preparation
26 | case mediaPreparationAbandoned(playSessionId: String, url: URL)
27 |
28 | /// Content Key Validation failed with the specified error, or `nil` if the underlyig error is expected.
29 | case coreMediaErrorDomain(error: Error?)
30 |
31 |
32 | }
33 |
34 | extension HLSNativeWarning {
35 | public var message: String {
36 | switch self {
37 | case .seekableRangesEmpty: return "Seekable ranges was empty"
38 | case .discontinuousSeekableRanges(seekableRanges: let ranges): return "Seekable ranges contain discontinuity \(ranges)"
39 | case .seekTimeBeyondLivePoint(timestamp: let timestamp, livePoint: let live): return "Requested seek time \(timestamp) was beyond live point \(live)"
40 | case .invalidStartTime(startTime: let time, seekableRanges: let ranges): return "Invalid start time, \(time) set beyond seekable ranges, \(ranges)"
41 | case .mediaPreparationAbandoned(playSessionId: let sessionId, url: let url): return "Preparation of media source with playsessionId: \(sessionId) was abandoned before finalizing. Url: \(url)"
42 | case .coreMediaErrorDomain(error: let error): if let error = error as? HLSAVPlayerItemErrorLogEventError { return "PLAYER_ITEM_ERROR_LOG_ENTRY : CoreMediaErrorDomain : \n info: \(error.info) \n message : \(error.message) \n code: \(error.code) \n Error : \(error)" } else { return "PLAYER_ITEM_ERROR_LOG_ENTRY : CoreMediaErrorDomain" }
43 | }
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Observation/DateRangeMetadataCollector.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DateRangeMetadataCollector.swift
3 | // Player
4 | //
5 | // Created by Udaya Sri Senarathne on 2022-08-22.
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 |
11 |
12 | /// AVDateRangeMetadataGroup Requester
13 | public protocol DateMetaDataRequester: AVPlayerItemMetadataCollectorPushDelegate {
14 | func setDelegate(_ metadataCollector: AVPlayerItemMetadataCollector)
15 | }
16 |
17 | /// AVDateRangeMetadataGroup Parser
18 | public protocol DateMetaDataParser {
19 | func dateMetaDataDidCollect(dateRangeMetadataGroups: [AVDateRangeMetadataGroup], indexesOfNewGroups: IndexSet, indexesOfModifiedGroups: IndexSet )
20 | }
21 |
22 |
23 | class DateRangeMetadataCollector : NSObject, DateMetaDataRequester {
24 |
25 | weak var metadataCollector: AVPlayerItemMetadataCollector?
26 | var parserDelegate: DateMetaDataParser?
27 |
28 | /// Set the delegate for metadataCollector
29 | /// - Parameter metadataCollector: `metadataCollector` AVPlayerItemMetadataCollector
30 | public func setDelegate(_ metadataCollector: AVPlayerItemMetadataCollector) {
31 | self.metadataCollector = metadataCollector
32 | self.metadataCollector?.setDelegate(self, queue: .main)
33 | }
34 |
35 |
36 | /// Delegate method for collecting AVDateRangeMetadataGroups
37 | /// - Parameters:
38 | /// - metadataCollector: AVPlayerItemMetadataCollector
39 | /// - metadataGroups: AVDateRangeMetadataGroup
40 | /// - indexesOfNewGroups: IndexSet
41 | /// - indexesOfModifiedGroups: IndexSet
42 | internal func metadataCollector(_ metadataCollector: AVPlayerItemMetadataCollector,
43 | didCollect metadataGroups: [AVDateRangeMetadataGroup],
44 | indexesOfNewGroups: IndexSet,
45 | indexesOfModifiedGroups: IndexSet) {
46 |
47 | guard let delegate = self.parserDelegate else { return }
48 | delegate.dateMetaDataDidCollect(dateRangeMetadataGroups: metadataGroups, indexesOfNewGroups: indexesOfNewGroups , indexesOfModifiedGroups: indexesOfModifiedGroups)
49 |
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Observation/ItemObserver.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ItemObserver.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-11-20.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import AVFoundation
11 |
12 | /// Internal class wrapping `KVO` and `Notifications` related to `AVPlayerItem`
13 | internal class PlayerItemObserver: NotificationObserver, KeyValueObserver {
14 | internal typealias Object = AVPlayerItem
15 |
16 | internal var observers: [Observer] = []
17 | internal var tokens: [NotificationToken] = []
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Observation/KVOChange.swift:
--------------------------------------------------------------------------------
1 | //
2 | // KVOChange.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-04-07.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | internal struct KVOChange {
12 | /// The kind of the change.
13 | ///
14 | /// See also `NSKeyValueChangeKindKey`
15 | internal var kind: NSKeyValueChange? {
16 | return (self.rawDict?[.kindKey] as? UInt).flatMap(NSKeyValueChange.init)
17 | }
18 |
19 | /// The old value from the change.
20 | ///
21 | /// See also `NSKeyValueChangeOldKey`
22 | internal var old: Any? {
23 | return self.rawDict?[.oldKey]
24 | }
25 |
26 | /// The new value from the change.
27 | ///
28 | /// See also `NSKeyValueChangeNewKey`
29 | internal var new: Any? {
30 | return self.rawDict?[.newKey]
31 | }
32 |
33 | /// Whether this callback is being sent prior to the change.
34 | ///
35 | /// See also `NSKeyValueChangeNotificationIsPriorKey`
36 | internal var isPrior: Bool {
37 | return self.rawDict?[.notificationIsPriorKey] as? Bool ?? false
38 | }
39 |
40 | /// The indexes of the inserted, removed, or replaced objects when relevant.
41 | ///
42 | /// See also `NSKeyValueChangeIndexesKey`
43 | internal var indexes: IndexSet? {
44 | return self.rawDict?[.indexesKey] as? IndexSet
45 | }
46 |
47 | /// The raw change dictionary passed to `observeValueForKeyPath(_:ofObject:change:context:)`.
48 | internal let rawDict: [NSKeyValueChangeKey: Any]?
49 |
50 | internal init(rawDict: [NSKeyValueChangeKey: Any]?) {
51 | self.rawDict = rawDict
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Observation/KeyValueObservable.swift:
--------------------------------------------------------------------------------
1 | //
2 | // KeyValueObservable.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-04-07.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | /// Defines an object which has `ObservableKeys` to use with `KVO`
12 | internal protocol KeyValueObservable {
13 | /// Specifies observable keys
14 | associatedtype ObservableKeys
15 | }
16 |
--------------------------------------------------------------------------------
/Sources/iOSClientPlayer/Tech/HLS/Observation/KeyValueObserver.swift:
--------------------------------------------------------------------------------
1 | //
2 | // KeyValueObserver.swift
3 | // Player
4 | //
5 | // Created by Fredrik Sjöberg on 2017-04-07.
6 | // Copyright © 2017 emp. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 |
12 | /// `KVO` wrapper for convenience access to *key value observation.
13 | ///
14 | /// For more information regarding *Key Value Observation*, please see Apple's documentation
15 | internal protocol KeyValueObserver {
16 | /// Observed object type.
17 | associatedtype Object: NSObject
18 |
19 | /// Storage for the *observables* used to track registered `KVO`.
20 | var observers: [Observer