├── .editorconfig
├── .gitignore
├── .husky
└── pre-commit
├── LICENSE
├── README.md
├── build
├── webpack.config.build.js
├── webpack.config.dev.js
└── webpack.config.test.js
├── cypress.config.cjs
├── cypress
├── e2e
│ ├── actions.cy.ts
│ └── utils.cy.ts
├── support
│ ├── audio-fixtures.ts
│ ├── audio-loader.ts
│ ├── commands.ts
│ └── e2e.ts
└── tsconfig.json
├── example
├── audio-connect.ts
├── audio-hls.ts
├── audio.ts
├── live-hls.ts
├── live.ts
├── src
│ ├── actions.ts
│ ├── console.ts
│ ├── dom.ts
│ ├── events.ts
│ ├── filters.ts
│ └── inputs.ts
├── video-hls.ts
└── video.ts
├── index.html
├── package.json
├── pnpm-lock.yaml
├── public
└── audio-files
│ ├── example.m4a
│ ├── example.mp3
│ ├── example.ogg
│ ├── mono-example.mp3
│ └── stereo-example.mp3
├── src
├── actions.ts
├── audio.ts
├── connect.ts
├── events.ts
├── filters.ts
├── hls.ts
├── index.ts
├── media-context.ts
├── media.ts
├── polyfills.ts
├── props.ts
├── types.ts
├── utils.ts
└── video.ts
├── tsconfig.json
└── vite.config.js
/.editorconfig:
--------------------------------------------------------------------------------
1 | # editorconfig.org
2 | root = true
3 |
4 | [*]
5 | indent_style = space
6 | indent_size = 2
7 | end_of_line = lf
8 | charset = utf-8
9 | trim_trailing_whitespace = true
10 | insert_final_newline = true
11 |
12 | [*.json]
13 | indent_size = 2
14 | indent_style = space
15 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## System and generated files
2 | .idea
3 | .DS_Store
4 | .sass-cache
5 |
6 | ## Directories
7 | log/
8 | dist/
9 | tmp/
10 | node_modules/
11 | bower_components/
12 |
13 | ## Files
14 | result.xml
15 | npm-debug.log
16 | cypress/videos/*
17 |
--------------------------------------------------------------------------------
/.husky/pre-commit:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 | . "$(dirname -- "$0")/_/husky.sh"
3 |
4 | npx pretty-quick --staged
5 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2019 Podlove Project
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Pure HTML5 Audio Driver
2 | [](https://badge.fury.io/js/%40podlove%2Fhtml5-audio-driver.svg)
3 | [](https://github.com/feross/standard)
4 | [](http://commitizen.github.io/cz-cli/)
5 | [](https://app.fossa.io/projects/git%2Bgithub.com%2Fpodlove%2Fhtml5-audio-driver?ref=badge_shield)
6 |
7 | Opinionated low level functional bindings to control html5 audio
8 |
9 | ## Constraints (or what you won't find here)
10 | - Full functional audio player with controls and all the fuzz, instead you should use [mediaelement](https://github.com/mediaelement/mediaelement)
11 | - Support for multiple sounds and ambient control, instead you should use [howler.js](https://github.com/goldfire/howler.js)
12 | - No WebAudio, instead you should use [pizzicato](https://github.com/alemangui/pizzicato)
13 |
14 | ## Features (or what you will find here)
15 | - Full control over the audio element
16 | - Functional bindings to all necessary events
17 | - Composability for all audio actions
18 | - Helper functions to get relevant audio element properties
19 | - Written in vanilla es6 with only one dependency to [ramda](https://github.com/ramda/ramda)
20 |
21 | ## Installation
22 |
23 | `npm install html5-audio-driver` or `yarn add html5-audio-driver`
24 |
25 | ## Usage
26 |
27 | ### Creating an AudioElement
28 |
29 | If you have already an audio element defined, good for you, skip this and use the dom reference. Otherwise you can use this helper, the helper will create an audio element without controls, preloading and loops:
30 |
31 | ```javascript
32 | import { audio } from '@podlove/html5-audio-driver'
33 |
34 | const myAudioElement = audio([{
35 | url: 'audio-files/example.m4a',
36 | mimeType: 'audio/mp4'
37 | }, {
38 | url: 'audio-files/example.mp3',
39 | mimeType: 'audio/mp3'
40 | }, {
41 | url: 'audio-files/example.ogg',
42 | mimeType: 'audio/ogg'
43 | }])
44 | ```
45 |
46 | `mimeType` is needed so the browser can decide what source is appropriated to use.
47 |
48 | ### Interacting with the audio element
49 |
50 | All audio element actions are curried and accept as their first parameter the audio element. Also each action returns the audio element:
51 |
52 | ```javascript
53 | import { compose } from 'ramda'
54 | import { play, setPlaytime } from '@podlove/html5-audio-driver/actions'
55 |
56 | const setPlaytimeAndPlay = compose(play, setAudioPlaytime)(myAudioElement)
57 | // Sets the playtime to 50 seconds and plays the audio
58 | setPlaytimeAndPlay(50)
59 | ```
60 |
61 | For convenience also a `action` composer is available:
62 |
63 | ```javascript
64 | import { actions } from '@podlove/html5-audio-driver'
65 |
66 | const audioActions = actions(myAudioElement)
67 |
68 | audioActions.load()
69 | audioActions.play()
70 | audioActions.pause()
71 | audioActions.setPlaytime(50)
72 | audioActions.setRate(1.5)
73 | audioActions.mute()
74 | audioActions.unmute()
75 | ```
76 |
77 | #### Available Actions:
78 |
79 | | _Function_ | _Action_ | parameters |
80 | |---------------|-----------------------------------------------------------|--------------------------|
81 | | `play` | Safeplays the audio, initiates load if not already loaded | void |
82 | | `pause` | pauses the audio | void |
83 | | `load` | loads the audio | void |
84 | | `mute` | mutes the audio | void |
85 | | `unmute` | unmutes the audio | void |
86 | | `setRate` | sets the play rate | number: [0.5 ... 4] |
87 | | `setPlaytime` | sets the current play time | number: [0 ... duration] |
88 |
89 |
90 | ### Reacting to audio events
91 |
92 | All audio events are curried and accept as their first parameter the audio element. The second parameter is always the callback function. Each event returns a different set of audio properties, depending on the event scope:
93 |
94 | ```javascript
95 | import { onPlay } from '@podlove/html5-audio-driver/events'
96 |
97 | const playEvent = onPlay(myAudioElement)
98 |
99 | playEvent(console.log) // similar to onPlay(myAudioElement, console.log)
100 | /**
101 | * Will log audio properties on audio play:
102 | * {
103 | * duration,
104 | * buffered,
105 | * volume,
106 | * state,
107 | * playtime,
108 | * ended,
109 | * rate,
110 | * muted,
111 | * src,
112 | * paused,
113 | * playing
114 | * }
115 | */
116 | ```
117 |
118 | For convenience also a `events` composer is available:
119 |
120 | ```javascript
121 | import { events } from '@podlove/html5-audio-driver'
122 |
123 | const audioEvents = events(myAudioElement)
124 |
125 | audioEvents.onLoading(console.log)
126 | audioEvents.onLoaded(console.log)
127 | audioEvents.onReady(console.log)
128 | audioEvents.onPlay(console.log)
129 | audioEvents.onPause(console.log)
130 | audioEvents.onBufferChange(console.log)
131 | audioEvents.onBuffering(console.log)
132 | audioEvents.onPlaytimeUpdate(console.log)
133 | audioEvents.onVolumeChange(console.log)
134 | audioEvents.onError(console.log)
135 | audioEvents.onDurationChange(console.log)
136 | audioEvents.onRateChange(console.log)
137 | audioEvents.onEnd(console.log)
138 | ```
139 |
140 | #### Available Events:
141 |
142 | | _Function_ | _Event_ | _Original_ | _Callback Payload_ | _Once_ |
143 | |---------------------|-------------------------------------------------------|-------------------|-------------------------------------------------------------------------|---------|
144 | | `onLoading` | When browser starts audio loading | `progress` | All props | `true` |
145 | | `onLoaded` | When browser loaded the entire file | `canplaythrough` | All props | `true` |
146 | | `onReady` | When browser has enough data to play | `canplay` | All props | `false` |
147 | | `onPlay` | When browser starts playing audio | `play` | All props | `false` |
148 | | `onPause` | When browser pauses audio | `pause` | All props | `false` |
149 | | `onEnd` | When browser reaches end of audio | `ended` | All props | `false` |
150 | | `onBufferChange` | When browser buffered a new audio segment | `progress` | buffered segments | `false` |
151 | | `onBuffering` | When browser waits for audio segments to play | `waiting` | All props | `false` |
152 | | `onPlaytimeUpdate` | When currentTime of audio changes | `timeupdate` | playtime | `false` |
153 | | `onVolumeChange` | When volume of audio changes | `volumechange` | volume | `false` |
154 | | `onError` | When an error occurred while playing the audio | `error` | `NETWORK_NO_SOURCE`, `NETWORK_EMPTY`, `NETWORK_LOADING`, `MEDIA_ERROR` | `false` |
155 | | `onDurationChange` | When browser has new information on audio duration | `durationchange` | duration | `false` |
156 | | `onRateChange` | When browser detects a change in audio playback rate | `ratechange` | rate | `false` |
157 | | `onFilterUpdate` | When a filter has been changed | `filterUpdated` | All props | `false` |
158 |
159 | ### Audio Element Properties
160 |
161 | Multiple different functions are provided to give you easy access to audio element properties. Initially most of them are undefined:
162 |
163 | ```javascript
164 |
165 | import { volume } from '@podlove/html5-audio-driver/props'
166 |
167 | isPlaying(myAudioElement) // Will return false
168 | ```
169 |
170 | For convenience also a composed version is available giving you all available properties:
171 |
172 | ```javascript
173 | import { props } from '@podlove/html5-audio-driver/props'
174 |
175 | props(myAudioElement)
176 | /**
177 | * {
178 | * duration,
179 | * buffered,
180 | * volume,
181 | * state,
182 | * playtime,
183 | * ended,
184 | * rate,
185 | * muted,
186 | * src,
187 | * paused,
188 | * playing
189 | * }
190 | */
191 | ```
192 |
193 | #### Available Properties:
194 |
195 | | _Function_ | _Description_ | _Return Value_ | _Initial Value_ |
196 | |-------------|---------------------------------------------------|-----------------------------------------------------------------------------------------------|-----------------|
197 | | `duration` | Duration of audio in seconds | number | `undefined` |
198 | | `buffered` | Buffered audio segments start and end in seconds | [[number, number], ...] | `[]` |
199 | | `volume` | Audio volume | number: [0...1] | `undefined` |
200 | | `state` | Network State | `HAVE_NOTHING`, `HAVE_METADATA`, `HAVE_CURRENT_DATA`, `HAVE_FUTURE_DATA`, `HAVE_ENOUGH_DATA` | `undefined` |
201 | | `playtime` | Current audio playtime position in sconds | number | `undefined` |
202 | | `ended` | Indicates if audio has ended | boolean | `undefined` |
203 | | `rate` | Audio playback rate | number: [0.5 ... 4] | `undefined` |
204 | | `muted` | Indicates if audio is muted | boolean | `undefined` |
205 | | `src` | Used audio source | string | `undefined` |
206 | | `paused` | Indicates if audio is paused | boolean | `undefined` |
207 | | `channels` | Available audio channels | number | `undefined` |
208 | | `playing` | Indicates if audio is playing | boolean | `false` |
209 |
210 |
211 | ## Handled HTML5 Quirks and Limitations (the nasty part :/)
212 |
213 | HTML5 audio was a needed addition to get rid of the flash hell. Although it is already multiple years implemented in all the different browsers each implementation has it's flaws. If you want to dive deeper into the topic I recommend you the [following article](https://24ways.org/2010/the-state-of-html5-audio).
214 |
215 | ### Play Action
216 |
217 | Using the `play` action will give you a safe function that surpresses most of the errors. One source is that older browsers doesn't implement `audio.play()` as a promise. Also there is a race condition between `play` and `pause` that needs to be `.catch`ed.
218 |
219 | ### Playtime (CurrentTime)
220 |
221 | In Safari and mobile Safari it isn't possible to set the `currentTime` before loading the audio. You can set it but it won't mutate the audio `currentTime` value. `html5-audio-driver` therefore uses a custom `playtime` attribute that is synced wit the `currentTime`.
222 |
223 | ### Mobile Environments
224 |
225 | To `play` audio on mobile devices you have to trigger use a direct user interaction to trigger the audio. Also `volume` is not available on mobile devices.
226 |
227 | ## Legacy Browser Support (IE11)
228 |
229 | In case you need IE11 support you have to provide some polyfills in your application. Have a look at the [test polyfills](test/polyfills.js) to see a working example.
230 |
231 | ## Publishing
232 |
233 | Run `npm publish:prepare` move to the `dist/` folder and run `npm publish --public`
234 |
235 |
236 | ## License
237 | [](https://app.fossa.io/projects/git%2Bgithub.com%2Fpodlove%2Fhtml5-audio-driver?ref=badge_large)
238 |
--------------------------------------------------------------------------------
/build/webpack.config.build.js:
--------------------------------------------------------------------------------
1 | const path = require('path')
2 | const file = name => `./src/${name}.js`
3 |
4 | module.exports = {
5 | mode: 'production',
6 | entry: {
7 | index: file('index'),
8 | actions: file('actions'),
9 | audio: file('audio'),
10 | video: file('video'),
11 | events: file('events'),
12 | props: file('props'),
13 | utils: file('utils'),
14 | hls: file('hls'),
15 | filters: file('filters'),
16 | media: file('media'),
17 | connect: file('connect')
18 | },
19 | output: {
20 | path: path.resolve('./dist'),
21 | filename: '[name].js',
22 | libraryTarget: 'umd',
23 | library: '@podlove/html5-audio-driver'
24 | },
25 | module: {
26 | rules: [{
27 | test: /\.(js)$/,
28 | exclude: /node_modules/,
29 | use: {
30 | loader: 'babel-loader'
31 | }
32 | }]
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/build/webpack.config.dev.js:
--------------------------------------------------------------------------------
1 | const path = require("path");
2 | const CopyPlugin = require("copy-webpack-plugin");
3 |
4 | module.exports = {
5 | mode: "development",
6 | devtool: "source-map",
7 | entry: {
8 | audio: "./example/audio.js",
9 | "audio-hls": "./example/audio-hls.js",
10 | "live-hls": "./example/live-hls.js",
11 | live: "./example/live.js",
12 | video: "./example/video.js",
13 | "video-hls": "./example/video-hls.js",
14 | "audio-connect": "./example/audio-connect.js",
15 | },
16 | output: {
17 | filename: "./tmp/[name].js",
18 | },
19 | resolve: {
20 | alias: {
21 | "@podlove/html5-audio-driver": path.resolve(__dirname, "..", "src"),
22 | },
23 | },
24 | devServer: {
25 | host: "0.0.0.0",
26 | disableHostCheck: true,
27 | },
28 | module: {
29 | rules: [
30 | {
31 | test: /\.(js)$/,
32 | exclude: /node_modules/,
33 | use: {
34 | loader: "babel-loader",
35 | },
36 | },
37 | ],
38 | },
39 | plugins: [
40 | new CopyPlugin({
41 | patterns: [
42 | path.resolve(
43 | __dirname,
44 | "..",
45 | "node_modules",
46 | "milligram",
47 | "dist",
48 | "milligram.min.css"
49 | ),
50 | path.resolve(
51 | __dirname,
52 | "..",
53 | "node_modules",
54 | "normalize.css",
55 | "normalize.css"
56 | ),
57 | ],
58 | }),
59 | ],
60 | };
61 |
--------------------------------------------------------------------------------
/build/webpack.config.test.js:
--------------------------------------------------------------------------------
1 | // IO
2 | const path = require('path')
3 | const glob = require('glob')
4 |
5 | const scripts = glob.sync('./test/**/*.test.js').reduce((result, file) =>
6 | Object.assign({}, result, {
7 | [path.parse(file).name]: file
8 | }), {
9 | runtime: './test/runtime.js'
10 | })
11 |
12 | module.exports = {
13 | mode: 'development',
14 | devtool: 'source-map',
15 | entry: scripts,
16 | output: {
17 | path: path.resolve('./tmp'),
18 | filename: '[name].js'
19 | },
20 | resolve: {
21 | alias: {
22 | '@podlove/html5-audio-driver': path.resolve(__dirname, '..', 'src'),
23 | test: path.resolve('./test')
24 | }
25 | },
26 | module: {
27 | rules: [{
28 | test: /\.css$/,
29 | use: ['style-loader', 'css-loader']
30 | },
31 | {
32 | test: /\.(js)$/,
33 | exclude: /node_modules/,
34 | use: {
35 | loader: 'babel-loader'
36 | }
37 | }]
38 | },
39 | devServer: {
40 | contentBase: path.resolve('./tmp'),
41 | overlay: true,
42 | host: '0.0.0.0',
43 | disableHostCheck: true
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/cypress.config.cjs:
--------------------------------------------------------------------------------
1 | const { defineConfig } = require("cypress");
2 |
3 | module.exports = defineConfig({
4 | e2e: {
5 | setupNodeEvents(on, config) {
6 | // implement node event listeners here
7 | },
8 | },
9 | fileServerFolder: "public",
10 | });
11 |
--------------------------------------------------------------------------------
/cypress/e2e/actions.cy.ts:
--------------------------------------------------------------------------------
1 | import { compose } from "ramda";
2 | import { audio } from "../../src";
3 | import { onPlay } from "../../src/events";
4 | import {
5 | actions,
6 | setPlaytime,
7 | play,
8 | pause,
9 | load,
10 | mute,
11 | unmute,
12 | setVolume,
13 | setRate,
14 | } from "../../src/actions";
15 | import { duration, playing, muted, volume, rate } from "../../src/props";
16 | import { onError } from "../../src/events";
17 |
18 | import { audioFixture } from "../support/audio-fixtures";
19 | import { audioLoader } from "../support/audio-loader";
20 | import { MediaElement } from "../../src/types";
21 |
22 | describe("actions", () => {
23 | let audioElement: MediaElement;
24 |
25 | beforeEach(() => {
26 | audioElement = audio(audioFixture());
27 | onError(audioElement, console.log);
28 | });
29 |
30 | afterEach(() => {
31 | audioElement.remove();
32 | });
33 |
34 | describe("setPlaytime", () => {
35 | let playtimeSetter: (input: number) => MediaElement;
36 | let playAction: () => MediaElement;
37 |
38 | beforeEach(() => {
39 | playtimeSetter = setPlaytime(audioElement);
40 | playAction = play(audioElement);
41 | });
42 |
43 | it("should be a function", () => {
44 | expect(typeof setPlaytime).to.equal("function");
45 | });
46 |
47 | it("should return a function", () => {
48 | expect(typeof playtimeSetter).to.equal("function");
49 | });
50 |
51 | it("should set the playtime", () => {
52 | expect(playtimeSetter(9).playtime).to.equal(9);
53 | });
54 |
55 | it("should prevent playtimes less than 0", () => {
56 | expect(playtimeSetter(-10).playtime).to.equal(0);
57 | });
58 |
59 | it("should prevent playtime larger than duration", (done) => {
60 | playAction();
61 | audioLoader(
62 | audioElement,
63 | compose(
64 | done,
65 | (duration) => {
66 | expect(playtimeSetter(duration + 50).playtime).to.equal(duration);
67 | },
68 | duration
69 | )
70 | );
71 | });
72 |
73 | it("should use the playtime on play", (done) => {
74 | audioLoader(
75 | audioElement,
76 | compose(
77 | done,
78 | () => {
79 | playtimeSetter(50);
80 | playAction();
81 | expect(audioElement.playtime).to.be.at.least(50);
82 | expect(audioElement.currentTime).to.be.at.least(50);
83 | },
84 | duration
85 | )
86 | );
87 | });
88 | });
89 |
90 | describe("play", () => {
91 | let playAction: any;
92 | let playEvent: any;
93 |
94 | beforeEach(() => {
95 | playAction = play(audioElement);
96 | playEvent = onPlay(audioElement);
97 | });
98 |
99 | it("should be a function", () => {
100 | expect(typeof play).to.equal("function");
101 | });
102 |
103 | it("should return a function", () => {
104 | expect(typeof playAction).to.equal("function");
105 | });
106 |
107 | // This test will fail on mobile devices because a direct user interaction is required
108 | it("should play the audio", (done) => {
109 | playEvent(
110 | () => {
111 | done();
112 | },
113 | { once: true }
114 | );
115 | playAction();
116 | });
117 | });
118 |
119 | describe("pause", () => {
120 | let pauseAction: any;
121 | let playAction: any;
122 |
123 | beforeEach(() => {
124 | pauseAction = pause(audioElement);
125 | playAction = play(audioElement);
126 | });
127 |
128 | it("should be a function", () => {
129 | expect(typeof pause).to.equal("function");
130 | });
131 |
132 | it("should return a function", () => {
133 | expect(typeof pauseAction).to.equal("function");
134 | });
135 |
136 | it("should pause the audio", (done) => {
137 | audioLoader(audioElement, () => {
138 | playAction();
139 | setTimeout(() => {
140 | pauseAction();
141 | expect(playing(audioElement)).to.equal(false);
142 | done();
143 | }, 1000);
144 | });
145 | });
146 | });
147 |
148 | describe("load", () => {
149 | let loadAction: any;
150 |
151 | beforeEach(() => {
152 | loadAction = load(audioElement);
153 | });
154 |
155 | it("should be a function", () => {
156 | expect(typeof load).to.equal("function");
157 | });
158 |
159 | it("should return a function", () => {
160 | expect(typeof loadAction).to.equal("function");
161 | });
162 |
163 | it("should load the audio", (done) => {
164 | audioElement.addEventListener("canplay", () => done(), { once: true });
165 | loadAction();
166 | });
167 | });
168 |
169 | describe("mute", () => {
170 | let muteAction: any;
171 |
172 | beforeEach(() => {
173 | muteAction = mute(audioElement);
174 | });
175 |
176 | it("should be a function", () => {
177 | expect(typeof mute).to.equal("function");
178 | });
179 |
180 | it("should return a function", () => {
181 | expect(typeof muteAction).to.equal("function");
182 | });
183 |
184 | it("mute the audio", (done) => {
185 | audioLoader(audioElement, () => {
186 | expect(muted(audioElement)).to.equal(false);
187 | muteAction();
188 | expect(muted(audioElement)).to.equal(true);
189 | done();
190 | });
191 | });
192 | });
193 |
194 | describe("unmute", () => {
195 | let muteAction: any;
196 | let unmuteAction: any;
197 |
198 | beforeEach(() => {
199 | muteAction = mute(audioElement);
200 | unmuteAction = unmute(audioElement);
201 | });
202 |
203 | it("should be a function", () => {
204 | expect(typeof mute).to.equal("function");
205 | });
206 |
207 | it("should return a function", () => {
208 | expect(typeof unmuteAction).to.equal("function");
209 | });
210 |
211 | it("unmute the audio", (done) => {
212 | audioLoader(audioElement, () => {
213 | muteAction();
214 | expect(muted(audioElement)).to.equal(true);
215 | unmuteAction();
216 | expect(muted(audioElement)).to.equal(false);
217 | done();
218 | });
219 | });
220 | });
221 |
222 | // Important: setting volume on mobile is not supported!
223 | describe("setVolume", () => {
224 | let setVolumeAction: any;
225 |
226 | beforeEach(() => {
227 | setVolumeAction = setVolume(audioElement);
228 | setVolumeAction(1);
229 | });
230 |
231 | it("should be a function", () => {
232 | expect(typeof setVolume).to.equal("function");
233 | });
234 |
235 | it("should return a function", () => {
236 | expect(typeof setVolumeAction).to.equal("function");
237 | });
238 |
239 | it("sets the audio volume", (done) => {
240 | audioLoader(audioElement, () => {
241 | expect(volume(audioElement)).to.equal(1);
242 | setVolumeAction(0.5);
243 | expect(volume(audioElement)).to.equal(0.5);
244 | done();
245 | });
246 | });
247 |
248 | it("should prevent volume less than 0", (done) => {
249 | audioLoader(audioElement, () => {
250 | expect(volume(audioElement)).to.equal(1);
251 | setVolumeAction(-1);
252 | expect(volume(audioElement)).to.equal(0);
253 | done();
254 | });
255 | });
256 |
257 | it("should prevent volume larger than 1", (done) => {
258 | audioLoader(audioElement, () => {
259 | expect(volume(audioElement)).to.equal(1);
260 | setVolumeAction(2);
261 | expect(volume(audioElement)).to.equal(1);
262 | done();
263 | });
264 | });
265 | });
266 |
267 | describe("setRate", () => {
268 | let setRateAction: any;
269 |
270 | beforeEach(() => {
271 | setRateAction = setRate(audioElement);
272 | });
273 |
274 | it("should be a function", () => {
275 | expect(typeof setRate).to.equal("function");
276 | });
277 |
278 | it("should return a function", () => {
279 | expect(typeof setRateAction).to.equal("function");
280 | });
281 |
282 | it("sets the audio rate", (done) => {
283 | audioLoader(audioElement, () => {
284 | expect(rate(audioElement)).to.equal(1);
285 | setRateAction(0.5);
286 | expect(rate(audioElement)).to.equal(0.5);
287 | done();
288 | });
289 | });
290 |
291 | it("should prevent rate less than 0.5", (done) => {
292 | audioLoader(audioElement, () => {
293 | expect(rate(audioElement)).to.equal(1);
294 | setRateAction(-1);
295 | expect(rate(audioElement)).to.equal(0.5);
296 | done();
297 | });
298 | });
299 |
300 | it("should prevent rate larger than 4", (done) => {
301 | audioLoader(audioElement, () => {
302 | expect(rate(audioElement)).to.equal(1);
303 | setRateAction(5);
304 | expect(rate(audioElement)).to.equal(4);
305 | done();
306 | });
307 | });
308 | });
309 |
310 | describe("actions", () => {
311 | let audioActions: any;
312 | let availableActions: any;
313 |
314 | beforeEach(() => {
315 | audioActions = actions(audioElement);
316 | availableActions = Object.keys(audioActions);
317 | });
318 |
319 | it("should be a function", () => {
320 | expect(typeof actions).to.equal("function");
321 | });
322 |
323 | it("should return a object with actions", () => {
324 | expect(availableActions).to.deep.equal([
325 | "play",
326 | "pause",
327 | "load",
328 | "setPlaytime",
329 | "mute",
330 | "unmute",
331 | "setVolume",
332 | "setRate",
333 | ]);
334 | });
335 |
336 | it("should export an object with functions", () => {
337 | availableActions.map((action: any) => {
338 | expect(typeof audioActions[action]).to.equal("function");
339 | });
340 | });
341 | });
342 | });
343 |
--------------------------------------------------------------------------------
/cypress/e2e/utils.cy.ts:
--------------------------------------------------------------------------------
1 | import { audio } from "../../src";
2 | import { collectProperties, getNodeFromEvent } from "../../src/utils";
3 | import { audioFixture } from "../support/audio-fixtures";
4 | import { audioLoader } from "../support/audio-loader";
5 |
6 | describe("utils", () => {
7 | describe("collectProperties", () => {
8 | it("should be a function", () => {
9 | expect(typeof collectProperties).to.equal("function");
10 | });
11 |
12 | it("should return a function", () => {
13 | expect(typeof collectProperties({ foo: "bar" })).to.equal("function");
14 | });
15 |
16 | it("should call a list of function with a given value", () => {
17 | const propertySpyA = cy.spy();
18 | const propertySpyB = cy.spy();
19 |
20 | collectProperties({ propertySpyA, propertySpyB })("foo");
21 | expect(propertySpyA).to.have.been.calledWith("foo");
22 | expect(propertySpyB).to.have.been.calledWith("foo");
23 | });
24 | });
25 |
26 | describe("getNodeFromEvent", () => {
27 | let audioElement: HTMLAudioElement;
28 |
29 | beforeEach(() => {
30 | audioElement = audio(audioFixture());
31 | });
32 |
33 | afterEach(() => {
34 | audioElement.remove();
35 | });
36 |
37 | it("should be a function", () => {
38 | expect(typeof getNodeFromEvent).to.equal("function");
39 | });
40 |
41 | it("should extract the audio element from event", (done) => {
42 | audioLoader(audioElement, (target, event) => {
43 | expect(getNodeFromEvent(event)).to.equal(audioElement);
44 | done();
45 | });
46 | });
47 | });
48 | });
49 |
--------------------------------------------------------------------------------
/cypress/support/audio-fixtures.ts:
--------------------------------------------------------------------------------
1 | export const audioFixture = () => [
2 | {
3 | src: "/audio-files/example.m4a",
4 | type: "audio/mp4",
5 | },
6 | {
7 | src: "/audio-files/example.mp3",
8 | type: "audio/mp3",
9 | },
10 | {
11 | src: "/audio-files/example.ogg",
12 | type: "audio/ogg",
13 | },
14 | ];
15 |
--------------------------------------------------------------------------------
/cypress/support/audio-loader.ts:
--------------------------------------------------------------------------------
1 | import { MediaElement } from "../../src/types";
2 |
3 | export const audioLoader = (
4 | audio: MediaElement,
5 | cb: (target: any, event: Event) => void
6 | ) => {
7 | audio.addEventListener(
8 | "canplay",
9 | (event) => {
10 | cb(event.target, event);
11 | },
12 | { once: true }
13 | );
14 | audio.load();
15 | };
16 |
--------------------------------------------------------------------------------
/cypress/support/commands.ts:
--------------------------------------------------------------------------------
1 | // ***********************************************
2 | // This example commands.js shows you how to
3 | // create various custom commands and overwrite
4 | // existing commands.
5 | //
6 | // For more comprehensive examples of custom
7 | // commands please read more here:
8 | // https://on.cypress.io/custom-commands
9 | // ***********************************************
10 | //
11 | //
12 | // -- This is a parent command --
13 | // Cypress.Commands.add('login', (email, password) => { ... })
14 | //
15 | //
16 | // -- This is a child command --
17 | // Cypress.Commands.add('drag', { prevSubject: 'element'}, (subject, options) => { ... })
18 | //
19 | //
20 | // -- This is a dual command --
21 | // Cypress.Commands.add('dismiss', { prevSubject: 'optional'}, (subject, options) => { ... })
22 | //
23 | //
24 | // -- This will overwrite an existing command --
25 | // Cypress.Commands.overwrite('visit', (originalFn, url, options) => { ... })
26 |
--------------------------------------------------------------------------------
/cypress/support/e2e.ts:
--------------------------------------------------------------------------------
1 | // ***********************************************************
2 | // This example support/e2e.js is processed and
3 | // loaded automatically before your test files.
4 | //
5 | // This is a great place to put global configuration and
6 | // behavior that modifies Cypress.
7 | //
8 | // You can change the location of this file or turn off
9 | // automatically serving support files with the
10 | // 'supportFile' configuration option.
11 | //
12 | // You can read more here:
13 | // https://on.cypress.io/configuration
14 | // ***********************************************************
15 |
16 | // Import commands.js using ES2015 syntax:
17 | import './commands'
18 |
19 | // Alternatively you can use CommonJS syntax:
20 | // require('./commands')
--------------------------------------------------------------------------------
/cypress/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es5",
4 | "lib": ["es5", "dom"],
5 | "types": ["cypress", "node"]
6 | },
7 | "include": ["**/*.ts"]
8 | }
9 |
--------------------------------------------------------------------------------
/example/audio-connect.ts:
--------------------------------------------------------------------------------
1 | import { compose, defaultTo, path, prop } from "ramda";
2 |
3 | import { connect, props } from "../src";
4 | import {
5 | loadButton,
6 | playButton,
7 | pauseButton,
8 | muteButton,
9 | unmuteButton,
10 | restartButton,
11 | } from "./src/actions";
12 | import { volumeInput, rateInput, progressBar } from "./src/inputs";
13 | import { log } from "./src/console";
14 |
15 | export default () => {
16 | const connector = connect.audio();
17 |
18 | const load = () =>
19 | connector.load([
20 | {
21 | src: "/audio-files/example.m4a",
22 | type: "audio/mp4",
23 | },
24 | {
25 | src: "./audio-files/example.mp3",
26 | type: "audio/mp3",
27 | },
28 | {
29 | src: "./audio-files/example.ogg",
30 | type: "audio/pgg",
31 | },
32 | ]);
33 |
34 | // actions
35 | loadButton?.addEventListener("click", () => load());
36 | playButton?.addEventListener("click", () => connector.actions.play());
37 | pauseButton?.addEventListener("click", () => connector.actions.pause());
38 | muteButton?.addEventListener("click", () => connector.actions.mute());
39 | unmuteButton?.addEventListener("click", () => connector.actions.unmute());
40 | restartButton?.addEventListener(
41 | "click",
42 | compose(
43 | () => connector.actions.play(),
44 | () => connector.actions.setPlaytime(0),
45 | () => connector.actions.pause()
46 | )
47 | );
48 |
49 | // inputs
50 | volumeInput?.addEventListener(
51 | "change",
52 | compose(
53 | (val) => connector.actions.setVolume(val),
54 | path(["target", "value"])
55 | )
56 | );
57 | rateInput?.addEventListener(
58 | "change",
59 | compose((val) => connector.actions.setRate(val), path(["target", "value"]))
60 | );
61 | progressBar.addEventListener(
62 | "change",
63 | compose(
64 | (val: any) => connector.actions.setPlaytime(val * 250),
65 | defaultTo(0),
66 | path(["target", "value"])
67 | )
68 | );
69 |
70 | // Props
71 | const renderProps = () => {
72 | const element = document.getElementById("props");
73 | const playerProperties = props(connector.mediaElement);
74 |
75 | while (element?.firstChild) {
76 | element.removeChild(element.firstChild);
77 | }
78 |
79 | Object.keys(playerProperties).map((key: any) => {
80 | const propNode = document.createElement("tr");
81 | propNode.innerHTML = `
${key} | ${prop(
82 | key,
83 | playerProperties
84 | )} | `;
85 | element?.appendChild(propNode);
86 | });
87 | };
88 |
89 | // Events
90 | const onEvent = (event: any) => compose(renderProps, log(event));
91 | connector.events.onLoaded(onEvent("loaded"));
92 | connector.events.onLoading(onEvent("loading"));
93 | connector.events.onBuffering(onEvent("buffering"));
94 | connector.events.onBufferChange(onEvent("buffer changed"));
95 | connector.events.onPause(onEvent("paused"));
96 | connector.events.onPlay(onEvent("playing"));
97 | connector.events.onPlaytimeUpdate(onEvent("playtime"));
98 | connector.events.onError(onEvent("error"));
99 | connector.events.onEnd(onEvent("end"));
100 | connector.events.onRateChange(onEvent("rate changed"));
101 | connector.events.onDurationChange(onEvent("duration changed"));
102 | connector.events.onVolumeChange(onEvent("volume changed"));
103 | connector.events.onFilterUpdate(onEvent("filter updated"));
104 | connector.events.onPlaytimeUpdate((value: number | undefined) => {
105 | if (typeof value !== "undefined" && progressBar) {
106 | progressBar.value = (value / 250).toString();
107 | }
108 | });
109 | };
110 |
--------------------------------------------------------------------------------
/example/audio-hls.ts:
--------------------------------------------------------------------------------
1 | import { audio } from "../src";
2 | import { attatchStream } from "../src/hls";
3 |
4 | import { registerActions } from "./src/actions";
5 | import { registerEvents } from "./src/events";
6 | import { registerInputs } from "./src/inputs";
7 |
8 | const sources = [
9 | {
10 | src: "https:/freakshow.fm/podlove/file/5377/s/webplayer/c/home/fs218-der-kann-kein-blut-hoeren.m4a",
11 | size: 84942216,
12 | title: "MPEG-4 AAC Audio (m4a)",
13 | type: "audio/mp4",
14 | },
15 | {
16 | src: "https:/freakshow.fm/podlove/file/5373/s/webplayer/c/home/fs218-der-kann-kein-blut-hoeren.opus",
17 | size: 82338432,
18 | title: "Opus Audio (opus)",
19 | type: "audio/opus",
20 | },
21 | {
22 | src: "https:/freakshow.fm/podlove/file/5372/s/webplayer/c/home/fs218-der-kann-kein-blut-hoeren.oga",
23 | size: 81611435,
24 | title: "Ogg Vorbis Audio (oga)",
25 | type: "audio/ogg",
26 | },
27 | {
28 | src: "https:/freakshow.fm/podlove/file/5376/s/webplayer/c/home/fs218-der-kann-kein-blut-hoeren.mp3",
29 | size: 133433818,
30 | title: "MP3 Audio (mp3)",
31 | type: "audio/mpeg",
32 | },
33 | {
34 | src: "https:/media.metaebene.me/hls/freakshow/fs218-der-kann-kein-blut-hoeren.m3u8",
35 | size: 195,
36 | title: "HLS Stream",
37 | type: "application/x-mpegURL",
38 | },
39 | ];
40 |
41 | export default () => {
42 | const myAudio = attatchStream(audio(sources));
43 |
44 | registerEvents(myAudio);
45 | registerActions(myAudio);
46 | registerInputs(myAudio);
47 | };
48 |
--------------------------------------------------------------------------------
/example/audio.ts:
--------------------------------------------------------------------------------
1 | import { audio } from "../src";
2 |
3 | import { registerActions } from "./src/actions";
4 | import { registerEvents } from "./src/events";
5 | import { registerInputs } from "./src/inputs";
6 | import { registerFilters } from "./src/filters";
7 |
8 | export default () => {
9 | const myAudio = audio([
10 | {
11 | src: "./audio-files/example.m4a",
12 | type: "audio/mp4",
13 | },
14 | {
15 | src: "./audio-files/example.mp3",
16 | type: "audio/mp3",
17 | },
18 | {
19 | src: "./audio-files/example.ogg",
20 | type: "audio/ogg",
21 | },
22 | ]);
23 |
24 | registerEvents(myAudio);
25 | registerActions(myAudio);
26 | registerInputs(myAudio);
27 | registerFilters(myAudio);
28 | };
29 |
--------------------------------------------------------------------------------
/example/live-hls.ts:
--------------------------------------------------------------------------------
1 | import { audio } from "../src";
2 | import { attatchStream } from "../src/hls";
3 |
4 | import { registerActions } from "./src/actions";
5 | import { registerEvents } from "./src/events";
6 | import { registerInputs } from "./src/inputs";
7 |
8 | const sources = [
9 | {
10 | src: "https://mcdn.br.de/br/hf/b5/master.m3u8",
11 | title: "HLS Stream",
12 | type: "application/x-mpegURL",
13 | },
14 | ];
15 |
16 | export default () => {
17 | const myAudio = attatchStream(audio(sources));
18 |
19 | registerEvents(myAudio);
20 | registerActions(myAudio);
21 | registerInputs(myAudio);
22 | };
23 |
--------------------------------------------------------------------------------
/example/live.ts:
--------------------------------------------------------------------------------
1 | import { audio } from "../src";
2 | import { attatchStream } from "../src/hls";
3 |
4 | import { registerActions } from "./src/actions";
5 | import { registerEvents } from "./src/events";
6 | import { registerInputs } from "./src/inputs";
7 |
8 | const sources = [
9 | {
10 | src: "https://st01.sslstream.dlf.de/dlf/01/128/mp3/stream.mp3?aggregator=web",
11 | type: "audio/mp3",
12 | },
13 | ];
14 |
15 | export default () => {
16 | const myAudio = attatchStream(audio(sources));
17 |
18 | registerEvents(myAudio);
19 | registerActions(myAudio);
20 | registerInputs(myAudio);
21 | };
22 |
--------------------------------------------------------------------------------
/example/src/actions.ts:
--------------------------------------------------------------------------------
1 | import { compose } from "ramda";
2 | import { actions, props } from "../../src";
3 | import { MediaElement } from "../../src/types";
4 |
5 | // actions
6 | export const playButton = document.getElementById("play");
7 | export const pauseButton = document.getElementById("pause");
8 | export const loadButton = document.getElementById("load");
9 | export const restartButton = document.getElementById("restart");
10 | export const muteButton = document.getElementById("mute");
11 | export const unmuteButton = document.getElementById("unmute");
12 | export const backwardButton = document.getElementById("backward");
13 | export const forwardButton = document.getElementById("forward");
14 |
15 | export const registerActions = (node: MediaElement) => {
16 | const mediaActions = actions(node);
17 |
18 | (window as any).actions = mediaActions;
19 |
20 | loadButton?.addEventListener("click", mediaActions.load);
21 | playButton?.addEventListener("click", mediaActions.play);
22 | pauseButton?.addEventListener("click", mediaActions.pause);
23 | muteButton?.addEventListener("click", mediaActions.mute);
24 | unmuteButton?.addEventListener("click", mediaActions.unmute);
25 | backwardButton?.addEventListener(
26 | "click",
27 | compose(
28 | mediaActions.setPlaytime,
29 | ({ playtime }) => (playtime || 0) - 30,
30 | () => props(node)
31 | )
32 | );
33 | forwardButton?.addEventListener(
34 | "click",
35 | compose(
36 | mediaActions.setPlaytime,
37 | ({ playtime }) => (playtime || 0) + 30,
38 | () => props(node)
39 | )
40 | );
41 | restartButton?.addEventListener(
42 | "click",
43 | compose(
44 | mediaActions.play,
45 | () => mediaActions.setPlaytime(0),
46 | mediaActions.pause
47 | )
48 | );
49 | };
50 |
--------------------------------------------------------------------------------
/example/src/console.ts:
--------------------------------------------------------------------------------
1 | import { curry } from "ramda";
2 |
3 | export const log = curry((category, payload) => {
4 | console.group(category);
5 |
6 | switch (typeof payload) {
7 | case "number":
8 | case "string":
9 | console.log("payload: ", payload);
10 | break;
11 | case "object":
12 | Object.keys(payload).map((key) => console.log(`${key}: `, payload[key]));
13 | break;
14 | }
15 |
16 | console.groupEnd();
17 | });
18 |
--------------------------------------------------------------------------------
/example/src/dom.ts:
--------------------------------------------------------------------------------
1 | import { prop } from "ramda";
2 | import { props } from "../../src";
3 | import { MediaElement } from "../../src/types";
4 |
5 | // Props display
6 | export const renderProps =
7 | (audio: MediaElement) =>
8 | (input: any): any =>
9 | () => {
10 | const element = document.getElementById("props");
11 | const playerProperties = props(audio);
12 |
13 | if (!element) {
14 | return input;
15 | }
16 |
17 | while (element.firstChild) {
18 | element.removeChild(element.firstChild);
19 | }
20 |
21 | Object.keys(playerProperties).map((key: any) => {
22 | const propNode = document.createElement("tr");
23 | propNode.innerHTML = `${key} | ${prop(
24 | key,
25 | playerProperties
26 | )} | `;
27 | element.appendChild(propNode);
28 | });
29 |
30 | return input;
31 | };
32 |
--------------------------------------------------------------------------------
/example/src/events.ts:
--------------------------------------------------------------------------------
1 | import { events } from "../../src";
2 |
3 | import { progressBar } from "./inputs";
4 | import { renderProps } from "./dom";
5 | import { MediaElement } from "../../src/types";
6 |
7 | export const registerEvents = (node: MediaElement) => {
8 | const onEvent = renderProps(node);
9 | const mediaEvents = events(node);
10 |
11 | mediaEvents.onLoaded(onEvent("ready"));
12 | mediaEvents.onLoaded(onEvent("loaded"));
13 | mediaEvents.onLoading(onEvent("loading"));
14 | mediaEvents.onBuffering(onEvent("buffering"));
15 | mediaEvents.onBufferChange(onEvent("buffer changed"));
16 | mediaEvents.onPause(onEvent("paused"));
17 | mediaEvents.onPlay(onEvent("playing"));
18 | mediaEvents.onPlaytimeUpdate(onEvent("playtime"));
19 | mediaEvents.onError(onEvent("error"));
20 | mediaEvents.onEnd(onEvent("end"));
21 | mediaEvents.onRateChange(onEvent("rate changed"));
22 | mediaEvents.onDurationChange(onEvent("duration changed"));
23 | mediaEvents.onVolumeChange(onEvent("volume changed"));
24 | mediaEvents.onFilterUpdate(onEvent("filter updated"));
25 | mediaEvents.onLiveSyncUpdate(onEvent("livesync updated"));
26 | mediaEvents.onPlaytimeUpdate((value: number | undefined) => {
27 | if (typeof value !== "undefined" && progressBar) {
28 | progressBar.value = (value / 250).toString();
29 | }
30 | });
31 | };
32 |
--------------------------------------------------------------------------------
/example/src/filters.ts:
--------------------------------------------------------------------------------
1 | import { mono, stereo } from "../../src/filters";
2 | import { MediaElement } from "../../src/types";
3 |
4 | // actions
5 | const monoButton = document.getElementById("mono");
6 | const stereoButton = document.getElementById("stereo");
7 |
8 | export const registerFilters = (node: MediaElement) => {
9 | monoButton?.addEventListener("click", () => mono(node));
10 | stereoButton?.addEventListener("click", () => stereo(node));
11 | };
12 |
--------------------------------------------------------------------------------
/example/src/inputs.ts:
--------------------------------------------------------------------------------
1 | import { compose, defaultTo, path } from "ramda";
2 | import { actions } from "../../src";
3 | import { MediaElement } from "../../src/types";
4 |
5 | export const volumeInput = document.getElementById("volume");
6 | export const rateInput = document.getElementById("rate");
7 | export const progressBar = document.getElementById(
8 | "progress"
9 | ) as HTMLInputElement;
10 |
11 | export const registerInputs = (node: MediaElement) => {
12 | const mediaActions = actions(node);
13 | const getValue = compose(defaultTo(0), path(["target", "value"]));
14 |
15 | volumeInput?.addEventListener(
16 | "change",
17 | compose(mediaActions.setVolume, getValue)
18 | );
19 | rateInput?.addEventListener(
20 | "change",
21 | compose(mediaActions.setRate, getValue)
22 | );
23 | progressBar?.addEventListener(
24 | "change",
25 | compose(mediaActions.setPlaytime, (val: number) => val * 250, getValue)
26 | );
27 | };
28 |
--------------------------------------------------------------------------------
/example/video-hls.ts:
--------------------------------------------------------------------------------
1 | import { video } from "../src";
2 | import { attatchStream } from "../src/hls";
3 |
4 | import { registerActions } from "./src/actions";
5 | import { registerEvents } from "./src/events";
6 | import { registerInputs } from "./src/inputs";
7 |
8 | export default () => {
9 | const myVideo = attatchStream(
10 | video({
11 | src: "https://demo.unified-streaming.com/k8s/features/stable/video/tears-of-steel/tears-of-steel.ism/.m3u8",
12 | type: "application/x-mpegURL",
13 | })
14 | );
15 |
16 | document.getElementById("media-node")?.appendChild(myVideo);
17 |
18 | registerEvents(myVideo);
19 | registerActions(myVideo);
20 | registerInputs(myVideo);
21 | };
22 |
--------------------------------------------------------------------------------
/example/video.ts:
--------------------------------------------------------------------------------
1 | import { video } from "../src";
2 |
3 | import { registerActions } from "./src/actions";
4 | import { registerEvents } from "./src/events";
5 | import { registerInputs } from "./src/inputs";
6 |
7 | export default () => {
8 | const myVideo = video({
9 | src: "http://download.blender.org/peach/bigbuckbunny_movies/BigBuckBunny_320x180.mp4",
10 | type: "video/mp4",
11 | });
12 |
13 | document.getElementById("media-node")?.appendChild(myVideo);
14 |
15 | registerEvents(myVideo);
16 | registerActions(myVideo);
17 | registerInputs(myVideo);
18 | };
19 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | Podlove Html5 Audio Driver Test Env
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
31 |
32 |
33 |
34 |
40 |
41 |
42 |
Properties
43 |
44 |
45 | property |
46 | value |
47 |
48 |
49 |
50 |
51 |
52 |
Audio Actions
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
66 |
67 |
Progress
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
Audio Filters
76 |
77 |
78 |
79 |
80 |
81 |
132 |
133 |
134 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@podlove/html5-audio-driver",
3 | "version": "2.0.3",
4 | "description": "Pure html5 audio driver",
5 | "author": "Alexander Heimbuch ",
6 | "license": "MIT",
7 | "type": "module",
8 | "files": [
9 | "dist"
10 | ],
11 | "main": "dist/index.js",
12 | "types": "dist/index.d.ts",
13 | "exports": {
14 | ".": {
15 | "import": "./dist/index.js",
16 | "types": "./dist/index.d.ts"
17 | },
18 | "./actions": {
19 | "import": "./dist/actions.js",
20 | "types": "./dist/actions.d.ts"
21 | },
22 | "./audio": {
23 | "import": "./dist/audio.js",
24 | "types": "./dist/audio.d.ts"
25 | },
26 | "./connect": {
27 | "import": "./dist/connect.js",
28 | "types": "./dist/connect.d.ts"
29 | },
30 | "./events": {
31 | "import": "./dist/events.js",
32 | "types": "./dist/events.d.ts"
33 | },
34 | "./filters": {
35 | "import": "./dist/filters.js",
36 | "types": "./dist/filters.d.ts"
37 | },
38 | "./hls": {
39 | "import": "./dist/hls.js",
40 | "types": "./dist/hls.d.ts"
41 | },
42 | "./media": {
43 | "import": "./dist/media.js",
44 | "types": "./dist/media.d.ts"
45 | },
46 | "./props": {
47 | "import": "./dist/props.js",
48 | "types": "./dist/props.d.ts"
49 | },
50 | "./utils": {
51 | "import": "./dist/utils.js",
52 | "types": "./dist/utils.d.ts"
53 | },
54 | "./video": {
55 | "import": "./dist/video.js",
56 | "types": "./dist/video.d.ts"
57 | },
58 | "./types": {
59 | "types": "./dist/types.d.ts"
60 | }
61 | },
62 | "scripts": {
63 | "start": "vite serve",
64 | "clean": "rm -rf dist/* && mkdir -p dist/",
65 | "build": "vite build",
66 | "test": "cypress run --browser chrome",
67 | "test:dev": "cypress open --browser chrome",
68 | "release": "release-it",
69 | "prepare": "husky install",
70 | "format": "prettier -w {src,cypress}/**/*.ts"
71 | },
72 | "repository": {
73 | "type": "git",
74 | "url": "https://github.com/podlove/html5-audio-driver.git"
75 | },
76 | "dependencies": {
77 | "hls.js": "1.2.8",
78 | "ramda": "0.28.0"
79 | },
80 | "devDependencies": {
81 | "cypress": "12.0.1",
82 | "milligram": "1.4.0",
83 | "normalize.css": "8.0.1",
84 | "release-it": "15.5.1",
85 | "typescript": "4.9.3",
86 | "vite": "3.2.5",
87 | "vitest": "0.25.5",
88 | "prettier": "2.8.1",
89 | "pretty-quick": "3.1.3",
90 | "husky": "8.0.0",
91 | "@types/ramda": "0.28.20",
92 | "@types/hls.js": "1.0.0",
93 | "vite-plugin-dts": "1.7.1",
94 | "rollup-plugin-delete": "2.0.0"
95 | },
96 | "release-it": {
97 | "hooks": {
98 | "before:init": [
99 | "npm test"
100 | ],
101 | "after:bump": [
102 | "npm run build"
103 | ]
104 | },
105 | "git": {
106 | "commitMessage": "chore: release v${version}"
107 | },
108 | "github": {
109 | "release": true
110 | }
111 | }
112 | }
113 |
--------------------------------------------------------------------------------
/public/audio-files/example.m4a:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/podlove/html5-audio-driver/439e0d5cd1beb0d4cc598d7c1e402558084d71dd/public/audio-files/example.m4a
--------------------------------------------------------------------------------
/public/audio-files/example.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/podlove/html5-audio-driver/439e0d5cd1beb0d4cc598d7c1e402558084d71dd/public/audio-files/example.mp3
--------------------------------------------------------------------------------
/public/audio-files/example.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/podlove/html5-audio-driver/439e0d5cd1beb0d4cc598d7c1e402558084d71dd/public/audio-files/example.ogg
--------------------------------------------------------------------------------
/public/audio-files/mono-example.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/podlove/html5-audio-driver/439e0d5cd1beb0d4cc598d7c1e402558084d71dd/public/audio-files/mono-example.mp3
--------------------------------------------------------------------------------
/public/audio-files/stereo-example.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/podlove/html5-audio-driver/439e0d5cd1beb0d4cc598d7c1e402558084d71dd/public/audio-files/stereo-example.mp3
--------------------------------------------------------------------------------
/src/actions.ts:
--------------------------------------------------------------------------------
1 | import { duration, initialized } from "./props";
2 | import { MediaActions, MediaElement } from "./types";
3 | import { collectProperties, parseFloatInput } from "./utils";
4 |
5 | /**
6 | * ACTIONS
7 | */
8 |
9 | const load = (media: MediaElement) => () => {
10 | media.load();
11 | return media;
12 | };
13 |
14 | const play = (media: MediaElement) => () => {
15 | // safe play, fixes inconsistency in media API
16 | try {
17 | // Some browsers doesn't implement it as a promise
18 | const playAction = media.play();
19 |
20 | // some does ~.~
21 | if (playAction && typeof playAction.catch !== "undefined") {
22 | playAction.catch((e) => {
23 | media.dispatchEvent(new CustomEvent("error-media", { detail: e }));
24 | });
25 | }
26 | } catch (e) {
27 | console.warn(e);
28 | media.dispatchEvent(new CustomEvent("error-media", { detail: e }));
29 | }
30 |
31 | return media;
32 | };
33 |
34 | // pause :: MediaElement -> () -> MediaElement
35 | const pause = (media: MediaElement) => () => {
36 | media.pause();
37 | return media;
38 | };
39 |
40 | // mute :: MediaElement -> () -> MediaElement
41 | const mute = (media: MediaElement) => () => {
42 | media.muted = true;
43 | return media;
44 | };
45 |
46 | const unmute = (media: MediaElement) => () => {
47 | media.muted = false;
48 | return media;
49 | };
50 |
51 | const setVolume =
52 | (media: MediaElement) =>
53 | (volume: number = 1) => {
54 | volume = parseFloatInput(volume);
55 | volume = volume < 0 ? 0 : volume;
56 | volume = volume > 1 ? 1 : volume;
57 |
58 | media.volume = volume;
59 | return media;
60 | };
61 |
62 | const setRate =
63 | (media: MediaElement) =>
64 | (rate: number = 1) => {
65 | rate = parseFloatInput(rate);
66 | rate = rate > 4 ? 4 : rate;
67 | rate = rate < 0.5 ? 0.5 : rate;
68 | media.playbackRate = rate;
69 |
70 | return media;
71 | };
72 |
73 | const setPlaytime =
74 | (media: MediaElement) =>
75 | (time: number = 0) => {
76 | const mediaDuration = duration(media);
77 | time = parseFloatInput(time);
78 | time = time > mediaDuration ? mediaDuration : time;
79 | time = time < 0 ? 0 : time;
80 |
81 | if (initialized(media)) {
82 | media.playtime = time;
83 | media.currentTime = time;
84 | } else {
85 | media.playtime = time;
86 | }
87 |
88 | return media;
89 | };
90 |
91 | const actions: (input: MediaElement) => MediaActions = collectProperties({
92 | play,
93 | pause,
94 | load,
95 | setPlaytime,
96 | mute,
97 | unmute,
98 | setVolume,
99 | setRate,
100 | });
101 |
102 | export {
103 | play,
104 | pause,
105 | load,
106 | setPlaytime,
107 | mute,
108 | unmute,
109 | setVolume,
110 | setRate,
111 | actions,
112 | };
113 |
--------------------------------------------------------------------------------
/src/audio.ts:
--------------------------------------------------------------------------------
1 | import { compose } from "ramda";
2 |
3 | import { createSourceNodes, mediaNode } from "./media";
4 | import { MediaElement, MediaSource } from "./types";
5 | import { mountNode, toArray } from "./utils";
6 |
7 | export const audio = compose<
8 | [MediaSource[]],
9 | MediaSource[],
10 | MediaElement,
11 | MediaElement
12 | >(
13 | mountNode,
14 | createSourceNodes(mediaNode("audio")),
15 | toArray
16 | );
17 |
--------------------------------------------------------------------------------
/src/connect.ts:
--------------------------------------------------------------------------------
1 | import { attatchStream } from "./hls";
2 | import { audio as createAudioElement } from "./audio";
3 | import { events as mediaEvents } from "./events";
4 | import { actions as mediaActions } from "./actions";
5 | import { MediaAction, MediaElement, MediaEvent, MediaSource } from "./types";
6 |
7 | const ACTIONS: MediaAction[] = [
8 | "play",
9 | "pause",
10 | "load",
11 | "setPlaytime",
12 | "mute",
13 | "unmute",
14 | "setVolume",
15 | "setRate",
16 | ];
17 |
18 | const EVENTS: MediaEvent[] = [
19 | "onLoading",
20 | "onLoaded",
21 | "onPause",
22 | "onBufferChange",
23 | "onEnd",
24 | "onPlaytimeUpdate",
25 | "onLiveSyncUpdate",
26 | "onVolumeChange",
27 | "onError",
28 | "onDurationChange",
29 | "onRateChange",
30 | "onPlay",
31 | "onBuffering",
32 | "onReady",
33 | "onFilterUpdate",
34 | ];
35 |
36 | export const audio = () => {
37 | const facade = {
38 | load,
39 | mediaElement: null as unknown as MediaElement,
40 | actions: ACTIONS.reduce(
41 | (result, action) => ({
42 | ...result,
43 | [action]: () => {},
44 | }),
45 | {}
46 | ) as { [key in MediaAction]: Function },
47 | events: EVENTS.reduce(
48 | (result, event) => ({
49 | ...result,
50 | [event]: (handler: Function) => recievers[event].push(handler),
51 | }),
52 | {}
53 | ) as { [key in MediaEvent]: Function },
54 | };
55 |
56 | const recievers = EVENTS.reduce(
57 | (result, event: MediaEvent) => ({
58 | ...result,
59 | [event]: [],
60 | }),
61 | {}
62 | ) as { [key in MediaEvent]: Function[] };
63 |
64 | function load(sources: MediaSource[]) {
65 | // remove media element
66 | facade.mediaElement &&
67 | facade.mediaElement.parentNode?.removeChild(facade.mediaElement);
68 |
69 | ACTIONS.forEach((action) => {
70 | facade.actions[action] = connect(sources, action);
71 | });
72 | }
73 |
74 | function connect(sources: MediaSource[], action: MediaAction) {
75 | return (params = []) => {
76 | // create a new media element
77 | facade.mediaElement = createAudioElement(sources);
78 |
79 | attatchStream(facade.mediaElement);
80 |
81 | // connect the events to existing recievers
82 | const eventEmitters = mediaEvents(facade.mediaElement);
83 |
84 | EVENTS.forEach((name) =>
85 | recievers[name].forEach((receiver) =>
86 | eventEmitters[name](receiver as any)
87 | )
88 | );
89 |
90 | // update actions to new media element
91 | const actionEmitters = mediaActions(facade.mediaElement);
92 |
93 | ACTIONS.forEach((name) => (facade.actions[name] = actionEmitters[name]));
94 |
95 | // call initial action
96 | action && facade.actions[action].call(null, params);
97 | };
98 | }
99 |
100 | return facade;
101 | };
102 |
103 | export type ConnectInterface = ReturnType;
104 |
--------------------------------------------------------------------------------
/src/events.ts:
--------------------------------------------------------------------------------
1 | import { curry, compose } from "ramda";
2 | import { getNodeFromEvent, collectProperties, toArray } from "./utils";
3 | import {
4 | props,
5 | playtime,
6 | volume,
7 | duration,
8 | rate,
9 | buffered,
10 | state,
11 | initialized,
12 | liveSync,
13 | } from "./props";
14 | import {
15 | MediaElement,
16 | MediaProps,
17 | ErrorState,
18 | NetworkStateEmpty,
19 | NetworkStateNoSource,
20 | MediaError,
21 | } from "./types";
22 |
23 | // events
24 | const eventFactory = (
25 | events: string | string[],
26 | processor: (a: any) => any = props,
27 | factoryOptions = {}
28 | ) =>
29 | curry((media, callback, runtimeOptions = {}) => {
30 | toArray(events).forEach((event) => {
31 | media.addEventListener(
32 | event,
33 | compose<[EventTarget], any, any, void>(
34 | callback,
35 | processor,
36 | getNodeFromEvent
37 | ),
38 | Object.assign({}, factoryOptions, runtimeOptions)
39 | );
40 | });
41 |
42 | return media;
43 | });
44 |
45 | const onLoading = eventFactory("progress", props, {
46 | once: true,
47 | });
48 |
49 | const onLoaded = eventFactory(["canplay", "canplaythrough"], props, {
50 | once: true,
51 | });
52 |
53 | const canPlay = eventFactory(["canplay", "canplaythrough"], props, {
54 | once: true,
55 | });
56 |
57 | const onReady = eventFactory(["canplay", "canplaythrough"]);
58 | const onPlay = eventFactory("play");
59 | const onPause = eventFactory("pause");
60 | const onEnd = eventFactory("ended");
61 | const onFilterUpdate = eventFactory("filterUpdated");
62 |
63 | const onBufferChange = eventFactory("progress", buffered);
64 | const onBuffering = curry(
65 | (media: MediaElement, callback, runtimeOptions = {}) => {
66 | media.addEventListener(
67 | "waiting",
68 | (event) => {
69 | const node = getNodeFromEvent(event);
70 |
71 | if (state(node) !== "HAVE_ENOUGH_DATA") {
72 | callback(props(node));
73 | }
74 | },
75 | Object.assign({}, runtimeOptions)
76 | );
77 |
78 | return media;
79 | }
80 | );
81 |
82 | const onPlaytimeUpdate = eventFactory("timeupdate", playtime);
83 | const onVolumeChange = eventFactory("volumechange", volume);
84 | const onLiveSyncUpdate = eventFactory("livesyncupdate", liveSync);
85 |
86 | const onError = curry(
87 | (
88 | media: MediaElement,
89 | callback: (error: ErrorState, detail?: any) => void
90 | ) => {
91 | media.addEventListener(
92 | "error",
93 | function ({ detail }: any) {
94 | const networkState = detail && detail.networkState;
95 |
96 | switch (networkState || this.networkState) {
97 | case HTMLMediaElement.NETWORK_NO_SOURCE:
98 | return callback(NetworkStateNoSource, {});
99 |
100 | case HTMLMediaElement.NETWORK_EMPTY:
101 | return callback(NetworkStateEmpty, {});
102 | }
103 | },
104 | true
105 | );
106 |
107 | media.addEventListener(
108 | "error-media",
109 | function ({ detail }: any) {
110 | const stoppedByUserCodes = [
111 | 0, // safari
112 | 20, // chrome & firefox
113 | ];
114 |
115 | if (!initialized(media)) {
116 | return;
117 | }
118 |
119 | if (stoppedByUserCodes.includes(detail.code)) {
120 | return;
121 | }
122 |
123 | callback(MediaError, detail);
124 | },
125 | false
126 | );
127 |
128 | return media;
129 | }
130 | );
131 |
132 | const onDurationChange = eventFactory("durationchange", duration);
133 | const onRateChange = eventFactory("ratechange", rate);
134 |
135 | const events: (input: MediaElement) => {
136 | onLoading: (cb: (args: MediaProps) => void) => void;
137 | onLoaded: (cb: (args: MediaProps) => void) => void;
138 | onPause: (cb: (args: MediaProps) => void) => void;
139 | onBufferChange: (cb: (args: [number, number][]) => void) => void;
140 | onEnd: (cb: (args: MediaProps) => void) => void;
141 | onPlaytimeUpdate: (cb: (args: number | undefined) => void) => void;
142 | onVolumeChange: (cb: (args: number | undefined) => void) => void;
143 | onError: (cb: (error: ErrorState, detail?: any) => void) => void;
144 | onDurationChange: (cb: (error: ErrorState, detail?: any) => void) => void;
145 | onRateChange: (cb: (args: number | undefined) => void) => void;
146 | onPlay: (cb: (args: MediaProps) => void) => void;
147 | onBuffering: (cb: (args: MediaProps) => void) => void;
148 | onReady: (cb: (args: MediaProps) => void) => void;
149 | onFilterUpdate: (cb: (args: MediaProps) => void) => void;
150 | canPlay: (cb: (args: MediaProps) => void) => void;
151 | onLiveSyncUpdate: (cb: (args: number | undefined) => void) => void;
152 | } = collectProperties({
153 | onLoading,
154 | onLoaded,
155 | onPause,
156 | onBufferChange,
157 | onEnd,
158 | onPlaytimeUpdate,
159 | onVolumeChange,
160 | onError,
161 | onDurationChange,
162 | onRateChange,
163 | onPlay,
164 | onBuffering,
165 | onReady,
166 | onFilterUpdate,
167 | canPlay,
168 | onLiveSyncUpdate,
169 | });
170 |
171 | export {
172 | onLoading,
173 | onLoaded,
174 | onPause,
175 | onBufferChange,
176 | onEnd,
177 | onPlaytimeUpdate,
178 | onVolumeChange,
179 | onError,
180 | onDurationChange,
181 | onRateChange,
182 | onPlay,
183 | events,
184 | onBuffering,
185 | onReady,
186 | onFilterUpdate,
187 | canPlay,
188 | onLiveSyncUpdate,
189 | };
190 |
--------------------------------------------------------------------------------
/src/filters.ts:
--------------------------------------------------------------------------------
1 | import { compose, when, curry, has, and } from "ramda";
2 |
3 | import { dispatchEvent } from "./utils";
4 | import { connectBuffer } from "./media-context";
5 |
6 | const hasAudioContext = and(has("audioContext"), has("audioBuffer"));
7 |
8 | const channelGain = curry((channels, node) => {
9 | const gainNode = node.audioContext.createGain();
10 |
11 | gainNode.channelCount = channels;
12 | gainNode.channelCountMode = "explicit";
13 | gainNode.channelInterpretation = "speakers";
14 | gainNode.gain.value = 1;
15 |
16 | return connectBuffer(gainNode, node);
17 | });
18 |
19 | const mono = when(
20 | hasAudioContext,
21 | compose(dispatchEvent("filterUpdated"), channelGain(1))
22 | );
23 |
24 | const stereo = when(
25 | hasAudioContext,
26 | compose(dispatchEvent("filterUpdated"), channelGain(2))
27 | );
28 |
29 | export { mono, stereo };
30 |
--------------------------------------------------------------------------------
/src/hls.ts:
--------------------------------------------------------------------------------
1 | /* global HTMLMediaElement */
2 | import Hls from "hls.js";
3 | import { compose } from "ramda";
4 | import { MediaElement, MediaSource } from "./types";
5 |
6 | import { toArray, getMediaSources } from "./utils";
7 |
8 | // See: https://developer.apple.com/library/archive/documentation/NetworkingInternet/Conceptual/StreamingMediaGuide/DeployingHTTPLiveStreaming/DeployingHTTPLiveStreaming.html
9 | const hlsSource = compose<[MediaSource[]], MediaSource[], string | null>(
10 | (sources: MediaSource[]): string | null =>
11 | sources.reduce(
12 | (result: string | null, source) =>
13 | result ||
14 | ~["application/x-mpegurl", "vnd.apple.mpegurl"].indexOf(
15 | source.type.toLowerCase()
16 | )
17 | ? source.src
18 | : null,
19 | null
20 | ),
21 | toArray
22 | );
23 |
24 | export const isHLS = (sources: MediaSource[]) => {
25 | if (!Hls.isSupported()) {
26 | return false;
27 | }
28 |
29 | return !!hlsSource(sources);
30 | };
31 |
32 | export const attatchStream = (media: MediaElement) => {
33 | if (!Hls.isSupported()) {
34 | return media;
35 | }
36 |
37 | const hls = new Hls({
38 | liveDurationInfinity: true,
39 | });
40 |
41 | const sources = getMediaSources(media);
42 |
43 | const hlsStream = hlsSource(sources);
44 |
45 | if (!hlsStream) {
46 | return media;
47 | }
48 |
49 | media.hls = hls;
50 |
51 | hls.attachMedia(media);
52 | hls.loadSource(hlsStream);
53 |
54 | // Finally start loading
55 | hls.on(Hls.Events.MEDIA_ATTACHED, () => {
56 | hls.startLoad(-1);
57 | });
58 |
59 | // Translate errors to native media errors
60 | hls.on(Hls.Events.ERROR, function (_event, data) {
61 | switch (data.type) {
62 | case Hls.ErrorTypes.NETWORK_ERROR:
63 | hls.startLoad();
64 | media.dispatchEvent(
65 | new CustomEvent("error", {
66 | detail: { networkState: HTMLMediaElement.NETWORK_EMPTY },
67 | })
68 | );
69 | break;
70 | case Hls.ErrorTypes.OTHER_ERROR:
71 | hls.destroy();
72 | media.dispatchEvent(
73 | new CustomEvent("error", {
74 | detail: { networkState: HTMLMediaElement.NETWORK_NO_SOURCE },
75 | })
76 | );
77 | break;
78 | default:
79 | hls.recoverMediaError();
80 | break;
81 | }
82 | });
83 |
84 | return media;
85 | };
86 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | import { audio } from "./audio";
2 | import { video } from "./video";
3 | import { events } from "./events";
4 | import { actions } from "./actions";
5 | import { props } from "./props";
6 | import * as connect from "./connect";
7 |
8 | export { audio, video, events, props, actions, connect };
9 |
--------------------------------------------------------------------------------
/src/media-context.ts:
--------------------------------------------------------------------------------
1 | import { curry } from "ramda";
2 |
3 | export const connectBuffer = curry((buffer, node) => {
4 | node.activeBuffer &&
5 | node.activeBuffer.disconnect &&
6 | node.activeBuffer.disconnect();
7 | node.audioBuffer &&
8 | node.audioBuffer.disconnect &&
9 | node.audioBuffer.disconnect();
10 |
11 | node.activeBuffer = buffer;
12 |
13 | node.audioBuffer.connect(buffer);
14 | buffer.connect(node.audioContext.destination);
15 |
16 | return node;
17 | });
18 |
19 | export const audioContext = (node: any): HTMLMediaElement => {
20 | let webAudioContext: AudioContext;
21 |
22 | try {
23 | webAudioContext = new window.AudioContext();
24 | } catch (e) {
25 | console.warn(
26 | `[html-5-audio-driver]: can't create the audio context, seems like the browser is not compatible`
27 | );
28 | return node;
29 | }
30 |
31 | const audioBuffer: MediaElementAudioSourceNode =
32 | webAudioContext.createMediaElementSource(node);
33 |
34 | audioBuffer.connect(webAudioContext.destination);
35 |
36 | node.audioContext = webAudioContext;
37 | node.audioBuffer = audioBuffer;
38 |
39 | return node;
40 | };
41 |
--------------------------------------------------------------------------------
/src/media.ts:
--------------------------------------------------------------------------------
1 | import { compose, curry } from "ramda";
2 |
3 | import { appendNode, setAttributes, createNode } from "./utils";
4 | import { mediaPolyfill } from "./polyfills";
5 | import { MediaElement } from "./types";
6 |
7 | const createSource = (source: MediaSource): MediaElement => {
8 | const node = createNode("source");
9 | return setAttributes(node, source);
10 | };
11 |
12 | const createSourceNodes = curry(
13 | (node: MediaElement, sources: MediaSource[]): MediaElement => {
14 | const sourceNodes = sources.map(createSource);
15 | return appendNode(node, sourceNodes);
16 | }
17 | );
18 |
19 | const mediaNode = compose<[string], any, MediaElement>(mediaPolyfill, createNode);
20 |
21 | export { createSourceNodes, mediaNode };
22 |
--------------------------------------------------------------------------------
/src/polyfills.ts:
--------------------------------------------------------------------------------
1 | import { compose, ifElse, identity } from "ramda";
2 | import { getNodeFromEvent } from "./utils";
3 | import { initialized, duration, props } from "./props";
4 | import { MediaElement } from "./types";
5 | import Hls from "hls.js";
6 |
7 | /**
8 | * Node Defaults
9 | *
10 | * Disables media defaults
11 | */
12 |
13 | const setMediaDefaults = (node: MediaElement) => {
14 | node.autoplay = false;
15 | node.loop = false;
16 | node.preload = "auto"; // if set to 'none' this won't trigger canplay events in IE11 or won't play in Safari
17 | node.controls = false;
18 | node.playtime = 0;
19 | node.liveSync = null;
20 | node.initialized = false;
21 | node.hls = null;
22 |
23 | return node;
24 | };
25 |
26 | /**
27 | * Playtime Polyfill
28 | *
29 | * Adds ability for Safari to set the playtime without the need of loading the full file
30 | */
31 | const updatePlaytimeToCurrentTime = (media: MediaElement) => {
32 | media.playtime = media.currentTime;
33 | return media;
34 | };
35 |
36 | const updateCurrentTimeToPlaytime = (media: MediaElement) => {
37 | if (!media || !initialized(media)) {
38 | return media;
39 | }
40 |
41 | try {
42 | media.currentTime = media.playtime || 0;
43 | } catch (e) {}
44 |
45 | return media;
46 | };
47 |
48 | const readyToPlay = (node: MediaElement) => {
49 | node.initialized = true;
50 |
51 | return node;
52 | };
53 |
54 | // HTML Audio implementation 101 quirks: on Safari and iOS you just can set currentTime after loading
55 | const polyfillPlaytime = (node: MediaElement) => {
56 | node.playtime = 0;
57 |
58 | node.addEventListener(
59 | "timeupdate",
60 | compose(updatePlaytimeToCurrentTime, getNodeFromEvent)
61 | );
62 |
63 | node.addEventListener(
64 | "canplay",
65 | compose(updateCurrentTimeToPlaytime, readyToPlay, getNodeFromEvent),
66 | { once: true }
67 | );
68 |
69 | node.addEventListener(
70 | "play",
71 | compose(updateCurrentTimeToPlaytime, getNodeFromEvent)
72 | );
73 |
74 | return node;
75 | };
76 |
77 | // [livesync] polyfill: adds a pointer to the live position
78 | const isLivestream = (node: MediaElement) => duration(node) === Infinity;
79 |
80 | const liveSyncPosition = ({ playtime, hls }: { playtime: number | undefined; hls: Hls | undefined}): number => {
81 | // not a http live stream
82 | if (!hls) {
83 | return 0;
84 | }
85 |
86 | // syncposition wasn't initialized yet
87 | if (!hls.liveSyncPosition) {
88 | return playtime || 0;
89 | }
90 |
91 | return hls.liveSyncPosition;
92 | };
93 |
94 | const addLiveSync = (node: MediaElement) => {
95 | if (!node) {
96 | return node;
97 | }
98 |
99 | const { playtime, hls } = props(node);
100 |
101 | node.liveSync = 0;
102 |
103 | setInterval(() => {
104 | const sync = liveSyncPosition({ playtime, hls });
105 |
106 | node.liveSync = sync > (node.liveSync || 0) ? sync : node.liveSync;
107 |
108 | node.liveSync = (node.liveSync || 0) + 1;
109 | node.dispatchEvent(new CustomEvent("livesyncupdate"));
110 | }, 1000);
111 |
112 | return node;
113 | };
114 |
115 | const resetToLivesync = (node: MediaElement) => {
116 | if (!node) {
117 | return node;
118 | }
119 |
120 | let { playtime, liveSync } = props(node);
121 |
122 | playtime = playtime || 0;
123 | liveSync = liveSync || 0;
124 |
125 | if (playtime > liveSync) {
126 | return node;
127 | }
128 |
129 | node.currentTime = liveSync;
130 |
131 | return node;
132 | };
133 |
134 | const polifyllLiveSync = (node: MediaElement) => {
135 | node.addEventListener(
136 | "canplay",
137 | compose(ifElse(isLivestream, addLiveSync, identity), getNodeFromEvent),
138 | { once: true }
139 | );
140 |
141 | node.addEventListener(
142 | "play",
143 | compose(ifElse(isLivestream, resetToLivesync, identity), getNodeFromEvent),
144 | { once: true }
145 | );
146 |
147 | return node;
148 | };
149 |
150 | export const mediaPolyfill = compose(
151 | setMediaDefaults,
152 | polyfillPlaytime,
153 | polifyllLiveSync
154 | );
155 |
--------------------------------------------------------------------------------
/src/props.ts:
--------------------------------------------------------------------------------
1 | import Hls from "hls.js";
2 | import { path, compose, defaultTo } from "ramda";
3 | import {
4 | AudioState,
5 | AudioStateCurrentData,
6 | AudioStateEnoughData,
7 | AudioStateFutureData,
8 | AudioStateMetaData,
9 | AudioStateNothing,
10 | MediaElement,
11 | MediaProps,
12 | } from "./types";
13 | import { collectProperties } from "./utils";
14 |
15 | const transformBuffered = (buffered: TimeRanges | undefined) => {
16 | let result: [number, number][] = [];
17 |
18 | if (!buffered) {
19 | return [];
20 | }
21 |
22 | for (let i = 0; i < buffered.length; i++) {
23 | result = [...result, [buffered.start(i), buffered.end(i)]];
24 | }
25 | return result;
26 | };
27 |
28 | // Media Props
29 | const duration = compose<[MediaElement], number | undefined, number>(
30 | defaultTo(0),
31 | path(["duration"])
32 | );
33 | const playtime: (input: MediaElement) => number | undefined = path([
34 | "playtime",
35 | ]);
36 | const buffered = compose<
37 | [MediaElement],
38 | TimeRanges | undefined,
39 | [number, number][]
40 | >(transformBuffered, path(["buffered"]));
41 | const volume: (input: MediaElement) => number | undefined = path([
42 | "volume",
43 | ]);
44 | const ended: (input: MediaElement) => boolean | undefined = path([
45 | "ended",
46 | ]);
47 | const paused: (input: MediaElement) => boolean | undefined = path([
48 | "paused",
49 | ]);
50 | const rate: (input: MediaElement) => number | undefined = path([
51 | "playbackRate",
52 | ]);
53 | const muted: (input: MediaElement) => boolean | undefined = path([
54 | "muted",
55 | ]);
56 | const src: (input: MediaElement) => string | undefined = path([
57 | "currentSrc",
58 | ]);
59 | const channels: (input: MediaElement) => number | undefined = path([
60 | "activeBuffer",
61 | "channelCount",
62 | ]);
63 | const buffer: (input: MediaElement) => AudioBuffer | undefined =
64 | path(["audioBuffer"]);
65 | const initialized: (input: MediaElement) => boolean | undefined = path(
66 | ["initialized"]
67 | );
68 | const hls: (input: MediaElement) => Hls | undefined = path(["hls"]);
69 | const liveSync: (input: MediaElement) => number | undefined = path([
70 | "liveSync",
71 | ]);
72 |
73 | const state = compose<
74 | [MediaElement],
75 | number | undefined,
76 | AudioState | undefined
77 | >((state: number | undefined) => {
78 | switch (state) {
79 | case 0: {
80 | return AudioStateNothing;
81 | }
82 | case 1: {
83 | return AudioStateMetaData;
84 | }
85 | case 2: {
86 | return AudioStateCurrentData;
87 | }
88 | case 3: {
89 | return AudioStateFutureData;
90 | }
91 | case 4: {
92 | return AudioStateEnoughData;
93 | }
94 | default:
95 | return undefined;
96 | }
97 | }, path(["readyState"]));
98 |
99 | const playing = (media: MediaElement): boolean =>
100 | media &&
101 | media.currentTime > 0 &&
102 | !media.paused &&
103 | !media.ended &&
104 | media.readyState > 2;
105 |
106 | const props: (input: MediaElement) => MediaProps = collectProperties({
107 | duration,
108 | buffered,
109 | volume,
110 | state,
111 | playtime,
112 | ended,
113 | rate,
114 | muted,
115 | src,
116 | paused,
117 | playing,
118 | buffer,
119 | hls,
120 | liveSync,
121 | });
122 |
123 | export {
124 | duration,
125 | playtime,
126 | buffered,
127 | volume,
128 | ended,
129 | rate,
130 | muted,
131 | state,
132 | playing,
133 | paused,
134 | props,
135 | src,
136 | channels,
137 | initialized,
138 | hls,
139 | liveSync,
140 | };
141 |
--------------------------------------------------------------------------------
/src/types.ts:
--------------------------------------------------------------------------------
1 | import Hls from "hls.js";
2 |
3 | export interface MediaElement extends HTMLMediaElement {
4 | playtime?: number;
5 | hls?: Hls | null;
6 | liveSync?: number | null;
7 | initialized?: boolean;
8 | }
9 |
10 | export const AudioStateNothing = "HAVE_NOTHING";
11 | export const AudioStateMetaData = "HAVE_METADATA";
12 | export const AudioStateCurrentData = "HAVE_CURRENT_DATA";
13 | export const AudioStateFutureData = "HAVE_FUTURE_DATA";
14 | export const AudioStateEnoughData = "HAVE_ENOUGH_DATA";
15 |
16 | export const NetworkStateNoSource = "NETWORK_NO_SOURCE";
17 | export const NetworkStateEmpty = "NETWORK_EMPTY";
18 | export const MediaError = "MEDIA_ERROR";
19 |
20 | export type ErrorState =
21 | | typeof NetworkStateNoSource
22 | | typeof NetworkStateEmpty
23 | | typeof MediaError;
24 |
25 | export type AudioState =
26 | | typeof AudioStateNothing
27 | | typeof AudioStateMetaData
28 | | typeof AudioStateCurrentData
29 | | typeof AudioStateFutureData
30 | | typeof AudioStateEnoughData;
31 |
32 | export interface MediaSource {
33 | src: string;
34 | type: string;
35 | title?: string;
36 | }
37 |
38 | export interface MediaProps {
39 | duration: number | undefined;
40 | buffered: [number, number][];
41 | volume: number | undefined;
42 | state: AudioState | undefined;
43 | playtime: number | undefined;
44 | ended: undefined | boolean;
45 | rate: undefined | number;
46 | muted: undefined | boolean;
47 | src: undefined | string;
48 | paused: undefined | boolean;
49 | playing: undefined | boolean;
50 | buffer: undefined | AudioBuffer;
51 | hls: undefined | Hls;
52 | liveSync: number | undefined;
53 | }
54 |
55 | export type MediaAction =
56 | | "play"
57 | | "pause"
58 | | "load"
59 | | "setPlaytime"
60 | | "mute"
61 | | "unmute"
62 | | "setVolume"
63 | | "setRate";
64 |
65 | export interface MediaActions {
66 | play: () => MediaElement;
67 | pause: () => MediaElement;
68 | load: () => MediaElement;
69 | setPlaytime: (time: number) => MediaElement;
70 | mute: () => MediaElement;
71 | unmute: () => MediaElement;
72 | setVolume: (volume: number) => MediaElement;
73 | setRate: (rate: number) => MediaElement;
74 | }
75 |
76 | export type MediaEvent =
77 | | "onLoading"
78 | | "onLoaded"
79 | | "onPause"
80 | | "onBufferChange"
81 | | "onEnd"
82 | | "onPlaytimeUpdate"
83 | | "onLiveSyncUpdate"
84 | | "onVolumeChange"
85 | | "onError"
86 | | "onDurationChange"
87 | | "onRateChange"
88 | | "onPlay"
89 | | "onBuffering"
90 | | "onReady"
91 | | "onFilterUpdate";
92 |
--------------------------------------------------------------------------------
/src/utils.ts:
--------------------------------------------------------------------------------
1 | import { compose, curry, prop } from "ramda";
2 | import { MediaElement, MediaSource } from "./types";
3 |
4 | // Transformation utils
5 | const collectProperties = curry(
6 | (props: { [key: string]: Function }, val: MediaElement): T =>
7 | Object.keys(props).reduce(
8 | (result, name) =>
9 | Object.assign({}, result, {
10 | [name]: props[name](val),
11 | }),
12 | {}
13 | ) as T
14 | );
15 |
16 | const toArray = (input: T | T[]): T[] =>
17 | Array.isArray(input) ? input : [input];
18 |
19 | // Event Utils
20 | const getNodeFromEvent = prop("target");
21 |
22 | // Dom Utils
23 | const createNode = (tag: string) => document.createElement(tag);
24 | const appendNode = curry(
25 | (node: HTMLElement, childs: HTMLElement[]): HTMLElement => {
26 | toArray(childs).forEach((child) => {
27 | node.appendChild(child);
28 | });
29 |
30 | return node;
31 | }
32 | );
33 |
34 | const mountNode = (child: T): T =>
35 | compose(() => child, appendNode(document.body))(child);
36 |
37 | const setAttributes = curry(
38 | (node: HTMLElement, attributes: { [key: string]: any }) =>
39 | Object.keys(attributes).reduce((result, key) => {
40 | result.setAttribute(key, attributes[key]);
41 | return result;
42 | }, node)
43 | );
44 |
45 | const getMediaSources = (media: MediaElement): MediaSource[] =>
46 | Array.from(media.children).map((node) => ({
47 | src: node.getAttribute("src") || "",
48 | type: node.getAttribute("type") || "",
49 | }));
50 |
51 | const dispatchEvent = curry((type, node) => {
52 | const event = new Event(type);
53 |
54 | node.dispatchEvent(event);
55 |
56 | return node;
57 | });
58 |
59 | const browser = (() => {
60 | const test = (regexp: RegExp) => regexp.test(window.navigator.userAgent);
61 |
62 | switch (true) {
63 | case test(/edg/i):
64 | return "edge";
65 | case test(/opr/i) && (!!(window as any).opr || !!(window as any).opera):
66 | return "opera";
67 | case test(/chrome/i) && !!(window as any).chrome:
68 | return "chrome";
69 | case test(/trident/i):
70 | return "ie";
71 | case test(/firefox/i):
72 | return "firefox";
73 | case test(/safari/i):
74 | return "safari";
75 | default:
76 | return "other";
77 | }
78 | })();
79 |
80 | const parseFloatInput = (input: string | number): number =>
81 | typeof input === "string" ? parseFloat(input) : input;
82 |
83 | export {
84 | parseFloatInput,
85 | collectProperties,
86 | getNodeFromEvent,
87 | createNode,
88 | mountNode,
89 | setAttributes,
90 | appendNode,
91 | getMediaSources,
92 | dispatchEvent,
93 | browser,
94 | toArray,
95 | };
96 |
--------------------------------------------------------------------------------
/src/video.ts:
--------------------------------------------------------------------------------
1 | import { compose } from "ramda";
2 |
3 | import { createSourceNodes, mediaNode } from "./media";
4 | import { MediaSource } from "./types";
5 | import { mountNode, toArray } from "./utils";
6 |
7 | export const video = (sources: MediaSource[] | MediaSource) =>
8 | compose(mountNode, createSourceNodes(mediaNode("video")), toArray)(sources);
9 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ESNext",
4 | "useDefineForClassFields": true,
5 | "module": "ESNext",
6 | "lib": ["ESNext", "dom"],
7 | "moduleResolution": "Node",
8 | "strict": true,
9 | "resolveJsonModule": true,
10 | "isolatedModules": true,
11 | "esModuleInterop": true,
12 | "noEmit": true,
13 | "noUnusedLocals": true,
14 | "noUnusedParameters": true,
15 | "noImplicitReturns": true,
16 | "skipLibCheck": true
17 | },
18 | "include": ["src/**/*.ts", "cypress/**/*.ts", "example/**/*.ts"]
19 | }
20 |
--------------------------------------------------------------------------------
/vite.config.js:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import { defineConfig } from "vite";
3 | import dts from "vite-plugin-dts";
4 | import del from "rollup-plugin-delete";
5 |
6 | const entries = [
7 | "index",
8 | "actions",
9 | "audio",
10 | "video",
11 | "events",
12 | "props",
13 | "utils",
14 | "hls",
15 | "filters",
16 | "media",
17 | "connect",
18 | ];
19 |
20 | export default ({ command }) =>
21 | defineConfig({
22 | plugins: [
23 | dts(),
24 | ...(command === "build"
25 | ? [del({ targets: "dist/audio-files", hook: "generateBundle" })]
26 | : []),
27 | ],
28 | build: {
29 | lib: {
30 | formats: ["es"],
31 | entry: entries.reduce(
32 | (result, entry) => ({
33 | ...result,
34 | [entry]: path.resolve(__dirname, "src", `${entry}.ts`),
35 | }),
36 | {}
37 | ),
38 | },
39 | rollupOptions: {
40 | output: {
41 | entryFileNames: "[name].js",
42 | chunkFileNames: `[name].[hash].js`,
43 | },
44 | },
45 | },
46 | resolve: {
47 | extensions: [".mjs", ".js", ".ts", ".jsx", ".tsx", ".json", ".vue"],
48 | },
49 | });
50 |
--------------------------------------------------------------------------------