16 |
17 | {{outlet}}
18 |
--------------------------------------------------------------------------------
/tests/dummy/app/styles/content.styl:
--------------------------------------------------------------------------------
1 | .content
2 | padding: 1em 0
3 |
4 | .note
5 | font-size: 1em
6 |
7 | li
8 | padding: .5em 0
9 |
10 | .track-list
11 | list-style: none
12 |
13 | .item
14 | cursor: pointer
15 | text-transform: capitalize
16 |
17 | .active
18 | text-decoration: underline
19 |
20 | .ember-remodal > .content
21 | padding: 1em
22 |
23 | .legacy-controls
24 | padding: 2em
25 |
26 | .btn
27 | margin: 0 1em
28 |
29 | .actions
30 | .btn
31 | min-width: 6em
32 |
--------------------------------------------------------------------------------
/addon/classes/note.js:
--------------------------------------------------------------------------------
1 | import EmberObject from '@ember/object';
2 | import { MusicalIdentity } from 'ember-audio/mixins';
3 |
4 | /**
5 | * A class that represents a musical note, but does not carry any audio data.
6 | *
7 | * This class only makes sense when used in the context of a collection, as the
8 | * only functionality it provides serves to facilitate identification.
9 | *
10 | * @public
11 | * @class Note
12 | * @uses MusicalIdentity
13 | */
14 | const Note = EmberObject.extend(MusicalIdentity);
15 |
16 | export default Note;
17 |
--------------------------------------------------------------------------------
/tests/unit/utils/array-methods-test.js:
--------------------------------------------------------------------------------
1 | import { arraySwap, flatten } from 'ember-audio/utils';
2 | import { module, test } from 'qunit';
3 |
4 | module('Unit | Utility | array methods', function () {
5 | test('arraySwap works', function (assert) {
6 | let result = arraySwap(['a', 'b', 'c', 'd', 'e'], 2);
7 | assert.deepEqual(['c', 'd', 'e', 'a', 'b'], result);
8 | });
9 |
10 | test('flatten works', function (assert) {
11 | let result = flatten([
12 | [1, 2],
13 | [3, 4],
14 | ]);
15 | assert.deepEqual([1, 2, 3, 4], result);
16 | });
17 | });
18 |
--------------------------------------------------------------------------------
/.ember-cli:
--------------------------------------------------------------------------------
1 | {
2 | /**
3 | Ember CLI sends analytics information by default. The data is completely
4 | anonymous, but there are times when you might want to disable this behavior.
5 |
6 | Setting `disableAnalytics` to true will prevent any data from being sent.
7 | */
8 | "disableAnalytics": false,
9 |
10 | /**
11 | Setting `isTypeScriptProject` to true will force the blueprint generators to generate TypeScript
12 | rather than JavaScript by default, when a TypeScript version of a given blueprint is available.
13 | */
14 | "isTypeScriptProject": false
15 | }
16 |
--------------------------------------------------------------------------------
/tests/dummy/config/ember-cli-update.json:
--------------------------------------------------------------------------------
1 | {
2 | "schemaVersion": "1.0.0",
3 | "packages": [
4 | {
5 | "name": "ember-cli",
6 | "version": "4.10.0",
7 | "blueprints": [
8 | {
9 | "name": "addon",
10 | "outputRepo": "https://github.com/ember-cli/ember-addon-output",
11 | "codemodsSource": "ember-addon-codemods-manifest@1",
12 | "isBaseBlueprint": true,
13 | "options": [
14 | "--welcome",
15 | "--ci-provider=github"
16 | ]
17 | }
18 | ]
19 | }
20 | ]
21 | }
22 |
--------------------------------------------------------------------------------
/tests/dummy/app/styles/main.styl:
--------------------------------------------------------------------------------
1 | html,
2 | body
3 | background-color: $background
4 | font-family: 'Open Sans', sans-serif
5 | min-height: 100%
6 | min-width: 100%
7 |
8 | hr
9 | border-top: 2px solid black
10 |
11 | .main
12 | margin: 0 auto
13 | max-width: 85%
14 | padding-bottom: 4em
15 |
16 | .content
17 | button
18 | margin: 2em 0
19 |
20 | pre
21 | text-align: left
22 |
23 | .ember-notify-show
24 | z-index: 1000
25 |
26 | .remodal-overlay
27 | z-index: 100
28 |
29 | .buy-beer
30 | text-align: center
31 |
32 | #xy-canvas
33 | background-color: black;
34 |
--------------------------------------------------------------------------------
/tests/dummy/app/styles/beat-pads.styl:
--------------------------------------------------------------------------------
1 | .controls
2 | text-align: center
3 |
4 | .beat-machine
5 | display: table
6 | margin: 0 auto
7 |
8 | .beat-lane
9 | margin: 1em 0
10 |
11 | .text
12 | display: block
13 |
14 | .beat-pad
15 | display: inline-block
16 | height: 100px
17 | width: 100px
18 | text-align: center
19 |
20 | .pad
21 | background-color: darken($code-bg, 6)
22 | display: block
23 | height: 100%
24 |
25 | &.highlighted
26 | background-color: $code-bg
27 |
28 | &.active
29 | background-color: red
30 |
31 | &.playing
32 | background-color: black
33 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/ignore-files/ for more about ignoring files.
2 |
3 | # compiled output
4 | /dist/
5 | /tmp/
6 |
7 | # dependencies
8 | /bower_components/
9 | /node_modules/
10 |
11 | # misc
12 | /.env*
13 | /.pnp*
14 | /.sass-cache
15 | /.eslintcache
16 | /connect.lock
17 | /coverage/
18 | /libpeerconnection.log
19 | /npm-debug.log*
20 | /testem.log
21 | /yarn-error.log
22 |
23 | # ember-try
24 | /.node_modules.ember-try/
25 | /bower.json.ember-try
26 | /npm-shrinkwrap.json.ember-try
27 | /package.json.ember-try
28 | /package-lock.json.ember-try
29 | /yarn.lock.ember-try
30 |
31 | # broccoli-debug
32 | /DEBUG/
33 |
--------------------------------------------------------------------------------
/tests/dummy/app/templates/soundfonts/notes.hbs:
--------------------------------------------------------------------------------
1 |
2 |
Note Objects
3 |
4 |
5 | Note objects returned from the asFont method are plain
6 | Ember.Objects. Using the note Ab1 as an example,
7 | here is what getting these attributes will look like:
8 |
43 | Please make suggestions for more features or changes to the API by creating
44 | an issue or pinging me on the ember community discord (@sethbrasile)
45 |
46 |
47 |
48 |
49 |
Buy Seth Brasile a Beer
50 |
51 |
56 |
57 |
--------------------------------------------------------------------------------
/tests/dummy/app/controllers/synthesis/xy-pad.js:
--------------------------------------------------------------------------------
1 | import { action } from '@ember/object';
2 | import { inject as service } from '@ember/service';
3 | import Controller from '@ember/controller';
4 | import { tracked } from '@glimmer/tracking';
5 | import { exponentialRatio } from 'ember-audio/utils';
6 |
7 | export default class XyPadController extends Controller {
8 | @service audio;
9 | @tracked oscillator;
10 | @tracked range;
11 |
12 | padSize = 300;
13 |
14 | constructor() {
15 | super(...arguments);
16 |
17 | this.oscillator = this.audio.createOscillator({ type: 'square' });
18 |
19 | if (!this.range) {
20 | this.range = { min: 100, max: 400 };
21 | }
22 | }
23 |
24 | get frequency() {
25 | const frequency = this.oscillator.frequency;
26 |
27 | if (frequency) {
28 | return frequency.toFixed();
29 | }
30 |
31 | return null;
32 | }
33 |
34 | get gain() {
35 | const gain = this.oscillator.gain;
36 |
37 | if (gain) {
38 | return gain.toFixed(2);
39 | } else {
40 | return 0;
41 | }
42 | }
43 |
44 | @action
45 | play() {
46 | this.oscillator.play();
47 | }
48 |
49 | @action
50 | stop() {
51 | const { oscillator } = this;
52 |
53 | if (oscillator.isPlaying) {
54 | oscillator.stop();
55 | }
56 | }
57 |
58 | @action
59 | adjustSynthParams(x, y) {
60 | const { oscillator, range, padSize } = this;
61 | const frequency = range.min + (range.max - range.min) * (x / padSize);
62 |
63 | // Human senses are not linear.
64 | // http://stackoverflow.com/questions/1165026/what-algorithms-could-i-use-for-audio-volume-level
65 | const gain = exponentialRatio(y / this.padSize);
66 |
67 | oscillator.update('frequency', frequency);
68 | oscillator.update('gain', gain);
69 |
70 | // Must reassign in order for @tracked to pick up changes to complex object
71 | this.oscillator = oscillator;
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/tests/unit/mixins/connectable-test.js:
--------------------------------------------------------------------------------
1 | import { A } from '@ember/array';
2 | import EmberObject from '@ember/object';
3 | import ContextMock from '../../helpers/context-mock';
4 | import ConnectableMixin from 'ember-audio/mixins/connectable';
5 | import { module, test } from 'qunit';
6 |
7 | module('Unit | Mixin | connectable', function () {
8 | // Replace this with your real tests.
9 | test('it works', function (assert) {
10 | let audioContext = ContextMock.create();
11 | let ConnectableObject = EmberObject.extend(ConnectableMixin);
12 | let subject = ConnectableObject.create({ audioContext });
13 | assert.ok(subject);
14 | });
15 |
16 | test('removeConnection removes a connection from the connections array by name', function (assert) {
17 | let audioContext = ContextMock.create();
18 | let ConnectableObject = EmberObject.extend(ConnectableMixin, {
19 | _initConnections() {
20 | // noop
21 | },
22 | wireConnections() {
23 | // noop
24 | },
25 |
26 | connections: A([{ name: 'milo' }, { name: 'otis' }]),
27 | });
28 |
29 | let subject = ConnectableObject.create({ audioContext });
30 | subject.removeConnection('milo');
31 |
32 | assert.strictEqual(subject.get('connections')[0].name, 'otis');
33 | assert.strictEqual(subject.get('connections').length, 1);
34 | });
35 |
36 | test('Connectable _createNode method throws when connection arg is missing props', function (assert) {
37 | let audioContext = ContextMock.create();
38 | let ConnectableObject = EmberObject.extend(ConnectableMixin);
39 | let subject = ConnectableObject.create({ audioContext });
40 |
41 | assert.ok(subject._createNode);
42 |
43 | let createNode = () => {
44 | subject._createNode({
45 | node: false,
46 | createdOnPlay: false,
47 | path: false,
48 | createCommand: false,
49 | source: false,
50 | name: 'test',
51 | });
52 | };
53 |
54 | assert.throws(createNode);
55 | });
56 | });
57 |
--------------------------------------------------------------------------------
/tests/dummy/app/controllers/audio-files/mp3-player.js:
--------------------------------------------------------------------------------
1 | import { action } from '@ember/object';
2 | import { inject as service } from '@ember/service';
3 | import { tracked } from '@glimmer/tracking';
4 | import Controller from '@ember/controller';
5 |
6 | export default class Mp3PlayerController extends Controller {
7 | @service audio;
8 | @tracked selectedTrack;
9 | @tracked trackIsLoading = false;
10 |
11 | tracks = [
12 | {
13 | name: 'barely-there',
14 | trackInstance: null,
15 | description: `I used to play bass and sing ("clean" vocals) in a metalcore
16 | band called "Bringing Down Broadway" and this is one of our songs.
17 | This is from around 2005 (I'm sooooo oooooollldddd).
18 | The album is titled, "It's all Gone South", I recorded and produced it, and it
19 | was a commercial failure. I think it's awesome.`,
20 | },
21 | {
22 | name: 'do-wah-diddy',
23 | trackInstance: null,
24 | description: `My friend David Denison and I recorded this song in a living
25 | room with a laptop and a broken logitech PC mic, for fun. This
26 | is from around 2008 (please see comment on "Barely There" about
27 | being old). David is "rapping" and I'm singing. Please
28 | keep in mind that this is from a time when "autotune" was in it's
29 | infancy so the suckiness was par for the course. Also, "autotune" is for assholes.
30 | When you can't sing, you should just suck it up and sound bad.`,
31 | },
32 | ];
33 |
34 | @action
35 | selectTrack(track) {
36 | const { audio } = this;
37 |
38 | this.selectedTrack = track;
39 | this.trackIsLoading = true;
40 |
41 | audio.pauseAll();
42 |
43 | audio
44 | .load(`/ember-audio/${track.name}.mp3`)
45 | .asTrack(track.name)
46 | .then((trackInstance) => {
47 | const track = this.selectedTrack;
48 |
49 | track.trackInstance = trackInstance;
50 | this.trackIsLoading = false;
51 |
52 | this.selectedTrack = track;
53 | });
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/tests/dummy/app/controllers/synthesis/index.js:
--------------------------------------------------------------------------------
1 | import classic from 'ember-classic-decorator';
2 | import { action } from '@ember/object';
3 | import { inject as service } from '@ember/service';
4 | import Controller from '@ember/controller';
5 | import { tracked } from '@glimmer/tracking';
6 | import { Oscillator } from 'ember-audio';
7 | import { MusicalIdentity } from 'ember-audio/mixins';
8 |
9 | // TODO: xy pad with filters and plugins
10 | // By mixing the MusicalIdentity mixin into the Oscillator class, we get an
11 | // oscillator that is aware of it's frequency, letter, accidental, octave, etc...
12 | @classic
13 | class MusicallyAwareOscillator extends Oscillator.extend(MusicalIdentity) {}
14 |
15 | export default class IndexController extends Controller {
16 | @service audio;
17 | @tracked oscillators; // Put oscillators here after they're created
18 |
19 | constructor() {
20 | super(...arguments);
21 |
22 | const { audio } = this;
23 |
24 | // Outputs an array of all the notes on a standard "western" piano
25 | // Could also do `audio.createNoteArray(notes)` where notes is a POJO,
26 | // or `audio.load(URL).asNoteArray().then(...)` providing a URL to a JSON file
27 | const notes = audio.createNoteArray();
28 |
29 | // Slicing so that the keyboard isn't massive
30 | const slicedNotes = notes.slice(48, 60);
31 |
32 | // Create a MusicallyAwareOscillator instance for each note in slicedNotes
33 | const oscillators = slicedNotes.map((note) => {
34 | return MusicallyAwareOscillator.create({
35 | // By setting `frequency`, we get `identifier`, `name`, etc.. for free
36 | frequency: note.get('frequency'),
37 | // Default type is 'sine'
38 | type: 'square',
39 | // Oscillator instances need `audioContext` in order to make sound
40 | audioContext: audio.get('audioContext'),
41 | });
42 | });
43 |
44 | this.oscillators = oscillators;
45 | }
46 |
47 | @action
48 | startNote(note) {
49 | note.play();
50 | }
51 |
52 | @action
53 | stopNote(note) {
54 | if (note.get('isPlaying')) {
55 | note.stop();
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/tests/helpers/context-mock.js:
--------------------------------------------------------------------------------
1 | import EmberObject from '@ember/object';
2 | import { on } from '@ember/object/evented';
3 | import { Promise } from 'rsvp';
4 |
5 | const NodeObject = EmberObject.extend({
6 | connectCalled: false,
7 | startCalled: false,
8 |
9 | startTime: null,
10 | stopTime: null,
11 |
12 | pan: null,
13 | gain: null,
14 |
15 | _init: on('init', function () {
16 | if (!this.pan) {
17 | this.pan = { value: null };
18 | }
19 |
20 | if (!this.gain) {
21 | this.gain = { value: null };
22 | }
23 | }),
24 |
25 | connect(obj) {
26 | this.set('connectCalled', true);
27 | this.set('connectedObject', obj);
28 | },
29 |
30 | start(time) {
31 | this.set('startTime', time);
32 | this.set('startCalled', true);
33 | },
34 |
35 | stop(time) {
36 | this.set('stopTime', time);
37 | this.set('stopCalled', true);
38 | },
39 | });
40 |
41 | const ContextMock = EmberObject.extend({
42 | currentTime: 110,
43 | createBufferSourceCalled: false,
44 | createGainCalled: false,
45 | createAnalyserCalled: false,
46 | createStereoPannerCalled: false,
47 | createBiqaudFilterCalled: false,
48 | createOscillatorCalled: false,
49 |
50 | initDestination: on('init', function () {
51 | this.destination = {};
52 | }),
53 |
54 | createBufferSource() {
55 | this.set('createBufferSourceCalled', true);
56 | return NodeObject.create();
57 | },
58 |
59 | createStereoPanner() {
60 | this.set('createStereoPannerCalled', true);
61 | return NodeObject.create();
62 | },
63 |
64 | createGain() {
65 | this.set('createGainCalled', true);
66 | return NodeObject.create({
67 | gain: { value: 0.4 },
68 | });
69 | },
70 |
71 | createAnalyser() {
72 | this.set('createAnalyserCalled', true);
73 | return NodeObject.create();
74 | },
75 |
76 | createBiqaudFilter() {
77 | this.set('createBiqaudFilterCalled', true);
78 | return NodeObject.create();
79 | },
80 |
81 | createOscillator() {
82 | this.set('createOscillatorCalled', true);
83 | return NodeObject.create();
84 | },
85 |
86 | decodeAudioData(data) {
87 | return new Promise((resolve) => resolve(data));
88 | },
89 | });
90 |
91 | export default ContextMock;
92 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | - master
8 | pull_request: {}
9 |
10 | concurrency:
11 | group: ci-${{ github.head_ref || github.ref }}
12 | cancel-in-progress: true
13 |
14 | jobs:
15 | test:
16 | name: "Tests"
17 | runs-on: ubuntu-latest
18 | timeout-minutes: 10
19 |
20 | steps:
21 | - uses: actions/checkout@v3
22 | - name: Install Node
23 | uses: actions/setup-node@v3
24 | with:
25 | node-version: 14.x
26 | cache: yarn
27 | - name: Install Dependencies
28 | run: yarn
29 | # - name: Lint
30 | # run: npm run lint
31 | - name: Run tests and collect coverage
32 | run: COVERAGE=true npm run test:ember
33 | - name: Upload Coverage to Codecov
34 | uses: codecov/codecov-action@v3
35 |
36 | floating:
37 | name: "Floating Dependencies"
38 | runs-on: ubuntu-latest
39 | timeout-minutes: 10
40 |
41 | steps:
42 | - uses: actions/checkout@v3
43 | - uses: actions/setup-node@v3
44 | with:
45 | node-version: 14.x
46 | cache: yarn
47 | - name: Install Dependencies
48 | run: yarn
49 | - name: Run Tests
50 | run: npm run test:ember
51 |
52 | try-scenarios:
53 | name: ${{ matrix.try-scenario }}
54 | runs-on: ubuntu-latest
55 | needs: "test"
56 | timeout-minutes: 10
57 |
58 | strategy:
59 | fail-fast: false
60 | matrix:
61 | try-scenario:
62 | - ember-lts-3.8
63 | - ember-lts-3.12
64 | - ember-lts-3.16
65 | - ember-lts-3.20
66 | - ember-lts-3.24
67 | - ember-lts-3.28
68 | - ember-lts-4.4
69 | - ember-lts-4.8
70 | - ember-release
71 | - ember-beta
72 | - ember-canary
73 | - ember-classic
74 |
75 | steps:
76 | - uses: actions/checkout@v3
77 | - name: Install Node
78 | uses: actions/setup-node@v3
79 | with:
80 | node-version: 14.x
81 | cache: yarn
82 | - name: Install Dependencies
83 | run: yarn
84 | - name: Run Tests
85 | run: ./node_modules/.bin/ember try:one ${{ matrix.try-scenario }}
86 |
--------------------------------------------------------------------------------
/tests/unit/classes/track-test.js:
--------------------------------------------------------------------------------
1 | import { Track } from 'ember-audio';
2 | import ContextMock from '../../helpers/context-mock';
3 | import AudioBufferMock from '../../helpers/audio-buffer-mock';
4 | import { module, test } from 'qunit';
5 |
6 | module('Unit | Class | track', function () {
7 | const audioContext = ContextMock.create();
8 |
9 | test('position.raw works', function (assert) {
10 | let result = Track.create({ audioContext, startOffset: 65 });
11 | assert.strictEqual(result.get('position.raw'), 65);
12 | });
13 |
14 | test('position.string works', function (assert) {
15 | let result = Track.create({ audioContext, startOffset: 65 });
16 |
17 | assert.strictEqual(result.get('position.string'), '01:05');
18 |
19 | result.set('startOffset', 40);
20 | assert.strictEqual(result.get('position.string'), '00:40');
21 |
22 | result.set('startOffset', 40.12765);
23 | assert.strictEqual(result.get('position.string'), '00:40');
24 |
25 | result.set('startOffset', 60);
26 | assert.strictEqual(result.get('position.string'), '01:00');
27 |
28 | result.set('startOffset', 600);
29 | assert.strictEqual(result.get('position.string'), '10:00');
30 |
31 | result.set('startOffset', 6001);
32 | assert.strictEqual(result.get('position.string'), '100:01');
33 |
34 | result.set('startOffset', 6012);
35 | assert.strictEqual(result.get('position.string'), '100:12');
36 | });
37 |
38 | test('position.pojo works', function (assert) {
39 | let result = Track.create({ audioContext, startOffset: 65 });
40 | assert.deepEqual(result.get('position.pojo'), {
41 | minutes: 1,
42 | seconds: 5,
43 | });
44 |
45 | result.set('startOffset', 40);
46 | assert.deepEqual(result.get('position.pojo'), {
47 | minutes: 0,
48 | seconds: 40,
49 | });
50 |
51 | result.set('startOffset', 60);
52 | assert.deepEqual(result.get('position.pojo'), {
53 | minutes: 1,
54 | seconds: 0,
55 | });
56 | });
57 |
58 | test('percentPlayed works', function (assert) {
59 | const audioBuffer = AudioBufferMock.create({ duration: 90 });
60 |
61 | let result = Track.create({ audioContext, audioBuffer, startOffset: 63 });
62 | assert.strictEqual(result.get('percentPlayed'), 70);
63 | });
64 | });
65 |
--------------------------------------------------------------------------------
/tests/dummy/app/components/xy-pad.js:
--------------------------------------------------------------------------------
1 | import { schedule } from '@ember/runloop';
2 | import { action } from '@ember/object';
3 | import Component from '@glimmer/component';
4 |
5 | export default class XyPad extends Component {
6 | constructor() {
7 | super(...arguments);
8 |
9 | schedule('afterRender', this, '_drawGrid');
10 | schedule('afterRender', this, '_drawText');
11 | }
12 |
13 | _drawText() {
14 | const canvas = document.getElementById('xy-canvas');
15 | const ctx = canvas.getContext('2d');
16 | const pad = 10;
17 |
18 | ctx.fillStyle = 'white';
19 | ctx.font = '24px serif';
20 | ctx.textAlign = 'left';
21 |
22 | // save orientation
23 | ctx.save();
24 |
25 | // rotate canvas
26 | ctx.translate(0, 0);
27 | ctx.rotate(Math.PI / 2);
28 |
29 | // draw 'Gain'
30 | ctx.fillText('Gain', pad, -pad);
31 |
32 | // restore orientation
33 | ctx.restore();
34 |
35 | // draw 'Frequency'
36 | ctx.fillText('Frequency', pad, canvas.height - pad);
37 | }
38 |
39 | _drawGrid() {
40 | const canvas = document.getElementById('xy-canvas');
41 | const ctx = canvas.getContext('2d');
42 | const { width, height } = canvas;
43 | const gridSize = 30;
44 |
45 | ctx.strokeStyle = 'gray';
46 | ctx.strokeWidth = 1;
47 |
48 | ctx.beginPath();
49 |
50 | for (let i = 1; i <= width / gridSize; i++) {
51 | const x = i * gridSize;
52 | ctx.moveTo(x, 0);
53 | ctx.lineTo(x, height);
54 | ctx.stroke();
55 | }
56 |
57 | for (let i = 1; i <= height / gridSize; i++) {
58 | const y = i * gridSize;
59 | ctx.moveTo(0, y);
60 | ctx.lineTo(width, y);
61 | ctx.stroke();
62 | }
63 |
64 | ctx.closePath();
65 | }
66 |
67 | @action
68 | activate(e) {
69 | this.updateCoordinates(e);
70 | this.args.activate();
71 | }
72 |
73 | @action
74 | updateCoordinates(e) {
75 | const canvasLocation = e.target.getBoundingClientRect();
76 | const xRelToScreen = e.x || e.touches[0].screenX;
77 | const yRelToScreen = e.y || e.touches[0].screenY;
78 | const x = xRelToScreen - canvasLocation.left;
79 |
80 | // 'y' is measured from top, so invert for value from bottom
81 | const y = this.args.padSize + (yRelToScreen - canvasLocation.top) * -1;
82 | this.args.updateCoordinates(x, y);
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/tests/dummy/public/drum-samples/erkan_cymbals_license.txt:
--------------------------------------------------------------------------------
1 | Erkans Samples (e_m_p@live.se)
2 |
3 | Recorded by Erkan Dogantimur at G‰vle, Sweden, 14 September 2008 in his home studio.
4 | Cut and finished samples produced 15 September, by Erkan Dogantimur at home.
5 |
6 | Recorded with an AKG Perception 100, through a Mackie Mini Series mixer, into an M-Audio Delta1010LT.
7 |
8 |
9 |
10 | !!ATTENTION!! If you have bought these samples from someone, you have been ripped. These samples are not for sale. Go find him and kick his ass.
11 |
12 |
13 | ----------------------------------------------------------------------------------
14 | IMPORTANT INFORMATION:
15 |
16 | These samples are 100% raw. There is some proximity effect from the microphone (it was 40-50 cm away from the source at all times!), but more importantly, there is a lot of bass resonance coming from the floor and up into the mic.
17 |
18 | When you build up your drumkit with these samples, do not forget to high pass these samples (as usual) to get rid of the unwanted bass. Keep in mind these are 100% natural, dry, unprocessed etc. recordings, so add effects and manipulate them as you wish.
19 |
20 |
21 | Also, the samples are numbered and the lowest number represents the lowest veloctiy of the hit. For example if you are aiming for metal sound, the higher numbers are what you would want.
22 |
23 | The hihats are struck with the tip of the stick for 2/3 of the sample amount, approximately. This means, if a hihat has 20 velocities recorded, like "Super_Cool_Hihat_Closed-01" to "Super_Cool_Hihat_Closed-20", approximately the sounds from 01 to 12 will be strokes with the tip of the drum stick, and from 13 and up will be the "side" of the stick, the standard way to hit cymbals in high volumes.
24 |
25 |
26 |
27 |
28 | ----------------------------------------------------------------------------------
29 | LICENSE AGREEMENT:
30 |
31 | These samples are free, for private use only. If you want to use the samples commercially, you will have to ask me for permission. Studios/Corporations/Organizations and such are NOT allowed to use these samples, without my given permission. You can not sell/buy these samples. Nobody is supposed to make a dime on these samples, keep that in mind.
32 |
33 |
34 | ----------------------------------------------------------------------------------
35 | 615 Cymbal Samples, brought to you buy Erkan Dogantimur. 8 freakin' hours in front of the computer to slice them up, yeah that was fun.
36 |
--------------------------------------------------------------------------------
/tests/unit/classes/beat-track-test.js:
--------------------------------------------------------------------------------
1 | import { BeatTrack, Sound } from 'ember-audio';
2 | import ContextMock from '../../helpers/context-mock';
3 | import AudioBufferMock from '../../helpers/audio-buffer-mock';
4 | import { module, test } from 'qunit';
5 |
6 | module('Unit | Class | beat track', function () {
7 | test('it exists', function (assert) {
8 | let result = BeatTrack.create();
9 | assert.ok(result);
10 | });
11 |
12 | test(`beats' 'active' state is saved when numBeats changes`, function (assert) {
13 | let result = BeatTrack.create();
14 | let [beat1, beat2, beat3] = result.get('beats');
15 |
16 | beat1.set('active', true);
17 | beat3.set('active', true);
18 |
19 | result.set('numBeats', 6);
20 |
21 | [beat1, beat2, beat3] = result.get('beats');
22 |
23 | assert.ok(beat1.get('active'));
24 | assert.notOk(beat2.get('active'));
25 | assert.ok(beat3.get('active'));
26 |
27 | result.set('numBeats', 4);
28 |
29 | [beat1, beat2, beat3] = result.get('beats');
30 |
31 | assert.ok(beat1.get('active'));
32 | assert.notOk(beat2.get('active'));
33 | assert.ok(beat3.get('active'));
34 | });
35 |
36 | test('playActiveBeats method calls _callPlayMethodOnBeats with "playIn" as first param', function (assert) {
37 | assert.expect(1);
38 | let result = BeatTrack.create();
39 | result._callPlayMethodOnBeats = (arg1) =>
40 | assert.strictEqual(arg1, 'playIn');
41 | result.playBeats(0, 0);
42 | });
43 |
44 | test('playActiveBeats method calls _callPlayMethodOnBeats with "ifActivePlayIn" as first param', function (assert) {
45 | assert.expect(1);
46 | let result = BeatTrack.create();
47 | result._callPlayMethodOnBeats = (arg1) =>
48 | assert.strictEqual(arg1, 'ifActivePlayIn');
49 | result.playActiveBeats(0, 0);
50 | });
51 |
52 | test('_callPlayMethodOnBeats method calls "method" arg on all beats in beats array', function (assert) {
53 | let audioContext = ContextMock.create();
54 | let audioBuffer = AudioBufferMock.create();
55 | let counter = 0;
56 | let sound = Sound.create({
57 | audioContext,
58 | audioBuffer,
59 |
60 | playIn() {
61 | counter++;
62 | },
63 | });
64 |
65 | let result = BeatTrack.create();
66 | let sounds = result.get('sounds');
67 |
68 | sounds.add(sound);
69 |
70 | result._callPlayMethodOnBeats('playIn', 120);
71 | assert.strictEqual(counter, 4);
72 | });
73 | });
74 |
--------------------------------------------------------------------------------
/tests/dummy/app/templates/audio-files/drum-kit.hbs:
--------------------------------------------------------------------------------
1 |
2 |
Multisampled Drum Kit
3 |
4 |
5 |
6 |
7 | Go ahead and hit the hihat a few times... Notice how each sample sounds
8 | slightly different? That's multisampling, baby.
9 |
5 | There are two ways to schedule sounds with Ember Audio:
6 |
7 |
8 |
9 |
10 |
1: Manually passing a time to a Sound or a Track
11 |
12 | Using a fixed moment in time with the playAt method, or by
13 | using an amount of time from now with the playIn method.
14 |
15 |
16 |
17 |
18 | this.get('audio').load('some.mp3').asSound('some-sound');
19 |
20 | // some time later...
21 |
22 | const audio = this.get('audio');
23 | const now = audio.get('audioContext.currentTime');
24 |
25 | audio.getSound('some-sound').playAt(now + 1); // plays in 1 second
26 |
27 |
28 |
29 | this.get('audio').load('some.mp3').asSound('some-sound');
30 |
31 | // some time later...
32 |
33 | this.get('audio').getSound('some-sound').playIn(1); // plays in 1 second
34 |
35 |
36 |
37 |
2: By using a BeatTrack
38 |
39 |
40 |
41 | this.get('audio').load('some.mp3').asBeatTrack('some-sound');
42 |
43 | // some time later...
44 |
45 | const beatTrack = this.get('audio').getBeatTrack('some-sound');
46 | const BPM = 120;
47 |
48 | // playBeats() accepts BPM and will play all the beatTrack's beats at that BPM,
49 | beatTrack.playBeats(BPM);
50 |
51 |
52 |
53 |
54 | Instead of using beatTrack.playBeats, you can also call a Beat
55 | instance's playIn method directly, passing an amount of time
56 | from now (in seconds) that the beat should play.
57 |
72 | beatTrack.playActiveBeats and beat.ifActivePlayIn
73 | are also available which will allow you to mark a beat's active
74 | property and the method will play rests (periods with no audio) during any
75 | beat that isn't active. This is to facilitate the way that most
76 | "drum machines" work.
77 |
78 |
79 |
80 | Take a look at the Drum Machine for
81 | examples.
82 |
83 |
84 |
--------------------------------------------------------------------------------
/tests/dummy/app/controllers/timing/drum-machine.js:
--------------------------------------------------------------------------------
1 | import { action } from '@ember/object';
2 | import { inject as service } from '@ember/service';
3 | import { tracked } from '@glimmer/tracking';
4 | import { all } from 'rsvp';
5 | import Controller from '@ember/controller';
6 |
7 | export default class DrumMachineController extends Controller {
8 | @service audio;
9 | @tracked beatTracks;
10 | @tracked isLoading = true;
11 | @tracked bpm = 120;
12 |
13 | constructor() {
14 | super(...arguments);
15 |
16 | all([
17 | this._loadBeatTrackFor('kick'),
18 | this._loadBeatTrackFor('snare'),
19 | this._loadBeatTrackFor('hihat'),
20 | ]).then((beatTracks) => {
21 | beatTracks.map((beatTrack) => {
22 | const name = beatTrack.get('name');
23 |
24 | // default is 4 beats, but we're going to use 16
25 | beatTrack.set('numBeats', 8);
26 |
27 | // snare and hihat are a little louder than kick, so we'll turn down the gain
28 | if (name === 'snare' || name === 'hihat') {
29 | beatTrack.set('gain', 0.4);
30 | }
31 |
32 | // and let's pan the hihat a little to the left
33 | if (name === 'hihat') {
34 | beatTrack.set('pan', -0.3);
35 | }
36 | });
37 |
38 | this.isLoading = false;
39 | this.beatTracks = beatTracks;
40 | });
41 | }
42 |
43 | _loadBeatTrackFor(name) {
44 | return this.audio
45 | .load([
46 | `/ember-audio/drum-samples/${name}1.wav`,
47 | `/ember-audio/drum-samples/${name}2.wav`,
48 | `/ember-audio/drum-samples/${name}3.wav`,
49 | ])
50 | .asBeatTrack(name);
51 | }
52 |
53 | @action
54 | play() {
55 | this.beatTracks.map((beatTrack) => {
56 | // playActiveBeats() optionally accepts "noteType" which defaults to "1/4"
57 | // notes, but we want to use eighth notes
58 | beatTrack.playActiveBeats(this.bpm, 1 / 8);
59 |
60 | // /* playActiveBeats() is a convenience method. For more control, you could do:
61 | // http://bradthemad.org/guitar/tempo_explanation.php */
62 | // const eighthNoteDuration = (240 * 1/8) / this.get('bpm');
63 | // beatTrack.get('beats').map((beat, beatIndex) => {
64 | // /* whatever else you need to do */
65 | // beat.ifActivePlayIn(beatIndex * eighthNoteDuration);
66 | // });
67 | });
68 | }
69 |
70 | @action
71 | toggleActive(beat) {
72 | if (beat.active) {
73 | beat.set('active', false);
74 | } else {
75 | beat.play();
76 | beat.set('active', true);
77 | }
78 | }
79 |
80 | @action
81 | engageLudicrousMode() {
82 | this.bpm = 1000000;
83 |
84 | this.beatTracks.map((beatTrack) => {
85 | beatTrack.beats.map((beat) => {
86 | beat.set('active', true);
87 | });
88 | });
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/addon/utils/frequency-map.js:
--------------------------------------------------------------------------------
1 | export default {
2 | C0: 16.35,
3 | // 'C#0': 17.32,
4 | Db0: 17.32,
5 | D0: 18.35,
6 | // 'D#0': 19.45,
7 | Eb0: 19.45,
8 | E0: 20.6,
9 | F0: 21.83,
10 | // 'F#0': 23.12,
11 | Gb0: 23.12,
12 | G0: 24.5,
13 | // 'G#0': 25.96,
14 | Ab0: 25.96,
15 | A0: 27.5,
16 | // 'A#0': 29.14,
17 | Bb0: 29.14,
18 | B0: 30.87,
19 | C1: 32.7,
20 | // 'C#1': 34.65,
21 | Db1: 34.65,
22 | D1: 36.71,
23 | // 'D#1': 38.89,
24 | Eb1: 38.89,
25 | E1: 41.2,
26 | F1: 43.65,
27 | // 'F#1': 46.25,
28 | Gb1: 46.25,
29 | G1: 49,
30 | // 'G#1': 51.91,
31 | Ab1: 51.91,
32 | A1: 55,
33 | // 'A#1': 58.27,
34 | Bb1: 58.27,
35 | B1: 61.74,
36 | C2: 65.41,
37 | // 'C#2': 69.3,
38 | Db2: 69.3,
39 | D2: 73.42,
40 | // 'D#2': 77.78,
41 | Eb2: 77.78,
42 | E2: 82.41,
43 | F2: 87.31,
44 | // 'F#2': 92.5,
45 | Gb2: 92.5,
46 | G2: 98,
47 | // 'G#2': 103.83,
48 | Ab2: 103.83,
49 | A2: 110,
50 | // 'A#2': 116.54,
51 | Bb2: 116.54,
52 | B2: 123.47,
53 | C3: 130.81,
54 | // 'C#3': 138.59,
55 | Db3: 138.59,
56 | D3: 146.83,
57 | // 'D#3': 155.56,
58 | Eb3: 155.56,
59 | E3: 164.81,
60 | F3: 174.61,
61 | // 'F#3': 185,
62 | Gb3: 185,
63 | G3: 196,
64 | // 'G#3': 207.65,
65 | Ab3: 207.65,
66 | A3: 220,
67 | // 'A#3': 233.08,
68 | Bb3: 233.08,
69 | B3: 246.94,
70 | C4: 261.63,
71 | // 'C#4': 277.18,
72 | Db4: 277.18,
73 | D4: 293.66,
74 | // 'D#4': 311.13,
75 | Eb4: 311.13,
76 | E4: 329.63,
77 | F4: 349.23,
78 | // 'F#4': 369.99,
79 | Gb4: 369.99,
80 | G4: 392,
81 | // 'G#4': 415.3,
82 | Ab4: 415.3,
83 | A4: 440,
84 | // 'A#4': 466.16,
85 | Bb4: 466.16,
86 | B4: 493.88,
87 | C5: 523.25,
88 | // 'C#5': 554.37,
89 | Db5: 554.37,
90 | D5: 587.33,
91 | // 'D#5': 622.25,
92 | Eb5: 622.25,
93 | E5: 659.26,
94 | F5: 698.46,
95 | // 'F#5': 739.99,
96 | Gb5: 739.99,
97 | G5: 783.99,
98 | // 'G#5': 830.61,
99 | Ab5: 830.61,
100 | A5: 880,
101 | // 'A#5': 932.33,
102 | Bb5: 932.33,
103 | B5: 987.77,
104 | C6: 1046.5,
105 | // 'C#6': 1108.73,
106 | Db6: 1108.73,
107 | D6: 1174.66,
108 | // 'D#6': 1244.51,
109 | Eb6: 1244.51,
110 | E6: 1318.51,
111 | F6: 1396.91,
112 | // 'F#6': 1479.98,
113 | Gb6: 1479.98,
114 | G6: 1567.98,
115 | // 'G#6': 1661.22,
116 | Ab6: 1661.22,
117 | A6: 1760,
118 | // 'A#6': 1864.66,
119 | Bb6: 1864.66,
120 | B6: 1975.53,
121 | C7: 2093,
122 | // 'C#7': 2217.46,
123 | Db7: 2217.46,
124 | D7: 2349.32,
125 | // 'D#7': 2489.02,
126 | Eb7: 2489.02,
127 | E7: 2637.02,
128 | F7: 2793.83,
129 | // 'F#7': 2959.96,
130 | Gb7: 2959.96,
131 | G7: 3135.96,
132 | // 'G#7': 3322.44,
133 | Ab7: 3322.44,
134 | A7: 3520,
135 | // 'A#7': 3729.31,
136 | Bb7: 3729.31,
137 | B7: 3951.07,
138 | C8: 4186.01,
139 | // 'C#8': 4434.92,
140 | Db8: 4434.92,
141 | D8: 4698.64,
142 | // 'D#8': 4978.03,
143 | Eb8: 4978.03,
144 | };
145 |
--------------------------------------------------------------------------------
/addon/classes/layered-sound.js:
--------------------------------------------------------------------------------
1 | import { A } from '@ember/array';
2 | import { on } from '@ember/object/evented';
3 | import EmberObject from '@ember/object';
4 |
5 | /**
6 | * Allows multiple instances of anything that uses
7 | * {{#crossLink "Playable"}}{{/crossLink}} to be loaded up and played at the
8 | * same time.
9 | *
10 | * @public
11 | * @class LayeredSound
12 | */
13 | const LayeredSound = EmberObject.extend({
14 | /**
15 | * Acts as a register for different types of sounds. Anything that uses
16 | * {{#crossLink "Playable"}}{{/crossLink}} can be added to this register.
17 | * If not set on instantiation, set to `A()` via `_initSounds`.
18 | *
19 | * @public
20 | * @property sounds
21 | * @type {array|Sound|Oscillator}
22 | */
23 | sounds: null,
24 |
25 | /**
26 | * Maps through objects in `sounds` and calls `play` on each
27 | *
28 | * @public
29 | * @method play
30 | */
31 | play() {
32 | this.sounds.map((sound) => sound.play());
33 | },
34 |
35 | /**
36 | * Maps through objects in `sounds` and calls `playAt` on each, passing
37 | * through the `time` param to each sound.
38 | *
39 | * @public
40 | * @method playAt
41 | *
42 | * @param {number} time The time to pass to each object's `playAt` method.
43 | */
44 | playAt(time) {
45 | this.sounds.map((sound) => sound.playAt(time));
46 | },
47 |
48 | /**
49 | * Maps through objects in `sounds` and calls `playIn` on each, passing
50 | * through the `seconds` param to each sound.
51 | *
52 | * @public
53 | * @method playIn
54 | *
55 | * @param {number} seconds The seconds to pass to each object's `playIn` method.
56 | */
57 | playIn(seconds) {
58 | this.sounds.map((sound) => sound.playIn(seconds));
59 | },
60 |
61 | /**
62 | * Maps through objects in `sounds` and calls `playFor` on each, passing
63 | * through the `seconds` param to each sound.
64 | *
65 | * @public
66 | * @method playFor
67 | *
68 | * @param {number} seconds The seconds to pass to each object's `playFor` method.
69 | */
70 | playFor(seconds) {
71 | this.sounds.map((sound) => sound.playFor(seconds));
72 | },
73 |
74 | /**
75 | * Maps through objects in `sounds` and calls `playInAndStopAfter` on each,
76 | * passing through the `playIn` and `stopAfter` params to each sound.
77 | *
78 | * @public
79 | * @method playForIn
80 | *
81 | * @param {number} playIn Seconds to pass to each object's
82 | * `playInAndStopAfter` method.
83 | *
84 | * @param {number} stopAfter Seconds to pass to each object's
85 | * `playInAndStopAfter` method.
86 | */
87 | playInAndStopAfter(playIn, stopAfter) {
88 | this.sounds.map((sound) => sound.playInAndStopAfter(playIn, stopAfter));
89 | },
90 |
91 | /**
92 | * If `sounds` is null on instantiation, sets it to `A()`
93 | *
94 | * @private
95 | * @method _initSounds
96 | */
97 | _initSounds: on('init', function () {
98 | if (!this.sounds) {
99 | this.set('sounds', A());
100 | }
101 | }),
102 | });
103 |
104 | export default LayeredSound;
105 |
--------------------------------------------------------------------------------
/tests/unit/mixins/musical-identity-test.js:
--------------------------------------------------------------------------------
1 | import EmberObject from '@ember/object';
2 | import MusicalIdentityMixin from 'ember-audio/mixins/musical-identity';
3 | import { module, test } from 'qunit';
4 |
5 | const Note = EmberObject.extend(MusicalIdentityMixin);
6 |
7 | module('Unit | Mixin | musical identity', function () {
8 | test('it exists', function (assert) {
9 | let subject = Note.create();
10 | assert.ok(subject);
11 | });
12 |
13 | test('identifier is formatted properly', function (assert) {
14 | assert.expect(1);
15 |
16 | const note = Note.create({ frequency: 51.91 });
17 |
18 | assert.strictEqual(note.get('identifier'), 'Ab1');
19 | });
20 |
21 | test('identifier is formatted properly when note has no accidental', function (assert) {
22 | assert.expect(1);
23 |
24 | const note = Note.create({ frequency: 55 });
25 |
26 | assert.strictEqual(note.get('identifier'), 'A1');
27 | });
28 |
29 | test('name is formatted properly', function (assert) {
30 | assert.expect(1);
31 |
32 | const note = Note.create({ frequency: 51.91 });
33 |
34 | assert.strictEqual(note.get('name'), 'Ab');
35 | });
36 |
37 | test('setting frequency properly calculates other props', function (assert) {
38 | assert.expect(5);
39 |
40 | const note = Note.create({ frequency: 440 });
41 |
42 | assert.strictEqual(note.get('identifier'), 'A4');
43 | assert.strictEqual(note.get('name'), 'A');
44 | assert.strictEqual(note.get('octave'), '4');
45 | assert.strictEqual(note.get('letter'), 'A');
46 | assert.strictEqual(note.get('accidental'), undefined);
47 | });
48 |
49 | test('setting identifier properly calculates other props', function (assert) {
50 | assert.expect(5);
51 |
52 | const note = Note.create({ identifier: 'A4' });
53 |
54 | assert.strictEqual(note.get('frequency'), 440);
55 | assert.strictEqual(note.get('name'), 'A');
56 | assert.strictEqual(note.get('octave'), '4');
57 | assert.strictEqual(note.get('letter'), 'A');
58 | assert.strictEqual(note.get('accidental'), undefined);
59 | });
60 |
61 | test('setting identifier with accidental properly calculates other props', function (assert) {
62 | assert.expect(5);
63 |
64 | const note = Note.create({ identifier: 'Ab4' });
65 |
66 | assert.strictEqual(note.get('frequency'), 415.3);
67 | assert.strictEqual(note.get('name'), 'Ab');
68 | assert.strictEqual(note.get('octave'), '4');
69 | assert.strictEqual(note.get('letter'), 'A');
70 | assert.strictEqual(note.get('accidental'), 'b');
71 | });
72 |
73 | test('still works if manually set letter, accidental and octave', function (assert) {
74 | assert.expect(3);
75 |
76 | const note = Note.create({ letter: 'A', accidental: 'b', octave: 4 });
77 |
78 | assert.strictEqual(note.get('frequency'), 415.3);
79 | assert.strictEqual(note.get('name'), 'Ab');
80 | assert.strictEqual(note.get('identifier'), 'Ab4');
81 | });
82 |
83 | test('still works if manually set letter and octave (no accidental)', function (assert) {
84 | assert.expect(3);
85 |
86 | const note = Note.create({ letter: 'A', octave: 4 });
87 |
88 | assert.strictEqual(note.get('frequency'), 440);
89 | assert.strictEqual(note.get('name'), 'A');
90 | assert.strictEqual(note.get('identifier'), 'A4');
91 | });
92 | });
93 |
--------------------------------------------------------------------------------
/tests/unit/utils/note-methods-test.js:
--------------------------------------------------------------------------------
1 | import { A } from '@ember/array';
2 | import { module, test } from 'qunit';
3 | import noteFactory from '../../helpers/note-factory';
4 | import {
5 | sortNotes,
6 | noteSort,
7 | octaveShift,
8 | octaveSort,
9 | extractOctaves,
10 | stripDuplicateOctaves,
11 | createOctavesWithNotes,
12 | } from 'ember-audio/utils';
13 |
14 | module('Unit | Utility | note methods', function () {
15 | const A0 = noteFactory('A', null, 0);
16 | const Bb0 = noteFactory('B', 'b', 0);
17 | const B0 = noteFactory('B', null, 0);
18 | const Ab1 = noteFactory('A', 'b', 1);
19 | const A1 = noteFactory('A', null, 1);
20 | const Bb1 = noteFactory('B', 'b', 1);
21 | const B1 = noteFactory('B', null, 1);
22 | const C1 = noteFactory('C', null, 1);
23 | const Db1 = noteFactory('D', 'b', 1);
24 |
25 | const correctOctaves = [
26 | [A0, Bb0, B0],
27 | [C1, Db1, Ab1, A1, Bb1, B1],
28 | ];
29 |
30 | test('sortNotes exists', function (assert) {
31 | assert.ok(sortNotes);
32 | });
33 |
34 | test('noteSort exists', function (assert) {
35 | assert.ok(noteSort);
36 | });
37 |
38 | test('octaveShift exists', function (assert) {
39 | assert.ok(octaveShift);
40 | });
41 |
42 | test('octaveSort exists', function (assert) {
43 | assert.ok(octaveSort);
44 | });
45 |
46 | test('extractOctaves exists', function (assert) {
47 | assert.ok(extractOctaves);
48 | });
49 |
50 | test('stripDuplicateOctaves exists', function (assert) {
51 | assert.ok(stripDuplicateOctaves);
52 | });
53 |
54 | test('createOctavesWithNotes exists', function (assert) {
55 | assert.ok(createOctavesWithNotes);
56 | });
57 |
58 | test('sortNotes works', function (assert) {
59 | assert.expect(1);
60 |
61 | let result = sortNotes([A0, Ab1, A1, Bb1, B1, C1, Db1, Bb0, B0]);
62 |
63 | assert.deepEqual(result, A([A0, Bb0, B0, C1, Db1, Ab1, A1, Bb1, B1]));
64 | });
65 |
66 | test('octaveShift works', function (assert) {
67 | assert.expect(1);
68 |
69 | let arr1 = [A0, Bb0, B0];
70 | let arr2 = [Ab1, A1, Bb1, B1, C1, Db1];
71 | let octaves = [arr1, arr2];
72 | let result = octaveShift(octaves);
73 |
74 | assert.deepEqual(result, correctOctaves);
75 | });
76 |
77 | test('octaveSort works', function (assert) {
78 | const alphabeticalOctaves = [
79 | [A0, Bb0, B0],
80 | [Ab1, A1, Bb1, B1, C1, Db1],
81 | ];
82 |
83 | assert.expect(1);
84 |
85 | let result = octaveSort(correctOctaves);
86 | assert.deepEqual(result, alphabeticalOctaves);
87 | });
88 |
89 | test('createOctavesWithNotes works', function (assert) {
90 | assert.expect(1);
91 | let arr = [
92 | [Ab1, Bb0, B0],
93 | [0, 1],
94 | ];
95 | let result = createOctavesWithNotes(arr);
96 | assert.deepEqual(result, [[Bb0, B0], [Ab1]]);
97 | });
98 |
99 | test('noteSort compares two letters correctly', function (assert) {
100 | assert.expect(2);
101 |
102 | let compA = noteSort(A0, B0);
103 | let compB = noteSort(B0, A0);
104 |
105 | assert.strictEqual(compA, -1);
106 | assert.strictEqual(compB, 1);
107 | });
108 |
109 | test('noteSort compares a natural and an accidental correctly', function (assert) {
110 | assert.expect(2);
111 |
112 | let compA = noteSort(Ab1, A0);
113 | let compB = noteSort(A0, Ab1);
114 |
115 | assert.strictEqual(compA, -1);
116 | assert.strictEqual(compB, 1);
117 | });
118 | });
119 |
--------------------------------------------------------------------------------
/addon/classes/track.js:
--------------------------------------------------------------------------------
1 | import { computed } from '@ember/object';
2 | import Sound from './sound';
3 | import { createTimeObject } from 'ember-audio/utils';
4 |
5 | /**
6 | * A class that represents a "track" of music, similar in concept to a track on
7 | * a CD or an MP3 player. Provides methods for tracking the play position of the
8 | * underlying {{#crossLink "AudioBuffer"}}{{/crossLink}}, and pausing/resuming.
9 | *
10 | * @public
11 | * @class Track
12 | * @extends Sound
13 | * @todo move play override to _play so that all super.play methods work
14 | */
15 | const Track = Sound.extend({
16 | /**
17 | * Computed property. Value is an object containing the current play position
18 | * of the audioBuffer in three formats. The three
19 | * formats are `raw`, `string`, and `pojo`.
20 | *
21 | * Play position of 6 minutes would be output as:
22 | *
23 | * {
24 | * raw: 360, // seconds
25 | * string: '06:00',
26 | * pojo: {
27 | * minutes: 6,
28 | * seconds: 0
29 | * }
30 | * }
31 | *
32 | * @public
33 | * @property position
34 | * @type {object}
35 | */
36 | position: computed('startOffset', function () {
37 | const offset = this.startOffset;
38 | const min = Math.floor(offset / 60);
39 | const sec = offset - min * 60;
40 | return createTimeObject(offset, min, sec);
41 | }),
42 |
43 | /**
44 | * Computed property. Value is the current play position of the
45 | * audioBuffer, formatted as a percentage.
46 | *
47 | * @public
48 | * @property percentPlayed
49 | * @type {number}
50 | */
51 | percentPlayed: computed('duration.raw', 'startOffset', function () {
52 | const ratio = this.startOffset / this.get('duration.raw');
53 | return ratio * 100;
54 | }),
55 |
56 | /**
57 | * Plays the audio source immediately.
58 | *
59 | * @public
60 | * @method play
61 | */
62 | play() {
63 | this._super();
64 | this.getNodeFrom('audioSource').onended = () => this.stop();
65 | this._trackPlayPosition();
66 | },
67 |
68 | /**
69 | * Pauses the audio source by stopping without
70 | * setting startOffset back to 0.
71 | *
72 | * @public
73 | * @method pause
74 | */
75 | pause() {
76 | if (this.isPlaying) {
77 | const node = this.getNodeFrom('audioSource');
78 |
79 | node.onended = function () {};
80 | node.stop();
81 | this.set('isPlaying', false);
82 | }
83 | },
84 |
85 | /**
86 | * Stops the audio source and sets
87 | * startOffset to 0.
88 | *
89 | * @public
90 | * @method stop
91 | */
92 | stop() {
93 | this.set('startOffset', 0);
94 |
95 | if (this.isPlaying) {
96 | this.getNodeFrom('audioSource').onended = function () {};
97 | this._super();
98 | }
99 | },
100 |
101 | /**
102 | * Sets up a `requestAnimationFrame` based loop that updates the
103 | * startOffset as `audioContext.currentTime` grows.
104 | * Loop ends when `isPlaying` is false.
105 | *
106 | * @method _trackPlayPosition
107 | * @private
108 | */
109 | _trackPlayPosition() {
110 | const ctx = this.audioContext;
111 | const startOffset = this.startOffset;
112 | const startedPlayingAt = this._startedPlayingAt;
113 |
114 | const animate = () => {
115 | if (this.isPlaying) {
116 | this.set(
117 | 'startOffset',
118 | startOffset + ctx.currentTime - startedPlayingAt
119 | );
120 | requestAnimationFrame(animate);
121 | }
122 | };
123 |
124 | requestAnimationFrame(animate);
125 | },
126 | });
127 |
128 | export default Track;
129 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "ember-audio",
3 | "version": "1.0.0-beta.1",
4 | "description": "An Ember addon that makes working with the Web Audio API super EZ",
5 | "keywords": [
6 | "ember-addon",
7 | "audio",
8 | "sound",
9 | "music",
10 | "synthesis",
11 | "soundfont",
12 | "sampling",
13 | "media",
14 | "mp3",
15 | "wav",
16 | "ogg",
17 | "web",
18 | "audio",
19 | "api"
20 | ],
21 | "repository": "https://github.com/sethbrasile/ember-audio",
22 | "license": "MIT",
23 | "author": "Seth Brasile ",
24 | "directories": {
25 | "doc": "tests/dummy/public/docs",
26 | "test": "tests"
27 | },
28 | "scripts": {
29 | "build": "ember build --environment=production",
30 | "clean:ember-try": "rm -rf .node_modules.ember-try && rm *.ember-try",
31 | "lint": "concurrently \"npm:lint:*(!fix)\" --names \"lint:\"",
32 | "lint:fix": "concurrently \"npm:lint:*:fix\" --names \"fix:\"",
33 | "lint:hbs": "ember-template-lint .",
34 | "lint:hbs:fix": "ember-template-lint . --fix",
35 | "lint:js": "eslint . --cache",
36 | "lint:js:fix": "eslint . --fix",
37 | "start": "ember serve",
38 | "test": "concurrently \"npm:lint\" \"npm:test:*\" --names \"lint,test:\"",
39 | "test:ember": "ember test",
40 | "test:ember-compatibility": "ember try:each && npm run clean:ember-try"
41 | },
42 | "dependencies": {
43 | "ember-cli-babel": "^7.26.11"
44 | },
45 | "devDependencies": {
46 | "@ember/optional-features": "^2.0.0",
47 | "@ember/string": "^3.0.1",
48 | "@ember/test-helpers": "^2.9.3",
49 | "@embroider/test-setup": "^2.0.2",
50 | "@fortawesome/fontawesome-free": "^6.3.0",
51 | "@glimmer/component": "^1.1.2",
52 | "@glimmer/tracking": "^1.1.2",
53 | "babel-eslint": "^10.1.0",
54 | "babel-plugin-prismjs": "^2.1.0",
55 | "bootstrap": "^3.4.0",
56 | "broccoli-asset-rev": "^3.0.0",
57 | "concurrently": "^7.6.0",
58 | "ember-auto-import": "^2.5.0",
59 | "ember-classic-decorator": "^3.0.1",
60 | "ember-cli": "~4.10.0",
61 | "ember-cli-code-coverage": "^2.0.0",
62 | "ember-cli-dependency-checker": "^3.3.1",
63 | "ember-cli-github-pages": "^0.2.2",
64 | "ember-cli-htmlbars": "^6.2.0",
65 | "ember-cli-inject-live-reload": "^2.1.0",
66 | "ember-cli-sri": "^2.1.1",
67 | "ember-cli-stylus": "^1.0.7",
68 | "ember-cli-terser": "^4.0.2",
69 | "ember-decorators": "^6.1.1",
70 | "ember-load-initializers": "^2.1.2",
71 | "ember-page-title": "^7.0.0",
72 | "ember-qunit": "^5.1.5",
73 | "ember-resolver": "^10.0.0",
74 | "ember-source": "~4.10.0",
75 | "ember-source-channel-url": "^3.0.0",
76 | "ember-template-lint": "^5.3.1",
77 | "ember-try": "^2.0.0",
78 | "eslint": "^7.32.0",
79 | "eslint-config-prettier": "^8.6.0",
80 | "eslint-plugin-ember": "^11.4.3",
81 | "eslint-plugin-n": "^15.6.1",
82 | "eslint-plugin-prettier": "^4.2.1",
83 | "eslint-plugin-qunit": "^7.3.4",
84 | "loader.js": "^4.7.0",
85 | "prettier": "^2.8.3",
86 | "prismjs": "^1.29.0",
87 | "qunit": "^2.19.3",
88 | "qunit-dom": "^2.0.0",
89 | "web-audio-mock-api": "^1.0.0",
90 | "webpack": "^5.75.0",
91 | "yuidoc-ember-cli-theme": "^1.0.4"
92 | },
93 | "peerDependencies": {
94 | "ember-source": "^3.8.0 || ^4.0.0"
95 | },
96 | "engines": {
97 | "node": "14.* || 16.* || >= 18"
98 | },
99 | "ember": {
100 | "edition": "octane"
101 | },
102 | "ember-addon": {
103 | "configPath": "tests/dummy/config",
104 | "versionCompatibility": {
105 | "ember": ">=3.8.0 <5.0.0"
106 | }
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/tests/dummy/config/ember-try.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | const getChannelURL = require('ember-source-channel-url');
4 | // const { embroiderSafe, embroiderOptimized } = require('@embroider/test-setup');
5 |
6 | module.exports = async function () {
7 | return {
8 | useYarn: true,
9 | scenarios: [
10 | {
11 | name: 'ember-lts-3.8',
12 | npm: {
13 | devDependencies: {
14 | 'ember-source': '~3.8.0',
15 | 'ember-cli': '~3.8.0',
16 | 'ember-resolver': '^8.0.0',
17 | },
18 | },
19 | },
20 | {
21 | name: 'ember-lts-3.12',
22 | npm: {
23 | devDependencies: {
24 | 'ember-source': '~3.12.0',
25 | 'ember-cli': '~3.12.0',
26 | 'ember-resolver': '^8.0.0',
27 | },
28 | },
29 | },
30 | {
31 | name: 'ember-lts-3.16',
32 | npm: {
33 | devDependencies: {
34 | 'ember-source': '~3.16.0',
35 | 'ember-cli': '~3.16.0',
36 | 'ember-resolver': '^8.0.0',
37 | },
38 | },
39 | },
40 | {
41 | name: 'ember-lts-3.20',
42 | npm: {
43 | devDependencies: {
44 | 'ember-source': '~3.20.5',
45 | 'ember-cli': '~3.20.0',
46 | 'ember-resolver': '^8.0.0',
47 | },
48 | },
49 | },
50 | {
51 | name: 'ember-lts-3.24',
52 | npm: {
53 | devDependencies: {
54 | 'ember-source': '~3.24.3',
55 | 'ember-cli': '~3.24.0',
56 | 'ember-resolver': '^8.0.0',
57 | },
58 | },
59 | },
60 | {
61 | name: 'ember-lts-3.28',
62 | npm: {
63 | devDependencies: {
64 | 'ember-source': '~3.28.0',
65 | 'ember-cli': '~3.28.0',
66 | 'ember-resolver': '^8.0.0',
67 | },
68 | },
69 | },
70 | {
71 | name: 'ember-lts-4.4',
72 | npm: {
73 | devDependencies: {
74 | 'ember-source': '~4.4.0',
75 | 'ember-cli': '~4.4.0',
76 | },
77 | },
78 | },
79 | {
80 | name: 'ember-lts-4.8',
81 | npm: {
82 | devDependencies: {
83 | 'ember-source': '~4.8.0',
84 | 'ember-cli': '~4.8.0',
85 | },
86 | },
87 | },
88 | {
89 | name: 'ember-release',
90 | npm: {
91 | devDependencies: {
92 | 'ember-source': await getChannelURL('release'),
93 | },
94 | },
95 | },
96 | {
97 | name: 'ember-beta',
98 | npm: {
99 | devDependencies: {
100 | 'ember-source': await getChannelURL('beta'),
101 | },
102 | },
103 | },
104 | {
105 | name: 'ember-canary',
106 | npm: {
107 | devDependencies: {
108 | 'ember-source': await getChannelURL('canary'),
109 | },
110 | },
111 | },
112 | {
113 | name: 'ember-classic',
114 | env: {
115 | EMBER_OPTIONAL_FEATURES: JSON.stringify({
116 | 'application-template-wrapper': false,
117 | 'default-async-observers': false,
118 | 'jquery-integration': false,
119 | 'template-only-glimmer-components': true,
120 | }),
121 | },
122 | npm: {
123 | devDependencies: {
124 | 'ember-source': '~3.28.0',
125 | 'ember-cli': '~3.28.0',
126 | 'ember-resolver': '^8.0.0',
127 | },
128 | ember: {
129 | edition: 'classic',
130 | },
131 | },
132 | },
133 | // embroiderSafe(),
134 | // embroiderOptimized(),
135 | ],
136 | };
137 | };
138 |
--------------------------------------------------------------------------------
/addon/mixins/musical-identity.js:
--------------------------------------------------------------------------------
1 | import { computed } from '@ember/object';
2 | import Mixin from '@ember/object/mixin';
3 | import { frequencyMap } from 'ember-audio/utils';
4 |
5 | /**
6 | * This mixin allows an object to have an awareness of it's "musical identity"
7 | * or "note value" based on western musical standards (a standard piano).
8 | * If any of the following are provided, all of the remaining properties will be
9 | * calculated:
10 | *
11 | * 1. frequency
12 | * 2. identifier (i.e. "Ab1")
13 | * 3. letter, octave, and (optionally) accidental
14 | *
15 | * This mixin only makes sense when the consuming object is part of a collection,
16 | * as the only functionality it provides serves to facilitate identification.
17 | *
18 | * @public
19 | * @class MusicalIdentity
20 | */
21 | export default Mixin.create({
22 | /**
23 | * For note `Ab5`, this would be `A`.
24 | *
25 | * @public
26 | * @property letter
27 | * @type {string}
28 | */
29 | letter: null,
30 |
31 | /**
32 | * For note `Ab5`, this would be `b`.
33 | *
34 | * @public
35 | * @property accidental
36 | * @type {string}
37 | */
38 | accidental: null,
39 |
40 | /**
41 | * For note `Ab5`, this would be `5`.
42 | *
43 | * @public
44 | * @property octave
45 | * @type {string}
46 | */
47 | octave: null,
48 |
49 | /**
50 | * Computed property. Value is `${letter}` or `${letter}${accidental}` if
51 | * accidental exists.
52 | *
53 | * @public
54 | * @property name
55 | * @type {string}
56 | */
57 | name: computed('letter', 'accidental', function () {
58 | const accidental = this.accidental;
59 | const letter = this.letter;
60 |
61 | if (accidental) {
62 | return `${letter}${accidental}`;
63 | } else {
64 | return letter;
65 | }
66 | }),
67 |
68 | /**
69 | * Computed property. The frequency of the note in hertz. Calculated by
70 | * comparing western musical standards (a standard piano) and the note
71 | * identifier (i.e. `Ab1`). If this property is set directly, all other
72 | * properties are updated to reflect the provided frequency.
73 | *
74 | * @public
75 | * @property frequency
76 | * @type {number}
77 | */
78 | frequency: computed('identifier', {
79 | get() {
80 | const identifier = this.identifier;
81 |
82 | if (identifier) {
83 | return frequencyMap[identifier];
84 | }
85 | },
86 |
87 | set(key, value) {
88 | for (let key in frequencyMap) {
89 | if (value === frequencyMap[key]) {
90 | this.set('identifier', key);
91 | return value;
92 | }
93 | }
94 | },
95 | }),
96 |
97 | /**
98 | * Computed property. Value is `${letter}${octave}` or
99 | * `${letter}${accidental}${octave}` if accidental exists. If this property
100 | * is set directly, all other properties are updated to reflect the provided
101 | * identifier.
102 | *
103 | * @public
104 | * @property identifier
105 | * @type {string}
106 | */
107 | identifier: computed('letter', 'octave', 'accidental', {
108 | get() {
109 | const accidental = this.accidental;
110 | const letter = this.letter;
111 | const octave = this.octave;
112 | let output;
113 |
114 | if (accidental) {
115 | output = `${letter}${accidental}${octave}`;
116 | } else {
117 | output = `${letter}${octave}`;
118 | }
119 |
120 | return output;
121 | },
122 |
123 | set(key, value) {
124 | const [letter] = value;
125 | const octave = value[2] || value[1];
126 | let accidental;
127 |
128 | if (value[2]) {
129 | accidental = value[1];
130 | }
131 |
132 | this.setProperties({ letter, octave, accidental });
133 |
134 | return value;
135 | },
136 | }),
137 | });
138 |
--------------------------------------------------------------------------------
/tests/dummy/app/controllers/synthesis/drum-kit.js:
--------------------------------------------------------------------------------
1 | import { action } from '@ember/object';
2 | import { inject as service } from '@ember/service';
3 | import { tracked } from '@glimmer/tracking';
4 | import Controller from '@ember/controller';
5 | import { LayeredSound } from 'ember-audio';
6 |
7 | export default class DrumKitController extends Controller {
8 | @service audio;
9 | @tracked drums = null;
10 |
11 | constructor() {
12 | super(...arguments);
13 | this.drums = [this._createKick(), this._createSnare(), this._createHihat()];
14 | }
15 |
16 | _createKick() {
17 | const { audio } = this;
18 | const kick = audio.createOscillator({ name: 'kick' });
19 | const osc = kick.getConnection('audioSource');
20 | const gain = kick.getConnection('gain');
21 |
22 | osc.onPlayRamp('frequency').from(150).to(0.01).in(0.1);
23 | gain.onPlayRamp('gain').from(1).to(0.01).in(0.1);
24 |
25 | return kick;
26 | }
27 |
28 | _createSnare() {
29 | const noise = this._createSnareNoise();
30 | const oscillator = this._createSnareOscillator();
31 |
32 | return LayeredSound.create({ name: 'snare', sounds: [noise, oscillator] });
33 | }
34 |
35 | _createSnareOscillator() {
36 | const { audio } = this;
37 | const snare = audio.createOscillator({ type: 'triangle' });
38 | const oscillator = snare.getConnection('audioSource');
39 | const gain = snare.getConnection('gain');
40 |
41 | oscillator.onPlayRamp('frequency').from(100).to(60).in(0.1);
42 | gain.onPlayRamp('gain').from(1).to(0.01).in(0.1);
43 |
44 | return snare;
45 | }
46 |
47 | _createSnareNoise() {
48 | const { audio } = this;
49 | const noise = audio.createWhiteNoise({
50 | name: 'snare',
51 | highpassFrequency: 1000,
52 | });
53 | const gain = noise.getConnection('gain');
54 |
55 | gain.onPlayRamp('gain').from(1).to(0.001).in(0.1);
56 |
57 | return noise;
58 | }
59 |
60 | _createHihat() {
61 | // http://joesul.li/van/synthesizing-hi-hats/
62 | const ratios = [2, 3, 4.16, 5.43, 6.79, 8.21];
63 |
64 | const oscillators = ratios
65 | .map(this._createHihatOscillator.bind(this))
66 | .map(this._createHihatEnvelope);
67 |
68 | return LayeredSound.create({ name: 'hihat', sounds: oscillators });
69 | }
70 |
71 | _createHihatOscillator(ratio) {
72 | const fundamental = 40;
73 |
74 | return this.audio.createOscillator({
75 | type: 'square',
76 | highpass: { frequency: 7000 },
77 | bandpass: { frequency: 10000 },
78 | frequency: fundamental * ratio,
79 | });
80 | }
81 |
82 | _createHihatEnvelope(oscillator) {
83 | const gain = oscillator.getConnection('gain');
84 |
85 | gain.onPlayRamp('gain').from(0.00001).to(1).in(0.02);
86 |
87 | gain.onPlaySet('gain').to(0.3).endingAt(0.03);
88 | gain.onPlaySet('gain').to(0.00001).endingAt(0.3);
89 |
90 | return oscillator;
91 | }
92 |
93 | @action
94 | playDrum(drum) {
95 | // Only play for 0.1 seconds so that playing in quick succession doesn't
96 | // result in distortion
97 | drum.playFor(0.1);
98 | }
99 |
100 | @action
101 | playBassDrop() {
102 | const { audio } = this;
103 | const bassDrop = audio.createOscillator();
104 | const osc = bassDrop.getConnection('audioSource');
105 | const gain = bassDrop.getConnection('gain');
106 |
107 | // We can specify 'linear' to get a linear ramp instead of an exponential one
108 | osc.onPlayRamp('frequency', 'linear').from(100).to(0.01).in(10);
109 |
110 | // We automate gain as well, so we don't end up with a loud click when the audio stops
111 | gain.onPlayRamp('gain').from(1).to(0.01).in(10);
112 |
113 | bassDrop.playFor(10);
114 | }
115 |
116 | @action
117 | playSnareMeat() {
118 | this._createSnareOscillator().playFor(0.1);
119 | }
120 |
121 | @action
122 | playSnareCrack() {
123 | this._createSnareNoise().playFor(0.1);
124 | }
125 | }
126 |
--------------------------------------------------------------------------------
/vendor/piano.css:
--------------------------------------------------------------------------------
1 | #piano ol {
2 | position: relative;
3 | padding: 0;
4 | margin: 0 0 10px;
5 | display: inline-block;
6 | font-size: 0;
7 | }
8 |
9 | #piano li {
10 | font-size: 16px;
11 | font-size: 1rem;
12 | background: url('data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0idXRmLTgiPz4gPHN2ZyB2ZXJzaW9uPSIxLjEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+PGRlZnM+PGxpbmVhckdyYWRpZW50IGlkPSJncmFkIiBncmFkaWVudFVuaXRzPSJvYmplY3RCb3VuZGluZ0JveCIgeDE9IjAuNSIgeTE9IjAuMCIgeDI9IjAuNSIgeTI9IjEuMCI+PHN0b3Agb2Zmc2V0PSIwJSIgc3RvcC1jb2xvcj0iI2MwYzFiZCIvPjxzdG9wIG9mZnNldD0iMTAlIiBzdG9wLWNvbG9yPSIjZjBmMGYwIi8+PHN0b3Agb2Zmc2V0PSIxMDAlIiBzdG9wLWNvbG9yPSIjZmFmMGU2Ii8+PC9saW5lYXJHcmFkaWVudD48L2RlZnM+PHJlY3QgeD0iMCIgeT0iMCIgd2lkdGg9IjEwMCUiIGhlaWdodD0iMTAwJSIgZmlsbD0idXJsKCNncmFkKSIgLz48L3N2Zz4g');
13 | background: -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(0%, #c0c1bd), color-stop(10%, #f0f0f0), color-stop(100%, #faf0e6));
14 | background: -moz-linear-gradient(top, #c0c1bd, #f0f0f0 10%, #faf0e6);
15 | background: -webkit-linear-gradient(top, #c0c1bd, #f0f0f0 10%, #faf0e6);
16 | background: linear-gradient(to bottom, #c0c1bd, #f0f0f0 10%, #faf0e6);
17 | transition: all 0.1s ease-in-out;
18 | padding: 0;
19 | margin: 0;
20 | display: inline-block;
21 | border: 1px solid #ccc;
22 | padding: 220px 15px 50px;
23 | border-radius: 0 0 5px 5px;
24 | box-shadow: 0 5px 5px #aaa;
25 | position: relative;
26 | z-index: 1;
27 | cursor: pointer;
28 | color: transparent;
29 | }
30 |
31 | #piano li:hover {
32 | color: #C0C1BD;
33 | }
34 |
35 | #piano li:active {
36 | box-shadow: none;
37 | top: 5px;
38 | color: #333;
39 | }
40 |
41 | #piano li.black {
42 | /* build the key shape */
43 | z-index: 2;
44 | position: absolute;
45 | width: 32px;
46 | top: 0px;
47 | margin-left: -16px;
48 | padding: 130px 0px 10px;
49 | /* Typography */
50 | text-align: center;
51 | font-size: 0.66667rem;
52 | color: transparent;
53 | border-radius: 0 0 2px 2px;
54 | border-width: 0 6px 12px;
55 | border-color: #000;
56 | border-bottom-color: #424546;
57 | background: #171C1A;
58 | background: url('data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0idXRmLTgiPz4gPHN2ZyB2ZXJzaW9uPSIxLjEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+PGRlZnM+PGxpbmVhckdyYWRpZW50IGlkPSJncmFkIiBncmFkaWVudFVuaXRzPSJvYmplY3RCb3VuZGluZ0JveCIgeDE9IjAuNSIgeTE9IjAuMCIgeDI9IjAuNSIgeTI9IjEuMCI+PHN0b3Agb2Zmc2V0PSIwJSIgc3RvcC1jb2xvcj0iIzVmNjI2NSIvPjxzdG9wIG9mZnNldD0iMjUlIiBzdG9wLWNvbG9yPSIjMTUxYjE5Ii8+PHN0b3Agb2Zmc2V0PSIxMDAlIiBzdG9wLWNvbG9yPSIjMTcxYzFhIi8+PC9saW5lYXJHcmFkaWVudD48L2RlZnM+PHJlY3QgeD0iMCIgeT0iMCIgd2lkdGg9IjEwMCUiIGhlaWdodD0iMTAwJSIgZmlsbD0idXJsKCNncmFkKSIgLz48L3N2Zz4g');
59 | background: -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(0%, #5f6265), color-stop(25%, #151b19), color-stop(100%, #171c1a));
60 | background: -moz-linear-gradient(#5f6265, #151b19 25%, #171c1a);
61 | background: -webkit-linear-gradient(#5f6265, #151b19 25%, #171c1a);
62 | background: linear-gradient(#5f6265, #151b19 25%, #171c1a);
63 | box-shadow: 0 3px 5px #ccc, inset 0px 0px 0px 1px rgba(220, 220, 220, 0.2);
64 | }
65 |
66 | #piano li.black:hover {
67 | color: gainsboro;
68 | }
69 |
70 | #piano li.black:active {
71 | background: url('data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0idXRmLTgiPz4gPHN2ZyB2ZXJzaW9uPSIxLjEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+PGRlZnM+PGxpbmVhckdyYWRpZW50IGlkPSJncmFkIiBncmFkaWVudFVuaXRzPSJvYmplY3RCb3VuZGluZ0JveCIgeDE9IjAuNSIgeTE9IjAuMCIgeDI9IjAuNSIgeTI9IjEuMCI+PHN0b3Agb2Zmc2V0PSIwJSIgc3RvcC1jb2xvcj0iIzVmNjI2NSIvPjxzdG9wIG9mZnNldD0iNTAlIiBzdG9wLWNvbG9yPSIjMTUxYjE5Ii8+PHN0b3Agb2Zmc2V0PSIxMDAlIiBzdG9wLWNvbG9yPSIjMTcxYzFhIi8+PC9saW5lYXJHcmFkaWVudD48L2RlZnM+PHJlY3QgeD0iMCIgeT0iMCIgd2lkdGg9IjEwMCUiIGhlaWdodD0iMTAwJSIgZmlsbD0idXJsKCNncmFkKSIgLz48L3N2Zz4g');
72 | background: -webkit-gradient(linear, 50% 0%, 50% 100%, color-stop(0%, #5f6265), color-stop(50%, #151b19), color-stop(100%, #171c1a));
73 | background: -moz-linear-gradient(#5f6265, #151b19 50%, #171c1a);
74 | background: -webkit-linear-gradient(#5f6265, #151b19 50%, #171c1a);
75 | background: linear-gradient(#5f6265, #151b19 50%, #171c1a);
76 | border-bottom-width: 8px;
77 | padding-top: 135px;
78 | color: white;
79 | }
80 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Ember-audio
2 |
3 | [](https://github.com/sethbrasile/ember-audio/actions)
4 | [](https://codecov.io/gh/sethbrasile/ember-audio)
5 | [](https://codeclimate.com/github/sethbrasile/ember-audio)
6 | [](https://codeclimate.com/github/sethbrasile/ember-audio)
7 | [](http://emberobserver.com/addons/ember-audio)
8 |
9 | ## Please Note
10 |
11 | This readme refers to current the state of the master branch, which is a WIP. Please see the
12 | [Interactive Demo/Documentation](http://sethbrasile.github.io/ember-audio) and [npmjs](https://www.npmjs.com/package/ember-audio) to see the readme and
13 | documentation as of the current release.
14 |
15 | ## Installation
16 |
17 | `ember install ember-audio`
18 |
19 | ## [Interactive Demo/Documentation](http://sethbrasile.github.io/ember-audio)
20 |
21 | Ember Audio provides an `audio` service and various classes/mixins that make
22 | working with the Web Audio API super EZ.
23 |
24 | Ember Audio aims to simplify sampling, triggering, routing, scheduling,
25 | synthesizing, soundfonts, and working with audio in general.
26 |
27 | ---
28 |
29 | ## Need your help!
30 |
31 | Please give me feedback/suggestions if you have any. Pull requests and issues
32 | are welcome. You can also contact me on the Ember community discord: `@sethbrasile`.
33 |
34 | ## Using Ember Audio?
35 |
36 | Please let me know if you're building something with Ember Audio! It's a fairly
37 | large project, and I would love to know if it's working out for people! You can
38 | contact me via `@sethbrasile` on the ember community discord, or via email.
39 |
40 | ## Known Issues
41 |
42 | - Ember optional feature `default-async-observers` creates a race condition which will interfere with `connections` being available when you need them. We need
43 | to remove the observer that triggers the `_wireConnections` method in order to resolve this. I tried a computed property in the past, but I didn't have enough
44 | control over when the computed property recalculated, so I stayed with an observer. I have some ideas on how to resolve, but this will probably not happen
45 | until I move this library to JS classes and remove the "classic classes."
46 |
47 | ## Compatibility
48 |
49 | - Ember.js v3.28 or above
50 | - Ember CLI v3.8 or above
51 |
52 | ## Run The Demo locally
53 |
54 | - `ember serve`
55 | - Visit http://localhost:4200/ember-audio/.
56 |
57 | ## Roadmap
58 |
59 | 1. ~~Get up to date so it doesn't create issues with new ember apps. The current goal is keeping compatibility all the way back to Ember 3.28 but no
60 | issues or deprecations in Ember 4+~~ - Done
61 | 2. Do some cleanup and testing and ensure compatibility, assess for breaking changes, then cut a new release.
62 | 3. Switch to 100% JS classes, no ember "classic classes" - but I haven't decided on a strategy for the mixins yet. I like the composability of
63 | combining mixins to create new playable/connectable types. Take a look at how most of the classes are built, and take a look at
64 | [MusicallyAwareOscillator](https://sethbrasile.github.io/ember-audio/#/synthesis) to see what I mean. I am open to suggestions if you have an idea on how
65 | to maintain this composability without mixins. I have a feeling decorators could do something similar, but I've not authored one of those yet and I see
66 | similar warnings about them compared to mixins so I'm uncertain.
67 | 4. Cut a 2.0 release (`.get` api will be broken for instance)
68 | 5. Move Ember Audio to Typescript - This might interfere with the mixin concept also. IIRC Typescript doesn't like mixins.. At least provide TS definitions.
69 | 6. Extract most of the important code into a non-ember project and reference that as a dependency here.
70 |
71 | ## Building yuidoc Documentation
72 |
73 | - `ember ember-cli-yuidoc`
74 |
75 | For more information on using ember-cli, visit [http://ember-cli.com/](http://ember-cli.com/).
76 | [Longer description of how to use the addon in apps.]
77 |
78 | ## Contributing
79 |
80 | See the [Contributing](CONTRIBUTING.md) guide for details.
81 |
82 | ## License
83 |
84 | This project is licensed under the [MIT License](LICENSE.md).
85 |
--------------------------------------------------------------------------------
/tests/dummy/app/styles/audio-player.styl:
--------------------------------------------------------------------------------
1 | .track-list
2 | table
3 | margin-bottom: 0
4 |
5 | .item
6 | text-align: center
7 |
8 | .audioplayer
9 | height: 2.5em
10 | color: #fff
11 | border: 1px solid #337ab7
12 | position: relative
13 | z-index: 1
14 | background: #337ab7
15 | border-radius: 2px
16 |
17 | .audioplayer > div
18 | position: absolute
19 |
20 | .play-pause
21 | border-right: 1px solid #555
22 | border-right-color: rgba( 255, 255, 255, .1 )
23 | width: 2.5em
24 | height: 100%
25 | text-align: left
26 | text-indent: -9999px
27 | cursor: pointer
28 | z-index: 2
29 | top: 0
30 | left: 0
31 |
32 | .play-pause:not(.playing) a
33 | width: 0
34 | height: 0
35 | border: 0.5em solid transparent
36 | border-right: none
37 | border-left-color: #fff
38 | content: ''
39 | position: absolute
40 | top: 50%
41 | left: 50%
42 | margin: -0.5em 0 0 -0.25em
43 |
44 | .play-pause.playing a
45 | width: 0.75em
46 | height: 0.75em
47 | position: absolute
48 | top: 50%
49 | left: 50%
50 | margin: -0.375em 0 0 -0.375em
51 |
52 | .play-pause.playing a:before,
53 | .play-pause.playing a:after
54 | width: 40%
55 | height: 100%
56 | background-color: #fff
57 | content: ''
58 | position: absolute
59 | top: 0
60 |
61 | .play-pause.playing a:before
62 | left: 0
63 |
64 | .play-pause.playing a:after
65 | right: 0
66 |
67 | .time
68 | width: 4.375em
69 | height: 100%
70 | line-height: 2.375em
71 | text-align: center
72 | z-index: 2
73 | top: 0
74 |
75 | .current
76 | border-left: 1px solid #111
77 | border-left-color: rgba( 0, 0, 0, .25 )
78 | left: 2.5em
79 |
80 | .duration
81 | border-right: 1px solid #555
82 | border-right-color: rgba( 255, 255, 255, .1 )
83 | right: 2.5em
84 |
85 | .audioplayer-novolume .duration
86 | border-right: 0
87 | right: 0
88 |
89 | .bar
90 | height: 0.875em
91 | background-color: #337ab7
92 | cursor: pointer
93 | z-index: 1
94 | top: 50%
95 | right: 6.875em
96 | left: 6.875em
97 | margin-top: -0.438em
98 |
99 | .audioplayer-novolume .bar
100 | right: 4.375em
101 |
102 | .bar div
103 | width: 0
104 | height: 100%
105 | position: absolute
106 | left: 0
107 | top: 0
108 |
109 | .bar-loaded
110 | background-color: #337ab7
111 | z-index: 1
112 |
113 | .played
114 | background: #d9edf7
115 | z-index: 2
116 |
117 | .volume
118 | width: 2.5em
119 | height: 100%
120 | border-left: 1px solid #111
121 | border-left-color: rgba( 0, 0, 0, .25 )
122 | text-align: left
123 | text-indent: -9999px
124 | cursor: pointer
125 | z-index: 2
126 | top: 0
127 | right: 0
128 |
129 | .volume:hover,
130 | .volume:focus
131 | background-color: #337ab7
132 |
133 | .button
134 | width: 100%
135 | height: 100%
136 |
137 | .button a
138 | width: 0.313em
139 | height: 0.45em
140 | background-color: #fff
141 | display: block
142 | position: relative
143 | z-index: 1
144 | top: 40%
145 | left: 35%
146 |
147 | .button a:before,
148 | .button a:after
149 | content: ''
150 | position: absolute
151 |
152 | .button a:before
153 | width: 0
154 | height: 0
155 | border: 0.5em solid transparent
156 | border-left: none
157 | border-right-color: #fff
158 | z-index: 2
159 | top: 50%
160 | right: -0.25em
161 | margin-top: -0.5em
162 |
163 | .adjust
164 | height: 6.25em
165 | cursor: default
166 | position: absolute
167 | left: 0
168 | right: -1px
169 | top: -9999px
170 | background: #337ab7
171 | border-top-left-radius: 2px
172 | border-top-right-radius: 2px
173 |
174 | .volume:not(:hover) .adjust
175 | opacity: 0
176 |
177 | .volume:hover .adjust
178 | top: auto
179 | bottom: 100%
180 |
181 | .adjust > div
182 | width: 40%
183 | height: 80%
184 | background-color: #337ab7
185 | cursor: pointer
186 | position: relative
187 | z-index: 1
188 | margin: 30% auto 0
189 |
190 | .adjust div div
191 | width: 100%
192 | height: 100%
193 | position: absolute
194 | bottom: 0
195 | left: 0
196 | background-color: #d9edf7
197 |
198 | .audioplayer-novolume .volume
199 | display: none
200 |
201 | .bar,
202 | .bar div,
203 | .adjust div
204 | border-radius: 4px
205 |
206 | .bar,
207 | .adjust > div
208 | box-shadow: -1px -1px 0 rgba( 0, 0, 0, .5 ), 1px 1px 0 rgba( 255, 255, 255, .1 )
209 |
210 | .audioplayer *,
211 | .audioplayer *:before,
212 | .audioplayer *:after
213 | transition: color .25s ease, background-color .25s ease, opacity .5s ease
214 |
--------------------------------------------------------------------------------
/addon/classes/sampler.js:
--------------------------------------------------------------------------------
1 | import { on } from '@ember/object/evented';
2 | import EmberObject from '@ember/object';
3 |
4 | /**
5 | * An instance of the Sampler class behaves just like a Sound, but allows
6 | * many {{#crossLink "AudioBuffer"}}AudioBuffers{{/crossLink}} to exist and
7 | * automatically alternately plays them (round-robin) each time any of the play
8 | * methods are called.
9 | *
10 | * @public
11 | * @class Sampler
12 | *
13 | * @todo humanize gain and time - should be optional and customizable
14 | * @todo loop
15 | */
16 | const Sampler = EmberObject.extend({
17 | /**
18 | * Determines the gain applied to each sample.
19 | *
20 | * @public
21 | * @property gain
22 | * @type {number}
23 | * @default 1
24 | */
25 | gain: 1,
26 |
27 | /**
28 | * Determines the stereo pan position of each sample.
29 | *
30 | * @public
31 | * @property pan
32 | * @type {number}
33 | * @default 0
34 | */
35 | pan: 0,
36 |
37 | /**
38 | * Temporary storage for the iterable that comes from the sounds Set.
39 | * This iterable is meant to be replaced with a new copy every time it reaches
40 | * it's end, resulting in an infinite stream of Sound instances.
41 | *
42 | * @private
43 | * @property _soundIterator
44 | * @type {Iterator}
45 | *
46 | */
47 | _soundIterator: null,
48 |
49 | /**
50 | * Acts as a register for loaded audio sources. Audio sources can be anything
51 | * that uses {{#crossLink "Playable"}}{{/crossLink}}. If not set on
52 | * instantiation, automatically set to `new Set()` via `_initSounds`.
53 | *
54 | * @public
55 | * @property sounds
56 | * @type {set}
57 | */
58 | sounds: null,
59 |
60 | /**
61 | * Gets the next audio source and plays it immediately.
62 | *
63 | * @public
64 | * @method play
65 | */
66 | play() {
67 | this._getNextSound().play();
68 | },
69 |
70 | /**
71 | * Gets the next Sound and plays it after the specified offset has elapsed.
72 | *
73 | * @public
74 | * @method playIn
75 | *
76 | * @param {number} seconds Number of seconds from "now" that the next Sound
77 | * should be played.
78 | */
79 | playIn(seconds) {
80 | this._getNextSound().playIn(seconds);
81 | },
82 |
83 | /**
84 | * Gets the next Sound and plays it at the specified moment in time. A
85 | * "moment in time" is measured in seconds from the moment that the
86 | * {{#crossLink "AudioContext"}}{{/crossLink}} was instantiated.
87 | *
88 | * @param {number} time The moment in time (in seconds, relative to the
89 | * {{#crossLink "AudioContext"}}AudioContext's{{/crossLink}} "beginning of
90 | * time") when the next Sound should be played.
91 | *
92 | * @public
93 | * @method playAt
94 | */
95 | playAt(time) {
96 | this._getNextSound().playAt(time);
97 | },
98 |
99 | /**
100 | * Gets _soundIterator and returns it's next value. If _soundIterator has
101 | * reached it's end, replaces _soundIterator with a fresh copy from sounds
102 | * and returns the first value from that.
103 | *
104 | * @private
105 | * @method _getNextSound
106 | * @return {Sound}
107 | */
108 | _getNextSound() {
109 | let soundIterator = this._soundIterator;
110 | let nextSound;
111 |
112 | if (!soundIterator) {
113 | soundIterator = this.sounds.values();
114 | }
115 |
116 | nextSound = soundIterator.next();
117 |
118 | if (nextSound.done) {
119 | soundIterator = this.sounds.values();
120 | nextSound = soundIterator.next();
121 | }
122 |
123 | this.set('_soundIterator', soundIterator);
124 |
125 | return this._setGainAndPan(nextSound.value);
126 | },
127 |
128 | /**
129 | * Applies the `gain` and `pan` properties from the Sampler instance to a
130 | * Sound instance and returns the Sound instance.
131 | *
132 | * @private
133 | * @method _setGainAndPan
134 | * @return {Sound} The input sound after having it's gain and pan set
135 | */
136 | _setGainAndPan(sound) {
137 | sound.changeGainTo(this.gain).from('ratio');
138 | sound.changePanTo(this.pan);
139 |
140 | return sound;
141 | },
142 |
143 | /**
144 | * Sets `sounds` to `new Set()` if null on instantiation.
145 | *
146 | * @private
147 | * @method _initSounds
148 | */
149 | _initSounds: on('init', function () {
150 | if (!this.sounds) {
151 | this.set('sounds', new Set());
152 | }
153 | }),
154 | });
155 |
156 | export default Sampler;
157 |
--------------------------------------------------------------------------------
/addon/classes/beat-track.js:
--------------------------------------------------------------------------------
1 | import { computed } from '@ember/object';
2 | import Beat from './beat';
3 | import Sampler from './sampler';
4 |
5 | const beatBank = new WeakMap();
6 |
7 | /**
8 | * An instance of this class has a single "sound" (comprised of one or multiple
9 | * audio sources) but provides methods to play that sound repeatedly, mixed with
10 | * "rests," in a rhythmic way. An instance of this class behaves very similarly
11 | * to a "lane" on a drum machine.
12 | *
13 | * @public
14 | * @class BeatTrack
15 | * @extends Sampler
16 | *
17 | * @todo need a way to stop a BeatTrack once it's started. Maybe by creating
18 | * the times in advance and not calling play until it's the next beat in the
19 | * queue?
20 | */
21 | const BeatTrack = Sampler.extend({
22 | /**
23 | * Determines the number of beats in a BeatTrack instance.
24 | *
25 | * @public
26 | * @property numBeats
27 | * @type {number}
28 | */
29 | numBeats: 4,
30 |
31 | /**
32 | * If specified, Determines length of time, in milliseconds, before isPlaying
33 | * and currentTimeIsPlaying are automatically switched back to false after
34 | * having been switched to true for each beat. 100ms is used by default.
35 | *
36 | * @public
37 | * @property duration
38 | * @type {number}
39 | * @default 100
40 | */
41 | duration: 100,
42 |
43 | /**
44 | * Computed property. An array of Beat instances. The number of Beat instances
45 | * in the array is always the same as the `numBeats` property. If 'numBeats'
46 | * or duration changes. This property will be recomputed, but any beats that
47 | * previously existed are reused so that they will maintain their `active`
48 | * state.
49 | *
50 | * @public
51 | * @property beats
52 | * @type {array|Beat}
53 | */
54 | beats: computed('duration', 'numBeats', 'play', 'playIn', function () {
55 | let beats = [];
56 | let numBeats = this.numBeats;
57 | let existingBeats;
58 |
59 | if (beatBank.has(this)) {
60 | existingBeats = beatBank.get(this);
61 | numBeats = numBeats - existingBeats.length;
62 | }
63 |
64 | for (let i = 0; i < numBeats; i++) {
65 | const beat = Beat.create({
66 | duration: this.duration,
67 | _parentPlayIn: this.playIn.bind(this),
68 | _parentPlay: this.play.bind(this),
69 | });
70 |
71 | beats.push(beat);
72 | }
73 |
74 | if (existingBeats) {
75 | beats = existingBeats.concat(beats);
76 | }
77 |
78 | beatBank.set(this, beats);
79 |
80 | return beats;
81 | }),
82 |
83 | /**
84 | * Calls play on all Beat instances in the beats array.
85 | *
86 | * @public
87 | * @method playBeats
88 | *
89 | * @param {number} bpm The tempo at which the beats should be played.
90 | *
91 | * @param noteType {number} The (rhythmic) length of each beat. Fractions
92 | * are suggested here so that it's easy to reason about. For example, for
93 | * eighth notes, pass in `1/8`.
94 | */
95 | playBeats(bpm, noteType) {
96 | this._callPlayMethodOnBeats('playIn', bpm, noteType);
97 | },
98 |
99 | /**
100 | * Calls play on `active` Beat instances in the beats array. Any beat that
101 | * is not marked active is effectively a "rest".
102 | *
103 | * @public
104 | * @method playActiveBeats
105 | *
106 | * @param {number} bpm The tempo at which the beats and rests should be played.
107 | *
108 | * @param noteType {number} The (rhythmic) length of each beat/rest. Fractions
109 | * are suggested here so that it's easy to reason about. For example, for
110 | * eighth notes, pass in `1/8`.
111 | */
112 | playActiveBeats(bpm, noteType) {
113 | this._callPlayMethodOnBeats('ifActivePlayIn', bpm, noteType);
114 | },
115 |
116 | /**
117 | * The underlying method behind playBeats and playActiveBeats.
118 | *
119 | * @private
120 | * @method _callPlayMethodOnBeats
121 | *
122 | * @param {string} method The method that should be called on each beat.
123 | *
124 | * @param {number} bpm The tempo that should be used to calculate the length
125 | * of a beat/rest.
126 | *
127 | * @param noteType {number} The (rhythmic) length of each beat/rest that should
128 | * be used to calculate the length of a beat/rest in seconds.
129 | */
130 | _callPlayMethodOnBeats(method, bpm, noteType = 1 / 4) {
131 | // http://bradthemad.org/guitar/tempo_explanation.php
132 | const duration = (240 * noteType) / bpm;
133 | this.beats.map((beat, idx) => beat[method](idx * duration));
134 | },
135 | });
136 |
137 | export default BeatTrack;
138 |
--------------------------------------------------------------------------------
/tests/dummy/app/templates/synthesis/xy-pad.hbs:
--------------------------------------------------------------------------------
1 |
23 | This is a great example of the use of some of Ember Audio's classes/mixins.
24 |
25 |
26 |
27 | In order to create oscillators that have a "musical identity" (as in, each
28 | oscillator represents a "note" that can be played), we will add the
29 | MusicalIdentity mixin to the Oscillator class:
30 |
75 | Here is the complete code for the keyboard above:
76 |
77 |
78 |
79 |
80 | <div id="piano">
81 | <ol>
82 | {{#each this.oscillators as |note|}}
83 | <li class={{if note.accidental "black"}}
84 | {{!-- These are to make it play like a "keyboard" on touch devices
85 | and I'm sure there's a more elegant way to handle it ¯\_(ツ)_/¯ --}}
86 | {{action "startNote" note on="touchStart"}}
87 | {{action "startNote" note on="mouseDown"}}
88 | {{action "stopNote" note on="touchEnd"}}
89 | {{action "stopNote" note on="mouseUp"}}
90 | {{action "stopNote" note on="mouseLeave"}}>
91 | {{note.name}}
92 | </li>
93 | {{/each}}
94 | </ol>
95 | </div>
96 |
97 |
98 |
99 | import Ember from 'ember';
100 | import { Oscillator } from 'ember-audio';
101 | import { MusicalIdentity } from 'ember-audio/mixins';
102 |
103 | // By mixing the MusicalIdentity mixin into the Oscillator class, we get an
104 | // oscillator that is aware of it's frequency, letter, accidental, octave, etc...
105 | const MusicallyAwareOscillator = Oscillator.extend(MusicalIdentity);
106 |
107 | export default Ember.Whatever.extend({
108 | audio: Ember.inject.service(),
109 | oscillators: null, // Put oscillators here after they're created
110 |
111 | initSynth: Ember.on('init', function() {
112 | const audio = this.get('audio');
113 |
114 | // Outputs an array of all the notes on a standard "western" piano
115 | // Could also do `audio.createNoteArray(notes)` where notes is a POJO,
116 | // or `audio.load(URL).asNoteArray().then(...)` providing a URL to a JSON file
117 | // See `utils/frequencyMap` for an example of what these might look like.
118 | const notes = audio.createNoteArray();
119 |
120 | // Slicing so that the keyboard isn't massive
121 | const slicedNotes = notes.slice(48, 60);
122 |
123 | // Create a MusicallyAwareOscillator instance for each note in slicedNotes
124 | const oscillators = slicedNotes.map((note) => {
125 | return MusicallyAwareOscillator.create({
126 | type: 'square', // the "shape" of the waveform
127 | frequency: note.get('frequency'),
128 | audioContext: audio.get('audioContext')
129 | });
130 | });
131 |
132 | this.set('oscillators', oscillators);
133 | }),
134 |
135 | actions: {
136 | startNote(note) {
137 | note.play();
138 | },
139 |
140 | stopNote(note) {
141 | if (note.get('isPlaying')) {
142 | note.stop();
143 | }
144 | }
145 | }
146 | });
147 |
148 |
--------------------------------------------------------------------------------
/tests/dummy/app/templates/timing/index.hbs:
--------------------------------------------------------------------------------
1 |
2 |
Timing
3 |
4 |
5 | It is not necessary to understand this concept, as Ember Audio has methods
6 | that allow you to ignore it. I encourage you to understand it anyway. It's
7 | easy to grasp, and if you're building a rhythm/timing heavy app as this
8 | knowledge will be very useful to you.
9 |
10 |
11 |
12 | Timing with the Web Audio API can seem tricky at first. It's unlike any other
13 | timing system native to the browser. It's not very complex, and easy to
14 | wrap your brain around once you "get" it.
15 |
16 |
17 |
18 | It's based on the concept of a currentTime that starts at
19 | 0 and counts it's way up in seconds (as a high-precision Double). This
20 | currentTime starts the moment that an AudioContext has been created.
21 |
22 |
23 |
24 | If, for instance, you wanted a sound to play exactly 1 second
25 | after a user clicks a button, it could look like this:
26 |
27 |
28 |
29 |
30 | // This is pseudo-code. The goal at this point is to get the concept across,
31 | // not to potentially confuse you with framework-specific stuff.
32 |
33 | // The moment that audioContext is created, audioContext.currentTime starts counting seconds
34 | var audioContext = new AudioContext();
35 |
36 | var sound = // Create or load a sound and hook up audio inputs and outputs.
37 | // Not important right now...
38 | // We'll say that the result is an audio "node" that is ready to play
39 |
40 | function handleClick() {
41 | // Get the current time from audioContext.
42 | var now = audioContext.currentTime;
43 |
44 | // Start the sound we created up there^, adding 1 second to "now"
45 | // The Web Audio API deals in seconds, not milliseconds
46 | sound.start(now + 1);
47 | }
48 |
49 |
50 |
51 |
52 | Now what if we wanted to schedule the sound 5 times, each exactly 1 second apart?
53 |
54 |
55 |
56 |
57 | // Again, I want to mention that this code will not work as-is. It's ignoring
58 | // some other quirks of the Web Audio API. We're only focused on understanding
59 | // timing at the moment.
60 | var audioContext = new AudioContext();
61 |
62 | var sound = // Create or load a sound and hook up audio inputs and outputs.
63 |
64 | function handleClick() {
65 | var now = audioContext.currentTime;
66 |
67 | for (var i = 0; i < 5; i++) {
68 | sound.start(now + i);
69 | }
70 | }
71 |
72 |
73 |
74 |
75 | As you can see, as far as an AudioContext is concerned, the
76 | moment that it is created is "the beginning of time" and scheduling events
77 | is achieved by specifying an exact moment in time.
78 | sound.start(100) would play the sound exactly 100 seconds after
79 | the AudioContext was created, regardless of what time
80 | sound.start(100) was called. If sound.start(100)
81 | is called after 100 seconds has already passed since "the beginning of time,"
82 | the sound will play immediately.
83 |
84 |
85 |
86 |
87 |
88 | Again, this is an important concept to understand, but in many cases (even
89 | more complex cases, such as
90 | building a rhythmically-based instrument)
91 | this is
92 | already handled for you. Check out Beats,
93 | or the very last example on this page.
94 |
95 |
96 |
97 |
98 |
The first example again, but with real code
99 |
100 |
101 | Just in case you're not completely grasping how this relates directly to
102 | Ember Audio. Here is the first example from above, but written so that it
103 | works in Ember and Ember Audio.
104 |
133 | By using a Sound's playIn method,
134 | currentTime is handled for you, so this can be made even simpler.
135 |
136 |
137 |
138 |
139 |
140 |
141 | playInOneSecond() {
142 | this.get('audio').getSound('delayed-note').playIn(1);
143 | }
144 |
145 |
--------------------------------------------------------------------------------
/addon/mixins/playable.js:
--------------------------------------------------------------------------------
1 | import { later } from '@ember/runloop';
2 | import Mixin from '@ember/object/mixin';
3 |
4 | /**
5 | * A mixin that allows an object to start and stop an audio source, now or in
6 | * the future, as well as track whether the audio source is currently playing or
7 | * not.
8 | *
9 | * Consuming object must implement `wireConnections` and `getNodeFrom` methods.
10 | * These methods are included in the {{#crossLink "Connectable"}}{{/crossLink}}
11 | * mixin.
12 | *
13 | * @public
14 | * @class Playable
15 | */
16 | export default Mixin.create({
17 | /**
18 | * Whether an audio source is playing or not.
19 | *
20 | * @public
21 | * @property isPlaying
22 | * @type {boolean}
23 | * @default false
24 | */
25 | isPlaying: false,
26 |
27 | /**
28 | * Plays the audio source immediately.
29 | *
30 | * @public
31 | * @method play
32 | */
33 | play() {
34 | this._play(this.get('audioContext.currentTime'));
35 | },
36 |
37 | /**
38 | * Plays the audio source at the specified moment in time. A "moment in time"
39 | * is measured in seconds from the moment that the
40 | * {{#crossLink "AudioContext"}}{{/crossLink}} was instantiated.
41 | *
42 | * Functionally equivalent to {{#crossLink "Playable/_play:method"}}{{/crossLink}}.
43 | *
44 | * @param {number} time The moment in time (in seconds, relative to the
45 | * {{#crossLink "AudioContext"}}AudioContext's{{/crossLink}} "beginning of
46 | * time") when the audio source should be played.
47 | *
48 | * @public
49 | * @method playAt
50 | */
51 | playAt(time) {
52 | this._play(time);
53 | },
54 |
55 | /**
56 | * Plays the audio source in specified amount of seconds from "now".
57 | *
58 | * @public
59 | * @method playIn
60 | *
61 | * @param {number} seconds Number of seconds from "now" that the audio source
62 | * should be played.
63 | */
64 | playIn(seconds) {
65 | this._play(this.get('audioContext.currentTime') + seconds);
66 | },
67 |
68 | /**
69 | * Starts playing the audio source immediately, but stops after specified
70 | * seconds have elapsed.
71 | *
72 | * @public
73 | * @method playFor
74 | *
75 | * @param {number} seconds The amount of time after which the audio source is
76 | * stopped.
77 | */
78 | playFor(seconds) {
79 | this.play();
80 | this.stopIn(seconds);
81 | },
82 |
83 | /**
84 | * Starts playing the audio source after `playIn` seconds have elapsed, then
85 | * stops the audio source `stopAfter` seconds after it started playing.
86 | *
87 | * @public
88 | * @method playInAndStopAfter
89 | *
90 | * @param {number} playIn Number of seconds from "now" that the audio source
91 | * should play.
92 | *
93 | * @param {number} stopAfter Number of seconds from when the audio source
94 | * started playing that the audio source should be stopped.
95 | */
96 | playInAndStopAfter(playIn, stopAfter) {
97 | this.playIn(playIn);
98 | this.stopIn(playIn + stopAfter);
99 | },
100 |
101 | /**
102 | * Stops the audio source immediately.
103 | *
104 | * @public
105 | * @method stop
106 | */
107 | stop() {
108 | this._stop(this.get('audioContext.currentTime'));
109 | },
110 |
111 | /**
112 | * Stops the audio source after specified seconds have elapsed.
113 | *
114 | * @public
115 | * @method stopIn
116 | *
117 | * @param {number} seconds Number of seconds from "now" that the audio source
118 | * should be stopped.
119 | */
120 | stopIn(seconds) {
121 | this._stop(this.get('audioContext.currentTime') + seconds);
122 | },
123 |
124 | /**
125 | * Stops the audio source at the specified "moment in time" relative to the
126 | * "beginning of time" according to the `audioContext`.
127 | *
128 | * Functionally equivalent to the `_stop` method.
129 | *
130 | * @public
131 | * @method stopAt
132 | *
133 | * @param {number} time The time that the audio source should be stopped.
134 | */
135 | stopAt(time) {
136 | this._stop(time);
137 | },
138 |
139 | /**
140 | * The underlying method that backs all of the `stop` methods. Stops sound and
141 | * set `isPlaying` to false at specified time.
142 | *
143 | * Functionally equivalent to the `stopAt` method.
144 | *
145 | * @private
146 | * @method _stop
147 | *
148 | * @param {number} stopAt The moment in time (in seconds, relative to the
149 | * {{#crossLink "AudioContext"}}AudioContext's{{/crossLink}} "beginning of
150 | * time") when the audio source should be stopped.
151 | */
152 | _stop(stopAt) {
153 | const node = this.getNodeFrom('audioSource');
154 | const currentTime = this.get('audioContext.currentTime');
155 |
156 | if (node) {
157 | node.stop(stopAt);
158 | }
159 |
160 | if (stopAt === currentTime) {
161 | this.set('isPlaying', false);
162 | } else {
163 | later(() => this.set('isPlaying', false), (stopAt - currentTime) * 1000);
164 | }
165 | },
166 |
167 | /**
168 | * The underlying method that backs all of the `play` methods. Plays sound and
169 | * sets `isPlaying` to true at specified time.
170 | *
171 | * Functionally equivalent to `playAt`.
172 | *
173 | * @param {number} time The moment in time (in seconds, relative to the
174 | * {{#crossLink "AudioContext"}}AudioContext's{{/crossLink}} "beginning of
175 | * time") when the audio source should be played.
176 | *
177 | * @method _play
178 | * @private
179 | */
180 | _play(playAt) {
181 | const currentTime = this.get('audioContext.currentTime');
182 |
183 | this.wireConnections();
184 |
185 | const node = this.getNodeFrom('audioSource');
186 |
187 | node.start(playAt, this.startOffset);
188 |
189 | this.set('_startedPlayingAt', playAt);
190 |
191 | if (playAt === currentTime) {
192 | this.set('isPlaying', true);
193 | } else {
194 | later(() => this.set('isPlaying', true), (playAt - currentTime) * 1000);
195 | }
196 | },
197 | });
198 |
--------------------------------------------------------------------------------
/addon/utils/note-methods.js:
--------------------------------------------------------------------------------
1 | import { A } from '@ember/array';
2 | import { arraySwap, flatten } from './array-methods';
3 |
4 | /**
5 | * @public
6 | * @class utils
7 | */
8 |
9 | /**
10 | * Sorts an array of {{#crossLink "Note"}}Notes{{/crossLink}} so that they are in the same order that they would
11 | * appear on a piano.
12 | *
13 | * @param {array} notes An array of notes that should be musically-sorted.
14 | *
15 | * @public
16 | * @method sortNotes
17 | *
18 | * @return {array} Array of musically-sorted notes.
19 | */
20 | export function sortNotes(notes) {
21 | // get octaves so that we can sort based on them
22 | let sortedNotes = extractOctaves(notes);
23 |
24 | // Each octave has tons of duplicates
25 | sortedNotes = stripDuplicateOctaves(sortedNotes);
26 |
27 | // Create array of arrays. Each inner array contains all the notes in an octave
28 | sortedNotes = createOctavesWithNotes(sortedNotes);
29 |
30 | // Sort the notes in each octave, alphabetically, flats before naturals
31 | sortedNotes = octaveSort(sortedNotes);
32 |
33 | // Determine last note of first octave, then for each octave, split at
34 | // that note, then shift the beginning notes to the end
35 | sortedNotes = octaveShift(sortedNotes);
36 |
37 | // Flatten array of arrays into a flat array
38 | return A(flatten(sortedNotes));
39 | }
40 |
41 | /**
42 | * Takes an array of arrays of notes, determines the last note of
43 | * the first array, then splits the rest of the arrays in the array at the last
44 | * note of the first array, and moves the beginning of the array to the end
45 | * so that each array starts at the next note after the last note of the first
46 | * array, instead of at "A" (alphabetically).
47 | *
48 | * @example
49 | * This is hard to explain. Here's an example.
50 | * (Simplified, as the real notes are objects)
51 | *
52 | * Example input: [['A0', 'B0'], ['A1', 'B1', 'C1', 'D1']]
53 | * Example output: [['A0', 'B0'], ['C1', 'D1', 'A1', 'B1']]
54 | *
55 | * @private
56 | * @method octaveShift
57 | *
58 | * @param {array} octaves An array of octaves, each octave is an array of Notes.
59 | *
60 | * @return {array} Input array after having been shifted.
61 | */
62 | export function octaveShift(octaves) {
63 | // Pull first octave from beginning of array
64 | const firstOctave = A(A(octaves).shiftObject());
65 | // Get all the note names from the second octave for comparison
66 | const secondOctaveNames = A(octaves.get('firstObject')).getEach('name');
67 | // Get the note name of the last note in the first octave
68 | const lastNote = firstOctave.get('lastObject.name');
69 | // Get the index of the occurrence of the last note from the first
70 | // octave, in the second octave
71 | const indexToShiftAt = secondOctaveNames.lastIndexOf(lastNote) + 1;
72 | // Split the octave array at that point, and move the first chunk to the end
73 | return (
74 | A(octaves.map((octave) => arraySwap(octave, indexToShiftAt)))
75 | // Put first octave back at the beginning of the array
76 | .unshiftObjects([firstOctave])
77 | );
78 | }
79 |
80 | /**
81 | * Maps through an array of arrays and sorts each array with
82 | * "noteSort"
83 | *
84 | * @private
85 | * @method octaveSort
86 | *
87 | * @param {array} octaves array of arrays to be sorted
88 | *
89 | * @return {array} array of sorted arrays
90 | */
91 | export function octaveSort(octaves) {
92 | return octaves.map((octave) => octave.sort(noteSort));
93 | }
94 |
95 | /**
96 | * Accepts an array of Note objects and passes back an array
97 | * like this: [original array, array of each octave in the orginal array]
98 | *
99 | * @private
100 | * @method extractOctaves
101 | *
102 | * @param {array} notes array of note objects.
103 | *
104 | * @return {array} array containing two inner arrays, [0] is the untouched input
105 | * array, [1] is an array of all the octaves in the original array.
106 | */
107 | export function extractOctaves(notes) {
108 | return [notes, A(A(notes).getEach('octave'))];
109 | }
110 |
111 | /**
112 | * Accepts an array of two arrays and returns the same
113 | * array, but with array at index [1] uniq'd and sorted alphabetically.
114 | *
115 | * @private
116 | * @method stripDuplicateOctaves
117 | *
118 | * @param {array} [ notes, octaves ] the output from extractOctaves.
119 | *
120 | * @return {array} The mutated array.
121 | */
122 | export function stripDuplicateOctaves([notes, octaves]) {
123 | return [notes, A(octaves).uniq().sort()];
124 | }
125 |
126 | /**
127 | * Accepts an array of two arrays, [0] being an array
128 | * of Note objects, [1] being all the available octaves. Returns a single array
129 | * made up of arrays of Note objects, organized by octave. Each inner array
130 | * represents all of the notes in an octave.
131 | *
132 | * @private
133 | * @method createOctavesWithNotes
134 | *
135 | * @param {array} data The output of stripDuplicateOctaves.
136 | *
137 | * @return {Ember.MutableArray}
138 | */
139 | export function createOctavesWithNotes([notes, octaves]) {
140 | return A(octaves).map((octave) => A(notes).filterBy('octave', octave));
141 | }
142 |
143 | /**
144 | * Acts as a comparator function for the
145 | * {{#crossLink "Array/sort:method"}}Array.prototype.sort{{/crossLink}} method.
146 | * Sorts two {{#crossLink "Note"}}{{/crossLink}} instances alphabetically, flats
147 | * before naturals.
148 | *
149 | * @private
150 | * @method noteSort
151 | *
152 | * @param {Note} a The first Note instance to compare.
153 | * @param {Note} b The second Note instance to compare.
154 | *
155 | * @return {number} -1 or 1, depending on whether the current
156 | * {{#crossLink "Note"}}{{/crossLink}} instance should be sorted left, or right.
157 | */
158 | export function noteSort(a, b) {
159 | const aLet = a.get('letter');
160 | const bLet = b.get('letter');
161 |
162 | if (aLet < bLet) {
163 | return -1;
164 | }
165 |
166 | if (aLet === bLet) {
167 | if (a.get('accidental') === 'b') {
168 | return -1;
169 | }
170 | }
171 |
172 | return 1;
173 | }
174 |
--------------------------------------------------------------------------------
/addon/classes/beat.js:
--------------------------------------------------------------------------------
1 | import EmberObject from '@ember/object';
2 | import { later } from '@ember/runloop';
3 |
4 | /**
5 | * This class represents a single "beat" for a rhythmic instrument. An instance of this
6 | * class can be set to `active` or not to facilitate the way that most drum
7 | * machines work (when a beat is not `active`, the time that it occupies still
8 | * exists, but it does not cause audio to play, effectively resulting in a
9 | * "rest"). It provides properties that track when it is played, and when a "rest"
10 | * is played in it's place.
11 | *
12 | * This class does not have the ability to create audio on it's own and is
13 | * expected be a "child" of one of the Sound classes. See it's implementation in
14 | * {{#crossLink "BeatTrack"}}BeatTrack{{/crossLink}} for an example.
15 | *
16 | * // Cannot play audio on it's own.
17 | * // Must pass in parentPlay and/or parentPlayIn from a parent class.
18 | * Beat.create({
19 | * _parentPlayIn: this.playIn.bind(this),
20 | * _parentPlay: this.play.bind(this),
21 | * });
22 | *
23 | * @public
24 | * @class Beat
25 | * @todo add playAt
26 | */
27 | const Beat = EmberObject.extend({
28 | /**
29 | * If `active` is `true`, all methods of play will cause this instance to play.
30 | * If `active` is `false`, the `playIfActive()` and `ifActivePlayIn()`
31 | * methods will treat this instance as a rest (a timed period of silence).
32 | *
33 | * @public
34 | * @property active
35 | * @type {boolean}
36 | */
37 | active: false,
38 |
39 | /**
40 | * Whether a Beat instance is currently playing, considering both active and
41 | * inactive beats (rests). When switched to `true`, is automatically returned
42 | * to false after the time specified by the duration property.
43 | *
44 | * @public
45 | * @property currentTimeIsPlaying
46 | * @type {boolean}
47 | * @default false
48 | */
49 | currentTimeIsPlaying: false,
50 |
51 | /**
52 | * Whether a Beat instance is currently playing, considering only active beats.
53 | * When switched to `true`, is automatically returned to false after the time
54 | * specified by the duration property.
55 | *
56 | * @public
57 | * @property isPlaying
58 | * @type {boolean}
59 | * @default false
60 | */
61 | isPlaying: false,
62 |
63 | /**
64 | * On Beat instance instantiation, this property should be set to the parent's
65 | * audioBuffer.duration.
66 | *
67 | * @property _audioBufferDuration
68 | * @type {number|null}
69 | * @private
70 | */
71 | _audioBufferDuration: null,
72 |
73 | /**
74 | * If specified, Determines length of time, in milliseconds, before isPlaying
75 | * and currentTimeIsPlaying are automatically switched back to false after
76 | * having been switched to true. 100ms is used by default.
77 | *
78 | * @public
79 | * @property duration
80 | * @type {number}
81 | * @default 100
82 | */
83 | duration: 100,
84 |
85 | /**
86 | * Calls it's parent's `playIn()` method directly to play the beat in
87 | * `${offset}` seconds.
88 | *
89 | * isPlaying and currentTimeIsPlaying are both marked true after the provided
90 | * offset has elapsed.
91 | *
92 | * @public
93 | * @method playIn
94 | *
95 | * @param {number} offset Number of seconds from "now" that the audio should
96 | * play.
97 | */
98 | playIn(offset = 0) {
99 | const msOffset = offset * 1000;
100 |
101 | this._parentPlayIn(offset);
102 |
103 | later(() => this._markPlaying(), msOffset);
104 | later(() => this._markCurrentTimePlaying(), msOffset);
105 | },
106 |
107 | /**
108 | * If the beat is marked `active`, calls it's parent's `playIn()` method
109 | * directly to play the beat in `${offset}` seconds.
110 | *
111 | * If active, isPlaying is marked true after the provided offset has elapsed.
112 | *
113 | * currentTimeIsPlaying is marked true after the provided offset has elapsed,
114 | * even if beat is not active.
115 | *
116 | * @public
117 | * @method ifActivePlayIn
118 | *
119 | * @param {number} offset Number of seconds from "now" that the audio should
120 | * play.
121 | */
122 | ifActivePlayIn(offset = 0) {
123 | const msOffset = offset * 1000;
124 |
125 | if (this.active) {
126 | this._parentPlayIn(offset);
127 | later(() => this._markPlaying(), msOffset);
128 | }
129 |
130 | later(() => this._markCurrentTimePlaying(), msOffset);
131 | },
132 |
133 | /**
134 | * Calls it's parent's `play()` method directly to play the beat immediately.
135 | *
136 | * isPlaying and currentTimeIsPlaying are both immediately marked true.
137 | *
138 | * @public
139 | * @method play
140 | */
141 | play() {
142 | this._parentPlay();
143 | this._markPlaying();
144 | this._markCurrentTimePlaying();
145 | },
146 |
147 | /**
148 | * If `active`, calls it's parent's `play()` method directly to play the beat
149 | * immediately.
150 | *
151 | * If `active`, isPlaying is immediately marked true.
152 | *
153 | * currentTimeIsPlaying is immediately marked true, even if beat is not active.
154 | *
155 | * @public
156 | * @method playIfActive
157 | */
158 | playIfActive() {
159 | if (this.active) {
160 | this._parentPlay();
161 | this._markPlaying();
162 | }
163 |
164 | this._markCurrentTimePlaying();
165 | },
166 |
167 | /**
168 | * Sets `isPlaying` to `true` and sets up a timer that sets `isPlaying` back
169 | * to false after `duration` has elapsed.
170 | *
171 | * @method _markPlaying
172 | * @private
173 | */
174 | _markPlaying() {
175 | this.set('isPlaying', true);
176 | later(() => this.set('isPlaying', false), this.duration);
177 | },
178 |
179 | /**
180 | * Sets `currentTimeIsPlaying` to `true` and sets up a timer that sets
181 | * `currentTimeIsPlaying` back to false after `duration` has elapsed.
182 | *
183 | * @method _markCurrentTimePlaying
184 | * @private
185 | */
186 | _markCurrentTimePlaying() {
187 | this.set('currentTimeIsPlaying', true);
188 | later(() => this.set('currentTimeIsPlaying', false), this.duration);
189 | },
190 | });
191 |
192 | export default Beat;
193 |
--------------------------------------------------------------------------------
/tests/dummy/app/templates/audio-routing.hbs:
--------------------------------------------------------------------------------
1 |
2 |
Audio Routing
3 |
4 |
5 |
8 |
9 |
10 |
11 |
12 | The signal path in the Web Audio API works by allowing one to stitch
13 | together various audio "nodes." An audio node works just like a guitar pedal;
14 | It has an input, it does some stuff to whatever goes into that input, and
15 | it has an output.
16 |
17 |
18 |
19 |
20 |
21 | By default, a Sound instance is routed through 4 audio nodes:
22 |
23 |
24 |
25 |
26 | Source - It's input is some sort of audio source; Sound
27 | loaded from a file, a synthesizer oscillator, or input from a user's
28 | microphone. It's output is digital audio data that the other audio
29 | nodes understand.
30 |
31 |
32 | Gain - This node allows one to adjust the
33 |
34 | gain
35 |
36 | of the audio data that is routed through it.
37 |
38 |
39 | Panner - This node allows one to control the stereo pan
40 | position (left or right) of the audio data that is routed through it.
41 |
42 |
43 | Destination - This node routes any audio data that is routed
44 | through it, to the end user's audio output.
45 |
46 |
47 |
48 |
49 |
50 |
51 | The nodes are connected automatically, in the same order that they exist
52 | in the connections array. For the example above (the default
53 | case) they are connected like: Source -> Gain
54 | -> Panner -> Destination
55 |
56 |
57 |
58 |
59 |
60 | There are many more AudioNode types provided by the Web Audio
61 | API than the ones that are represented here. Take a look at the
62 |
63 | Web Audio API Documentation
64 |
65 | to learn about all of the available AudioNode types.
66 |
67 |
68 |
69 |
70 |
71 | It is possible to customize routing by adding and removing audio nodes from
72 | a Sound instance's connections array.
73 |
74 |
75 |
76 | The connections array is an Ember.MutableArray
77 | so it is easily manipulated using it's prototype methods such as
78 | insertAt and removeAt.
79 |
80 |
81 |
82 | A Sound instance also has a convenience method called
83 | removeConnection that allows one to remove a connection by it's
84 | name.
85 |
86 |
87 |
88 |
89 |
90 | <button class="btn btn-primary" {{action "playSound"}}>Play Sound</button>
91 | <button class="btn {{if distortionEnabled "btn-warning" "btn-success"}}" {{action "toggleDistortion"}}>
92 | Turn {{if distortionEnabled "Off" "On"}} Distortion
93 | </button>
94 |
95 |
96 |
97 | import Ember from 'ember';
98 | import { Connection } from 'ember-audio';
99 |
100 | export default Ember.Whatever.extend({
101 | audio: Ember.inject.service(),
102 | distortionEnabled: false,
103 |
104 | initAudioFile: Ember.on('init', function() {
105 | // Eb5.mp3 is an mp3 file located in the "public" folder
106 | this.get('audio').load('/ember-audio/Eb5.mp3').asSound('piano-note').then((note) => {
107 |
108 | // Create the connection and insert it into the note's connections array
109 | note.get('connections').insertAt(1, Connection.create({
110 | name: 'distortionNode',
111 | source: 'audioContext',
112 | createCommand: 'createWaveShaper'
113 | }));
114 |
115 | this.set('note', note);
116 | });
117 | }),
118 |
119 | _makeDistortionCurve(amount) {
120 | // I stole this straight from the Mozilla Web Audio API docs site
121 | const k = typeof amount === 'number' ? amount : 50;
122 | const n_samples = 44100;
123 | const curve = new Float32Array(n_samples);
124 | const deg = Math.PI / 180;
125 |
126 | for (let i = 0; i < n_samples; ++i) {
127 | let x = i * 2 / n_samples - 1;
128 | curve[i] = ( 3 + k ) * x * 20 * deg / ( Math.PI + k * Math.abs(x) );
129 | }
130 |
131 | return curve;
132 | },
133 |
134 | _addDistortion() {
135 | const curve = this._makeDistortionCurve(400);
136 | const note = this.get('note');
137 |
138 | this.set('distortionEnabled', true);
139 |
140 | // lower note's gain because distorted signal has much more apparent volume
141 | note.changeGainTo(0.1).from('ratio');
142 |
143 | // Set distortionNode's curve to enable distortion
144 | note.getNodeFrom('distortionNode').curve = curve;
145 | },
146 |
147 | _removeDistortion() {
148 | const note = this.get('note');
149 |
150 | this.set('distortionEnabled', false);
151 |
152 | // raise note's gain because clean signal has much less apparent volume
153 | note.changeGainTo(1).from('ratio');
154 |
155 | // Set distortionNode's curve to an empty Float32Array to disable distortion
156 | note.getNodeFrom('distortionNode').curve = new Float32Array();
157 | },
158 |
159 | actions: {
160 | playSound() {
161 | this.get('note').play();
162 | },
163 |
164 | toggleDistortion() {
165 | if (this.get('distortionEnabled')) {
166 | this._removeDistortion();
167 | } else {
168 | this._addDistortion();
169 | }
170 | }
171 | }
172 | });
173 |
174 |
175 |
--------------------------------------------------------------------------------
/tests/unit/classes/sound-test.js:
--------------------------------------------------------------------------------
1 | import { Sound } from 'ember-audio';
2 | import { get } from '@ember/object';
3 | import ContextMock from '../../helpers/context-mock';
4 | import AudioBufferMock from '../../helpers/audio-buffer-mock';
5 | import { module, test } from 'qunit';
6 |
7 | module('Unit | Class | sound', function () {
8 | test('it exists', function (assert) {
9 | let audioContext = ContextMock.create();
10 | let result = Sound.create({ audioContext });
11 | assert.ok(result);
12 | });
13 |
14 | test('on init, a gain, and panner are created', function (assert) {
15 | let audioContext = ContextMock.create();
16 | let result = Sound.create({ audioContext });
17 | let ctx = get(result, 'audioContext');
18 |
19 | assert.ok(get(ctx, 'createGainCalled'));
20 | assert.ok(get(ctx, 'createStereoPannerCalled'));
21 | });
22 |
23 | test('duration.raw works', function (assert) {
24 | let audioContext = ContextMock.create();
25 | let audioBuffer = AudioBufferMock.create();
26 |
27 | let result = Sound.create({ audioContext, audioBuffer });
28 | assert.strictEqual(get(result, 'duration.raw'), 65);
29 | });
30 |
31 | test('duration.string works', function (assert) {
32 | let audioContext = ContextMock.create();
33 | let audioBuffer = AudioBufferMock.create();
34 |
35 | let result = Sound.create({ audioContext, audioBuffer });
36 | assert.strictEqual(get(result, 'duration.string'), '01:05');
37 |
38 | result.set('audioBuffer.duration', 40);
39 | assert.strictEqual(get(result, 'duration.string'), '00:40');
40 |
41 | result.set('audioBuffer.duration', 60);
42 | assert.strictEqual(get(result, 'duration.string'), '01:00');
43 |
44 | result.set('audioBuffer.duration', 600);
45 | assert.strictEqual(get(result, 'duration.string'), '10:00');
46 |
47 | result.set('audioBuffer.duration', 6001);
48 | assert.strictEqual(get(result, 'duration.string'), '100:01');
49 |
50 | result.set('audioBuffer.duration', 6012);
51 | assert.strictEqual(get(result, 'duration.string'), '100:12');
52 | });
53 |
54 | test('duration.pojo works', function (assert) {
55 | let audioContext = ContextMock.create();
56 | let audioBuffer = AudioBufferMock.create();
57 |
58 | let result = Sound.create({ audioContext, audioBuffer });
59 | assert.deepEqual(get(result, 'duration.pojo'), {
60 | minutes: 1,
61 | seconds: 5,
62 | });
63 |
64 | result.set('audioBuffer.duration', 40);
65 | assert.deepEqual(get(result, 'duration.pojo'), {
66 | minutes: 0,
67 | seconds: 40,
68 | });
69 |
70 | result.set('audioBuffer.duration', 60);
71 | assert.deepEqual(get(result, 'duration.pojo'), {
72 | minutes: 1,
73 | seconds: 0,
74 | });
75 | });
76 |
77 | test('percentGain works', function (assert) {
78 | let audioContext = ContextMock.create();
79 | let result = Sound.create({ audioContext });
80 | assert.strictEqual(get(result, 'percentGain'), 40);
81 | });
82 |
83 | test('play() calls node.connect(ctx.destination)', function (assert) {
84 | let audioContext = ContextMock.create();
85 | let result = Sound.create({ audioContext });
86 |
87 | result.getNodeFrom('audioSource').connectCalled = false;
88 |
89 | assert.notOk(result.getNodeFrom('audioSource').connectCalled);
90 | result.play();
91 | assert.ok(result.getNodeFrom('audioSource').connectCalled);
92 | });
93 |
94 | test('play() connects panner', function (assert) {
95 | let audioContext = ContextMock.create();
96 | let result = Sound.create({ audioContext });
97 |
98 | result.getNodeFrom('panner').connectCalled = false;
99 |
100 | assert.notOk(result.getNodeFrom('panner').connectCalled);
101 | result.play();
102 | assert.ok(result.getNodeFrom('panner').connectCalled);
103 | });
104 |
105 | test('play() connects gain', function (assert) {
106 | let audioContext = ContextMock.create();
107 | let result = Sound.create({ audioContext });
108 |
109 | result.getNodeFrom('gain').connectCalled = false;
110 |
111 | assert.notOk(result.getNodeFrom('gain').connectCalled);
112 | result.play();
113 | assert.ok(result.getNodeFrom('gain').connectCalled);
114 | });
115 |
116 | test(`changePanTo() gets the panner connection and changes it's node's pan value`, function (assert) {
117 | let audioContext = ContextMock.create();
118 | let result = Sound.create({ audioContext });
119 | let panner = result.getNodeFrom('panner');
120 |
121 | result.changePanTo(0.6);
122 | assert.strictEqual(panner.pan.value, 0.6);
123 |
124 | result.changePanTo(0.4);
125 | assert.strictEqual(panner.pan.value, 0.4);
126 | });
127 |
128 | test(`changeGainTo() gets the gain connection and changes it's node's gain value`, function (assert) {
129 | let audioContext = ContextMock.create();
130 | let result = Sound.create({ audioContext });
131 | let gain = result.getNodeFrom('gain');
132 |
133 | result.changeGainTo(0.6).from('ratio');
134 | assert.strictEqual(gain.gain.value, 0.6);
135 |
136 | result.changeGainTo(0.3).from('inverseRatio');
137 | assert.strictEqual(gain.gain.value, 0.7);
138 |
139 | result.changeGainTo(20).from('percent');
140 | assert.strictEqual(gain.gain.value, 0.2);
141 | });
142 |
143 | test('startOffset starts at 0', function (assert) {
144 | let audioContext = ContextMock.create();
145 | let result = Sound.create({ audioContext });
146 | assert.strictEqual(get(result, 'startOffset'), 0);
147 | });
148 |
149 | test('seek() sets startOffset', function (assert) {
150 | let audioContext = ContextMock.create();
151 | let audioBuffer = AudioBufferMock.create();
152 | let result = Sound.create({ audioContext, audioBuffer });
153 |
154 | result.seek(2).from('seconds');
155 | assert.strictEqual(get(result, 'startOffset'), 2);
156 |
157 | result.seek(30).from('percent');
158 | assert.strictEqual(get(result, 'startOffset'), 19.5);
159 |
160 | result.seek(0.2).from('ratio');
161 | assert.strictEqual(get(result, 'startOffset'), 13);
162 |
163 | result.seek(0.2).from('inverseRatio');
164 | assert.strictEqual(get(result, 'startOffset'), 52);
165 | });
166 |
167 | test('seek() calls stop then play when `isPlaying` is true', function (assert) {
168 | let audioContext = ContextMock.create();
169 | let audioBuffer = AudioBufferMock.create();
170 | let result = Sound.create({ audioContext, audioBuffer, isPlaying: true });
171 |
172 | result.set('stop', () => result.set('stopCalled', true));
173 | result.set('play', () => result.set('playCalled', true));
174 |
175 | assert.notOk(get(result, 'stopCalled'));
176 | assert.notOk(get(result, 'playCalled'));
177 |
178 | result.seek(2).from('seconds');
179 |
180 | assert.ok(get(result, 'stopCalled'));
181 | assert.ok(get(result, 'playCalled'));
182 | });
183 | });
184 |
--------------------------------------------------------------------------------
/tests/dummy/app/templates/audio-files/mp3-player.hbs:
--------------------------------------------------------------------------------
1 |
2 |
MP3 Player Example
3 |
4 |
5 |
6 |
7 |
8 | {{#each this.tracks as |track|}}
9 |
10 |
{{undasherize track.name}}
11 |
12 | {{/each}}
13 |
14 |
15 |
16 |
17 |
18 | {{#if this.selectedTrack}}
19 |
{{this.selectedTrack.description}}
20 | {{else}}
21 |
Select a Track...
22 | {{/if}}
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 | Note: Purely aesthetic stuff has been removed. See repo for full code.
32 |
33 |
34 |
Template
35 |
36 |
37 | {{'
38 |
39 |
40 |
41 | {{#each tracks as |track|}}
42 |
43 |
44 |
{{track.name}}
45 |
46 |
47 | {{/each}}
48 |
49 |
50 |
51 |
52 | {{#if selectedTrack}}
53 |
{{selectedTrack.description}}
54 | {{else}}
55 |
Select a Track...
56 | {{/if}}
57 |
58 |
59 |
60 | {{!-- mp3-player accepts a Track instance, and a loading boolean --}}
61 | {{mp3-player track=selectedTrack.trackInstance loading=trackIsLoading}}
62 | '}}
63 |
64 |
65 |
66 |
67 |
Controller
68 |
69 |
70 | audio: Ember.inject.service(),
71 | trackIsLoading: false,
72 | selectedTrack: null,
73 |
74 | // "barely-there.mp3" and "do-wah-diddy.mp3" are mp3 files located in this project's public folder
75 | tracks: [
76 | {
77 | name: 'barely-there',
78 | trackInstance: null, // After it's loaded, we will place the audio data here
79 | description: `description here...`
80 | },
81 | {
82 | name: 'do-wah-diddy',
83 | trackInstance: null,
84 | description: `description here...`
85 | }
86 | ],
87 |
88 | actions: {
89 | selectTrack(track) {
90 | const audio = this.get('audio');
91 |
92 | this.set('selectedTrack', track);
93 | this.set('trackIsLoading', true);
94 |
95 | // Pause any playing track so that user can't play multiple tracks at once
96 | audio.pauseAll();
97 |
98 | // We use ".asTrack('some-name')" to name the sound, and to specify that we want a "Track" instance
99 | audio.load(`${track.name}.mp3`).asTrack(track.name).then((trackInstance) => {
100 | this.set('selectedTrack.trackInstance', trackInstance);
101 | this.set('trackIsLoading', false);
102 | });
103 | }
104 | }
105 |
106 |
118 |
119 | {{!-- track.position and track.duration output a few different formats
120 | as a POJO. We are interested in the "string" variant --}}
121 |
{{track.position.string}}
122 |
123 | {{!-- An action bound to "onclick" is passed a javascript native "MouseEvent" object --}}
124 |
125 |
126 |
127 |
128 |
129 |
{{track.duration.string}}
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 | {{/if}}
142 | '}}
143 |
144 |
145 |
146 |
147 |
mp3-player Component Javascript
148 |
149 |
150 | Note that the exact measurements below will not apply to you unless your
151 | markup is identical; however, the concepts are universal.
152 | newPosition and newGain could be calculated much
153 | more easily using range inputs.
154 |
155 |
156 |
157 | percentPlayed: Ember.computed('track.percentPlayed', function() {
158 | const percentPlayed = this.get('track.percentPlayed');
159 | return Ember.String.htmlSafe(`width: ${percentPlayed}%;`);
160 | }),
161 |
162 | percentGain: Ember.computed('track.percentGain', function() {
163 | const percentGain = this.get('track.percentGain');
164 | return Ember.String.htmlSafe(`height: ${percentGain}%;`);
165 | }),
166 |
167 | actions: {
168 | togglePlay() {
169 | const track = this.get('track');
170 |
171 | if (track.get('isPlaying')) {
172 | track.pause();
173 | } else {
174 | track.play()
175 | }
176 | },
177 |
178 | // "e" is a javascript native "MouseEvent" object
179 | seek(e) {
180 | // Get width of clicked element's parent
181 | const width = e.target.offsetParent.offsetWidth;
182 |
183 | // Divide click position by parent width
184 | const newPosition = e.offsetX / width;
185 |
186 | // Set new position based on ratio
187 | this.get('track').seek(newPosition).from('ratio');
188 | },
189 |
190 | changeVolume(e) {
191 | // Get height of clicked element's parent
192 | const height = e.target.offsetParent.offsetHeight;
193 |
194 | // Get click position
195 | const offset = e.pageY - Ember.$(e.target).parent().offset().top;
196 |
197 | // Adjust height because height of element is 80% of parent's
198 | const adjustedHeight = height * 0.8;
199 |
200 | // Adjust click position because height of element is 80% of parent's,
201 | // and element is centered vertically
202 | const adjustedOffset = offset - ((height - adjustedHeight) / 2);
203 | const newGain = adjustedOffset / adjustedHeight;
204 |
205 | // Set new gain based on inverse ratio because Y coordinate is measured
206 | // from the top, but we want gain to be measured from the bottom
207 | this.get('track').changeGainTo(newGain).from('inverseRatio');
208 | }
209 | }
210 |
211 |