media.pause()
13 | }
14 |
15 | clip.start ||= 0;
16 | media.currentTime = clip.start;
17 |
18 | const toSeekableRange = () => {
19 | if (media.readyState === 0) return;
20 |
21 | // Setting preload to `none` from `auto` was required on iOS to fix a bug
22 | // that caused no `timeupdate` events to fire after seeking ¯\_(ツ)_/¯
23 | const wasAuto = media.preload === 'auto';
24 | if (wasAuto) media.preload = 'none';
25 |
26 | if (media.currentTime < 0) media.currentTime = 0;
27 | if (media.currentTime > clip.end) media.currentTime = clip.end;
28 |
29 | if (wasAuto) media.preload = 'auto';
30 | }
31 |
32 | let preciseInterval
33 | const onTimeupdate = () => {
34 | clearInterval(preciseInterval);
35 |
36 | if (media.currentTime >= clip.end) { // ended
37 | if (media.loop) {
38 | media.currentTime = clip.start;
39 | return;
40 | }
41 | media.pause();
42 | media.dispatchEvent(new Event('ended'));
43 | return;
44 | }
45 |
46 | // When the playhead is 200ms or less from the end check every 10ms (~512 samples)
47 | // for increased accuracy. timeupdate is only fired every ~150ms or so.
48 | if (media.currentTime + .2 > clip.end) preciseInterval = setInterval(onTimeupdate, 10);
49 | }
50 |
51 | const onPlaying = () => {
52 | if (media.currentTime >= clip.end) media.currentTime = clip.start;
53 | }
54 |
55 | media.addEventListener('durationchange', toSeekableRange);
56 | media.addEventListener('seeking', toSeekableRange);
57 | media.addEventListener('timeupdate', onTimeupdate);
58 |
59 | let timeUpdateInterval = setInterval(onTimeupdate, 50) // safari is too bad
60 | media.addEventListener('playing', onPlaying);
61 |
62 | media.play()
63 |
64 | return () => {
65 | media.removeEventListener('durationchange', toSeekableRange);
66 | media.removeEventListener('seeking', toSeekableRange);
67 | media.removeEventListener('timeupdate', onTimeupdate);
68 | media.removeEventListener('playing', onPlaying);
69 | clearInterval(timeUpdateInterval)
70 | clearInterval(preciseInterval)
71 |
72 | media.pause()
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/src/component/playback.js:
--------------------------------------------------------------------------------
1 | class PlayButton extends HTMLElement {
2 | constructor() {
3 | super();
4 | this.attachShadow({ mode: 'open' });
5 | this._playing = false;
6 | this.tabIndex = -1;
7 | }
8 |
9 | connectedCallback() {
10 | this._playing = this.hasAttribute('playing');
11 | this.render();
12 | this.setupEventListeners();
13 | }
14 |
15 | static get observedAttributes() {
16 | return ['playing', 'tabindex'];
17 | }
18 |
19 | attributeChangedCallback(name, oldValue, newValue) {
20 | if (name === 'playing') {
21 | this._playing = newValue !== null;
22 | this.updateDisplay();
23 | }
24 | }
25 |
26 | get playing() {
27 | return this._playing;
28 | }
29 |
30 | set playing(value) {
31 | if (value) {
32 | this.setAttribute('playing', '');
33 | } else {
34 | this.removeAttribute('playing');
35 | }
36 | }
37 |
38 | render() {
39 | const tabindex = this.getAttribute('tabindex') || '0';
40 |
41 | this.shadowRoot.innerHTML = `
42 |
43 |
48 |
53 | `;
54 |
55 | this.updateDisplay();
56 | }
57 |
58 | updateDisplay() {
59 | const playIcon = this.shadowRoot.getElementById('play-icon');
60 | const pauseIcon = this.shadowRoot.getElementById('pause-icon');
61 |
62 | if (playIcon && pauseIcon) {
63 | playIcon.hidden = this._playing;
64 | pauseIcon.hidden = !this._playing;
65 | }
66 | }
67 |
68 | setupEventListeners() {
69 | const button = this.shadowRoot.querySelector('button');
70 | button.addEventListener('click', () => {
71 | this.toggle();
72 | });
73 | }
74 |
75 | toggle() {
76 | this.playing = !this._playing;
77 | this.dispatchEvent(new CustomEvent('toggle', {
78 | detail: { playing: this._playing },
79 | bubbles: true,
80 | composed: true
81 | }));
82 | }
83 | }
84 |
85 | customElements.define('play-button', PlayButton);
86 |
87 | // Usage example:
88 | //
89 | //
90 | //
91 | // Listen for toggle events:
92 | // document.querySelector('play-button').addEventListener('toggle', (e) => {
93 | // console.log('Playing:', e.detail.playing);
94 | // });
95 |
--------------------------------------------------------------------------------
/experiments/audio-recorder.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Waveedit
5 |
6 |
32 |
51 |
60 |
61 |
62 |
63 |
67 |
68 | 🕉
69 |
--------------------------------------------------------------------------------
/dist/node-wav-6T5M4VPY.js:
--------------------------------------------------------------------------------
1 | import{a as g}from"./chunk-A576P2ZR.js";var F=g((I,A)=>{var M={pcm8:(r,n,p,i,f)=>{let o=new Uint8Array(r,n),a=0;for(let l=0;l{let o=new Int16Array(r,n),a=0;for(let l=0;l{let o=new Uint8Array(r,n),a=0;for(let l=0;l8388608?c-16777216:c;p[t][l]=u<0?u/8388608:u/8388607}},pcm32:(r,n,p,i,f)=>{let o=new Int32Array(r,n),a=0;for(let l=0;l{let o=new Float32Array(r,n),a=0;for(let l=0;l{let o=new Float64Array(r,n),a=0;for(let l=0;l{let o=new Uint8Array(r,n),a=0;for(let l=0;l{let o=new Int16Array(r,n),a=0;for(let l=0;l{let o=new Uint8Array(r,n),a=0;for(let l=0;l>0&255,o[a++]=e>>8&255,o[a++]=e>>16&255}},pcm32:(r,n,p,i,f)=>{let o=new Int32Array(r,n),a=0;for(let l=0;l{let o=new Float32Array(r,n),a=0;for(let l=0;l{let o=new Float64Array(r,n),a=0;for(let l=0;l>3)),t=new DataView(l),e=0;function m(s){t.setUint8(e++,s)}function h(s){t.setUint16(e,s,!0),e+=2}function c(s){t.setUint32(e,s,!0),e+=4}function u(s){for(var w=0;w>3)),h(o*(f>>3)),h(f),u("data"),c(l.byteLength-44),y(U,f,i)(l,e,r,o,a),Buffer(l)}A.exports={decode:v,encode:b}});export default F();
2 | //# sourceMappingURL=node-wav-6T5M4VPY.js.map
3 |
--------------------------------------------------------------------------------
/src/selection.js:
--------------------------------------------------------------------------------
1 |
2 | export const selection = {
3 | // get normalized selection
4 | get() {
5 | let s = window.getSelection()
6 |
7 | // return unknown selection
8 | if (!s.anchorNode || !s.anchorNode.parentNode.closest('#editarea')) return
9 |
10 | // collect start/end offsets
11 | let start = absOffset(s.anchorNode, s.anchorOffset), end = absOffset(s.focusNode, s.focusOffset)
12 |
13 | // swap selection direction
14 | let startNode = s.anchorNode.parentNode.closest('.segment'), startNodeOffset = s.anchorOffset,
15 | endNode = s.focusNode.parentNode.closest('.segment'), endNodeOffset = s.focusOffset;
16 | if (start > end) {
17 | [end, endNode, endNodeOffset, start, startNode, startNodeOffset] =
18 | [start, startNode, startNodeOffset, end, endNode, endNodeOffset]
19 | }
20 |
21 | return {
22 | start,
23 | startNode,
24 | startNodeOffset,
25 | end,
26 | endNode,
27 | endNodeOffset,
28 | collapsed: s.isCollapsed,
29 | range: s.getRangeAt(0)
30 | }
31 | },
32 |
33 | /**
34 | * Set normalized selection
35 | * @param {number | Array} start – absolute offset (excluding modifier chars) or relative offset [node, offset]
36 | * @param {number | Array} end – absolute offset (excluding modifier chars) or relative offset [node, offset]
37 | * @returns {start, , end}
38 | */
39 | set(start, end) {
40 | let s = window.getSelection()
41 |
42 | if (Array.isArray(start)) start = absOffset(...start)
43 | if (Array.isArray(end)) end = absOffset(...end)
44 |
45 | // start/end must be within limits
46 | start = Math.max(0, start)
47 | if (end == null) end = start
48 |
49 | // find start/end nodes
50 | let editarea = document.querySelector('#editarea')
51 | let [startNode, startNodeOffset] = relOffset(editarea, start)
52 | let [endNode, endNodeOffset] = relOffset(editarea, end)
53 |
54 | let currentRange = s.getRangeAt(0)
55 | if (
56 | !(currentRange.startContainer === startNode.firstChild && currentRange.startOffset === startNodeOffset) &&
57 | !(currentRange.endContainer === endNode.firstChild && currentRange.endOffset === endNodeOffset)
58 | ) {
59 | // NOTE: Safari doesn't support reusing range
60 | s.removeAllRanges()
61 | let range = new Range()
62 | range.setStart(startNode.firstChild, startNodeOffset)
63 | range.setEnd(endNode.firstChild, endNodeOffset)
64 | s.addRange(range)
65 | }
66 |
67 | return {
68 | start, startNode, end, endNode,
69 | startNodeOffset, endNodeOffset,
70 | collapsed: s.isCollapsed,
71 | range: s.getRangeAt(0)
72 | }
73 | }
74 | }
75 |
76 | // calculate absolute offset from relative pair
77 | function absOffset(node, relOffset) {
78 | let prevNode = node.parentNode.closest('.segment')
79 | let offset = cleanText(prevNode.textContent.slice(0, relOffset)).length
80 | while (prevNode = prevNode.previousSibling) offset += cleanText(prevNode.textContent).length
81 | return offset
82 | }
83 |
84 | // calculate node and relative offset from absolute offset
85 | function relOffset(editarea, offset) {
86 | let node = editarea.firstChild, len
87 | // discount previous nodes
88 | while (offset > (len = cleanText(node.textContent).length)) {
89 | offset -= len, node = node.nextSibling
90 | }
91 | // convert current node to relative offset
92 | let skip = 0
93 | for (let content = node.textContent, i = 0; i < offset; i++) {
94 | while (content[i + skip] >= '\u0300') skip++
95 | }
96 | return [node, offset + skip]
97 | }
98 |
99 | // return clean from modifiers text
100 | export function cleanText(str) {
101 | return str.replace(/\u0300|\u0301/g, '')
102 | }
103 |
--------------------------------------------------------------------------------
/dist/chunk-ZZPGCUWV.js:
--------------------------------------------------------------------------------
1 | var u=class{constructor(t){if(!t)throw TypeError("options argument is required");if(!t.sampleRate)throw TypeError("options.sampleRate is required");if(t.sampleRate<3e3||t.sampleRate>768e3)throw TypeError("options.sampleRate must be within 3000..768000");if(!t.length)throw TypeError("options.length must be more than 0");this.sampleRate=t.sampleRate,this.numberOfChannels=t.numberOfChannels||1,this.length=t.length|0,this.duration=this.length/this.sampleRate,this._data=new Float32Array(this.length*this.numberOfChannels),this._channelData=[];for(let n=0;n=this.numberOfChannels||t<0||t==null)throw Error("Cannot getChannelData: channel number ("+t+") exceeds number of channels ("+this.numberOfChannels+")");return this._channelData[t]}copyFromChannel(t,n,r){r==null&&(r=0);for(var h=this._channelData[n],a=r,l=0;ar&&w(await n.decodeFile(r)))(e)},async mp3(e){let{MPEGDecoder:t}=await import("./mpg123-decoder-UBRSS5IY.js"),n=new t;return await n.ready,(p.mp3=r=>r&&w(n.decode(r)))(e)},async flac(e){let{FLACDecoder:t}=await import("./flac-ZGCWAMZE.js"),n=new t;return await n.ready,(p.mp3=async r=>r&&w(await n.decode(r)))(e)},async opus(e){let{OggOpusDecoder:t}=await import("./ogg-opus-decoder-GNKUO3CE.js"),n=new t;return await n.ready,(p.opus=async r=>r&&w(await n.decodeFile(r)))(e)},async wav(e){let t=await import("./node-wav-6T5M4VPY.js"),{decode:n}=t.default;return(p.wav=r=>r&&w(n(r)))(e)},async qoa(e){let{decode:t}=await import("./qoa-format-5YQ5ZETV.js");return(p.qoa=n=>n&&w(t(n)))(e)}};function w({channelData:e,sampleRate:t}){let n=new L({sampleRate:t,length:e[0].length,numberOfChannels:e.length});for(let r=0;r>3)),l=new DataView(a),c=0,y=o=>l.setUint8(c++,o),s=o=>(l.setUint16(c,o,!0),c+=2),d=o=>(l.setUint32(c,o,!0),c+=4),g=o=>{for(var i=0;i>3)),s(r*(n>>3)),s(n),g("data"),d(a.byteLength-44);let C=new Float32Array(a,c);for(let o of e){let i=o.numberOfChannels,A=Array(i),D=o.length;for(let m=0;m=t.length?0:t[a],y+=s,o=Math.max(o,s),i=Math.min(i,s);g=Math.min(h,Math.ceil(h*(o-i)/2))||0,C=Math.round(h*(o+i)/2),n+=String.fromCharCode(256+g),n+=(C>0?"\u0301":"\u0300").repeat(Math.abs(C)),l+=1024}return e._wf=n,n}var $=e=>new Promise((t,n)=>{let r=new FileReader;r.addEventListener("loadend",h=>{t(h.target.result)}),r.addEventListener("error",n),r.readAsArrayBuffer(e)});export{u as a,O as b,Z as c,H as d,Q as e,$ as f};
2 | //# sourceMappingURL=chunk-ZZPGCUWV.js.map
3 |
--------------------------------------------------------------------------------
/dist/qoa-format-5YQ5ZETV.js:
--------------------------------------------------------------------------------
1 | import"./chunk-A576P2ZR.js";var P=(s,t=e=>e!==void 0?": "+e:"")=>class extends Error{constructor(e){super(s(e)+t(e))}};var it=P(()=>"illegal argument(s)"),y=s=>{throw new it(s)};var rt=P(()=>"illegal state"),J=s=>{throw new rt(s)};var ot=Math.pow(2,32),q=class{constructor(t,e=0,i=t.length<<3){this.buffer=t,this.start=e,this.limit=i,this.seek(e)}*[Symbol.iterator](){let t=this.start,e=t>>>3,i=7-(t&7);for(;t>>i&1,--i<0&&(e++,i=7),t++}get length(){return this.limit}get position(){return this.bitPos}seek(t){return(t=this.limit)&&y(`seek pos out of bounds: ${t}`),this.pos=t>>>3,this.bit=8-(t&7),this.bitPos=t,this}read(t=1){if(t>32)return this.read(t-32)*ot+this.read(32);if(t>8){let e=0,i=t&-8,r=t-i;for(r>0&&(e=this._read(r));i>0;)e=(e<<8|this._read(8))>>>0,i-=8;return e}else return this._read(t)}readFields(t){return t.map(e=>this.read(e))}readWords(t,e=8){let i=[];for(;t-- >0;)i.push(this.read(e));return i}readStruct(t){return t.reduce((e,[i,r])=>(e[i]=this.read(r),e),{})}readBit(){this.checkLimit(1),this.bit--,this.bitPos++;let t=this.buffer[this.pos]>>>this.bit&1;return this.bit===0&&(this.pos++,this.bit=8),t}_read(t){this.checkLimit(t);let e=this.bit-t,i;return e>=0?(this.bit=e,i=this.buffer[this.pos]>>>e&(1<>>this.bit),this.bitPos+=t,i}checkLimit(t){this.bitPos+t>this.limit&&J("can't read past EOF")}};var nt=16,K=Math.pow(2,32),C=class{constructor(t,e=0){this.buffer=typeof t>"u"?new Uint8Array(nt):typeof t=="number"?new Uint8Array(t):t,this.start=e,this.seek(e),this.buffer[this.pos]&=~((1<=this.buffer.length<<3)&&y(`seek pos out of bounds: ${t}`),this.pos=t>>>3,this.bit=8-(t&7),this.bitPos=t,this}bytes(){return this.buffer.slice(0,this.pos+(this.bit&7?1:0))}reader(t=0){return new q(this.buffer,t,this.position)}write(t,e=1){if(e>32){let i=Math.floor(t/K);this.write(i,e-32),this.write(t-i*K,32)}else if(e>8){let i=e&-8,r=e-i;for(r>0&&this._write(t>>>i,r),i-=8;i>=0;)this._write(t>>>i,8),i-=8}else this._write(t,e);return this}writeWords(t,e=8){let i=t[Symbol.iterator](),r;for(;r=i.next(),!r.done;)this.write(r.value,e)}writeBit(t){return this.bit--,this.buffer[this.pos]=this.buffer[this.pos]&~(1<=0?(c|=(1<>>-a&~c,this.ensureSize(),this.buffer[this.pos]=this.buffer[this.pos]&(1<Math.floor(8+16*s+8*t*s);function E(s,t,e){return se?e:s}function S(s,t){let e=new Int16Array(s||4),i=new Int16Array(t||4);return{history:e,weights:i}}function U(s,t){return s[0]*t[0]+s[1]*t[1]+s[2]*t[2]+s[3]*t[3]>>13}function Z(s,t,e,i){let r=i>>4;s[0]+=t[0]<0?-r:r,s[1]+=t[1]<0?-r:r,s[2]+=t[2]<0?-r:r,s[3]+=t[3]<0?-r:r,t[0]=t[1],t[1]=t[2],t[2]=t[3],t[3]=e}var Y=s=>Math.sign(s)*Math.round(Math.abs(s)),$=Array(16).fill().map((s,t)=>Y(Math.pow(t+1,2.75))),lt=[.75,-.75,2.5,-2.5,4.5,-4.5,7,-7],B=$.map(s=>lt.map(t=>Y(t*s)));function at(s){if(s.read(32)!==1903124838)throw new Error("Not a QOA file; expected magic number 'qoaf'");let e={samples:s.read(32),channels:s.read(8),sampleRate:s.read(24)};return s.seek(64),e}function ct(s,t,e,i,r){let o=s.read(8),a=s.read(24),c=s.read(16),w=s.read(16),m=Math.floor(w-8-4*4*o),l=Math.floor(m/8)*20;if(o!=t.channels||a!=t.sampleRate||c*o>l)throw new Error("invalid frame header data");for(let n=0;n0&&s.read(A)}return c}function tt(s){if(s.byteLength<16)throw new Error(`QOA file size must be >= ${16}`);let t=new q(s),e=at(t),i=[],r=[];for(let c=0;cMath.floor(((1<<16)+s-1)/s)),ft=[7,7,7,5,5,3,3,1,0,0,2,2,4,4,6,6,6];function ut(s,t){let e=ht[t],i=s*e+(1<<15)>>16;return i=i+((s>0)-(s<0))-((i>0)-(i<0)),i}function _t(s,t,e,i,r){let o=t.channels,a=t.sampleRate,c=t.channelData,w=t.samples,m=Math.floor((r+20-1)/20),M=V(o,m);s.write(o,8),s.write(a,24),s.write(r,16),s.write(M,16);for(let l=0;ld)break;Z(b.weights,b.history,j,X),I.push(W)}A=1?s[0].length:0,r={samples:i,channels:e,channelData:s,sampleRate:t},o=(i+5120-1)/5120,a=(i+20-1)/20,c=8+o*8+o*4*4*r.channels+a*8*r.channels,w=[];for(let l=0;l{$.exports=Worker});var B=(n,p)=>{Object.defineProperty(n,"name",{value:p})};function o(n){let p=Uint8Array,f=Float32Array;o.modules||Object.defineProperties(o,{modules:{value:new WeakMap},setModule:{value(e,s){o.modules.set(e,Promise.resolve(s))}},getModule:{value(e,s){let t=o.modules.get(e);return t||(s?t=WebAssembly.compile(o.decodeDynString(s)):(s=e.wasm,t=o.inflateDynEncodeString(s).then(U=>WebAssembly.compile(U))),o.modules.set(e,t)),t}},concatFloat32:{value(e,s){let t=new f(s),U=0,r=0;for(;U({errors:e,channelData:s,samplesDecoded:t,sampleRate:U,bitDepth:r})},getDecodedAudioMultiChannel:{value(e,s,t,U,r,h){let a=[],u,l;for(u=0;u0?a+U:a-t}return s.subarray(0,h)}},inflateDynEncodeString:{value(e){return e=o.decodeDynString(e),new Promise(s=>{let t=String.raw`dynEncode0014u*ttt$#U¤¤U¤¤3yzzss|yusvuyÚ&4<054<,5T44^T44<(6U~J(44< ~A544U~6J0444545 444J0444J,4U4U
Ò7U454U4Z4U4U^/6545T4T44BU~64CU~O4U54U~5 U5T4B4Z!4U~5U5U5T4U~6U4ZTU5U5T44~4O4U2ZTU5T44Z!4B6T44U~64B6U~O44U~4O4U~54U~5 44~C4~54U~5 44~5454U4B6Ub!444~UO4U~5 U54U4ZTU#44U$464<4~B6^4<444~U~B4U~54U544~544~U5 µUä#UJUè#5TT4U0ZTTUX5U5T4T4Uà#~4OU4U $~C4~54U~5 T44$6U\!TTT4UaT4<6T4<64<Z!44~4N4<U~5 4UZ!4U±_TU#44UU6UÔ~B$544$6U\!4U6U¤#~B44Uä#~B$~64<6_TU#444U~B~6~54<Y!44<_!T4Y!4<64~444~AN44<U~6J4U5 44J4U[!U#44UO4U~54U~5 U54 7U6844J44J 4UJ4UJ04VK(44<J44<J$4U´~54U~5 4U¤~5!TTT4U$5"U5TTTTTTT4U$"4VK,U54<(6U~64<$6_!4< 64~6A54A544U~6#J(U54A4U[!44J(44#~A4U6UUU
[!4464~64_!4<64~54<6T4<4]TU5 T4Y!44~44~AN4U~54U~54U5 44J(44J UÄA!U5U#UôJU"UÔJU#UÔ"JU#U´"JT4U´ZTU5T4UôZTU5T4UDZTU5T4U$[T44~UO4U~5 UÔUô4U~U´$.U5T4UP[T4U~4~UO4U~5 U#<U#<4U~U2$.UÄUN 44 ~UO4U~5 44!~UO4U~5 4U~4~UO4U~5 44J44J(U5 44U¤~J@44Uä~J<44UD~J844U~J44U$54U$5U54U$54U1^4U1^!4U~54U~5U54U~6U4U^/65T4T4U$54U~4BU~4O4U54U~5 UU'464U'_/54UU~5T4T4U~4BU~UO4U54U~5 U54Uä~4U¤~4U~U'$!44~5U5T44\T44U<~$6U\!4U#aT4U~4U~4O4U~5 U5U5U5TTT4U$"4YTU5 4U4~C5U5 U5U5444$4~64~\TU5 4U~4U~5T4Y!44O4U~54U~54U5 4CYTU5 4Uä~4U¤~4U~4$6TU54U\!44Bæ4Bä~[!4U~4UD~4U~4U~4$6TU54U\!44B4B~[!44U<~4U4~$5 4U"U#$544"Y!454U^!44<J44<(J454U~84UN!#%'+/37?GOWgw·×÷Uä;U9$%& !"#`;o.getModule(o,t).then(U=>WebAssembly.instantiate(U,{})).then(({exports:U})=>{let r=new Map(Object.entries(U)),h=r.get("puff"),a=r.get("memory").buffer,u=new p(a),l=new DataView(a),i=r.get("__heap_base"),d=e.length,c=i;i+=4,l.setInt32(c,d,!0);let m=i;i+=d,u.set(e,m);let T=i;i+=4,l.setInt32(T,u.byteLength-i,!0),h(i,T,m,c),s(u.slice(i,i+l.getInt32(T,!0)))})})}}}),Object.defineProperty(this,"wasm",{enumerable:!0,get:()=>this._wasm}),this.getOutputChannels=(e,s,t)=>{let U=[],r=0;for(;r{let U=this._wasm._malloc(s.BYTES_PER_ELEMENT*e);return t&&this._pointers.add(U),{ptr:U,len:e,buf:new s(this._wasm.HEAP,U,e)}},this.free=()=>{this._pointers.forEach(e=>{this._wasm._free(e)}),this._pointers.clear()},this.codeToString=e=>{let s=[],t=new Uint8Array(this._wasm.HEAP);for(let U=t[e];U!==0;U=t[++e])s.push(U);return String.fromCharCode.apply(null,s)},this.addError=(e,s,t)=>{e.push({message:s,frameLength:t,frameNumber:n._frameNumber,inputBytes:n._inputBytes,outputSamples:n._outputSamples})},this.instantiate=()=>{let e=n._module,s=n._EmscriptenWASM,t=n._inputSize,U=n._outputChannels,r=n._outputChannelSize;return e&&o.setModule(s,e),this._wasm=new s(o).instantiate(),this._pointers=new Set,this._wasm.ready.then(()=>(t&&(n._input=this.allocateTypedArray(t,p)),r&&(n._output=this.allocateTypedArray(U*r,f)),n._inputBytes=0,n._outputSamples=0,n._frameNumber=0,this))}}var J=S(b(),1);var C=()=>globalThis.Worker||J.default,g=class extends C(){constructor(p,f,e,s){o.modules||new o;let t=o.modules.get(e);if(!t){let U=`'use strict';(${((h,a,u)=>{let l,i,d=new Promise(c=>{i=c});self.onmessage=({data:{id:c,command:m,data:T}})=>{let _=d,y={id:c},w;m==="init"?(Object.defineProperties(h,{WASMAudioDecoderCommon:{value:a},EmscriptenWASM:{value:u},module:{value:T.module},isWebWorker:{value:!0}}),l=new h(T.options),i()):m==="free"?l.free():m==="ready"?_=_.then(()=>l.ready):m==="reset"?_=_.then(()=>l.reset()):(Object.assign(y,l[m](Array.isArray(T)?T.map(A=>new Uint8Array(A)):new Uint8Array(T))),w=y.channelData?y.channelData.map(A=>A.buffer):[]),_.then(()=>self.postMessage(y,w))}}).toString()})(${e}, ${o}, ${s})`,r="text/javascript";try{t=URL.createObjectURL(new Blob([U],{type:r}))}catch{t=`data:${r};base64,${Buffer.from(U).toString("base64")}`}o.modules.set(e,t)}super(t,{name:f}),this._id=Number.MIN_SAFE_INTEGER,this._enqueuedOperations=new Map,this.onmessage=({data:U})=>{let{id:r,...h}=U;this._enqueuedOperations.get(r)(h),this._enqueuedOperations.delete(r)},new s(o).getModule().then(U=>{this._postToDecoder("init",{module:U,options:p})})}async _postToDecoder(p,f){return new Promise(e=>{this.postMessage({command:p,id:this._id,data:f}),this._enqueuedOperations.set(this._id++,e)})}get ready(){return this._postToDecoder("ready")}async free(){await this._postToDecoder("free").finally(()=>{this.terminate()})}async reset(){await this._postToDecoder("reset")}};export{o as a,g as b,B as c};
2 | //# sourceMappingURL=chunk-SCMJQOGN.js.map
3 |
--------------------------------------------------------------------------------
/dist/worker.js:
--------------------------------------------------------------------------------
1 | import{a as W,b as z,c as G,d as _,e as H,f as J}from"./chunk-ZZPGCUWV.js";import"./chunk-A576P2ZR.js";var te=function(){function e(){}return e.prototype.then=function(t,n){let r=new e,o=this.s;if(o){let i=1&o?t:n;if(i){try{d(r,1,i(this.v))}catch(a){d(r,2,a)}return r}return this}return this.o=function(i){try{let a=i.v;1&i.s?d(r,1,t?t(a):a):n?d(r,1,n(a)):d(r,2,a)}catch(a){d(r,2,a)}},r},e}();function d(e,t,n){if(!e.s){if(n instanceof te){if(!n.s)return void(n.o=d.bind(null,e,t));1&t&&(t=n.s),n=n.v}if(n&&n.then)return void n.then(d.bind(null,e,t),d.bind(null,e,2));e.s=t,e.v=n;let r=e.o;r&&r(e)}}var fe=0,A=typeof WeakMap=="function"?WeakMap:function(){var e=typeof Symbol=="function"?Symbol(0):"__weak$"+ ++fe;this.set=function(t,n){t[e]=n},this.get=function(t){return t[e]}};function Y(e,t){return new Promise(function(n,r){e.onsuccess=function(){var o=e.result;t&&(o=t(o)),n(o)},e.onerror=function(){r(e.error)}})}function ne(e,t){return Y(e.openCursor(t),function(n){return n?[n.key,n.value]:[]})}function Q(e){return new Promise(function(t,n){e.oncomplete=function(){t()},e.onabort=function(){n(e.error)},e.onerror=function(){n(e.error)}})}function K(e){if(!function(t){return!!(typeof t=="number"||typeof t=="string"||typeof t=="object"&&t&&(Array.isArray(t)||"setUTCFullYear"in t||typeof ArrayBuffer=="function"&&ArrayBuffer.isView(t)||"byteLength"in t&&"length"in t))}(e))throw Error("kv-storage: The given value is not allowed as a key")}var re={};function X(e,t){return ne(e,oe(t))}function oe(e){return e===re?IDBKeyRange.lowerBound(-1/0):IDBKeyRange.lowerBound(e,!0)}var ie=new A,T=new A,x=new A,ue=new A,E=function(){};function ee(e,t){return t(function(n,r){try{let v=function(){return T.set(e,i),x.set(e,void 0),{value:c,done:i===void 0}};var o=T.get(e);if(o===void 0)return Promise.resolve({value:void 0,done:!0});var i,a,c,h=function(f,u){var p,l=-1;e:{for(var y=0;y{let{id:t,ops:n}=e.data,r;for(;t{let t=e.map(a=>H(a)),n=e.reduce((a,{duration:c})=>a+c,0),r=await _(...e),o=new Blob([r],{type:"audio/wav"}),i=URL.createObjectURL(o);self.postMessage({id:j.length,url:i,segments:t,duration:n})},j=[],s=[],ce={async src(...e){return j.push(()=>s=[]),s=await Promise.all(e.map(G)),s},async file(e){if(typeof e=="string"){let r=await V.get(Z+":"+e);if(!r)return s;let o=await J(r);return s=[await z(o)]}j.push(()=>s.pop());let t=new W({numberOfChannels:e.numberOfChannels,length:e.length,sampleRate:e.sampleRate});e.channelData.forEach((r,o)=>t.getChannelData(o).set(r)),s.push(t);let n=new Blob([await _(...s)]);return console.log("save",Z+":"+e.name),V.set(Z+":"+e.name,n),s},del(e,t){e=Number(e),t=Number(t);let n=[...s];j.push(()=>{s=n});let r=ae(e),o=ae(t);!o[1]&&o[0]&&(o[0]-=1,o[1]=s[o[0]].length);let i=s[r[0]],a=s[o[0]],c=r[1]+(a.length-o[1]);if(!c)return s=[];let h=new W({length:c,sampleRate:i.sampleRate,numberOfChannels:i.numberOfChannels});for(let f=0;f{let t=e*1024;if(t===0)return[0,0];var n=0,r;for(let o=0;o> 3));
31 | let v = new DataView(buffer);
32 | let pos = 0;
33 | const u8 = (x) => v.setUint8(pos++, x);
34 | const u16 = (x) => (v.setUint16(pos, x, true), pos += 2)
35 | const u32 = (x) => (v.setUint32(pos, x, true), pos += 4)
36 | const string = (s) => { for (var i = 0; i < s.length; ++i) u8(s.charCodeAt(i)); }
37 | string("RIFF");
38 | u32(buffer.byteLength - 8);
39 | string("WAVE");
40 | string("fmt ");
41 | u32(16);
42 | u16(3); // float
43 | u16(channels);
44 | u32(sampleRate);
45 | u32(sampleRate * channels * (bitDepth >> 3));
46 | u16(channels * (bitDepth >> 3));
47 | u16(bitDepth);
48 | string("data");
49 | u32(buffer.byteLength - 44);
50 |
51 | // FIXME: can just copy data for mono case (way faster)
52 | // FIXME: should we instead to just directly work with wav buffer instead of audio buffers?
53 | let output = new Float32Array(buffer, pos);
54 | for (let audioBuffer of audioBuffers) {
55 | let channels = audioBuffer.numberOfChannels,
56 | channelData = Array(channels),
57 | length = audioBuffer.length
58 | for (let ch = 0; ch < channels; ++ch) channelData[ch] = audioBuffer.getChannelData(ch)
59 | for (let i = 0; i < length; ++i)
60 | for (let ch = 0; ch < channels; ++ch) output[pos++] = channelData[ch][i];
61 | }
62 |
63 | console.timeEnd('wav encode')
64 | return buffer;
65 | }
66 |
67 | // convert audio buffer to waveform string
68 | export function drawAudio(audioBuffer) {
69 | if (!audioBuffer) return '';
70 |
71 | // if waveform is rendered already - return cached
72 | if (audioBuffer._wf) return audioBuffer._wf;
73 |
74 | // console.time('draw string')
75 |
76 | // map waveform to wavefont
77 | let channelData = audioBuffer.getChannelData(0), str = ''
78 |
79 | // TODO: weight waveform by audible spectrum
80 |
81 | // create wavefont string
82 | // amp coef brings up value a bit
83 | const VISUAL_AMP = 2
84 | const RANGE = 128, AMP = 2
85 |
86 | for (let i = 0, nextBlock = BLOCK_SIZE; i < channelData.length;) {
87 | let ssum = 0, sum = 0, x, avg, v, shift
88 |
89 | // avg amp method - waveform is too small
90 | // for (; i < nextBlock; i++) {
91 | // x = i >= channelData.length ? 0 : channelData[i]
92 | // sum += Math.abs(x)
93 | // }
94 | // avg = sum / BLOCK_SIZE
95 | // v = Math.ceil(avg * RANGE)
96 | // shift = 0
97 |
98 | // rms method
99 | // drawback: waveform is smaller than needed
100 | // for (; i < nextBlock; i++) {
101 | // x = i >= channelData.length ? 0 : channelData[i]
102 | // sum += x
103 | // ssum += x ** 2
104 | // }
105 | // avg = sum / BLOCK_SIZE
106 | // const rms = Math.sqrt(ssum / BLOCK_SIZE)
107 | // v = Math.min(RANGE, Math.ceil(rms * RANGE * VISUAL_AMP / 2)) || 0
108 | // shift = Math.round(avg * RANGE / 2)
109 |
110 | // signal energy loudness
111 | // ref: https://github.com/MTG/essentia/blob/master/src/algorithms/temporal/loudness.cpp
112 | // same as RMS essentially, different power
113 | // const STEVENS_POW = 0.67
114 | // for (; i < nextBlock; i++) ssum += i >= channelData.length ? 0 : channelData[i] ** 2
115 | // const value = (ssum / BLOCK_SIZE) ** STEVENS_POW
116 | // v = Math.min(RANGE, Math.ceil(value * RANGE * VISUAL_AMP))
117 | // shift = 0
118 |
119 | // peak amplitude
120 | let max = -1, min = 1
121 | for (; i < nextBlock; i++) {
122 | x = i >= channelData.length ? 0 : channelData[i]
123 | sum += x
124 | max = Math.max(max, x)
125 | min = Math.min(min, x)
126 | }
127 | v = Math.min(RANGE, Math.ceil(RANGE * (max - min) / AMP)) || 0
128 | shift = Math.round(RANGE * (max + min) / (2 * AMP))
129 |
130 | str += String.fromCharCode(0x0100 + v)
131 | str += (shift > 0 ? '\u0301' : '\u0300').repeat(Math.abs(shift))
132 |
133 | nextBlock += BLOCK_SIZE
134 | }
135 |
136 | // cache waveform
137 | audioBuffer._wf = str
138 |
139 | // console.timeEnd('draw string')
140 | return str
141 | }
142 |
143 | export function sliceAudio(buffer, start = 0, end = buffer.length) {
144 | let newBuffer = new AudioBuffer({
145 | length: end - start,
146 | numberOfChannels: buffer.numberOfChannels,
147 | sampleRate: buffer.sampleRate
148 | });
149 |
150 | for (var c = 0; c < newBuffer.numberOfChannels; c++) {
151 | newBuffer.copyToChannel(
152 | buffer.getChannelData(c).subarray(start, end),
153 | c, 0
154 | )
155 | }
156 |
157 | return newBuffer
158 | }
159 |
160 | export function joinAudio(a, b) {
161 | let newBuffer = new AudioBuffer({
162 | length: a.length + b.length,
163 | numberOfChannels: Math.max(a.numberOfChannels, b.numberOfChannels),
164 | sampleRate: a.sampleRate
165 | })
166 |
167 | for (let ch = 0; ch < newBuffer.numberOfChannels; ch++) {
168 | newBuffer.copyToChannel(
169 | a.getChannelData(ch),
170 | ch, 0
171 | )
172 | newBuffer.copyToChannel(
173 | b.getChannelData(ch),
174 | ch, a.length
175 | )
176 | }
177 |
178 | return newBuffer
179 | }
180 |
181 | export function deleteAudio(buffer, start = 0, end = buffer.length) {
182 | let newBuffer = new AudioBuffer({
183 | length: buffer.length - Math.abs(end - start),
184 | numberOfChannels: buffer.numberOfChannels,
185 | sampleRate: buffer.sampleRate
186 | });
187 |
188 | for (var c = 0; c < buffer.numberOfChannels; c++) {
189 | var channelData = buffer.getChannelData(c)
190 | var newChannelData = newBuffer.getChannelData(c)
191 | newChannelData.set(channelData.subarray(0, start), 0);
192 | newChannelData.set(channelData.subarray(end), start);
193 | }
194 |
195 | return newBuffer
196 | }
197 |
198 | export function insertAudio(a, offset, b) {
199 | if (offset >= a.length) return joinAudio(a, b)
200 | if (!offset) return joinAudio(b, a)
201 |
202 | let buffer = new AudioBuffer({
203 | length: a.length + b.length,
204 | numberOfChannels: Math.max(a.numberOfChannels, b.numberOfChannels),
205 | sampleRate: a.sampleRate
206 | })
207 |
208 | for (let ch = 0; ch < buffer.numberOfChannels; ch++) {
209 | buffer.copyToChannel(
210 | a.getChannelData(ch).subarray(0, offset),
211 | ch, 0
212 | )
213 | buffer.copyToChannel(
214 | b.getChannelData(ch),
215 | ch, offset
216 | )
217 | buffer.copyToChannel(
218 | a.getChannelData(ch).subarray(offset),
219 | ch, offset + b.length
220 | )
221 | }
222 | return buffer
223 | }
224 |
225 | export function cloneAudio(a) {
226 | let b = new AudioBuffer({ sampleRate: a.sampleRate, numberOfChannels: a.numberOfChannels, length: a.length })
227 | for (let ch = 0; ch < a.numberOfChannels; ch++) b.getChannelData(ch).set(a.getChannelData(ch))
228 | return b
229 | }
230 |
231 | export const fileToArrayBuffer = (file) => {
232 | return new Promise((y, n) => {
233 | const reader = new FileReader();
234 | reader.addEventListener('loadend', (event) => {
235 | y(event.target.result);
236 | });
237 | reader.addEventListener('error', n)
238 | reader.readAsArrayBuffer(file);
239 | })
240 | }
241 |
--------------------------------------------------------------------------------
/main.css:
--------------------------------------------------------------------------------
1 | * {
2 | box-sizing: border-box;
3 | }
4 |
5 | html,
6 | body {
7 | margin: 0;
8 | }
9 |
10 | body {
11 | font-family: sans-serif;
12 | }
13 |
14 | [hidden] {
15 | display: none !important;
16 | }
17 |
18 | [disabled] {
19 | opacity: .5;
20 | }
21 |
22 | @font-face {
23 | font-family: wavefont;
24 | font-display: block;
25 | src: url(./asset/wavefont.woff2) format('woff2');
26 | }
27 |
28 | .wavefont {
29 | display: block;
30 | --wght: 25;
31 | font-family: wavefont;
32 | /* letter-spacing: 1.5ch; */
33 | font-size: var(--wavefont-size, 50px);
34 | line-height: var(--wavefont-lh);
35 | font-variation-settings: 'wght' var(--wght), 'ROND' 100, 'YALN' 0;
36 | text-rendering: optimizeSpeed;
37 | font-smooth: grayscale;
38 | -webkit-font-smoothing: grayscale;
39 | -moz-osx-font-smoothing: grayscale;
40 | }
41 |
42 | .container {
43 | display: flex;
44 | position: relative;
45 | width: 100%;
46 | margin: 1rem auto;
47 | padding: 0 0 0;
48 | justify-content: center;
49 | }
50 |
51 | #wavearea {
52 | height: 100%;
53 | margin: 0;
54 | padding: 0 2vw;
55 | display: flex;
56 | flex-direction: column;
57 | /* --wavefont-size: max(4rem, min(10.8vw, 6rem)); */
58 | --wavefont-size: 50px;
59 | /* Value is special: it doesn't break in mobiles */
60 | --wavefont-lh: calc(var(--wavefont-size) * 1.4);
61 | --secondary: rgb(0 0 0 / 33%);
62 | --primary: black;
63 |
64 | #loading {
65 | cursor: wait !important;
66 | }
67 |
68 | #waveform {
69 | position: relative;
70 | width: 100%;
71 | /* this 0.5ch fixes inconsistent breaking, seemingly round error */
72 | max-width: calc(4 * 216 * 1ch + 0.5ch);
73 | margin: 1rem;
74 | margin-left: 4rem;
75 | }
76 |
77 | #waveform.dragover {
78 | /* cursor: drop; */
79 | }
80 |
81 | #editarea,
82 | #loader {
83 | outline: none;
84 | width: 100%;
85 | color: var(--primary);
86 | }
87 |
88 | #editarea {
89 | /* background-size: 1px calc(var(--wavefont-size) * 1.4);
90 | background-position: 0% 4.2rem;
91 | background-image: repeating-linear-gradient(0deg, var(--secondary) -0.5px, rgb(255 255 255 / 0%) 0.5px, rgb(255 255 255 / 0%)); */
92 | position: relative;
93 | p::selection {
94 | background-color: var(--secondary);
95 | }
96 | }
97 |
98 |
99 | /* played samples dimmer */
100 | #editarea.playing:before,
101 | #editarea.playing:after {
102 | content: '';
103 | position: absolute;
104 | background: rgba(255, 255, 255, .75);
105 | pointer-events: none;
106 | z-index: 1;
107 | }
108 |
109 | #editarea:before {
110 | bottom: 0;
111 | left: -1px;
112 | right: -1px;
113 | top: calc(var(--carety) + var(--wavefont-lh));
114 | }
115 |
116 | #editarea:after {
117 | top: var(--carety);
118 | right: -1px;
119 | left: var(--caretx);
120 | height: var(--wavefont-lh);
121 | }
122 |
123 | #loader {
124 | top: 0;
125 | display: block;
126 | position: absolute;
127 | z-index: 1;
128 | pointer-events: none;
129 | color: var(--secondary);
130 | }
131 |
132 |
133 | .segment {
134 | position: relative;
135 | margin: 0;
136 | padding: 0;
137 | min-height: var(--wavefont-lh);
138 |
139 | word-break: break-all;
140 | white-space: break-spaces;
141 | line-break: anywhere;
142 | }
143 |
144 | #timecodes,
145 | #status {
146 | position: absolute;
147 | top: 0;
148 | left: -3rem;
149 | font-family: sans-serif;
150 | letter-spacing: 0;
151 | font-size: .75rem;
152 | margin: 0;
153 | word-break: keep-all;
154 | white-space: pre;
155 | color: var(--secondary);
156 | line-height: var(--wavefont-lh);
157 | }
158 |
159 | #timecodes {
160 | display: flex;
161 | flex-direction: column;
162 | }
163 |
164 | #timecodes>* {
165 | margin: 0;
166 | text-decoration: none;
167 | color: var(--secondary);
168 | }
169 |
170 | #status {
171 | left: -4rem;
172 | }
173 |
174 | #caret-line,
175 | #opener {
176 | position: absolute;
177 | margin-left: -3.8rem;
178 | width: 3rem;
179 | height: var(--wavefont-lh);
180 | top: var(--carety);
181 | display: flex;
182 | align-items: center;
183 | justify-content: center;
184 | }
185 |
186 | #opener {
187 | margin-left: -3.2rem;
188 | }
189 |
190 | #caret-line {
191 | pointer-events: none;
192 | margin-left: -4rem;
193 | }
194 |
195 | #file {
196 | width: 0.1px;
197 | height: 0.1px;
198 | opacity: 0;
199 | overflow: hidden;
200 | position: absolute;
201 | z-index: -1;
202 | }
203 |
204 | #file+label {
205 | display: flex;
206 | align-items: center;
207 | font-size: 1rem;
208 | cursor: pointer;
209 | user-select: none;
210 | }
211 |
212 | #record {
213 | align-items: center;
214 | padding: 0;
215 | text-align: center;
216 | display: flex;
217 | appearance: none;
218 | border: none;
219 | background: none;
220 | cursor: pointer;
221 | font-size: 1rem;
222 | user-select: none;
223 | }
224 |
225 | #floater {
226 | position: sticky;
227 | top: 0;
228 | bottom: var(--wavefont-lh);
229 | height: 0;
230 | display: flex;
231 | line-height: 0;
232 | margin-top: calc(-1*var(--wavefont-lh));
233 |
234 | #play {
235 | background: linear-gradient(to bottom, rgb(255 255 255 / 0%) 0%, rgb(255 255 255 / 100%) 25%, rgb(255 255 255 / 100%) 75%, rgb(255 255 255 / 0%) 100%);
236 | height: var(--wavefont-lh );
237 | }
238 | }
239 |
240 | #play {
241 | padding: 0;
242 | text-align: center;
243 | appearance: none;
244 | border: none;
245 | cursor: pointer;
246 | justify-content: center;
247 | align-items: center;
248 | -webkit-tap-highlight-color: transparent;
249 | z-index: 1;
250 | display: flex;
251 | background: none;
252 |
253 | #clickarea {
254 | height: calc(var(--wavefont-lh) * 3);
255 | width: 100%;
256 | position: absolute;
257 | top: -100%;
258 |
259 | height: 4rem;
260 | width: 4rem;
261 | position: absolute;
262 | left: -1rem;
263 | top: 0;
264 | bottom: 0;
265 | margin: auto;
266 | }
267 | }
268 |
269 | #playback {
270 | position: fixed;
271 | bottom: 4rem;
272 | width: auto;
273 | margin: auto;
274 | z-index: 2;
275 | display: flex;
276 | gap: .6rem;
277 | align-items: center;
278 | font-size: .8rem;
279 | padding: 1rem 1.2rem;
280 | border-radius: 50px;
281 | border: 1px solid rgb(0 0 0 / 3%);
282 | box-shadow: rgb(0 0 0 / 22%) 0px 4px 16px -8px, rgb(0 0 0 / 7%) 0px 1px 2px;
283 | background: linear-gradient(to bottom, rgb(255 255 255 / 95%) 0%, rgb(255 255 255 / 100%) 33%, rgb(255 255 255 / 100%) 100%);
284 | /* -webkit-backdrop-filter: blur(2px); */
285 | /* backdrop-filter: blur(2px); */
286 |
287 | .time {
288 | font-variant-numeric: tabular-nums;
289 | }
290 | }
291 |
292 | #krsnzd {
293 | position: fixed;
294 | bottom: 1.08rem;
295 | right: 1.08rem;
296 | margin: 0 auto;
297 | text-decoration: none;
298 | color: var(--primary);
299 | opacity: .108;
300 | transition: .108s ease-out;
301 | }
302 |
303 | #krsnzd:hover {
304 | opacity: .82;
305 | }
306 |
307 | #info-button {
308 | position: fixed;
309 | bottom: .8rem;
310 | right: .8rem;
311 | appearance: none;
312 | background: none;
313 | border: none;
314 | opacity: 0.25;
315 | cursor: pointer;
316 | }
317 |
318 | #info-button:hover {
319 | opacity: 1;
320 | }
321 |
322 | #info-dialog {
323 | padding: 1.2rem;
324 | border: none;
325 | border-radius: .8rem;
326 | box-shadow: rgb(0 0 0 / 22%) 0px 4px 16px -8px, rgb(0 0 0 / 7%) 0px 1px 2px;
327 | background: white;
328 | opacity: 0;
329 | display: none;
330 | transition: opacity .2s ease-out;
331 | inset: 0;
332 | }
333 |
334 | #info-dialog[open] {
335 | opacity: 1;
336 | margin: auto;
337 | display: block;
338 | transition: opacity .2s ease-out;
339 | }
340 |
341 | #info-dialog>* {
342 | margin: 0
343 | }
344 |
345 | #info-dialog::backdrop {
346 | background: rgba(108, 108, 108, .16);
347 | -webkit-backdrop-filter: blur(0px);
348 | backdrop-filter: blur(0px);
349 | transition: opacity .5s ease-out;
350 | }
351 |
352 | #info-dialog[open]::backdrop {
353 | -webkit-backdrop-filter: blur(4px);
354 | backdrop-filter: blur(4px);
355 | }
356 | }
357 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Wavearea
5 |
6 |
7 |
8 |
9 |
10 |
22 |
23 |
29 |
103 |
104 |
107 | ...
108 |
109 |
110 |
113 |
121 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
144 |
145 |
146 |
147 |
163 |
164 |
165 |
166 |
167 |
168 |
--------------------------------------------------------------------------------
/research.md:
--------------------------------------------------------------------------------
1 | ## [x] Name -> wavearea
2 |
3 | * waveedit
4 | * wavearea
5 | + alliteration ea ea
6 | + canonical direct name
7 | * waver
8 | * wavee
9 | * waveplay
10 | + free
11 | + works with waveplayer
12 | * wavely
13 | + wave-ply
14 | + works with wavedy for editor
15 | * waveplayer
16 | * playwave
17 | * waev
18 | + wæv
19 | + anagram
20 | + free
21 | + refers to sprae
22 | + phonetically correct form of "wave"
23 | - can be done without sprae
24 | - registered company name
25 | * wavea
26 | + wavearea
27 | * wavescope
28 | * waveview
29 | * waveplae
30 | + refers to sprae (ostensibly better than waev)
31 | + refers to waveplay - better association than waev
32 | * plae
33 | + player
34 | + refers to waveplae
35 | + refers to sprae
36 | - taken
37 | * wavr
38 | + registered
39 | + short for wave-area
40 | + better fits wav
41 | + refers to plyr
42 |
43 | ## [ ] Random files / demo cases
44 |
45 | * Classics?
46 | * Famous quotes?
47 | * Prabhupada vani?
48 | * Audio books?
49 | * Poetry?
50 | * Mantras/Mahamantra?
51 | * Randomly generated music stream?
52 | * The most popular songs of all time?
53 | * Vedas?
54 | * Random forest sounds!
55 |
56 | ## [ ] Intro screen: ideas?
57 |
58 | * !recent history of files
59 | * !random file?
60 | * !record mic
61 | * !generate speech (some free API)
62 | * !generate signal
63 | * !some AI stuff (generate from prompt)
64 | * !open file(s), drop file(s)
65 | * It must be meaningful & entertaining: each time educative content, like voiced aphorism etc.
66 |
67 | ## [ ] Cases / integrations
68 |
69 | * Drop [Prabhupada] audio (paste by URL, by file, drop file), have multiline waveform with time markers.
70 | * Separate logical secions by pressing enter.
71 | * Delete apparent long pauses.
72 | * Apply normalizer plugin.
73 | * Select start, apply fade-in; select end, apply fade-out.
74 | * Put cursor at any place: record own speech.
75 | * Drop any audio chunk at specific caret location.
76 | * Generate speech at specific location.
77 | * [ ] Sound fragments sharing platform
78 | * [ ] Hosting files via github
79 | * [ ] Sampler player, like te-re-khe-ta from URL will play sampled phrases by dictionary
80 | * [ ] Multiple variations of theming
81 | * [ ] Voice emails integration
82 | * [ ] Multiple various transforms: speed up, skip silence, enhance recording, normalize
83 | * [ ] Famous voices speak famous phrases - chunk tp share
84 | * [ ] Dictaphone
85 | * [ ] Customizable waveform player component: loudness variants, rendering complexity variants, themes, backend variants
86 | * [ ] Audio books with paged chapters for playback
87 | * [ ] sound-resource.com
88 | * [ ] Assembly AI transcript player https://www.assemblyai.com/playground/transcript/rfj7ddsp95-7929-4158-8cf7-27d897b47b96
89 |
90 | ## [x] Editing cases: what's the method of identifying changes? -> detect from onbeforeinput inputType
91 |
92 | * Delete part (selection)
93 | * Delete single block
94 | * Paste piece from the other part
95 | * Speparate by Enter
96 | * Paste audio file
97 |
98 | 1. 1a2b3a4a5...123a124b125c
99 |
100 | - letter characters are selectable / navigatable, unlike combos
101 | + allows identifying parts exactly
102 | - too extended string
103 |
104 | 2. \uff** + bar
105 |
106 | - same as above
107 | + less space taken
108 |
109 | 3. Detect operation from changed input based on current selection
110 |
111 | + no overhead
112 | + smart algo
113 | ~ selection can be unreliable on some devices
114 | + more reliably detects allowed inputs
115 | - no way to paste samples from somewhere else
116 |
117 | ## [x] Paragraphs instead of textarea -> let's try p
118 |
119 | + No hardship detecting line breaks
120 | + Easy way to display time codes
121 | + We anyways display single duplication node
122 | - Not textarea already: textarea can be useful for simple small fragments
123 |
124 | ## [x] CRDT: keep ops in URL -> yes, see readme for latest format details
125 |
126 | + allows undo/redo just from URL
127 | + allows permanent links to edited audio pieces
128 | * allowed URL chars: ;,/?:@&=+$-_.!~*()#
129 | ? ops: `add(0:url(path/to/file)),br(112,5634,12355),del(12:45,123:234)`
130 | * delete: `-(start:amt,23:12,...)`
131 | * add: `+(start:src,23:url(https://path/to/file),...)`
132 | * silence: `_(start:amt,start:amt,...)`
133 | * breaks: `.(offet,23,112,1523)`
134 | * normalize?
135 | * remove-silence?
136 | * enhance-quality-via-external-processor, like `process(adobe-enhancer)`
137 | * Alt: `src=path/to-file&br=112,5634,12355&del=12:45,123:234`
138 | + colon is perfect separator: `#line:col`
139 | + one entry is one history item
140 | + shorturl for audio files
141 |
142 | ## [ ] Store offsets in blocks or samples?
143 |
144 | 1. Blocks
145 | - depend on block size & sample rate
146 | - block size can change transforms
147 | - no precise editing
148 | ~ isn't necessarily needed
149 | + very short notation
150 | + very natural to what you see
151 | ? can define `block=1024&sr=44100` in url
152 | - any zoom change recalculates full url
153 |
154 | 2. Samples
155 | - depend on sample rate
156 | ~ sample rate change recalculates URL
157 | - longer than block
158 | + more precies
159 | + zoom change doesn't change url
160 | - big sample rates make very long URLs
161 |
162 | 3. Time
163 | - too lengthy values
164 | - can be mistakes identifying exact place
165 | + doesn't depend on zoom / sample rate levels
166 | + can be very precise
167 | + can have conventional short notation: br=122.1s,156.432s,
168 |
169 | 4. Mix of 3 and 1: units indicate time, values indicate block
170 | - lazy solution: can be fixed on experimental stage
171 |
172 | ## [ ] From-to vs at-count
173 |
174 | + `del=from-to` is more logical as range indicator
175 | + also easier from code perspective
176 | - `sil=at-count` is more logical to insert silence
177 |
178 |
179 | ## [x] Looping method -> custom UI for audio element: we need better UI anyways
180 |
181 | 0. Same way we observe currentTime via raf, we can loop
182 | - short pieces are not loopable nicely
183 | + solves long pieces
184 |
185 | 1. Create a clone of audio with selected fragment and loop it
186 | - keeping UI in sync
187 | + standard API
188 | + natural extension
189 | - can be costly to immediately create a big slice
190 | ~ there's no difference perf-wise between set & loop
191 | -> yes, create bg wav buffer onselection, and fully intercept audio
192 | * may need alternative UI, since original UI can fail
193 |
194 | 2. -> Custom UI for audio tag
195 | + anyways we were going to do that
196 | + better control over displayed data
197 | + it can allow removin unusable parts
198 | + we have raw data anyways
199 |
200 | 3. Custom UI via AudioSourceNode
201 | + better integration with audio buffers
202 | + no need to constantly (re) encode wav
203 | - requires sending audio buffers to main thread
204 | - not as reliable as just audio
205 |
206 | 4. media-offset
207 | + separates concern nicely
208 | + doesn't require worker slicing delay
209 | + can be messy on small chunks
210 | - small chunks defects
211 | - rough api yet
212 |
213 |
214 | ## [ ] Inline player vs playback panel
215 |
216 | 1. Inline player
217 | + Inline player is minimalistic
218 | - Inline player is buggy on safari for intersection observer - needs fake scroll container, unless done via scroll
219 | - Inline player is buggy for multiple lines - misses the caret pos
220 | ~ We still may need to track caret-line properly (scroll into caret)
221 | ? unless area i able to do it itself
222 |
223 | 2. Playback panel
224 | + Always accessible
225 | + Allows displaying any info: time, download, progress, record
226 | + Customizable
227 | + Conventional UX
228 | + Has no scrolling issues
229 |
230 | ## [x] WAA player vs Audio element -> use compensated audio for now. Too many benefits
231 |
232 | 1. Audio
233 | + More universally supported
234 | + Simpler API
235 | + Decoding out of box
236 | - Big delay in iOS for playback
237 | - No built-in loop support
238 | ~ Can be relatively safely implemented
239 | - API quirks / inconsistencies across iOS / desktop, like preloading
240 | - Events order is confusing: seeked, seeking, timeupdate - but we factually need just 'looped' or 'usernavigated'
241 | - Likely impossible to organize precise tests (if at all)
242 | + It opens the file nicely in iOS home screen
243 |
244 | 2. WAA (AudioSourceNode)
245 | + short latency
246 | + no 1.5s playback delay imposed by Safari
247 | - no ready playback API
248 | - may require live audiobuffer manipulations to output sound
249 | + loopStart/loopEnd support out of box
250 | + direct access to AudioBuffer: no need to constantly re-encode audio, ops can be more instantaneous
251 | - context instantiation issues (see web-audio-player)
252 |
253 | 3. web-audio-player https://github.com/Jam3/web-audio-player
254 | + attempt to fix many gotchas
255 | - switches between 2 modes: element / waa
256 |
--------------------------------------------------------------------------------
/src/worker.js:
--------------------------------------------------------------------------------
1 | // main audio processing API / backend
2 | import { BLOCK_SIZE, SAMPLE_RATE } from "./const.js";
3 | import { fetchAudio, cloneAudio, drawAudio, encodeAudio, sliceAudio, fileToArrayBuffer } from "./audio-util.js";
4 | import decodeAudio from 'audio-decode'
5 | import AudioBuffer from "audio-buffer";
6 | import storage from 'kv-storage-polyfill';
7 |
8 | // shim worker for Safari
9 | if (!globalThis.Worker) {
10 | let { default: Worker } = await import('pseudo-worker')
11 | globalThis.Worker = Worker
12 | }
13 |
14 | // ops worker - schedules message processing with debounced update
15 | self.onmessage = async e => {
16 | console.log('Worker message', e)
17 | let { id, ops } = e.data, resultBuffers
18 |
19 | // revert history if needed
20 | while (id < history.length) history.pop()()
21 |
22 | // apply op
23 | for (let op of ops) {
24 | console.log('Apply op', op)
25 | let [name, ...args] = op
26 | resultBuffers = await Ops[name]?.(...args);
27 | }
28 |
29 | renderAudio(resultBuffers)
30 | };
31 |
32 | // render waveform & audio, post to client
33 | const renderAudio = async (buffers) => {
34 | let segments = buffers.map(buffer => drawAudio(buffer))
35 | let duration = buffers.reduce((total, { duration }) => total + duration, 0)
36 | let wavBuffer = await encodeAudio(...buffers);
37 | let blob = new Blob([wavBuffer], { type: 'audio/wav' });
38 | let url = URL.createObjectURL(blob);
39 | self.postMessage({ id: history.length, url, segments, duration });
40 | }
41 |
42 |
43 | // sequence of buffers states
44 | let history = []
45 |
46 | // current audio data (which segments correspond to)
47 | let buffers = []
48 |
49 | // dict of operations - supposed to update history & current buffers
50 | const Ops = {
51 | // load/decode file from url
52 | async src(...urls) {
53 | history.push(() => buffers = [])
54 | buffers = await Promise.all(urls.map(fetchAudio))
55 | return buffers
56 | },
57 |
58 | // accept decoded audio buffer
59 | async file(data) {
60 | // load file from storage, if exists
61 | if (typeof data === 'string') {
62 | let blob = await storage.get(DB_KEY + ':' + data)
63 | if (!blob) return buffers // TODO: throw error, reset history
64 |
65 | let arrayBuffer = await fileToArrayBuffer(blob)
66 | let audioBuffer = await decodeAudio(arrayBuffer)
67 |
68 | return buffers = [audioBuffer]
69 | }
70 |
71 | history.push(() => buffers.pop())
72 |
73 | // overcome alloc limit by creating multiple buffers
74 | let maxLength = 108 * SAMPLE_RATE // 108 sec
75 | for (let i = 0; i < data.length; i += maxLength) {
76 | let length = Math.min(maxLength, data.length - i)
77 | console.log(length);
78 | let audioBuffer = new AudioBuffer({
79 | numberOfChannels: data.numberOfChannels,
80 | length,
81 | sampleRate: data.sampleRate
82 | });
83 | data.channelData.forEach((data, channel) => audioBuffer.getChannelData(channel).set(data.subarray(i, i + length)))
84 | buffers.push(audioBuffer)
85 | }
86 |
87 | // save to storage
88 | let blob = new Blob([await encodeAudio(...buffers)])
89 | console.log('save', DB_KEY + ':' + data.name)
90 | storage.set(DB_KEY + ':' + data.name, blob)
91 |
92 | return buffers
93 | },
94 |
95 | del(from, to) {
96 | from = Number(from), to = Number(to)
97 |
98 | let origBuffers = [...buffers]
99 | history.push(() => {
100 | buffers = origBuffers
101 | })
102 |
103 | let start = bufferIndex(from)
104 | let end = bufferIndex(to)
105 |
106 | // correct tail: pointing to head of the next buffer unnecessarily joins buffers in result
107 | // but we may want to preserve segmentation
108 | if (!end[1] && end[0]) end[0] -= 1, end[1] = buffers[end[0]].length
109 |
110 | let startBuffer = buffers[start[0]]
111 | let endBuffer = buffers[end[0]]
112 |
113 | let length = start[1] + (endBuffer.length - end[1])
114 | if (!length) return buffers = []
115 |
116 | let outBuffer = new AudioBuffer({
117 | length,
118 | sampleRate: startBuffer.sampleRate,
119 | numberOfChannels: startBuffer.numberOfChannels
120 | })
121 |
122 | for (let c = 0; c < startBuffer.numberOfChannels; c++) {
123 | let i = 0,
124 | outData = outBuffer.getChannelData(c),
125 | startData = startBuffer.getChannelData(c),
126 | endData = endBuffer.getChannelData(c)
127 |
128 | // transfer remaining head samples
129 | for (i = 0; i < start[1]; i++) outData[i] = startData[i]
130 | // transfer remaining tail samples
131 | for (let j = end[1]; j < endData.length; j++) outData[i] = endData[j], i++
132 | }
133 |
134 | let deleted = buffers.splice(start[0], end[0] - start[0] + 1, outBuffer)
135 |
136 | return buffers
137 | },
138 |
139 | /*
140 | // normalize audio
141 | norm() {
142 | let origBuffers = buffers.map(buffer => cloneAudio(buffer))
143 |
144 | // remove static - calculate avg and subtract
145 | let sum = 0, total = 0
146 | for (let buffer of buffers) {
147 | for (let c = 0; c < buffer.numberOfChannels; c++) {
148 | let channelData = buffer.getChannelData(c);
149 | total += channelData.length
150 | for (let i = 0; i < channelData.length; i++)
151 | sum += channelData[i]
152 | }
153 | }
154 | let avg = sum / total
155 | for (let buffer of buffers) {
156 | for (let c = 0; c < buffer.numberOfChannels; c++) {
157 | let channelData = buffer.getChannelData(c);
158 | total += channelData.length
159 | for (let i = 0; i < channelData.length; i++)
160 | channelData[i] -= avg
161 | }
162 | }
163 |
164 | // amplify max to meet 1
165 | let max = 0
166 | for (let buffer of buffers) {
167 | for (let c = 0; c < buffer.numberOfChannels; c++) {
168 | let channelData = buffer.getChannelData(c);
169 | for (let i = 0; i < channelData.length; i++)
170 | max = Math.max(Math.abs(channelData[i]), max)
171 | }
172 | }
173 |
174 | let amp = Math.max(1 / max, 1);
175 |
176 | for (let buffer of buffers) {
177 | for (let c = 0; c < buffer.numberOfChannels; c++) {
178 | let channelData = buffer.getChannelData(c);
179 | for (let i = 0; i < channelData.length; i++)
180 | channelData[i] = Math.min(1, Math.max(-1, channelData[i] * amp));
181 | }
182 | }
183 |
184 | return () => origBuffers
185 | },
186 |
187 | // insert breaks / split
188 | br(buffers, ...offsets) {
189 | for (let offset of offsets) {
190 | let [bufIdx, bufOffset] = bufferIndex(offset);
191 | let buf = buffers[bufIdx]
192 |
193 | if (bufOffset > 0 && bufOffset < buf.length) {
194 | let left = sliceAudio(buf, 0, bufOffset)
195 | let right = sliceAudio(buf, bufOffset)
196 |
197 | buffers.splice(bufIdx, 1,
198 | left, right
199 | )
200 | }
201 | }
202 |
203 | return buffers
204 | },
205 |
206 | join(offset) {
207 | let [bufIdx, bufOffset] = bufferIndex(offset)
208 |
209 | if (bufOffset) return console.warn('Wrong buffer offset', offset)
210 |
211 | let left = buffers[bufIdx-1], right = buffers[bufIdx]
212 | buffers.splice(bufIdx-1, 2,
213 | joinAudio(left, right)
214 | )
215 |
216 | return buffers
217 | },
218 |
219 | mute(...parts) {
220 | for (let part of parts) {
221 | let [offset, count] = part
222 | let [bufIdx, bufOffset] = bufferIndex(offset)
223 |
224 | // end of segment: insert to prev buffer - conventionally better have end space than have spaced beginning
225 | if (!bufOffset && bufIdx) bufIdx -= 1, bufOffset = buffers[bufIdx].length
226 |
227 | let silenceBuffer = new AudioBuffer({
228 | length: count * BLOCK_SIZE,
229 | numberOfChannels: buffers?.[0].numberOfChannels || 1,
230 | sampleRate: buffers?.[0].sampleRate || SAMPLE_RATE
231 | })
232 | buffers[bufIdx] = insertAudio(buffers[bufIdx], bufOffset, silenceBuffer)
233 | }
234 | return buffers
235 | },
236 |
237 | // clip to indicated fragment
238 | clip(from, to) {
239 |
240 | },
241 |
242 | // either add external URL or silence (count)
243 | add(offset, src) {
244 |
245 | },
246 |
247 | // copy offset/cout to another position (rewrites data underneath)
248 | cp(offset, count, to) {
249 |
250 | }
251 | */
252 |
253 | // apply ops to history
254 | goto(id) {
255 |
256 | }
257 | }
258 |
259 |
260 | // return [bufIdx, bufOffset] from absolute offset
261 | const bufferIndex = (blockOffset) => {
262 | let frameOffset = blockOffset * BLOCK_SIZE
263 | if (frameOffset === 0) return [0, 0]
264 | var start = 0, end
265 | for (let i = 0; i < buffers.length; i++) {
266 | end = start + buffers[i].length
267 | if (frameOffset < end) return [i, frameOffset - start]
268 | start = end
269 | }
270 |
271 | // that's special case of last buffer: we return index pointing at non-existing item
272 | // but that's useful for obtaining end of the range
273 | // eg. getSelection() API also returns offset index _after_ last item.
274 | return [buffers.length - 1, buffers[buffers.length - 1].length]
275 | }
276 |
277 | const DB_KEY = 'wavearea-audio'
278 |
--------------------------------------------------------------------------------
/todo.md:
--------------------------------------------------------------------------------
1 | ## [ ] Restructure
2 |
3 | * [ ] Audio loading via web audio codecs
4 | * [ ] Audio playback via audio worklet
5 | * [ ] History operations built-in
6 | * [ ] settings-panel
7 | * [ ] Move all handlers to markup, leave app for logic
8 |
9 |
10 | ## Backlog
11 |
12 | * [x] Finish sprae 10
13 | * [x] no-caret (0 focus) play bug
14 | * [x] space repeat
15 | * [x] head of audio is boosted for some reason
16 | * [~] discrepancy of caret with sound -> can't reproduce
17 | * [ ] play button position via sticky
18 | * [ ] parameters manager (based on settings panel)
19 | * [ ] click on time must not reload anything
20 | * [ ] loading link with time should navigate to the line
21 | * [ ] Automatic tests
22 | * [ ] Shift play button to the left
23 | * [ ] Display current time
24 | * [ ] All editing operations
25 | * [ ] delete
26 | * [ ] ctrl+C / ctrl+V
27 | * [ ] Undo/redo history (separate from browser history)
28 | * [ ] Save result
29 | * [ ] Adjustable view
30 | * [ ] block size
31 | * [ ] color theme
32 | * [ ] player backends
33 | * [ ] Audio
34 | * [ ] WAA
35 | * [ ] Drag-n-drop
36 | * [ ] Separate by fragments (scenes) via enter
37 | * [ ] Playback bar with current time, play/stop, more
38 | * [ ] Position: bottom floating/appearing, bottom fixed, balloon next to cursor, no (melded into UI)
39 | * [ ] Operations
40 | * [ ] Normalize audio (from playback bar?)
41 | * [ ] Revolume selected fragments
42 | * [ ] Noise-gate plugin
43 | * [ ] Speedup silences (plugin?)
44 | * [ ] Change number of channels
45 | * [ ] 11labs integration: generate speech of length
46 | * [ ] Switchable main-thread / worker / GPU processing
47 |
48 | ## Reiterating
49 |
50 | * [ ] display audio
51 | * [ ] play audio
52 | * [ ] caret indication
53 | * [ ] Don't update caret in raf: update only on playback and time change
54 | * [ ] Don't track caret on every focus: only when user selects by mouse
55 | * [ ] Make playback within the selection
56 | * [ ] Fix safari
57 | * [ ] loses caret on play, like insert silence, press play etc
58 | * [ ] serialize file in url: ?src=path/to/url/file/to/fetch
59 | * [ ] sprae :onfile-attachment-accepted
60 | * [ ] add preloader (sprae mount-unmount)
61 | * [ ] delete fragments -> updates audio
62 | * [ ] create silence by space
63 | * [ ] download
64 | * [ ] caret must be able to be reoriented during the playback
65 | * [ ] Safari: wrong current time positioning
66 | * [ ] BUG: stopping drops focus
67 | * [ ] Make 'Enter' create segments
68 | * [ ] time codes next to lines
69 | * [ ] br
70 | * [ ] del
71 | * [ ] fix deleting tail properly
72 | * [ ] normalize
73 | * [ ] BUG: setting caret to the beginning of segment (a bit from the left of segment) doesn't start playback properly
74 | * [ ] faster encoder by just copying changed subbuffer data, opposed to full rerender
75 | * [ ] fix playback multiple segments
76 | * [ ] Add vertical shift of average
77 | * [ ] Shift + select
78 | * [ ] ~~interleaved buffers pointing to chunks of wav file, rather than audiobuffers~~ same as below
79 | * [ ] ~~immediate audio ops via copy~~ - saves 15ms, takes a lot in terms of losing AudioBuffer primitive
80 | * [ ] worker processor
81 | * [ ] actions via beforeinput inputType
82 |
83 | ## [ ] MVP: basic dubs editor
84 |
85 | * [ ] Make delete: `from-to` signature instead of `from-count`
86 | * [ ] Debounce delete better
87 | * [ ] "Open audio"
88 | * [ ] ~~"Generate speech" or "Pick random audio" intro screen. (+ button at the right)~~ -> use more complete sources config
89 | * [ ] Reflect operations in URL
90 | * [ ] Backspace-deleting from the beginning of segment doesn't remove break but deletes tail of prev segment instead
91 | * [ ] join operation that serializes as removing break
92 | * [ ] mute
93 | * [ ] take source from URL.
94 | * [ ] if there's none - take random source
95 | * [ ] support dropping files
96 | * [ ] save dropped files to storage
97 | * [ ] Make history of changes with undo/redo
98 | * [ ] Time-codes of following segments are messed up: make them href-able
99 | * [ ] Bug: insert silence at the beginning of new segment -> feature
100 | * [ ] ~~Save local file edits to kv-storage~~ - saved in history
101 | * [ ] BUG: 0:60 in timing
102 | * [ ] OPTIMIZATION: use onbeforeinput/oninput for handling operations
103 | * [ ] BUG: deleting from left & then from right of caret is different
104 | * [ ] BUG: fix playback from caret
105 | * [ ] Alt-Space for start/stop
106 | * [ ] Loop play selection
107 |
108 |
109 | * [ ] Outsource audio-decode, add missing codecs
110 | * [ ] Outsource media loopStart / loopEnd
111 | * [ ] Better selection logic: must be immediate
112 | * [ ] Display open/loading status
113 | * [ ] Display + for newlines
114 | * [ ] . for silence
115 | * [ ] Empty URL shows "Open file"
116 | * [ ] Loads source from url on init
117 | * [?] ~~Display loading status in playback~~ not sure still if we need playback
118 | * [ ] ~~Show average line in samples~~ use dots instead
119 | * [ ] Deleting, changing caret, deleting again causes UI waveform assertion fail
120 | * [ ] Small screens wrongly wrap waveform timing
121 | * [ ] Deleting part of audio screws up play button position
122 | * [ ] End of file caret positioning is wrong
123 | * [ ] Delete-all case doesn't get saved
124 | * [ ] Big file editing generates tons of error logs : must be good
125 | * [ ] Big files break caret line at the end (see bvg)
126 | * [ ] Loaded file misses offset
127 | * [ ] Played waveform update on big files is very slow. Use overlap technique or virtual list via intersection observer
128 | * [ ] Safari: smooth audio currentTime (opposed to glitchy now)
129 | * [ ] A way to download / reverse / etc selected fragment (... at the right)
130 | * [ ] Stopping playback causes glitch
131 | * [ ] Bug with assets/1s.wav playback - end line caret shifts down
132 | * [ ] Problematic mobile rendering
133 | * [ ] Mobile playback doesn't start from selection
134 | * [ ] Bug: renavigating during play
135 | * [ ] Bug: mobile safari play button sticks glitchly (alternative to intersection observer?)
136 | * [ ] Bug: multiline selection is damaged
137 | * [ ] Bug: empty head starts playing something non-silence
138 | * [ ] Bug: needs enhanced lines calculation, ideally chars-per-line
139 | * [ ] Bug: playback with space is glitchy (resets caret)
140 | * [ ] Bug: deleting is broken
141 | * [ ] Bug: doesn't renavigate by click
142 | * [ ] Bug: doesn't scroll on caret offset
143 | * [ ] Bug: loop playback selection is broken
144 | * [ ] Make play always cover the time, then it leaves space for "record" button
145 | * [ ] Zoom
146 | * [ ] Render only visible part (virtual) - must reduce rendering load significantly
147 | * [ ] Resize throttle
148 | * [ ] ``
149 | * [ ] ``
150 | * [ ] ``
151 | * [ ] Autoplay, loop, current line - can be navigatable from URL
152 | * [ ] Add info icon: support, github, brahman
153 | * tips, generate theme
154 | * adjust settings: audio loudness metric, block size
155 | * [ ] support paste fragment of itself
156 | * [ ] Mark loop selection
157 | * [ ] Mark fragments
158 | * [ ] Detect characters per line via ranges method: https://www.bennadel.com/blog/4310-detecting-rendered-line-breaks-in-a-text-node-in-javascript.htm
159 | * [ ] Separate audio-decode module with all codecs...
160 | * [ ] Make play button clickable area _big_
161 | * [ ] Recent files
162 | * [ ] use media-offset for looping -> own function play-loop
163 | * [ ] detect cmd/ctrl key depending on platform
164 | * [ ] make player responsive in mobile as bottom play button with overlay
165 | * [ ] make playback sticky to avoid hiding playback (intersection observer + position change)
166 | * [ ] ~~use plain (interleaved?) arrays instead of audio buffers - faster decoding, faster transfering to worker~~ - limited maintainability, no need to transfer to worker
167 | * [ ] use decodeAudioData main thread "worker" for faster decode, detect supported native codecs & video
168 | * [ ] Loudness weighting
169 | * https://github.com/MTG/essentia/blob/master/src/algorithms/temporal/loudnessebur128.cpp
170 | * https://github.com/domchristie/needles
171 | * [ ] Better loudness display: it is inadequate now
172 | * [ ] Display left/right channels with half-transparent blacks, and black is their intersection
173 | * [ ] time codes as # hrefs
174 | * [ ] make navigatable
175 | * [ ] Faster updates: maybe no point rerendering/encoding full waveform, or parallelize, or faster wav encoder (wasm?)
176 | * ? should we work straight on wav buffer maybe instead of audio buffers?
177 | * [ ] Highlight of playable/playing region via diff color
178 | * [ ] ~~use audio-buffer-list for faster ops?~~ -> use own implementation
179 | * [ ] theme selector: color gradientish, inverse, cool, hot, dynamic, bw, font style
180 | * [ ] move loading/decoding/encoding to worker
181 | * [ ] random phrase player (from URL - like thetamath) via free speech api
182 | * [ ] broken sprae condition of `:if :ref`
183 | * [ ] convert ops units to h/s/ms
184 | * [ ] replace file selector with + under caret?
185 | * [ ] open file
186 | * [ ] ~~Make nicer playback UI (bottom of page player)~~ -> not proved to be the best
187 | * [ ] Errors and loading state must be indicated there
188 | * [ ] Precise current playback time
189 | * [ ] delete file fully -> displays open file again
190 | * [ ] save file in storage? -> can be done via browser caching
191 | * [ ] ~~Safari: initial audio loading state displays Error (show silent buffer)~~
192 | * [ ] scroll must follow the current caret position
193 | * [ ] save edits in URL, so that any audio URL can be opened, edited, played.
194 | * [ ] More audio transforms
195 | * [ ] Make reusable (web-) component
196 | * [ ] Textarea mode: no-line-breaks simple renderer on any textarea, no autosizer
197 | * [ ] Adjustable timecodes
198 | * [ ] Adjustable menu
199 | * [ ] Adjustable theme
200 | * [ ] Adjustable line breaks / ops
201 | * [ ] Think of embeddable links
202 | * [ ] Recording capability
203 | * [ ] Add tests (playwright?)
204 | * [ ] Measure via LUFS and other methods
205 | * [ ] Process audio with lino?
206 | * [ ] Vary color based on spectrum
207 | * [ ] ~~?Use timing object https://github.com/chrisguttandin/timing-object~~ -> nah
208 | * [ ] Editable labeling / phrases
209 |
210 | ## Ideas
211 |
212 | * Export as audiobook (choose cover)
213 |
--------------------------------------------------------------------------------
/experiments/phraser.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Wavearea
5 |
6 |
7 |
8 |
27 |
155 |
156 |
157 |
158 |
159 |
160 |
161 | Te Re Khe Ta
162 |
163 |
177 |
178 |
195 |
196 |
197 |
198 |
201 |
202 |
304 |
--------------------------------------------------------------------------------
/experiments/audio-recorder.js:
--------------------------------------------------------------------------------
1 | const recordIcon = `
`
2 | const stopIcon = `
`
3 | const playIcon = `
`
4 | const pauseIcon = `
`
5 | const downloadIcon = `
`
6 | const settingsIcon = `
`
7 |
8 | // Design considerations
9 | // There are 2 strategies to handle data:
10 | // a. as sequence of blobs from media stream
11 | // b. as sequence of audio buffers, somehow converted to media stream
12 | // For input we must support media stream (mic), oscillator/noise (raw buffers), files (blobs)
13 | // For output we must be able to download (blob), playback (blob/raw)
14 | // For editing we can use buffers or blobs
15 | // If we support oscillators, editing must be buffers. (Also that gives precise audio manip benefits.)
16 | // If we support immediate download of edited file, editing must be blobs. (otherwise we're limited to wav encoding)
17 | // Seems that we need 1. raw chunks (audio buffers) 2. media recorder to encode them small-size
18 |
19 | export default class Wavearea {
20 | paused = true
21 | chunks = []
22 | header = []
23 | timeslice = null
24 | mimeType = 'audio/webm;codecs=opus'
25 |
26 | get recording () {return this.recorder?.state === 'recording'}
27 | get playing () {return !this.playback?.paused}
28 |
29 | constructor (textarea, o={}) {
30 | // DOM
31 | this.textarea = textarea
32 |
33 | // this.textarea.style.setProperty('--size', 100)
34 | Object.assign(this.textarea.style, {
35 | lineHeight: 1,
36 | // paddingTop: 0,
37 | // fontSize: `calc(var(--size) * 1px)`,
38 | // backgroundSize: `10px calc(var(--size) * 1px)`,
39 | // backgroundPosition: `0 calc(var(--size) * 0.5px)`,
40 | // backgroundImage: `linear-gradient(to bottom, rgb(230, 245, 255) 1px, transparent 1px)`,
41 | })
42 |
43 | if (!(this.settingsButton = o.settingsButton)) {
44 | this.textarea.after(this.settingsButton = document.createElement('button'))
45 | this.settingsButton.innerHTML = settingsIcon
46 | }
47 | this.settingsButton.addEventListener('click', e => {
48 | // !this.recording ? this.record() : this.stop()
49 | // this.textarea.focus()
50 | })
51 |
52 | if (!(this.downloadButton = o.downloadButton)) {
53 | this.textarea.after(this.downloadButton = document.createElement('button'))
54 | this.downloadButton.innerHTML = downloadIcon
55 | }
56 | this.downloadButton.addEventListener('click', e => {
57 | // !this.playing ? this.play() : this.pause()
58 | // this.textarea.focus()
59 | })
60 |
61 | if (!(this.recordButton = o.recordButton)) {
62 | this.textarea.after(this.recordButton = document.createElement('button'))
63 | this.recordButton.innerHTML = recordIcon
64 | }
65 | this.recordButton.addEventListener('click', e => {
66 | !this.recording ? this.record() : this.stop()
67 | this.textarea.focus()
68 | })
69 |
70 | if (!(this.playButton = o.playButton)) {
71 | this.textarea.after(this.playButton = document.createElement('button'))
72 | this.playButton.innerHTML = playIcon
73 | }
74 | this.playButton.addEventListener('click', e => {
75 | !this.playing ? this.play() : this.pause()
76 | this.textarea.focus()
77 | })
78 |
79 | // audio
80 | this.playback = document.createElement('audio')
81 | }
82 |
83 | // init recorder
84 | async init() {
85 | const audioContext = new AudioContext();
86 |
87 | // FIXME: make configurable
88 | // https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints#properties_of_audio_tracks
89 | const constraints = {audio: {
90 | autoGainControl: false,
91 | echoCancellation: false,
92 | latency: 0,
93 | noiseSuppression: false,
94 | sampleRate: audioContext.sampleRate,
95 | sampleSize: 16
96 | }}
97 | console.time('init recorder')
98 | let stream = this.stream = await navigator.mediaDevices.getUserMedia(constraints)
99 | console.timeEnd('init recorder')
100 |
101 | const streamSource = audioContext.createMediaStreamSource(stream);
102 | const analyser = audioContext.createAnalyser();
103 | analyser.fftSize = 2048;
104 | analyser.smoothingTimeConstant = 0;
105 | streamSource.connect(analyser);
106 | this.recorder = new MediaRecorder(stream, {mimeType: this.mimeType});
107 |
108 | const dataArray = new Float32Array(analyser.fftSize);
109 | this.timeslice = (analyser.fftSize)/audioContext.sampleRate;
110 |
111 | this.recorder.ondataavailable = (e) => {
112 | if (!e.data.size) return
113 | // console.log(last - (last = Date.now()), this.timeslice)
114 | // no need to turn data into array buffers, unless we're able to read them instead of Web-Audio-API
115 | // FIXME: capture header, initial 2 chunks are required for playback source validity
116 | if (this.header.length < 2) {
117 | this.header.push(e.data)
118 | }
119 | else {
120 | // reading loudness data from chunks is hard
121 | analyser.getFloatTimeDomainData(dataArray);
122 | let ssum = 0
123 | for (let i = 0; i < dataArray.length; i++) ssum += dataArray[i] * dataArray[i]
124 | const rms = Math.sqrt(ssum / dataArray.length)
125 | const bar = String.fromCharCode(0x0100 + Math.floor(rms * 100))
126 |
127 | // append
128 | if (this.textarea.selectionStart === this.textarea.textLength) {
129 | this.chunks.push(e.data)
130 | this.textarea.append( bar )
131 | this.textarea.selectionStart = this.textarea.textLength
132 | }
133 | // insert
134 | else {
135 | const text = this.textarea.textContent, caret = this.textarea.selectionStart
136 | const chunkStart = text.slice(0, caret).replace(/\s/ig,'').length;
137 | this.chunks.splice(chunkStart, 0, e.data)
138 | this.textarea.textContent = text.slice(0, caret) + bar + text.slice(caret)
139 | this.textarea.selectionStart = caret + 1
140 | }
141 | // console.log('dataavailable', this.textarea.textLength)
142 | }
143 | }
144 | }
145 |
146 | async record() {
147 | if (this.playing) this.pause()
148 |
149 | this.paused = false
150 | this.recordButton.innerHTML = `${pauseIcon}`
151 |
152 | if (!this.recorder) await this.init()
153 |
154 | // reset header to re-init it from the new recording part
155 | this.header = []
156 |
157 | // NOTE: real time intervals are different from timeslice
158 | this.recorder.start(1000 * this.timeslice)
159 | }
160 |
161 | // stop recording
162 | async stop() {
163 | if (this.paused) return
164 |
165 | this.paused = true
166 | this.recordButton.innerHTML = `${recordIcon}`
167 | this.playButton.innerHTML = `${playIcon}`
168 |
169 | if (this.recording) {
170 | this.recorder.stop()
171 |
172 | // it still can generate the last ondataavailable event, so we wait
173 | await event(this.recorder, 'stop')
174 |
175 | // create playback chunk
176 | console.log(this.chunks)
177 | this.blob = new Blob([...this.header, ...this.chunks], { type: this.recorder.mimeType })
178 | this.playback.src = window.URL.createObjectURL(this.blob)
179 | // this.playback.srcObject = stream
180 |
181 | await event(this.playback, 'loadedmetadata')
182 |
183 | // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=642012
184 | console.log('loadedmetadata', this.playback.duration)
185 | if (this.playback.duration === Infinity || isNaN(this.playback.duration)) {
186 | this.playback.currentTime = Number.MAX_SAFE_INTEGER
187 | await event(this.playback, 'timeupdate')
188 | console.log('ontimeupdate',this.playback.duration,this.playback.currentTime)
189 | // playback.currentTime = 0
190 | }
191 | // Normal behavior
192 | // else console.log('immediate',playback.duration)
193 | }
194 | }
195 |
196 | async play() {
197 | // reset recording
198 | if (!this.paused) this.pause();
199 |
200 | if (this.textarea.selectionStart >= this.textarea.textLength) this.textarea.selectionStart = this.textarea.selectionEnd = 0
201 |
202 | const from = this.playback.currentTime = this.playback.duration * this.textarea.selectionStart / this.textarea.textLength
203 |
204 | // Bug? Setting currentTime to 0 doesn't reset playback
205 | this.playback.currentTime = Math.max(from, 0.001)
206 |
207 | const to = this.textarea.selectionStart === this.textarea.selectionEnd ? this.playback.duration :
208 | this.playback.duration * this.textarea.selectionEnd / this.textarea.textLength
209 |
210 | this.paused = false
211 | this.playButton.innerHTML = `${pauseIcon}`
212 |
213 | this.playback.play()
214 |
215 | await Promise.any([event(this.playback, 'ended'), until(() => {
216 | // update caret
217 | const framesPlayed = Math.floor(this.chunks.length * this.playback.currentTime / this.playback.duration)
218 | this.textarea.selectionStart = this.textarea.selectionEnd = framesPlayed
219 |
220 | return this.paused || this.playback.currentTime >= to
221 | })])
222 | this.pause()
223 | }
224 |
225 | // pause playback
226 | pause() {
227 | if (this.paused) return
228 |
229 | this.paused = true
230 | this.playButton.innerHTML = `${playIcon}`
231 |
232 | if (this.playing) this.playback.pause()
233 | }
234 | }
235 |
236 |
237 | // wait until event
238 | const event = (target, evt) => new Promise(r => target.addEventListener(evt, function fn(){target.removeEventListener(evt, fn),r()}))
239 |
240 | // wait until condition
241 | const until = (cond) => new Promise(r => {
242 | const check = () => cond() ? r() : requestAnimationFrame(check)
243 | check()
244 | })
245 |
246 |
247 |
248 | // try splitting buffer to N parts, recording in parallel, generating blob
249 | async function recordParallel() {
250 | const audioContext = new AudioContext();
251 | const mimeType = 'audio/webm;codecs=opus'
252 |
253 | const N = 10, len = 4096
254 | let src = new Float32Array(len * N)
255 | for (let j = 0; j < src.length; j++) src[j] = Math.sin(j / 5)
256 |
257 | const bufs = [], all = []
258 | for (let i = 0; i < N; i++ ) {
259 | // 2705 - min chunk length for opus encoder in Chrome, so we increase block size to len plus silent header
260 | // let buf = new AudioBuffer({length: len, sampleRate: audioContext.sampleRate})
261 | const buf = new AudioBuffer({length: len, sampleRate: audioContext.sampleRate})
262 | let data = buf.getChannelData(0)
263 | data.set(src.slice(i * len, (i+1) * len))
264 | bufs.push(buf)
265 |
266 | // create recorders
267 | const source = audioContext.createBufferSource();
268 | source.buffer = buf;
269 |
270 | const chunks = []
271 | all.push(new Promise(r => {
272 | const dest = audioContext.createMediaStreamDestination();
273 | const recorder = new MediaRecorder(dest.stream, {mimeType});
274 | source.connect(dest)
275 |
276 | recorder.start()
277 | // delay is needed to shift encodingblocks
278 | source.start(0)
279 |
280 | recorder.ondataavailable = (e) => {
281 | const blob = e.data
282 | if (blob.size) chunks.push(blob)
283 | }
284 | recorder.onstop = e => {
285 | r(chunks)
286 | }
287 | source.onended = e => {
288 | recorder.stop()
289 | }
290 | }))
291 | }
292 |
293 | const blobs = await Promise.all(all);
294 |
295 | // combine multiple recorders back
296 | let buf = await blobs[0][0].arrayBuffer()
297 | console.hex(buf)
298 |
299 | var blob = new Blob([...blobs[0]], { type : mimeType });
300 | let audio = document.createElement('audio')
301 | audio.src = URL.createObjectURL(blob);
302 | audio.play()
303 | }
304 |
305 |
306 | console.hex = (d) => console.log((Object(d).buffer instanceof ArrayBuffer ? new Uint8Array(d.buffer) :
307 | typeof d === 'string' ? (new TextEncoder('utf-8')).encode(d) :
308 | new Uint8ClampedArray(d)).reduce((p, c, i, a) => p + (i % 16 === 0 ? i.toString(16).padStart(6, 0) + ' ' : ' ') +
309 | c.toString(16).padStart(2, 0) + (i === a.length - 1 || i % 16 === 15 ?
310 | ' '.repeat((15 - i % 16) * 3) + Array.from(a).splice(i - i % 16, 16).reduce((r, v) =>
311 | r + (v > 31 && v < 127 || v > 159 ? String.fromCharCode(v) : '.'), ' ') + '\n' : ''), ''));
312 |
--------------------------------------------------------------------------------
/experiments/waveedit.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Audio textarea
5 |
6 |
7 |
8 |
78 |
79 |
86 |
87 |
88 |
89 |
90 |
91 |
94 |
95 |
319 |
320 |
--------------------------------------------------------------------------------
/src/wavearea.js:
--------------------------------------------------------------------------------
1 | // UI part of wavearea
2 | // handles user interactions and sends commands to worker
3 | // all the data is stored and processed in worker
4 | import sprae from 'sprae';
5 | import { fileToArrayBuffer } from './audio-util.js';
6 | import playClip from './play-loop.js';
7 | import { measureLatency } from './measure-latency.js';
8 | import { selection, cleanText } from './selection.js';
9 |
10 | history.scrollRestoration = 'manual'
11 |
12 |
13 | // refs
14 | const wavearea = document.querySelector('#wavearea')
15 | const editarea = wavearea.querySelector('#editarea')
16 | const timecodes = wavearea.querySelector('#timecodes')
17 | const playButton = wavearea.querySelector('#play')
18 | const waveform = wavearea.querySelector('#waveform')
19 | const caretLinePointer = wavearea.querySelector('#caret-line')
20 | const audio = new Audio
21 |
22 |
23 | // init backend - receives messages from worker with rendered audio & waveform
24 | const worker = new Worker('./dist/worker.js', { type: "module" });
25 | const audioCtx = new AudioContext()
26 |
27 |
28 | // UI
29 | let state = sprae(wavearea, {
30 | // mode
31 | loading: false,
32 | recording: false,
33 | playing: false,
34 | selecting: false,
35 | isMouseDown: false,
36 | scrolling: false,
37 |
38 | // current playback start/end time
39 | clipStart: 0,
40 | loop: false,
41 | clipEnd: null,
42 |
43 | // ???
44 | _startTime: 0,
45 | _startTimeOffset: 0,
46 |
47 | audio,
48 | volume: 1,
49 | latency: 0, // time between playback and the first sample
50 |
51 | // waveform segments
52 | segments: [],
53 | total: 0, // # segments
54 | duration: 0, // duration (received from backend)
55 |
56 | caretOffscreen: 0, // +1 if caret is below, -1 above viewport
57 | caretOffset: 0, // current caret offset, characters
58 | caretLine: 0, // caret line number
59 | caretY: waveform.getBoundingClientRect().top,
60 | caretX: 0, // caret row coordinate
61 |
62 | // chars per line (~5s with block==1024)
63 | cols: 216,
64 |
65 |
66 | // handle beforeinput event to process deletions & insertions
67 | async handleBeforeInput(e) {
68 | let handler = inputHandlers[e.inputType];
69 | if (!handler) {
70 | e.preventDefault();
71 | e.stopPropagation();
72 | // avoid double space insertion (osx)
73 | if (e.data === '. ') selection.set(state.caretOffset)
74 | } else {
75 | handler.call(this, e);
76 | }
77 | },
78 |
79 | // handle file drop
80 | async handleDrop(e) {
81 | let files = e.dataTransfer.files
82 | let file = files[0]
83 | if (!file.type.startsWith('audio')) return false;
84 | // FIXME: save file to storage under the name
85 |
86 | // recode into wav
87 | state.loading = true;
88 | state.segments = [];
89 |
90 | let arrayBuf = await fileToArrayBuffer(file);
91 | let audioBuf = await decodeAudio(arrayBuf);
92 | let wavBuffer = await encodeAudio(audioBuf);
93 | let blob = new Blob([wavBuffer], { type: 'audio/wav' });
94 | let url = URL.createObjectURL(blob);
95 | await applyOp(['src', url]);
96 |
97 | state.loading = false;
98 |
99 | return arrayBuf;
100 | },
101 |
102 | async handleFile(e) {
103 | // let url = URL.createObjectURL(e.target.files[0])
104 | // pushOp(['src', url])
105 | state.loading = 'Decoding'
106 | let file = e.target.files[0];
107 | let arrayBuf = await fileToArrayBuffer(file);
108 | let audioBuf = await audioCtx.decodeAudioData(arrayBuf);
109 | let channelData = Array.from({ length: audioBuf.numberOfChannels }, (i) => audioBuf.getChannelData(i))
110 |
111 | await pushOp(['file', {
112 | name: file.name,
113 | numberOfChannels: audioBuf.numberOfChannels,
114 | sampleRate: audioBuf.sampleRate,
115 | length: audioBuf.length,
116 | channelData
117 | }])
118 | state.loading = false
119 | },
120 |
121 | scrollIntoCaret() {
122 | if (state.caretOffscreen && !state.scrolling) {
123 | caretLinePointer.scrollIntoView({ behavior: 'smooth', block: 'center' })
124 | state.scrolling = true
125 | setTimeout(() => (state.scrolling = false), 108)
126 | }
127 | },
128 |
129 | // start playback
130 | play(e) {
131 | state.playing = true;
132 | state.scrolling = false;
133 | editarea.focus();
134 | console.log('play from', state.caretOffset);
135 |
136 | // from the end to the beginning
137 | if (state.caretOffset === state.total) selection.set(state.caretOffset = state.clipStart = 0)
138 |
139 | state.scrollIntoCaret();
140 |
141 | let { clipStart, clipEnd, loop } = state;
142 |
143 | const toggleStop = () => (playButton.click())
144 |
145 | // since audio.currentTime is inaccurate, esp. in Safari, we measure precise played time
146 | let animId
147 | state._startTime
148 | state._startTimeOffset = state.caretOffset
149 | const resetStartTime = async () => {
150 | await new Promise(ok => setTimeout(ok, state.latency)) // Safari needs visual/audio latency compensation
151 | state._startTime = performance.now() * 0.001;
152 | clearInterval(animId)
153 | animId = setInterval(syncCaret, 10.8)
154 | }
155 |
156 | // detect scrolling state, to prevent forcing scroll-into-caret
157 | let scrollY = editarea.getBoundingClientRect().top
158 | const checkScroll = () => {
159 | if (state.scrolling) return
160 | let curY = editarea.getBoundingClientRect().top
161 | if (curY !== scrollY) (state.scrolling = true, setTimeout(() => (state.scrolling = false, checkScroll()), 1080))
162 | else state.scrolling = false
163 | scrollY = curY
164 | }
165 |
166 | const syncCaret = () => {
167 | checkScroll()
168 | if (state.selecting) return
169 |
170 | let playedTime = (performance.now() * 0.001 - state._startTime);
171 | let currentBlock = Math.min(state._startTimeOffset + Math.round(state.total * playedTime / state.duration), state.total)
172 | if (loop) currentBlock = Math.min(currentBlock, clipEnd)
173 |
174 | // FIXME: optimize this chunk, just animate via CSS
175 | let sel = selection.set(state.caretOffset = currentBlock)
176 | state.caretLine = Math.floor(sel.end / state.cols);
177 | let rects = sel.range.getClientRects()
178 | let rect = rects[rects.length - 1]
179 | state.caretX = rect.right
180 |
181 | // FIXME
182 | state.scrollIntoCaret();
183 | }
184 |
185 | // audio takes time to init before play on mobile, so we hold on caret
186 | audio.addEventListener('play', resetStartTime, { once: true })
187 |
188 | // audio looped - reset caret
189 | if (state.loop) audio.addEventListener('seeked', resetStartTime)
190 |
191 | const stopAudio = playClip(audio, state.loop && {
192 | start: state.duration * state.clipStart / state.total,
193 | end: state.duration * state.clipEnd / state.total
194 | });
195 | // TODO: markLoopRange()
196 |
197 | audio.addEventListener('ended', toggleStop);
198 |
199 | return () => {
200 | audio.removeEventListener('seeked', resetStartTime)
201 | audio.removeEventListener('ended', toggleStop);
202 |
203 | clearInterval(animId)
204 | stopAudio();
205 | state.playing = false
206 | state.scrolling = false
207 |
208 | // return selection if there was any
209 | //TODO: unmarkLoopRange()
210 | if (state.loop) selection.set(clipStart, clipEnd)
211 |
212 | // adjust end caret position
213 | else if (audio.currentTime >= audio.duration) selection.set(state.total)
214 |
215 | editarea.focus()
216 | }
217 | },
218 |
219 | // navigate to history state
220 | async goto(params) {
221 | try {
222 | await renderAudio(params)
223 | }
224 | catch (e) {
225 | // failed to load audio means likely history is discontinuous:
226 | // try updating blob in history state by rebuilding audio
227 | await loadAudioFromURL()
228 | }
229 | selection.set(state.caretOffset)
230 | },
231 |
232 | // produce display time from frames
233 | timecode(block, ms = 0) {
234 | let time = ((block / state?.total)) * state?.duration || 0
235 | return `${Math.floor(time / 60).toFixed(0)}:${(Math.floor(time) % 60).toFixed(0).padStart(2, 0)}${ms ? `.${(time % 1).toFixed(ms).slice(2).padStart(ms)}` : ''}`
236 | },
237 |
238 | selection
239 | });
240 |
241 |
242 | const inputHandlers = {
243 | // insertText(){},
244 | // insertReplacementText(){},
245 | // insertLineBreak(){},
246 | // insertParagraph(){},
247 | insertFromDrop(e) {
248 | console.log('insert from drop', e)
249 | },
250 | // insertFromPaste(){},
251 | // insertLink(){},
252 | // deleteWordBackward(){},
253 | // deleteWordForward(){},
254 | // deleteSoftLineBackward(){},
255 | // deleteSoftLineForward(){},
256 | // deleteEntireSoftLine(){},
257 | // deleteHardLineBackward(){},
258 | // deleteHardLineForward(){},
259 | // deleteByDrag(){},
260 | // deleteByCut(){},
261 | // deleteContent(){},
262 | async deleteContentBackward(e) {
263 | let range = e.getTargetRanges()[0]
264 | let fromNode = range.startContainer.parentNode.closest('.segment'),
265 | toNode = range.endContainer.parentNode.closest('.segment'),
266 | fromId = Number(fromNode.dataset.id), toId = Number(toNode.dataset.id)
267 | let from = range.startOffset + state.segments.slice(0, fromId).reduce((off, seg) => off + seg.length, 0),
268 | to = range.endOffset + state.segments.slice(0, toId).reduce((off, seg) => off + seg.length, 0)
269 |
270 | // debounce push op to collect multiple deletes
271 | if (this._deleteTimeout) {
272 | clearTimeout(this._deleteTimeout)
273 | this._deleteOp[1]--
274 | }
275 | else this._deleteOp = ['del', from, to]
276 |
277 | const pushDeleteOp = () => {
278 | pushOp(this._deleteOp)
279 | this._deleteOp = this._deleteTimeout = null
280 | }
281 | this._deleteTimeout = setTimeout(pushDeleteOp, 280)
282 | },
283 | // deleteContentForward(){},
284 | // historyUndo(){},
285 | // historyRedo(){},
286 |
287 | }
288 |
289 | // measure safari latency
290 | const whatsLatency = async () => {
291 | wavearea.removeEventListener('touchstart', whatsLatency)
292 | wavearea.removeEventListener('mousedown', whatsLatency)
293 | wavearea.removeEventListener('keydown', whatsLatency)
294 | state.latency = await measureLatency()
295 | }
296 | wavearea.addEventListener('touchstart', whatsLatency)
297 | wavearea.addEventListener('mousedown', whatsLatency)
298 | wavearea.addEventListener('keydown', whatsLatency)
299 |
300 |
301 | // create play button position observer
302 | const caretObserver = new IntersectionObserver(([item]) => {
303 | state.caretOffscreen = item.isIntersecting ? 0 :
304 | (item.intersectionRect.top <= item.rootBounds.top ? 1 :
305 | item.intersectionRect.bottom >= item.rootBounds.bottom ? -1 :
306 | 0);
307 | }, {
308 | // root: document,
309 | threshold: 0.999,
310 | rootMargin: '0px'
311 | });
312 | caretObserver.observe(caretLinePointer);
313 |
314 |
315 | // create line width observer
316 | const resizeObserver = new ResizeObserver((entries) => {
317 | // let width = entries[0].contentRect.width
318 | state.cols = measureLines()
319 | })
320 | resizeObserver.observe(editarea);
321 |
322 | // inspired by https://www.bennadel.com/blog/4310-detecting-rendered-line-breaks-in-a-text-node-in-javascript.htm
323 | // measure number of characters per line
324 | function measureLines() {
325 | let range = new Range();
326 | let textNode = editarea.firstChild.firstChild
327 | if (!textNode?.textContent) return
328 | let str = textNode.textContent
329 |
330 | range.setStart(textNode, 0), range.setEnd(textNode, 1)
331 | let y = range.getClientRects()[0].y
332 | for (var i = 0, offset = 0; i < str.length; offset++) {
333 | let skip = 1; while (str[i + skip] >= '\u0300') skip++;
334 | range.setStart(textNode, 0), range.setEnd(textNode, i = i + skip);
335 | // 2nd line means we counted chars per line
336 | let rects = range.getClientRects()
337 | if (rects[rects.length - 1].y > y) return offset
338 | }
339 |
340 | return str.length
341 | }
342 |
343 | // update history, post operation & schedule update
344 | // NOTE: we imply that ops are applied once and not multiple times
345 | // so that ops can be combined as del=0-10..20-30 instead of del=0-10&del=20-30
346 | async function pushOp(...ops) {
347 | let url = new URL(location)
348 |
349 | for (let op of ops) {
350 | let [name, ...args] = op
351 | if (args[0].name) url.searchParams.set(name, args[0].name)
352 | else if (url.searchParams.has(name)) url.searchParams.set(name, `${url.searchParams.get(name)}..${args.join('-')}`)
353 | else url.searchParams.append(name, args.join('-'))
354 | }
355 | state.loading = 'Processing'
356 | let params = await runOp(...ops)
357 | history.pushState(params, '', decodeURI(url)); // decodeURI needed to avoid escaping `:`
358 | state.loading = false
359 |
360 | if (editarea.textContent) console.assert(params.segments.join('') === editarea.textContent, 'Rendered waveform is different from UI')
361 |
362 | return renderAudio(params)
363 | }
364 |
365 | // post op message and wait for update response
366 | function runOp(...ops) {
367 | return new Promise(resolve => {
368 | // worker manages history, so id indicates which point in history we commit changes to
369 | console.log('Post message', ops)
370 | worker.postMessage({ id: history.state?.id || 0, ops })
371 | worker.addEventListener('message', e => {
372 | resolve(e.data)
373 | }, { once: true })
374 | })
375 | }
376 |
377 | // update audio url & assert waveform
378 | function renderAudio({ url, segments, duration, offsets }) {
379 | // assert waveform same as current content (must be!)
380 | state.total = segments.reduce((total, seg) => total += cleanText(seg).length, 0);
381 | state.duration = duration
382 | state.segments = segments
383 | if (!state.cols) state.cols = measureLines()
384 | // URL.revokeObjectURL(audio.src) // can be persisted from history, so we keep it
385 | audio.src = url
386 | audio.preload = "metadata" // preload avoids redundant fetch requests and needed by Safari
387 | return new Promise((ok, nok) => {
388 | audio.addEventListener('error', nok)
389 | audio.addEventListener('loadedmetadata', () => {
390 | audio.currentTime = duration * state.caretOffset / state.total || 0
391 | }, { once: true });
392 | })
393 | }
394 |
395 | // reconstruct audio from url
396 | async function loadAudioFromURL(url = new URL(location)) {
397 | state.loading = ' '
398 |
399 | let ops = []
400 | for (const [op, arg] of url.searchParams) ops.push(...arg.split('..').map(arg => {
401 | // skip https:// as single argument
402 | return [op, ...(op === 'src' || op === 'file' ? [arg] : arg.split('-'))]
403 | }))
404 |
405 | // shortcut for src op
406 | if (ops[0][0] === 'src') {
407 | let [, src] = ops.shift()
408 | let resp = await fetch(src, { cache: 'force-cache' });
409 | let arrayBuf = await resp.arrayBuffer();
410 | state.loading = 'Decoding'
411 | let audioBuf = await audioCtx.decodeAudioData(arrayBuf);
412 | let channelData = Array.from({ length: audioBuf.numberOfChannels }, (i) => audioBuf.getChannelData(i))
413 | ops.push(['file', {
414 | name: src,
415 | numberOfChannels: audioBuf.numberOfChannels,
416 | sampleRate: audioBuf.sampleRate,
417 | length: audioBuf.length,
418 | channelData
419 | }])
420 | }
421 |
422 | let params = await runOp(...ops)
423 | history.replaceState(params, '', decodeURI(url))
424 | renderAudio(params)
425 | state.loading = false
426 | }
427 |
428 |
429 | // if URL has no operations - put random sample
430 | // if (location.search.length < 2) {
431 | // const sampleSources = [
432 | // // 'https://upload.wikimedia.org/wikipedia/commons/9/9c/Vivaldi_-_Magnificat_01_Magnificat.oga',
433 | // 'https://upload.wikimedia.org/wikipedia/commons/c/cf/Caja_de_m%C3%BAsica_%28PianoConcerto5_Beethoven%29.ogg',
434 | // // 'https://upload.wikimedia.org/wikipedia/commons/9/96/Carcassi_Op_60_No_1.ogg',
435 | // ]
436 | // let src = sampleSources[Math.floor(Math.random() * sampleSources.length)];
437 | // location.search = `?src=${src}`
438 | // }
439 | // history.replaceState({segments:[]}, '', '/')
440 |
441 | // apply operations from URL, like src=path/to/file&clip=from-to&br=a..b..c
442 | if (location.search.length) {
443 | loadAudioFromURL()
444 | }
445 |
--------------------------------------------------------------------------------
/dist/wavearea.js:
--------------------------------------------------------------------------------
1 | import{f as K}from"./chunk-ZZPGCUWV.js";import{b as Ae}from"./chunk-A576P2ZR.js";var N,m,$,F,j;function ee(e){N=e.signal,m=e.effect,j=e.computed,F=e.batch||(r=>r()),$=e.untracked||F}var T=Symbol("signals"),L=Symbol("length");function x(e,r){if(!e||e[T])return e;if(Array.isArray(e))return Ce(e);if(e.constructor!==Object)return e;let t={...r?.[T]},n=N(Object.values(e).length),s=new Proxy(t,{get:(a,o)=>o===L?n:o===T?t:t[o]?.valueOf(),set:(a,o,l,f)=>(f=t[o],V(t,o,l),f??++n.value,1),deleteProperty:(a,o)=>(t[o]&&(te(t,o),n.value--),1),ownKeys(){return n.value,Reflect.ownKeys(t)}});for(let a in e){let o=Object.getOwnPropertyDescriptor(e,a);o?.get?(t[a]=j(o.get.bind(s)))._set=o.set?.bind(s):(t[a]=void 0,V(t,a,e[a]))}return s}var Oe={push:1,pop:1,shift:1,unshift:1,splice:1};function Ce(e){let r;if(e[T])return e;let t=N(e.length),n=Array(e.length).fill(),s=new Proxy(n,{get(a,o){if(typeof o=="symbol")return o===L?t:o===T?n:n[o];if(o==="length")return Oe[r]?t.peek():t.value;if(r=o,n[o])return n[o].valueOf();if(o=t.peek()&&(t.value=n.length=Number(o)+1),!0},deleteProperty:(a,o)=>(n[o]&&te(n,o),1)});return s}function V(e,r,t){let n=e[r];if(r[0]==="_")e[r]=t;else if(!n)e[r]=n=t?.peek?t:N(x(t));else if(t!==n.peek())if(n._set)n._set(t);else if(Array.isArray(t)&&Array.isArray(n.peek())){let s=n.peek();s[L]?$(()=>{F(()=>{let a=0,o=t.length;for(;a{for(;n.length;)n.pop()();E.delete(e)},t;function s(a,o=a.parentNode){if(a.attributes)for(let l=0;l{if(t=ne[e=e.trim()])return t;try{t=se(e)}catch(n){X(n,r,e)}return ne[e]=t},X=(e,r,t="")=>{throw Object.assign(e,{message:`\u2234 ${e.message}
2 |
3 | ${r}${t?`="${t}"
4 |
5 | `:""}`,expr:t})},se;y.use=e=>{e.signal&&ee(e),e.compile&&(se=e.compile)};var Z={};Ae(Z,{batch:()=>Se,computed:()=>xe,effect:()=>ie,signal:()=>ae,untracked:()=>Te});var _,R,ae=(e,r,t=new Set)=>(r={get value(){return _?.deps.push(t.add(_)),e},set value(n){if(n!==e){e=n;for(let s of t)R?R.add(s):s()}},peek(){return e}},r.toJSON=r.then=r.toString=r.valueOf=()=>r.value,r),ie=(e,r,t,n)=>(t=s=>{r?.call?.(),s=_,_=t;try{r=e()}finally{_=s}},n=t.deps=[],t(),s=>{for(r?.call?.();s=n.pop();)s.delete(t)}),xe=(e,r=ae(),t,n)=>(t={get value(){return n||=ie(()=>r.value=e()),r.value},peek:r.peek},t.toJSON=t.then=t.toString=t.valueOf=()=>t.value,t),Se=e=>{let r=R;r||(R=new Set);try{e()}finally{if(!r){r=R,R=null;for(let t of r)t()}}},Te=(e,r,t)=>(r=_,_=null,t=e(),_=r,t);var q=Symbol(":each");d.each=(e,[r,t,n],s)=>{let a=e[q]=document.createTextNode("");e.replaceWith(a);let o,l,f=0,p=j(()=>{l=null;let c=n(s);return typeof c=="number"&&(c=Array.from({length:c},(v,C)=>C+1)),c?.constructor===Object&&(l=Object.keys(c),c=Object.values(c)),c||[]}),g=()=>{$(()=>{let c=0,v=p.value,C=v.length;if(o&&!o[L]){for(let A of o[T]||[])A[Symbol.dispose]();o=null,f=0}if(CW.remove())}}:D;a.before(D),y(P,J),((o[T]||=[])[c]||={})[Symbol.dispose]=()=>{P[Symbol.dispose](),P.remove()}}}f=C})},u=0;return m(()=>{p.value[L]?.value,u?u++:(g(),queueMicrotask(()=>(u&&g(),u=0)))})};d.each.parse=(e,r)=>{let[t,n]=e.split(/\s+in\s+/),[s,a="$"]=t.split(/\s*,\s*/);return[s,a,r(n)]};var le=Symbol("if");d.if=(e,r,t)=>{let n=e.parentNode,s=e.nextElementSibling,a=document.createTextNode(""),o,l,f,p=[];return e.after(a),e.content?(o=p,e.remove(),l=[...e.content.childNodes]):l=o=[e],s?.hasAttribute(":else")?(s.removeAttribute(":else"),s.hasAttribute(":if")?f=p:(s.remove(),f=s.content?[...s.content.childNodes]:[s])):f=p,m(()=>{let g=r(t)?l:e[le]?p:f;if(s&&(s[le]=g===l),o!=g){o[0]?.[q]&&(o=[o[0][q]]);for(let u of o)u.remove();o=g;for(let u of o)n.insertBefore(u,a),y(u,t)}})};d.default=(e,r,t,n)=>{if(!n.startsWith("on"))return m(()=>{let u=r(t);if(n)Q(e,n,S(u,t));else for(let c in u)Q(e,Le(c),S(u[c],t))});let s=n.split("..").map(u=>{let c={evt:"",target:e,test:()=>!0};return c.evt=(u.startsWith("on")?u.slice(2):u).replace(/\.(\w+)?-?([-\w]+)?/g,(v,C,A="")=>(c.test=_e[C]?.(c,...A.split("-"))||c.test,"")),c});if(s.length==1)return m(()=>g(r(t),s[0]));let a,o,l,f=0,p=u=>{l=g(c=>(l(),o=u?.(c),(f=++f%s.length)?p(o):a&&p(a)),s[f])};return m(()=>(a=r(t),!l&&p(a),()=>a=null));function g(u,{evt:c,target:v,test:C,defer:A,stop:J,prevent:D,immediate:P,...W}){A&&(u=A(u));let z=M=>{try{C(M)&&(J&&(P?M.stopImmediatePropagation():M.stopPropagation()),D&&M.preventDefault(),u?.(M))}catch(be){X(be,`:on${c}`,u)}};return v.addEventListener(c,z,W),()=>v.removeEventListener(c,z,W)}};var _e={prevent(e){e.prevent=!0},stop(e){e.stop=!0},immediate(e){e.immediate=!0},once(e){e.once=!0},passive(e){e.passive=!0},capture(e){e.capture=!0},window(e){e.target=window},document(e){e.target=document},throttle(e,r){e.defer=t=>ke(t,r?Number(r)||0:108)},debounce(e,r){e.defer=t=>Ne(t,r?Number(r)||0:108)},outside:e=>r=>{let t=e.target;return!(t.contains(r.target)||r.target.isConnected===!1||t.offsetWidth<1&&t.offsetHeight<1)},self:e=>r=>r.target===e.target,ctrl:(e,...r)=>t=>h.ctrl(t)&&r.every(n=>h[n]?h[n](t):t.key===n),shift:(e,...r)=>t=>h.shift(t)&&r.every(n=>h[n]?h[n](t):t.key===n),alt:(e,...r)=>t=>h.alt(t)&&r.every(n=>h[n]?h[n](t):t.key===n),meta:(e,...r)=>t=>h.meta(t)&&r.every(n=>h[n]?h[n](t):t.key===n),arrow:()=>h.arrow,enter:()=>h.enter,esc:()=>h.esc,tab:()=>h.tab,space:()=>h.space,delete:()=>h.delete,digit:()=>h.digit,letter:()=>h.letter,char:()=>h.char},h={ctrl:e=>e.ctrlKey||e.key==="Control"||e.key==="Ctrl",shift:e=>e.shiftKey||e.key==="Shift",alt:e=>e.altKey||e.key==="Alt",meta:e=>e.metaKey||e.key==="Meta"||e.key==="Command",arrow:e=>e.key.startsWith("Arrow"),enter:e=>e.key==="Enter",esc:e=>e.key.startsWith("Esc"),tab:e=>e.key==="Tab",space:e=>e.key==="\xA0"||e.key==="Space"||e.key===" ",delete:e=>e.key==="Delete"||e.key==="Backspace",digit:e=>/^\d$/.test(e.key),letter:e=>/^\p{L}$/gu.test(e.key),char:e=>/^\S$/.test(e.key)},Q=(e,r,t)=>{t==null||t===!1?e.removeAttribute(r):e.setAttribute(r,t===!0?"":typeof t=="number"||typeof t=="string"?t:"")},ke=(e,r)=>{let t,n,s=a=>{t=!0,setTimeout(()=>{if(t=!1,n)return n=!1,s(a),e(a)},r)};return a=>t?n=!0:(s(a),e(a))},Ne=(e,r)=>{let t;return n=>{clearTimeout(t),t=setTimeout(()=>{t=null,e(n)},r)}},Le=e=>e.replace(/[A-Z\u00C0-\u00D6\u00D8-\u00DE]/g,r=>"-"+r.toLowerCase()),S=(e,r)=>e?.replace?e.replace(/\$<([^>]+)>/g,(t,n)=>r[n]??""):e;d.ref=(e,r,t)=>{t[S(r,t)]=e};d.ref.parse=e=>e;d.with=(e,r,t)=>{let n;return m(()=>{let s=r(t);y(e,n?s:n=x(s,t))})};d.html=(e,r,t)=>{let n=r(t);if(!n)return;let s=(n.content||n).cloneNode(!0);e.replaceChildren(s),y(e,t)};d.text=(e,r,t)=>(e.content&&e.replaceWith(e=document.createTextNode("")),m(()=>{let n=r(t);e.textContent=n??""}));d.class=(e,r,t)=>{let n=new Set;return m(()=>{let s=r(t),a=new Set;s&&(typeof s=="string"?S(s,t).split(" ").map(o=>a.add(o)):Array.isArray(s)?s.map(o=>(o=S(o,t))&&a.add(o)):Object.entries(s).map(([o,l])=>l&&a.add(o)));for(let o of n)a.has(o)?a.delete(o):e.classList.remove(o);for(let o of n=a)e.classList.add(o)})};d.style=(e,r,t)=>{let n=e.getAttribute("style")||"";return n.endsWith(";")||(n+="; "),m(()=>{let s=r(t);if(typeof s=="string")e.setAttribute("style",n+S(s,t));else{e.setAttribute("style",n);for(let a in s)e.style.setProperty(a,S(s[a],t))}})};d.value=(e,r,t)=>{let n,s,a=e.type==="text"||e.type===""?o=>e.setAttribute("value",e.value=o??""):e.tagName==="TEXTAREA"||e.type==="text"||e.type===""?o=>(n=e.selectionStart,s=e.selectionEnd,e.setAttribute("value",e.value=o??""),n&&e.setSelectionRange(n,s)):e.type==="checkbox"?o=>(e.checked=o,Q(e,"checked",o)):e.type==="select-one"?o=>{for(let l in e.options)l.removeAttribute("selected");e.value=o,e.selectedOptions[0]?.setAttribute("selected","")}:o=>e.value=o;return m(()=>a(r(t)))};d.fx=(e,r,t)=>m(()=>r(t));y.use(Z);y.use({compile:e=>y.constructor("__scope",`with (__scope) { return ${e} };`)});var ce=y;function G(e,r){if(!r)return e.play(),()=>e.pause();r.start||=0,e.currentTime=r.start;let t=()=>{if(e.readyState===0)return;let l=e.preload==="auto";l&&(e.preload="none"),e.currentTime<0&&(e.currentTime=0),e.currentTime>r.end&&(e.currentTime=r.end),l&&(e.preload="auto")},n,s=()=>{if(clearInterval(n),e.currentTime>=r.end){if(e.loop){e.currentTime=r.start;return}e.pause(),e.dispatchEvent(new Event("ended"));return}e.currentTime+.2>r.end&&(n=setInterval(s,10))},a=()=>{e.currentTime>=r.end&&(e.currentTime=r.start)};e.addEventListener("durationchange",t),e.addEventListener("seeking",t),e.addEventListener("timeupdate",s);let o=setInterval(s,50);return e.addEventListener("playing",a),e.play(),()=>{e.removeEventListener("durationchange",t),e.removeEventListener("seeking",t),e.removeEventListener("timeupdate",s),e.removeEventListener("playing",a),clearInterval(o),clearInterval(n),e.pause()}}var I=new Audio("data:audio/wav;base64,UklGRmgAAABXQVZFZm10IBAAAAABAAEAgLsAAAB3AQACABAAZGF0YQIAAABpNUxJU1Q6AAAASU5GT0lTRlQUAAAAcHJvYmUuYXVkaW90b29sLmNvbQBJQ1JEEQAAADIwMjMtMDMtMDIgMDctNDQAAA==");I.preload="metadata";I.load();I.volume=0;async function fe(){return new Promise(e=>{I.play();let r;I.onplaying=()=>r=performance.now(),I.onended=()=>{e(performance.now()-r)}})}var k={get(){let e=window.getSelection();if(!e.anchorNode||!e.anchorNode.parentNode.closest(".w-editarea"))return;let r=Y(e.anchorNode,e.anchorOffset),t=Y(e.focusNode,e.focusOffset),n=e.anchorNode.parentNode.closest(".w-segment"),s=e.anchorOffset,a=e.focusNode.parentNode.closest(".w-segment"),o=e.focusOffset;return r>t&&([t,a,o,r,n,s]=[r,n,s,t,a,o]),{start:r,startNode:n,startNodeOffset:s,end:t,endNode:a,endNodeOffset:o,collapsed:e.isCollapsed,range:e.getRangeAt(0)}},set(e,r){let t=window.getSelection();Array.isArray(e)&&(e=Y(...e)),Array.isArray(r)&&(r=Y(...r)),e=Math.max(0,e),r==null&&(r=e);let n=document.querySelector(".w-editarea"),[s,a]=ue(n,e),[o,l]=ue(n,r),f=t.getRangeAt(0);if(!(f.startContainer===s.firstChild&&f.startOffset===a)&&!(f.endContainer===o.firstChild&&f.endOffset===l)){t.removeAllRanges();let p=new Range;p.setStart(s.firstChild,a),p.setEnd(o.firstChild,l),t.addRange(p)}return{start:e,startNode:s,end:r,endNode:o,startNodeOffset:a,endNodeOffset:l,collapsed:t.isCollapsed,range:t.getRangeAt(0)}}};function Y(e,r){let t=e.parentNode.closest(".w-segment"),n=U(t.textContent.slice(0,r)).length;for(;t=t.previousSibling;)n+=U(t.textContent).length;return n}function ue(e,r){let t=e.firstChild,n;for(;r>(n=U(t.textContent).length);)r-=n,t=t.nextSibling;let s=0;for(let a=t.textContent,o=0;o="\u0300";)s++;return[t,r+s]}function U(e){return e.replace(/\u0300|\u0301/g,"")}history.scrollRestoration="manual";var b=document.querySelector(".wavearea"),O=b.querySelector(".w-editarea"),de=b.querySelector(".w-timecodes"),Ee=b.querySelector(".w-play"),Re=b.querySelector(".w-waveform"),me=b.querySelector(".w-caret-line"),w=new Audio,pe=new Worker("./dist/worker.js",{type:"module"}),he=new AudioContext,i=ce(b,{raf:e=>window.requestAnimationFrame(e),loading:!1,recording:!1,playing:!1,selecting:!1,isMouseDown:!1,scrolling:!1,clipStart:0,loop:!1,clipEnd:null,_startTime:0,_startTimeOffset:0,volume:1,latency:0,segments:[],total:0,duration:0,caretOffscreen:0,caretOffset:0,caretY:Re.getBoundingClientRect().top,caretX:0,cols:216,async handleCaret(){let e=k.get();!e||e.start===i.caretOffset&&e.collapsed||(i.caretOffset=e.start,i.updateCaretLine(e),i.clipStart=i.caretOffset,i.playing?(i._startTime=(performance.now()+i.latency)*.001,i._startTimeOffset=i.caretOffset):(i.clipEnd=e.collapsed?i.total:e.end,i.loop=w.loop=!e.collapsed),w.currentTime=i.duration*i.caretOffset/i.total)},async handleBeforeInput(e){let r=Ie[e.inputType];r?r.call(this,e):(e.preventDefault(),e.stopPropagation(),e.data===". "&&k.set(i.caretOffset))},async handleDrop(e){let t=e.dataTransfer.files[0];if(!t.type.startsWith("audio"))return!1;i.loading=!0,i.segments=[];let n=await K(t),s=await decodeAudio(n),a=await encodeAudio(s),o=new Blob([a],{type:"audio/wav"}),l=URL.createObjectURL(o);return await applyOp(["src",l]),i.loading=!1,n},async handleFile(e){i.loading="Decoding";let r=e.target.files[0],t=await K(r),n=await he.decodeAudioData(t),s=Array.from({length:n.numberOfChannels},a=>n.getChannelData(a));await ye(["file",{name:r.name,numberOfChannels:n.numberOfChannels,sampleRate:n.sampleRate,length:n.length,channelData:s}]),i.loading=!1},scrollIntoCaret(){i.caretOffscreen&&!i.scrolling&&(me.scrollIntoView({behavior:"smooth",block:"center"}),i.scrolling=!0,setTimeout(()=>i.scrolling=!1,108))},play(e){i.playing=!0,i.scrolling=!1,O.focus(),i.caretOffset===i.total&&k.set(i.caretOffset=i.clipStart=0),i.scrollIntoCaret();let{clipStart:r,clipEnd:t,loop:n}=i,s=()=>Ee.click(),a;i._startTime,i._startTimeOffset=i.caretOffset;let o=async()=>{await new Promise(u=>setTimeout(u,i.latency)),i._startTime=performance.now()*.001,clearInterval(a),a=setInterval(p,10.8)},l=O.getBoundingClientRect().top,f=()=>{if(i.scrolling)return;let u=O.getBoundingClientRect().top;u!==l?(i.scrolling=!0,setTimeout(()=>(i.scrolling=!1,f()),1080)):i.scrolling=!1,l=u},p=()=>{if(f(),i.selecting)return;let u=performance.now()*.001-i._startTime,c=Math.min(i._startTimeOffset+Math.round(i.total*u/i.duration),i.total);n&&(c=Math.min(c,t));let v=k.set(i.caretOffset=c);i.updateCaretLine(v),i.scrollIntoCaret()};w.addEventListener("play",o,{once:!0}),i.loop&&w.addEventListener("seeked",o);let g=G(w,i.loop&&{start:i.duration*i.clipStart/i.total,end:i.duration*i.clipEnd/i.total});return w.addEventListener("ended",s),()=>{w.removeEventListener("seeked",o),w.removeEventListener("ended",s),clearInterval(a),g(),i.playing=!1,i.scrolling=!1,i.loop?k.set(r,t):w.currentTime>=w.duration&&k.set(i.total),O.focus()}},async goto(e){try{await H(e)}catch{await ve()}k.set(i.caretOffset)},updateCaretLine(e){let r=e.range.getClientRects(),t=r[r.length-1];i.caretX=t.right,i.caretY=t.top},updateTimecodes(){if(de.replaceChildren(),!O.textContent)return;let e=0;for(let r of O.children){let t=new Range;t.selectNodeContents(O);let n=Math.round(t.getBoundingClientRect().height/t.getClientRects()[1].height);for(let s=0;sp+g.length,0),l=r.endOffset+i.segments.slice(0,a).reduce((p,g)=>p+g.length,0);this._deleteTimeout?(clearTimeout(this._deleteTimeout),this._deleteOp[1]--):this._deleteOp=["del",o,l];let f=()=>{ye(this._deleteOp),this._deleteOp=this._deleteTimeout=null};this._deleteTimeout=setTimeout(f,280)}},B=async()=>{b.removeEventListener("touchstart",B),b.removeEventListener("mousedown",B),b.removeEventListener("keydown",B),i.latency=await fe()};b.addEventListener("touchstart",B);b.addEventListener("mousedown",B);b.addEventListener("keydown",B);var Be=new IntersectionObserver(([e])=>{i.caretOffscreen=e.isIntersecting?0:e.intersectionRect.top<=e.rootBounds.top?1:e.intersectionRect.bottom>=e.rootBounds.bottom?-1:0},{threshold:.999,rootMargin:"0px"});Be.observe(me);var De=new ResizeObserver(e=>{i.cols=ge(),i.updateTimecodes()});De.observe(O);function ge(){let e=new Range,r=O.firstChild.firstChild;if(!r?.textContent)return;let t=r.textContent;e.setStart(r,0),e.setEnd(r,1);let n=e.getClientRects()[0].y;for(var s=0,a=0;s="\u0300";)o++;e.setStart(r,0),e.setEnd(r,s=s+o);let l=e.getClientRects();if(l[l.length-1].y>n)return a}return t.length}async function ye(...e){let r=new URL(location);for(let n of e){let[s,...a]=n;a[0].name?r.searchParams.set(s,a[0].name):r.searchParams.has(s)?r.searchParams.set(s,`${r.searchParams.get(s)}..${a.join("-")}`):r.searchParams.append(s,a.join("-"))}i.loading="Processing";let t=await we(...e);return history.pushState(t,"",decodeURI(r)),i.loading=!1,O.textContent&&console.assert(t.segments.join("")===O.textContent,"Rendered waveform is different from UI"),H(t)}function we(...e){return new Promise(r=>{pe.postMessage({id:history.state?.id||0,ops:e}),pe.addEventListener("message",t=>{r(t.data)},{once:!0})})}function H({url:e,segments:r,duration:t,offsets:n}){return i.total=r.reduce((s,a)=>s+=U(a).length,0),i.duration=t,i.segments=r,i.cols||(i.cols=ge()),i.updateTimecodes(),w.src=e,w.preload="metadata",new Promise((s,a)=>{w.addEventListener("error",a),w.addEventListener("loadedmetadata",()=>{w.currentTime=t*i.caretOffset/i.total||0},{once:!0})})}async function ve(e=new URL(location)){i.loading=" ";let r=[];for(let[n,s]of e.searchParams)r.push(...s.split("..").map(a=>[n,...n==="src"||n==="file"?[a]:a.split("-")]));if(r[0][0]==="src"){let[,n]=r.shift(),a=await(await fetch(n,{cache:"force-cache"})).arrayBuffer();i.loading="Decoding";let o=await he.decodeAudioData(a),l=Array.from({length:o.numberOfChannels},f=>o.getChannelData(f));r.push(["file",{name:n,numberOfChannels:o.numberOfChannels,sampleRate:o.sampleRate,length:o.length,channelData:l}])}let t=await we(...r);history.replaceState(t,"",decodeURI(e)),H(t),i.loading=!1}location.search.length&&ve();
6 | //# sourceMappingURL=wavearea.js.map
7 |
--------------------------------------------------------------------------------
/dist/flac-ZGCWAMZE.js:
--------------------------------------------------------------------------------
1 | import{a as v}from"./chunk-6T6HPD5B.js";import{a as P,b as Y,c as x}from"./chunk-SCMJQOGN.js";import"./chunk-A576P2ZR.js";function u(a){var t=t;function o(e){console.log(e)}function h(e){console.error(e)}function m(){}t={};function _(e){throw e}for(var i=new Uint8Array(123),c=25;c>=0;--c)i[48+c]=52+c,i[65+c]=c,i[97+c]=26+c;i[43]=62,i[47]=63;function p(e){for(var r,s,n=0,d=0,y=e.length,S=new Uint8Array((y*3>>2)-(e[y-2]=="=")-(e[y-1]=="="));n>4,S[d+1]=r<<4|s>>2,S[d+2]=s<<6|i[e.charCodeAt(n+3)];return S}u.wasm||Object.defineProperty(u,"wasm",{get:()=>String.raw`dynEncode00d9
/qõ¤æo-®Çr_¾.¥ò>?= ´Z^¨E¨_lLKÓ½Lgº½ï4É
2 | xø#~ÊQÊz~+âWÖÞ,ɲ^úÙ¹@¸Ëò°ÈÐúWØòhð6þr§·Ç¼FîC"Óàr$¼Aé_µåªÝâWïä'xÜkÈÀ¼ÐäYËÖû½GÐ%¡ÜS»¢üj(µÛÚýÀyla%H;w »ë*û1¬XÑî2n¼pÒ¤ÉOÕã}Ñ.qßO9äÊëÅBN}dé¡sÐÅW´xÀªÐBoÂix>å©üÁ9úßQôÆÞnH6Áøïv±véáÖ÷¿[Zö,üâJ
3 | ìî>Æ´i³âG*>$Æ36=}Pzaò ýÌz¨ÇKieZh&-
4 | äÔdfn@ã¥vßy§ËCñCq@aqs÷FÂ;¹ò@Ûa¦õ£Y»)Bþc=M]r,
5 | #³òúÚy Y»«Ê[,¤=}ÙÜäo¤^¹Z¾Àæä"Y#n?õ¼ÇpKïQ¸^s®õÃ2|ûKb±kDæñB©Òl»]"xúð_nS8âack'Æ#lü9å´´¡°¡ûË÷¾*Ï¡dlHï9æ\l½³Y¬åy»C*°9ô\"L·u<«)*
6 | «#°;c
_Éûuc¶¾YÃípDEàEº"A15@'N:5mB¯2«2m²¸Zåx;ÒXE{6Éd×qîébÖ:ª<