├── .travis.yml
├── src
├── .eslintrc
├── index.test.js
├── styles.module.css
└── index.js
├── .eslintignore
├── example
├── public
│ ├── favicon.ico
│ ├── manifest.json
│ └── index.html
├── src
│ ├── index.js
│ ├── App.test.js
│ ├── index.css
│ └── App.js
├── README.md
└── package.json
├── .editorconfig
├── .prettierrc
├── .gitignore
├── .github
└── workflows
│ └── npm-publish-github-packages.yml
├── .eslintrc
├── package.json
└── README.md
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: node_js
2 | node_js:
3 | - 12
4 | - 10
5 |
--------------------------------------------------------------------------------
/src/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "jest": true
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/.eslintignore:
--------------------------------------------------------------------------------
1 | build/
2 | dist/
3 | node_modules/
4 | .snapshots/
5 | *.min.js
--------------------------------------------------------------------------------
/example/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/doppelgunner/audio-react-recorder/HEAD/example/public/favicon.ico
--------------------------------------------------------------------------------
/src/index.test.js:
--------------------------------------------------------------------------------
1 | import { ExampleComponent } from '.'
2 |
3 | describe('ExampleComponent', () => {
4 | it('is truthy', () => {
5 | expect(ExampleComponent).toBeTruthy()
6 | })
7 | })
8 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | charset = utf-8
5 | indent_style = space
6 | indent_size = 2
7 | end_of_line = lf
8 | insert_final_newline = true
9 | trim_trailing_whitespace = true
10 |
--------------------------------------------------------------------------------
/example/src/index.js:
--------------------------------------------------------------------------------
1 | import './index.css'
2 |
3 | import React from 'react'
4 | import ReactDOM from 'react-dom'
5 | import App from './App'
6 |
7 | ReactDOM.render(, document.getElementById('root'))
8 |
--------------------------------------------------------------------------------
/src/styles.module.css:
--------------------------------------------------------------------------------
1 | /* add css module styles here (optional) */
2 |
3 | .test {
4 | margin: 2em;
5 | padding: 0.5em;
6 | border: 2px solid #000;
7 | font-size: 2em;
8 | text-align: center;
9 | }
10 |
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "singleQuote": true,
3 | "jsxSingleQuote": true,
4 | "semi": false,
5 | "tabWidth": 2,
6 | "bracketSpacing": true,
7 | "jsxBracketSameLine": false,
8 | "arrowParens": "always",
9 | "trailingComma": "none"
10 | }
11 |
--------------------------------------------------------------------------------
/example/README.md:
--------------------------------------------------------------------------------
1 | This example was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
2 |
3 | It is linked to the audio-react-recorder package in the parent directory for development purposes.
4 |
5 | You can run `npm install` and then `npm start` to test your package.
6 |
--------------------------------------------------------------------------------
/example/src/App.test.js:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 | import ReactDOM from 'react-dom'
3 | import App from './App'
4 |
5 | it('renders without crashing', () => {
6 | const div = document.createElement('div')
7 | ReactDOM.render(, div)
8 | ReactDOM.unmountComponentAtNode(div)
9 | })
10 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # See https://help.github.com/ignore-files/ for more about ignoring files.
3 |
4 | # dependencies
5 | node_modules
6 |
7 | # builds
8 | build
9 | dist
10 | .rpt2_cache
11 |
12 | # misc
13 | .DS_Store
14 | .env
15 | .env.local
16 | .env.development.local
17 | .env.test.local
18 | .env.production.local
19 |
20 | npm-debug.log*
21 | yarn-debug.log*
22 | yarn-error.log*
23 |
--------------------------------------------------------------------------------
/example/public/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "short_name": "audio-react-recorder",
3 | "name": "audio-react-recorder",
4 | "icons": [
5 | {
6 | "src": "favicon.ico",
7 | "sizes": "64x64 32x32 24x24 16x16",
8 | "type": "image/x-icon"
9 | }
10 | ],
11 | "start_url": ".",
12 | "display": "standalone",
13 | "theme_color": "#000000",
14 | "background_color": "#ffffff"
15 | }
16 |
--------------------------------------------------------------------------------
/example/src/index.css:
--------------------------------------------------------------------------------
1 | body {
2 | margin: 0;
3 | padding: 0;
4 | font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
5 | 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
6 | sans-serif;
7 | -webkit-font-smoothing: antialiased;
8 | -moz-osx-font-smoothing: grayscale;
9 | }
10 |
11 | code {
12 | font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
13 | monospace;
14 | }
15 |
--------------------------------------------------------------------------------
/.github/workflows/npm-publish-github-packages.yml:
--------------------------------------------------------------------------------
1 | name: Publish package to npmjs
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | jobs:
8 | publish-gpr:
9 | runs-on: ubuntu-latest
10 | permissions:
11 | contents: read
12 | packages: write
13 | steps:
14 | - uses: actions/checkout@v3
15 | - uses: actions/setup-node@v3
16 | with:
17 | node-version: 16
18 | registry-url: https://registry.npmjs.org
19 | - run: npm ci
20 | - run: npm publish
21 | env:
22 | NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
23 |
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "parser": "babel-eslint",
3 | "extends": [
4 | "standard",
5 | "standard-react",
6 | "plugin:prettier/recommended",
7 | "prettier/standard",
8 | "prettier/react"
9 | ],
10 | "env": {
11 | "node": true
12 | },
13 | "parserOptions": {
14 | "ecmaVersion": 2020,
15 | "ecmaFeatures": {
16 | "legacyDecorators": true,
17 | "jsx": true
18 | }
19 | },
20 | "settings": {
21 | "react": {
22 | "version": "16"
23 | }
24 | },
25 | "rules": {
26 | "space-before-function-paren": 0,
27 | "react/prop-types": 0,
28 | "react/jsx-handler-names": 0,
29 | "react/jsx-fragments": 0,
30 | "react/no-unused-prop-types": 0,
31 | "import/export": 0
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/example/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "audio-react-recorder-example",
3 | "homepage": ".",
4 | "version": "0.0.0",
5 | "private": true,
6 | "scripts": {
7 | "start": "node ../node_modules/react-scripts/bin/react-scripts.js start",
8 | "build": "node ../node_modules/react-scripts/bin/react-scripts.js build",
9 | "test": "node ../node_modules/react-scripts/bin/react-scripts.js test",
10 | "eject": "node ../node_modules/react-scripts/bin/react-scripts.js eject"
11 | },
12 | "dependencies": {
13 | "react": "file:../node_modules/react",
14 | "react-dom": "file:../node_modules/react-dom",
15 | "react-scripts": "file:../node_modules/react-scripts",
16 | "audio-react-recorder": "file:.."
17 | },
18 | "devDependencies": {
19 | "@babel/plugin-syntax-object-rest-spread": "^7.8.3"
20 | },
21 | "eslintConfig": {
22 | "extends": "react-app"
23 | },
24 | "browserslist": [
25 | ">0.2%",
26 | "not dead",
27 | "not ie <= 11",
28 | "not op_mini all"
29 | ]
30 | }
31 |
--------------------------------------------------------------------------------
/example/src/App.js:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 |
3 | import AudioReactRecorder, { RecordState } from 'audio-react-recorder'
4 | import 'audio-react-recorder/dist/index.css'
5 |
6 | class App extends React.Component {
7 | constructor(props) {
8 | super(props)
9 |
10 | this.state = {
11 | recordState: null,
12 | audioData: null
13 | }
14 | }
15 |
16 | start = () => {
17 | this.setState({
18 | recordState: RecordState.START
19 | })
20 | }
21 |
22 | pause = () => {
23 | this.setState({
24 | recordState: RecordState.PAUSE
25 | })
26 | }
27 |
28 | stop = () => {
29 | this.setState({
30 | recordState: RecordState.STOP
31 | })
32 | }
33 |
34 | onStop = (data) => {
35 | this.setState({
36 | audioData: data
37 | })
38 | console.log('onStop: audio data', data)
39 | }
40 |
41 | render() {
42 | const { recordState } = this.state
43 |
44 | return (
45 |
46 |
51 |
56 |
59 |
62 |
65 |
66 | )
67 | }
68 | }
69 |
70 | export default App
71 |
--------------------------------------------------------------------------------
/example/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
10 |
11 |
12 |
16 |
17 |
18 |
27 | audio-react-recorder
28 |
29 |
30 |
31 |
34 |
35 |
36 |
37 |
47 |
48 |
49 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "audio-react-recorder",
3 | "version": "1.0.5",
4 | "description": "Audio / Voice Recorder for React",
5 | "author": "noobieprogrammer",
6 | "license": "MIT",
7 | "repository": {
8 | "type": "git",
9 | "url": "https://github.com/doppelgunner/audio-react-recorder.git"
10 | },
11 | "main": "dist/index.js",
12 | "module": "dist/index.modern.js",
13 | "source": "src/index.js",
14 | "engines": {
15 | "node": ">=10"
16 | },
17 | "homepage": "https://github.com/doppelgunner/audio-react-recorder#readme",
18 | "keywords": [
19 | "react",
20 | "record",
21 | "audio",
22 | "voice",
23 | "record.js",
24 | "mic",
25 | "recorder",
26 | "audiowave"
27 | ],
28 | "scripts": {
29 | "build": "microbundle-crl --no-compress --format modern,cjs",
30 | "start": "microbundle-crl watch --no-compress --format modern,cjs",
31 | "prepare": "run-s build",
32 | "test": "run-s test:unit test:lint test:build",
33 | "test:build": "run-s build",
34 | "test:lint": "eslint .",
35 | "test:unit": "cross-env CI=1 react-scripts test --env=jsdom",
36 | "test:watch": "react-scripts test --env=jsdom",
37 | "predeploy": "cd example && npm install && npm run build",
38 | "deploy": "gh-pages -d example/build"
39 | },
40 | "peerDependencies": {
41 | "react": "^16.0.0"
42 | },
43 | "devDependencies": {
44 | "microbundle-crl": "^0.13.10",
45 | "babel-eslint": "^10.0.3",
46 | "cross-env": "^7.0.2",
47 | "eslint": "^6.8.0",
48 | "eslint-config-prettier": "^6.7.0",
49 | "eslint-config-standard": "^14.1.0",
50 | "eslint-config-standard-react": "^9.2.0",
51 | "eslint-plugin-import": "^2.18.2",
52 | "eslint-plugin-node": "^11.0.0",
53 | "eslint-plugin-prettier": "^3.1.1",
54 | "eslint-plugin-promise": "^4.2.1",
55 | "eslint-plugin-react": "^7.17.0",
56 | "eslint-plugin-standard": "^4.0.1",
57 | "gh-pages": "^2.2.0",
58 | "npm-run-all": "^4.1.5",
59 | "prettier": "^2.0.4",
60 | "react": "^16.13.1",
61 | "react-dom": "^16.13.1",
62 | "react-scripts": "^3.4.1"
63 | },
64 | "files": [
65 | "dist"
66 | ],
67 | "dependencies": {
68 | "prop-types": "^15.7.2"
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # audio-react-recorder
2 |
3 | > Audio / Voice Recorder w/ Audio Wave for React using the [Web Audio API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API)
4 |
5 | [](https://www.npmjs.com/package/audio-react-recorder) [](https://standardjs.com)
6 |
7 | ## Install
8 |
9 | ```bash
10 | npm install --save audio-react-recorder
11 | ```
12 |
13 | ## DEMO
14 |
15 | [AudioReactRecorder demo](https://doppelgunner.github.io/audio-react-recorder/)
16 |
17 | ## Usage
18 |
19 | ```jsx
20 | import React, { Component } from 'react'
21 |
22 | import AudioReactRecorder, { RecordState } from 'audio-react-recorder'
23 |
24 | class App extends Component {
25 | constructor(props) {
26 | super(props)
27 |
28 | this.state = {
29 | recordState: null
30 | }
31 | }
32 |
33 | start = () => {
34 | this.setState({
35 | recordState: RecordState.START
36 | })
37 | }
38 |
39 | stop = () => {
40 | this.setState({
41 | recordState: RecordState.STOP
42 | })
43 | }
44 |
45 | //audioData contains blob and blobUrl
46 | onStop = (audioData) => {
47 | console.log('audioData', audioData)
48 | }
49 |
50 | render() {
51 | const { recordState } = this.state
52 |
53 | return (
54 |
55 |
56 |
57 |
58 |
59 |
60 | )
61 | }
62 | }
63 | ```
64 |
65 | ## Supported props
66 |
67 | | Property name | Type | Default | Description |
68 | | --------------- | ------------- | ------------------ | ---------------------------------------------------- |
69 | | state | string | RecordState.NONE | RecordState.(NONE,START,STOP,PAUSE) |
70 | | type | string | audio/wav | MIME type of the audio file |
71 | | backgroundColor | string | rgb(200, 200, 200) | Background color of the audio wave / canvas |
72 | | foregroundColor | string | rgb(0, 0, 0) | Foreground color of the audio wave / canvas |
73 | | canvasWidth | number,string | 500 | Canvas width (you can use css to make it responsive) |
74 | | canvasHeight | number,string | 300 | canvas height |
75 |
76 | ## License
77 |
78 | MIT © [noobieprogrammer](https://github.com/noobieprogrammer)
79 |
80 | ## Buy me a coffee or just follow me
81 |
82 | [](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=GCJGP8MTETUYU)
83 |
84 | - [My Blog](https://noobieprogrammer.blogspot.com/)
85 | - [Twitter](https://twitter.com/noobieprogrmmer)
86 | - [Youtube](https://www.youtube.com/channel/UCpzMkMzGopmft5welUr8QZg)
87 |
--------------------------------------------------------------------------------
/src/index.js:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 | import styles from './styles.module.css'
3 | import PropTypes from 'prop-types' // ES6
4 |
5 | // export const AudioReactRecorder = ({ text }) => {
6 | // return BULLSWEET: {text}
7 | // }
8 |
9 | export const RecordState = Object.freeze({
10 | START: 'start',
11 | PAUSE: 'pause',
12 | STOP: 'stop',
13 | NONE: 'none'
14 | })
15 |
16 | export default class AudioReactRecorder extends React.Component {
17 | //0 - constructor
18 | constructor(props) {
19 | super(props)
20 |
21 | this.canvasRef = React.createRef()
22 | }
23 |
24 | //TODO: add the props definitions
25 | static propTypes = {
26 | state: PropTypes.string,
27 | type: PropTypes.string.isRequired,
28 | backgroundColor: PropTypes.string,
29 | foregroundColor: PropTypes.string,
30 | canvasWidth: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
31 | canvasHeight: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
32 |
33 | //method calls
34 | onStop: PropTypes.func
35 | }
36 | static defaultProps = {
37 | state: RecordState.NONE,
38 | type: 'audio/wav',
39 | backgroundColor: 'rgb(200, 200, 200)',
40 | foregroundColor: 'rgb(0, 0, 0)',
41 | canvasWidth: 500,
42 | canvasHeight: 300
43 | }
44 |
45 | //2 - mount
46 | componentDidMount() {
47 | this.init()
48 | }
49 |
50 | componentDidUpdate(prevProps, prevState) {
51 | const { state } = this.props
52 |
53 | this.checkState(prevProps.state, state)
54 | }
55 |
56 | checkState(previousState) {
57 | switch (previousState) {
58 | case RecordState.START:
59 | this.doIfState(RecordState.PAUSE, this.pause)
60 | this.doIfState(RecordState.STOP, this.stop)
61 | break
62 | case RecordState.PAUSE:
63 | this.doIfState(RecordState.START, this.resume)
64 | this.doIfState(RecordState.STOP, this.stop)
65 | break
66 | case RecordState.STOP:
67 | this.doIfState(RecordState.START, this.start)
68 | break
69 | default:
70 | this.doIfState(RecordState.START, this.start)
71 | break
72 | }
73 | }
74 |
75 | doIfState(state, cb) {
76 | if (this.props.state == state) {
77 | cb && cb()
78 | }
79 | }
80 |
81 | //TODO: destroy request animation frame
82 | componentWillUnmount() {}
83 |
84 | //TODO: change to state some conditionals
85 | init = async () => {
86 | this.leftchannel = []
87 | this.rightchannel = []
88 | this.recorder = null
89 | this.recording = false
90 | this.recordingLength = 0
91 | this.volume = null
92 | this.audioInput = null
93 | this.sampleRate = null
94 | this.AudioContext = window.AudioContext || window.webkitAudioContext
95 | this.context = null
96 | this.analyser = null
97 | this.canvas = this.canvasRef.current
98 | this.canvasCtx = this.canvas.getContext('2d')
99 | this.stream = null
100 | this.tested = false
101 |
102 | navigator.getUserMedia =
103 | navigator.getUserMedia ||
104 | navigator.webkitGetUserMedia ||
105 | navigator.mozGetUserMedia
106 | }
107 |
108 | //get mic stream
109 | getStream = (constraints) => {
110 | if (!constraints) {
111 | constraints = { audio: true, video: false }
112 | }
113 |
114 | return navigator.mediaDevices.getUserMedia(constraints)
115 | }
116 |
117 | setUpRecording = () => {
118 | this.context = new this.AudioContext()
119 | this.sampleRate = this.context.sampleRate
120 |
121 | // creates a gain node
122 | this.volume = this.context.createGain()
123 |
124 | // creates an audio node from teh microphone incoming stream
125 | this.audioInput = this.context.createMediaStreamSource(this.stream)
126 |
127 | // Create analyser
128 | this.analyser = this.context.createAnalyser()
129 |
130 | // connect audio input to the analyser
131 | this.audioInput.connect(this.analyser)
132 |
133 | // connect analyser to the volume control
134 | // analyser.connect(volume);
135 |
136 | let bufferSize = 2048
137 | this.recorder = this.context.createScriptProcessor(bufferSize, 2, 2)
138 |
139 | // we connect the volume control to the processor
140 | // volume.connect(recorder);
141 |
142 | this.analyser.connect(this.recorder)
143 |
144 | // finally connect the processor to the output
145 | this.recorder.connect(this.context.destination)
146 |
147 | const self = this
148 | this.recorder.onaudioprocess = function (e) {
149 | // Check
150 | if (!self.recording) return
151 | // Do something with the data, i.e Convert this to WAV
152 | let left = e.inputBuffer.getChannelData(0)
153 | let right = e.inputBuffer.getChannelData(1)
154 | if (!self.tested) {
155 | self.tested = true
156 | // if this reduces to 0 we are not getting any sound
157 | if (!left.reduce((a, b) => a + b)) {
158 | console.log('Error: There seems to be an issue with your Mic')
159 | // clean up;
160 | self.stop()
161 | self.stream.getTracks().forEach(function (track) {
162 | track.stop()
163 | })
164 | // self.context.close() // error being thrown on this line
165 | }
166 | }
167 | // we clone the samples
168 | self.leftchannel.push(new Float32Array(left))
169 | self.rightchannel.push(new Float32Array(right))
170 | self.recordingLength += bufferSize
171 | }
172 | this.visualize()
173 | }
174 |
175 | mergeBuffers = (channelBuffer, recordingLength) => {
176 | let result = new Float32Array(recordingLength)
177 | let offset = 0
178 | let lng = channelBuffer.length
179 | for (let i = 0; i < lng; i++) {
180 | let buffer = channelBuffer[i]
181 | result.set(buffer, offset)
182 | offset += buffer.length
183 | }
184 | return result
185 | }
186 |
187 | interleave = (leftChannel, rightChannel) => {
188 | let length = leftChannel.length + rightChannel.length
189 | let result = new Float32Array(length)
190 |
191 | let inputIndex = 0
192 |
193 | for (let index = 0; index < length; ) {
194 | result[index++] = leftChannel[inputIndex]
195 | result[index++] = rightChannel[inputIndex]
196 | inputIndex++
197 | }
198 | return result
199 | }
200 |
201 | writeUTFBytes = (view, offset, string) => {
202 | let lng = string.length
203 | for (let i = 0; i < lng; i++) {
204 | view.setUint8(offset + i, string.charCodeAt(i))
205 | }
206 | }
207 |
208 | visualize = () => {
209 | const { backgroundColor, foregroundColor } = this.props
210 |
211 | this.WIDTH = this.canvas.width
212 | this.HEIGHT = this.canvas.height
213 | this.CENTERX = this.canvas.width / 2
214 | this.CENTERY = this.canvas.height / 2
215 |
216 | if (!this.analyser) return
217 |
218 | this.analyser.fftSize = 2048
219 | const bufferLength = this.analyser.fftSize
220 | const dataArray = new Uint8Array(bufferLength)
221 |
222 | this.canvasCtx.clearRect(0, 0, this.WIDTH, this.HEIGHT)
223 |
224 | //reference this using self
225 | let self = this
226 | const draw = function () {
227 | self.drawVisual = requestAnimationFrame(draw)
228 |
229 | self.analyser.getByteTimeDomainData(dataArray)
230 |
231 | self.canvasCtx.fillStyle = backgroundColor
232 | self.canvasCtx.fillRect(0, 0, self.WIDTH, self.HEIGHT)
233 |
234 | self.canvasCtx.lineWidth = 2
235 | self.canvasCtx.strokeStyle = foregroundColor
236 |
237 | self.canvasCtx.beginPath()
238 |
239 | var sliceWidth = (self.WIDTH * 1.0) / bufferLength
240 | var x = 0
241 |
242 | for (var i = 0; i < bufferLength; i++) {
243 | var v = dataArray[i] / 128.0
244 | var y = (v * self.HEIGHT) / 2
245 |
246 | if (i === 0) {
247 | self.canvasCtx.moveTo(x, y)
248 | } else {
249 | self.canvasCtx.lineTo(x, y)
250 | }
251 |
252 | x += sliceWidth
253 | }
254 |
255 | self.canvasCtx.lineTo(self.canvas.width, self.canvas.height / 2)
256 | self.canvasCtx.stroke()
257 | }
258 |
259 | draw()
260 | }
261 |
262 | setupMic = async () => {
263 | //TODO: only get stream after clicking start
264 | try {
265 | window.stream = this.stream = await this.getStream()
266 | //TODO: on got stream
267 | } catch (err) {
268 | //TODO: error getting stream
269 | console.log('Error: Issue getting mic', err)
270 | }
271 |
272 | this.setUpRecording()
273 | }
274 |
275 | start = async () => {
276 | await this.setupMic()
277 |
278 | this.recording = true
279 | // reset the buffers for the new recording
280 | this.leftchannel.length = this.rightchannel.length = 0
281 | this.recordingLength = 0
282 | }
283 |
284 | stop = () => {
285 | const { onStop, type } = this.props
286 |
287 | this.recording = false
288 | this.closeMic()
289 |
290 | // we flat the left and right channels down
291 | this.leftBuffer = this.mergeBuffers(this.leftchannel, this.recordingLength)
292 | this.rightBuffer = this.mergeBuffers(
293 | this.rightchannel,
294 | this.recordingLength
295 | )
296 | // we interleave both channels together
297 | let interleaved = this.interleave(this.leftBuffer, this.rightBuffer)
298 |
299 | ///////////// WAV Encode /////////////////
300 | // from http://typedarray.org/from-microphone-to-wav-with-getusermedia-and-web-audio/
301 | //
302 |
303 | // we create our wav file
304 | let buffer = new ArrayBuffer(44 + interleaved.length * 2)
305 | let view = new DataView(buffer)
306 |
307 | // RIFF chunk descriptor
308 | this.writeUTFBytes(view, 0, 'RIFF')
309 | view.setUint32(4, 44 + interleaved.length * 2, true)
310 | this.writeUTFBytes(view, 8, 'WAVE')
311 | // FMT sub-chunk
312 | this.writeUTFBytes(view, 12, 'fmt ')
313 | view.setUint32(16, 16, true)
314 | view.setUint16(20, 1, true)
315 | // stereo (2 channels)
316 | view.setUint16(22, 2, true)
317 | view.setUint32(24, this.sampleRate, true)
318 | view.setUint32(28, this.sampleRate * 4, true)
319 | view.setUint16(32, 4, true)
320 | view.setUint16(34, 16, true)
321 | // data sub-chunk
322 | this.writeUTFBytes(view, 36, 'data')
323 | view.setUint32(40, interleaved.length * 2, true)
324 |
325 | // write the PCM samples
326 | let lng = interleaved.length
327 | let index = 44
328 | let volume = 1
329 | for (let i = 0; i < lng; i++) {
330 | view.setInt16(index, interleaved[i] * (0x7fff * volume), true)
331 | index += 2
332 | }
333 |
334 | // our final binary blob
335 | const blob = new Blob([view], { type: type })
336 | const audioUrl = URL.createObjectURL(blob)
337 |
338 | onStop &&
339 | onStop({
340 | blob: blob,
341 | url: audioUrl,
342 | type
343 | })
344 | }
345 |
346 | pause = () => {
347 | this.recording = false
348 | this.closeMic()
349 | }
350 |
351 | resume = () => {
352 | this.setupMic()
353 | this.recording = true
354 | }
355 |
356 | closeMic = () => {
357 | this.stream.getAudioTracks().forEach((track) => {
358 | track.stop()
359 | })
360 | this.audioInput.disconnect(0)
361 | this.analyser.disconnect(0)
362 | this.recorder.disconnect(0)
363 | }
364 |
365 | //1 - render
366 | render() {
367 | const { canvasWidth, canvasHeight } = this.props
368 |
369 | return (
370 |
371 |
377 |
378 | )
379 | }
380 | }
381 |
--------------------------------------------------------------------------------