├── .babelrc
├── .editorconfig
├── .eslintignore
├── .eslintrc
├── .github
├── FUNDING.yml
└── workflows
│ ├── ci-lint.yml
│ └── ci-test.yml
├── .gitignore
├── .prettierignore
├── .prettierrc
├── Gruntfile.js
├── LICENSE
├── README.md
├── docs
├── css
│ ├── f
│ │ ├── AvenirNextLTW01-Medium.eot
│ │ ├── AvenirNextLTW01-Medium.svg
│ │ ├── AvenirNextLTW01-Medium.ttf
│ │ ├── AvenirNextLTW01-Medium.woff
│ │ ├── inconsolata.eot
│ │ ├── inconsolata.otf
│ │ ├── inconsolata.svg
│ │ ├── inconsolata.ttf
│ │ └── inconsolata.woff
│ ├── main.css
│ ├── normalize.css
│ └── prism.css
├── img
│ ├── .gitignore
│ ├── first_sketch.png
│ ├── p5js-svg.txt
│ ├── p5js.svg
│ ├── sublime.png
│ ├── test-image-0.jpg
│ ├── test-image-1.jpg
│ ├── thick-asterisk-alone-gray.svg
│ ├── thick-asterisk-alone-stroke.svg
│ ├── thick-asterisk-alone.svg
│ └── thick-asterisk-alone.txt
├── js
│ ├── main.js
│ ├── p5.js
│ ├── render.js
│ └── vendor
│ │ ├── jquery-1.9.1.min.js
│ │ ├── modernizr-2.6.2.min.js
│ │ └── prism.js
├── progress.txt
├── reference
│ ├── api.js
│ ├── assets
│ │ ├── css
│ │ │ ├── external-small.png
│ │ │ ├── logo.png
│ │ │ └── main.css
│ │ ├── favicon.png
│ │ ├── img
│ │ │ └── spinner.gif
│ │ ├── index.html
│ │ ├── js
│ │ │ ├── api-filter.js
│ │ │ ├── api-list.js
│ │ │ ├── api-search.js
│ │ │ ├── apidocs.js
│ │ │ ├── p5.dom.js
│ │ │ ├── p5.sound.js
│ │ │ ├── reference.js
│ │ │ ├── reference.js.map
│ │ │ ├── require.min.js
│ │ │ └── yui-prettify.js
│ │ └── vendor
│ │ │ └── prettify
│ │ │ ├── CHANGES.html
│ │ │ ├── COPYING
│ │ │ ├── README.html
│ │ │ ├── prettify-min.css
│ │ │ └── prettify-min.js
│ ├── classes
│ │ ├── Amplitude.html
│ │ ├── AudioIn.html
│ │ ├── Env.html
│ │ ├── FFT.html
│ │ ├── Noise.html
│ │ ├── Oscillator.html
│ │ ├── Pulse.html
│ │ ├── SoundFile.html
│ │ ├── p5.Element.html
│ │ ├── p5.MediaElement.html
│ │ ├── p5.dom.html
│ │ └── p5.sound.html
│ ├── data.json
│ ├── files
│ │ └── lib_addons_p5.dom.js.html
│ ├── index.html
│ └── modules
│ │ ├── p5.dom.html
│ │ └── p5.sound.html
└── wiki-assets
│ ├── before-after.png
│ ├── browser-tests.png
│ ├── file-architecture.png
│ └── mocha-chrome-tests.png
├── examples
├── Compressor
│ ├── index.html
│ └── sketch.js
├── DelayNoiseEnvelope
│ ├── index.html
│ └── sketch.js
├── FFT_freqRange
│ ├── index.html
│ └── sketch.js
├── FFT_frequency_spectrum
│ ├── index.html
│ └── sketch.js
├── FFT_linAverages
│ ├── index.html
│ └── sketch.js
├── FFT_logAverages
│ ├── index.html
│ └── sketch.js
├── FFT_scaleNeighbors
│ ├── index.html
│ └── sketch.js
├── FFT_scaleOneThirdOctave
│ ├── index.html
│ └── sketch.js
├── Filter_BandPass
│ ├── index.html
│ └── sketch.js
├── Filter_LowPass
│ ├── index.html
│ └── sketch.js
├── Reverb_basic
│ ├── index.html
│ └── sketch.js
├── Reverb_convolve
│ ├── index.html
│ └── sketch.js
├── Reverb_convolve_FFT
│ ├── index.html
│ └── sketch.js
├── _monosynth_basic
│ ├── index.html
│ └── sketch.js
├── _polyphonic_synth
│ ├── Piano.json
│ ├── Piano_imag.txt
│ ├── Piano_real.txt
│ ├── Trombone.json
│ ├── Trombone_imag.txt
│ ├── Trombone_real.txt
│ ├── Twelve String Guitar 1.json
│ ├── TwelveStringGuitar_imag.txt
│ ├── TwelveStringGuitar_real.txt
│ ├── Wurlitzer_2.json
│ ├── Wurlitzer_2_imag.txt
│ ├── Wurlitzer_2_real.txt
│ ├── index.html
│ └── sketch.js
├── _sound_loop
│ ├── index.html
│ └── sketch.js
├── amplitude_analysis
│ ├── index.html
│ └── sketch.js
├── array_of_notes
│ ├── index.html
│ └── sketch.js
├── array_of_notes_soundloop
│ ├── index.html
│ └── sketch.js
├── audioIn_Multiple_Sources
│ ├── index.html
│ └── sketch.js
├── autoCorrelation
│ ├── index.html
│ └── sketch.js
├── bells_envelope_test
│ ├── assets
│ │ └── LadyChapelStAlbansCathedral.wav
│ ├── index.html
│ └── sketch.js
├── distortion
│ ├── index.html
│ └── sketch.js
├── envAmpFreq
│ ├── index.html
│ └── sketch.js
├── envExp
│ ├── index.html
│ └── sketch.js
├── envelope
│ ├── index.html
│ └── sketch.js
├── envelopeMultipleSources
│ ├── index.html
│ └── sketch.js
├── envelopeOnOff
│ ├── index.html
│ └── sketch.js
├── envelopeRamp
│ ├── index.html
│ └── sketch.js
├── envelope_designer
│ ├── index.html
│ └── sketch.js
├── envelope_exponential_play
│ ├── index.html
│ └── sketch.js
├── envelope_exponential_trig_rel
│ ├── index.html
│ └── sketch.js
├── files
│ ├── Damscray_-_Dancing_Tiger_01.mp3
│ ├── Damscray_-_Dancing_Tiger_01.ogg
│ ├── Damscray_-_Dancing_Tiger_02.mp3
│ ├── Damscray_-_Dancing_Tiger_02.ogg
│ ├── Damscray_DancingTiger.mp3
│ ├── Damscray_DancingTiger.ogg
│ ├── Soni_Ventorum_Wind_Quintet_-_08_-_Danzi_Wind_Quintet_Op_67_No_3_In_E-Flat_Major_4_Allegretto.mp3
│ ├── Tripping.mp3
│ ├── Tripping.ogg
│ ├── beat.mp3
│ ├── beat.ogg
│ ├── beatbox.mp3
│ ├── beatbox.ogg
│ ├── bx-spring.mp3
│ ├── bx-spring.ogg
│ ├── concrete-tunnel.mp3
│ ├── concrete-tunnel.ogg
│ ├── doorbell.mp3
│ ├── doorbell.ogg
│ ├── drum.mp3
│ ├── drum.ogg
│ ├── large-dark-plate.mp3
│ ├── large-dark-plate.ogg
│ ├── lucky_dragons_-_power_melody.mp3
│ ├── lucky_dragons_-_power_melody.ogg
│ ├── small-plate.mp3
│ ├── small-plate.ogg
│ ├── studio-b.mp3
│ └── studio-b.ogg
├── fractal_music
│ ├── index.html
│ └── sketch.js
├── genetic_music
│ ├── index.html
│ └── sketch.js
├── granular_sampler
│ ├── index.html
│ └── sketch.js
├── granular_sampler_psynth
│ ├── index.html
│ └── sketch.js
├── graphical_eq
│ ├── index.html
│ └── sketch.js
├── grid_sequencer
│ ├── index.html
│ └── sketch.js
├── learningProcessingExamples
│ ├── 01a_loadSound_playback
│ │ ├── index.html
│ │ └── sketch.js
│ ├── 01b_preloadSound_playback
│ │ ├── index.html
│ │ └── sketch.js
│ ├── 01c_soundFormats
│ │ ├── index.html
│ │ └── sketch.js
│ ├── 02_sound_effect
│ │ ├── index.html
│ │ └── sketch.js
│ ├── 03_manipulate_sound
│ │ ├── index.html
│ │ └── sketch.js
│ ├── 04_pan
│ │ ├── index.html
│ │ └── sketch.js
│ ├── 06_noise
│ │ ├── index.html
│ │ └── sketch.js
│ ├── 06_oscillator_frequency
│ │ ├── index.html
│ │ └── sketch.js
│ ├── 07_envelope
│ │ ├── index.html
│ │ └── sketch.js
│ ├── 08_amplitude_analysis
│ │ ├── index.html
│ │ └── sketch.js
│ ├── 09_live_input
│ │ ├── index.html
│ │ └── sketch.js
│ └── 10_mic_threshold
│ │ ├── index.html
│ │ └── sketch.js
├── loadSound_404ErrorHandling
│ ├── index.html
│ └── sketch.js
├── loadSound_callback
│ ├── index.html
│ └── sketch.js
├── loadSound_preload
│ ├── index.html
│ └── sketch.js
├── loadSound_with_Drag_and_Drop
│ ├── index.html
│ └── sketch.js
├── loop_stepSequencer
│ ├── index.html
│ └── sketch.js
├── looper_simple
│ ├── index.html
│ └── sketch.js
├── markov_music
│ ├── index.html
│ └── sketch.js
├── micFFT
│ ├── index.html
│ └── sketch.js
├── micLevel
│ ├── index.html
│ └── sketch.js
├── micLevel_on_off
│ ├── index.html
│ └── sketch.js
├── mixingSounds
│ ├── index.html
│ └── sketch.js
├── noiseMod_AM
│ ├── index.html
│ └── sketch.js
├── onended_callback
│ ├── index.html
│ └── sketch.js
├── oscillatorMod_AM
│ ├── index.html
│ └── sketch.js
├── oscillatorMod_FM
│ ├── index.html
│ └── sketch.js
├── oscillatorSecondsFromNow
│ ├── index.html
│ └── sketch.js
├── oscillatorWaveform
│ ├── index.html
│ └── sketch.js
├── oscillator_FMSynth
│ ├── index.html
│ └── sketch.js
├── outOfPhase
│ ├── index.html
│ └── sketch.js
├── pan_soundfile
│ ├── index.html
│ └── sketch.js
├── pause_soundfile
│ ├── index.html
│ └── sketch.js
├── peakDetect
│ ├── index.html
│ └── sketch.js
├── peakDetect_basic
│ ├── index.html
│ └── sketch.js
├── play_soundfile
│ ├── index.html
│ └── sketch.js
├── playbackRate
│ ├── index.html
│ └── sketch.js
├── playbackRatePart
│ ├── index.html
│ └── sketch.js
├── polyphonicSynth-Keyboard
│ ├── index.html
│ └── sketch.js
├── pulseWaveform
│ ├── index.html
│ └── sketch.js
├── record
│ ├── index.html
│ └── sketch.js
├── recordLoops
│ ├── index.html
│ └── sketch.js
├── removeSketch
│ ├── index.html
│ └── sketch.js
├── soundFormats
│ ├── index.html
│ └── sketch.js
├── soundfileMod_AM
│ ├── index.html
│ └── sketch.js
├── soundfile_playMode
│ ├── index.html
│ └── sketch.js
├── soundfile_remove_cue
│ ├── index.html
│ └── sketch.js
├── spatial_panning
│ ├── index.html
│ └── sketch.js
├── spatial_panning_listener
│ ├── boid.js
│ ├── flock.js
│ ├── index.html
│ └── sketch.js
├── virtual_piano
│ ├── index.html
│ └── sketch.js
├── visualize_pentatonic
│ ├── index.html
│ └── sketch.js
├── waveform
│ ├── index.html
│ └── sketch.js
└── waveform_peaks_with_playhead
│ ├── index.html
│ └── sketch.js
├── fragments
├── after.frag
└── before.frag
├── lib
├── p5.sound.js
├── p5.sound.js.map
├── p5.sound.min.js
└── p5.sound.min.js.map
├── package-lock.json
├── package.json
├── src
├── amplitude.js
├── app.js
├── audioVoice.js
├── audioWorklet
│ ├── .eslintrc
│ ├── amplitudeProcessor.js
│ ├── index.js
│ ├── processorNames.js
│ ├── recorderProcessor.js
│ ├── ringBuffer.js
│ └── soundFileProcessor.js
├── audiocontext.js
├── audioin.js
├── compressor.js
├── delay.js
├── deprecations
│ ├── Env.js
│ └── Signal.js
├── distortion.js
├── effect.js
├── envelope.js
├── eq.js
├── eqFilter.js
├── errorHandler.js
├── fft.js
├── filter.js
├── gain.js
├── helpers.js
├── listener3d.js
├── looper.js
├── main.js
├── metro.js
├── monosynth.js
├── noise.js
├── onsetDetect.js
├── oscillator.js
├── panner.js
├── panner3d.js
├── peakDetect.js
├── polysynth.js
├── pulse.js
├── reverb.js
├── shims.js
├── soundLoop.js
├── soundRecorder.js
└── soundfile.js
├── templates
└── pre-commit-hook.js
├── test
├── index.html
├── setup.js
├── testAudio
│ ├── bx-spring.mp3
│ ├── bx-spring.ogg
│ ├── drum.mp3
│ └── drum.ogg
├── tests.js
└── tests
│ ├── main.js
│ ├── p5.Amplitude.js
│ ├── p5.AudioContext.js
│ ├── p5.AudioIn.js
│ ├── p5.AudioVoice.js
│ ├── p5.Compressor.js
│ ├── p5.Delay.js
│ ├── p5.Distortion.js
│ ├── p5.EQ.js
│ ├── p5.Effect.js
│ ├── p5.Envelope.js
│ ├── p5.FFT.js
│ ├── p5.Filter.js
│ ├── p5.Gain.js
│ ├── p5.Helpers.js
│ ├── p5.Listener3d.js
│ ├── p5.Looper.js
│ ├── p5.Metro.js
│ ├── p5.MonoSynth.js
│ ├── p5.Noise.js
│ ├── p5.OnsetDetect.js
│ ├── p5.Oscillator.js
│ ├── p5.Panner.js
│ ├── p5.Panner3d.js
│ ├── p5.PeakDetect.js
│ ├── p5.PolySynth.js
│ ├── p5.Pulse.js
│ ├── p5.Reverb.js
│ ├── p5.SoundFile.js
│ ├── p5.SoundLoop.js
│ └── p5.SoundRecorder.js
├── todo.md
└── webpack.config.js
/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": ["@babel/preset-env"],
3 | "plugins": ["preval"]
4 | }
5 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # EditorConfig: http://EditorConfig.org
2 |
3 | # Top-most EditorConfig file
4 | root = true
5 |
6 | # Rules for JavaScript files:
7 |
8 | [*.js]
9 | # 2 space indentation
10 | indent_style = space
11 | indent_size = 2
12 | # No trailing spaces
13 | trim_trailing_whitespace = true
14 | # Unix-style newlines
15 | end_of_line = lf
16 | # Newline ending every file
17 | insert_final_newline = true
--------------------------------------------------------------------------------
/.eslintignore:
--------------------------------------------------------------------------------
1 | Gruntfile.js
2 | webpack.config.js
3 | lib/
4 |
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "jasmine": true,
4 | "node": true,
5 | "mocha": true,
6 | "browser": true,
7 | "builtin": true,
8 | "es6": true
9 | },
10 | "globals": {
11 | "p5": true,
12 | "define": true,
13 | "Float32Array": true,
14 | "Uint8Array": true
15 | },
16 | "extends": ["eslint:recommended", "prettier"],
17 | "plugins": ["prettier"],
18 | "rules": {
19 | "prettier/prettier": ["error"],
20 | "no-cond-assign": [2, "except-parens"],
21 | "eqeqeq": ["error", "smart"],
22 | "no-use-before-define": [
23 | 2,
24 | {
25 | "functions": false
26 | }
27 | ],
28 | "new-cap": 0,
29 | "no-caller": 2,
30 | "no-undef": 0,
31 | "no-unused-vars": ["error", { "args": "none" }],
32 | "no-empty": ["error", { "allowEmptyCatch": true }],
33 | "no-console": "off"
34 |
35 | },
36 | "parserOptions": {
37 | "ecmaVersion": 8,
38 | "sourceType": "module",
39 | "allowImportExportEverywhere": true
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: processing
2 | custom: https://processingfoundation.org/
3 |
--------------------------------------------------------------------------------
/.github/workflows/ci-lint.yml:
--------------------------------------------------------------------------------
1 | # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
3 |
4 | name: Linting
5 |
6 | on:
7 | push:
8 | branches: [main]
9 | pull_request:
10 | branches: [main]
11 |
12 | jobs:
13 | lint:
14 | runs-on: ubuntu-latest
15 |
16 | strategy:
17 | matrix:
18 | node: [16.x, 18.x]
19 |
20 | steps:
21 | - uses: actions/checkout@v3
22 | - name: Use Node.js ${{ matrix.node }}
23 | uses: actions/setup-node@v3
24 | with:
25 | node-version: ${{ matrix.node }}
26 | - run: npm ci
27 | - run: npm run lint
28 |
--------------------------------------------------------------------------------
/.github/workflows/ci-test.yml:
--------------------------------------------------------------------------------
1 | name: Testing
2 |
3 | on:
4 | push:
5 | branches: [ main ]
6 | pull_request:
7 | branches: [ main ]
8 |
9 | jobs:
10 | test:
11 | runs-on: ubuntu-latest
12 | strategy:
13 | matrix:
14 | node-version: [16.x]
15 |
16 |
17 | steps:
18 | - uses: actions/checkout@v3
19 | - name: Use Node.js ${{ matrix.node-version }}
20 | uses: actions/setup-node@v3
21 | with:
22 | node-version: ${{ matrix.node-version }}
23 | - name: Get node modules
24 | run: npm ci
25 | env:
26 | CI: true
27 | - name: Build
28 | run: npm run build
29 | env:
30 | CI: true
31 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Apple
2 | *.DS_Store
3 |
4 | # npm
5 | node_modules
6 |
7 | # Visual Studio
8 | .vscode
9 |
10 | # others
11 | docs
12 | examples/__test
13 | lib/p5.*
14 | p5soundnotes
15 |
16 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | Gruntfile.js
2 | webpack.config.js
3 | lib/
4 |
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "singleQuote": true
3 | }
4 |
--------------------------------------------------------------------------------
/Gruntfile.js:
--------------------------------------------------------------------------------
1 | const webpackConfig = require('./webpack.config.js');
2 |
3 | module.exports = function(grunt) {
4 |
5 | grunt.initConfig({
6 | pkg: grunt.file.readJSON('package.json'),
7 | decomment: {
8 | any: {
9 | // remove comments added by webpack from the build
10 | files: {
11 | "./lib/p5.sound.js": "./lib/p5.sound.js",
12 | },
13 | options: {
14 | ignore: [
15 | // keep JSDoc comments (p5.js repo's YUIDoc task parses those for documentation)
16 | /\/\*\*\s*\n([^\*]|(\*(?!\/)))*\*\//g,
17 | // keep the version number
18 | /.*Version.*/
19 | ]
20 | }
21 | }
22 | },
23 | // Configure style consistency
24 | eslint: {
25 | source: {
26 | options: {
27 | configFile: './.eslintrc',
28 | fix: true
29 | },
30 | src: ['src/**/*.js', 'test/tests/**/*.js']
31 | },
32 | sourceNofix: {
33 | options: {
34 | configFile: './.eslintrc',
35 | fix: false
36 | },
37 | src: ['src/**/*.js', 'test/tests/**/*.js']
38 | }
39 | },
40 | webpack: {
41 | prod: webpackConfig,
42 | dev: Object.assign({ watch: true }, webpackConfig)
43 | },
44 | open: {
45 | testChrome: {
46 | path: 'http://localhost:8000/test',
47 | app: 'Chrome'
48 | },
49 | testFirefox : {
50 | path: 'http://localhost:8000/test',
51 | app: 'Firefox'
52 | },
53 | testSafari : {
54 | path: 'http://localhost:8000/test',
55 | app: 'Safari'
56 | }
57 | },
58 | connect: {
59 | server: {
60 | options: {
61 | port: 8000,
62 | livereload: 35727,
63 | hostname: '*'
64 | }
65 | }
66 | },
67 | githooks: {
68 | all: {
69 | options:{
70 | template:"templates/pre-commit-hook.js"
71 | },
72 | 'pre-commit':'lint-nofix' //runs elint in -nofix mode before every git commit
73 | }
74 | }
75 | });
76 |
77 |
78 | grunt.loadNpmTasks('grunt-webpack');
79 | grunt.loadNpmTasks('grunt-eslint');
80 | grunt.loadNpmTasks('grunt-contrib-connect');
81 | grunt.loadNpmTasks('grunt-open');
82 | grunt.loadNpmTasks('grunt-decomment');
83 | grunt.loadNpmTasks('grunt-githooks');
84 |
85 | grunt.registerTask('lint', ['eslint:source']);
86 | grunt.registerTask('lint-nofix', ['eslint:sourceNofix']);
87 | grunt.registerTask('default', ['webpack:prod', 'decomment']);
88 | grunt.registerTask('dev', ['eslint','connect','webpack:dev', 'decomment']);
89 | grunt.registerTask('serve', 'connect:server:keepalive');
90 | grunt.registerTask('run-tests', ['serve', 'open']);
91 | };
92 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014-2015 The Processing Foundation
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/docs/css/f/AvenirNextLTW01-Medium.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/css/f/AvenirNextLTW01-Medium.eot
--------------------------------------------------------------------------------
/docs/css/f/AvenirNextLTW01-Medium.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/css/f/AvenirNextLTW01-Medium.ttf
--------------------------------------------------------------------------------
/docs/css/f/AvenirNextLTW01-Medium.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/css/f/AvenirNextLTW01-Medium.woff
--------------------------------------------------------------------------------
/docs/css/f/inconsolata.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/css/f/inconsolata.eot
--------------------------------------------------------------------------------
/docs/css/f/inconsolata.otf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/css/f/inconsolata.otf
--------------------------------------------------------------------------------
/docs/css/f/inconsolata.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/css/f/inconsolata.ttf
--------------------------------------------------------------------------------
/docs/css/f/inconsolata.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/css/f/inconsolata.woff
--------------------------------------------------------------------------------
/docs/img/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/img/.gitignore
--------------------------------------------------------------------------------
/docs/img/first_sketch.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/img/first_sketch.png
--------------------------------------------------------------------------------
/docs/img/sublime.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/img/sublime.png
--------------------------------------------------------------------------------
/docs/img/test-image-0.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/img/test-image-0.jpg
--------------------------------------------------------------------------------
/docs/img/test-image-1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/img/test-image-1.jpg
--------------------------------------------------------------------------------
/docs/img/thick-asterisk-alone-gray.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/docs/img/thick-asterisk-alone-stroke.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/docs/img/thick-asterisk-alone.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/docs/img/thick-asterisk-alone.txt:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
9 |
--------------------------------------------------------------------------------
/docs/js/main.js:
--------------------------------------------------------------------------------
1 | /*
2 |
3 | Hastily written scroll to fixed position:
4 |
5 | TO DO:
6 |
7 | - needs to not fail on resize of resize after scrolling,
8 | - and needs to use animate() to add jquery easing
9 |
10 | */
11 |
12 | var elementPosition = $('#menu').offset();
13 |
14 | $(window).scroll(function(){
15 | if($(window).scrollTop() > elementPosition.top){
16 | $('#menu').css({'position':'fixed','top':'0', 'z-index': '9999'});
17 |
18 | } else {
19 | $('#menu').css({'position':'static'});
20 | }
21 | });
22 |
23 | window.onload = function() {
24 | //renderCode('demo');
25 | }
--------------------------------------------------------------------------------
/docs/progress.txt:
--------------------------------------------------------------------------------
1 |
2 |
3 | //// Environment
4 | preload()
5 |
6 |
7 | //// DOM
8 |
9 |
10 | //// SHAPE
11 |
12 | createShape()
13 | loadShape()
14 | PShape
15 |
16 |
17 |
18 |
19 | curveTightness()
20 | curveVertex()
21 |
22 |
23 |
24 |
25 | // Loading & Displaying
26 | shape()
27 | shapeMode()
28 |
29 |
30 | //// INPUT
31 |
32 |
33 |
34 | // Files
35 | BufferedReader
36 | createInput()
37 | createReader()
38 | loadBytes()
39 |
40 | loadTable()
41 | open()
42 | parseXML()
43 | selectFolder()
44 | selectInput()
45 |
46 |
47 | //// OUTPUT
48 |
49 | // Image
50 | save()
51 | saveFrame()
52 |
53 | // Files
54 | beginRaw()
55 | beginRecord()
56 | createOutput()
57 | createWriter()
58 | endRaw()
59 | endRecord()
60 | PrintWriter
61 | saveBytes()
62 | saveJSONArray()
63 | saveJSONObject()
64 | saveStream()
65 | saveStrings()
66 | saveTable()
67 | saveXML()
68 | selectOutput()
69 |
70 |
71 |
72 | // Image
73 | // Loading & Displaying
74 |
75 | noTint()
76 | tint()
77 |
78 |
79 |
80 | // Rendering
81 |
82 | blendMode()
83 | createGraphics()
84 | PGraphics
85 |
86 |
87 |
88 | // Typography
89 |
90 |
91 | PFont
92 | // Loading & Displaying
93 | createFont()
94 | loadFont()
95 |
96 |
97 |
98 |
99 | // Metrics
100 | textAscent()
101 | textDescent()
102 |
103 |
104 |
105 |
106 |
107 | // Math
108 |
109 | // Random
110 | noise()
111 | noiseDetail()
112 | noiseSeed()
113 | random()
114 | randomGaussian()
115 | randomSeed()
116 |
--------------------------------------------------------------------------------
/docs/reference/api.js:
--------------------------------------------------------------------------------
1 | YUI.add("yuidoc-meta", function(Y) {
2 | Y.YUIDoc = { meta: {
3 | "classes": [
4 | "Amplitude",
5 | "AudioIn",
6 | "Env",
7 | "FFT",
8 | "Noise",
9 | "Oscillator",
10 | "Pulse",
11 | "SoundFile",
12 | "p5.Element",
13 | "p5.MediaElement",
14 | "p5.dom",
15 | "p5.sound"
16 | ],
17 | "modules": [
18 | "p5.dom",
19 | "p5.sound"
20 | ],
21 | "allModules": [
22 | {
23 | "displayName": "p5.dom",
24 | "name": "p5.dom",
25 | "description": "This is the p5.dom library."
26 | },
27 | {
28 | "displayName": "p5.sound",
29 | "name": "p5.sound",
30 | "description": "p5.sound extends p5 with Web Audio functionality including audio input, playback, analysis and synthesis."
31 | }
32 | ]
33 | } };
34 | });
--------------------------------------------------------------------------------
/docs/reference/assets/css/external-small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/reference/assets/css/external-small.png
--------------------------------------------------------------------------------
/docs/reference/assets/css/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/reference/assets/css/logo.png
--------------------------------------------------------------------------------
/docs/reference/assets/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/reference/assets/favicon.png
--------------------------------------------------------------------------------
/docs/reference/assets/img/spinner.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/reference/assets/img/spinner.gif
--------------------------------------------------------------------------------
/docs/reference/assets/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Redirector
5 |
6 |
7 |
8 | Click here to redirect
9 |
10 |
11 |
--------------------------------------------------------------------------------
/docs/reference/assets/js/api-filter.js:
--------------------------------------------------------------------------------
1 | YUI.add('api-filter', function (Y) {
2 |
3 | Y.APIFilter = Y.Base.create('apiFilter', Y.Base, [Y.AutoCompleteBase], {
4 | // -- Initializer ----------------------------------------------------------
5 | initializer: function () {
6 | this._bindUIACBase();
7 | this._syncUIACBase();
8 | },
9 | getDisplayName: function(name) {
10 |
11 | Y.each(Y.YUIDoc.meta.allModules, function(i) {
12 | if (i.name === name && i.displayName) {
13 | name = i.displayName;
14 | }
15 | });
16 |
17 | return name;
18 | }
19 |
20 | }, {
21 | // -- Attributes -----------------------------------------------------------
22 | ATTRS: {
23 | resultHighlighter: {
24 | value: 'phraseMatch'
25 | },
26 |
27 | // May be set to "classes" or "modules".
28 | queryType: {
29 | value: 'classes'
30 | },
31 |
32 | source: {
33 | valueFn: function() {
34 | var self = this;
35 | return function(q) {
36 | var data = Y.YUIDoc.meta[self.get('queryType')],
37 | out = [];
38 | Y.each(data, function(v) {
39 | if (v.toLowerCase().indexOf(q.toLowerCase()) > -1) {
40 | out.push(v);
41 | }
42 | });
43 | return out;
44 | };
45 | }
46 | }
47 | }
48 | });
49 |
50 | }, '3.4.0', {requires: [
51 | 'autocomplete-base', 'autocomplete-highlighters', 'autocomplete-sources'
52 | ]});
53 |
--------------------------------------------------------------------------------
/docs/reference/assets/js/yui-prettify.js:
--------------------------------------------------------------------------------
1 | YUI().use('node', function(Y) {
2 | var code = Y.all('.prettyprint.linenums');
3 | if (code.size()) {
4 | code.each(function(c) {
5 | var lis = c.all('ol li'),
6 | l = 1;
7 | lis.each(function(n) {
8 | n.prepend('');
9 | l++;
10 | });
11 | });
12 | var h = location.hash;
13 | location.hash = '';
14 | h = h.replace('LINE_', 'LINENUM_');
15 | location.hash = h;
16 | }
17 | });
18 |
--------------------------------------------------------------------------------
/docs/reference/assets/vendor/prettify/prettify-min.css:
--------------------------------------------------------------------------------
1 | .pln{color:#000}@media screen{.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec,.var{color:#606}.fun{color:red}}@media print,projection{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee}
--------------------------------------------------------------------------------
/docs/wiki-assets/before-after.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/wiki-assets/before-after.png
--------------------------------------------------------------------------------
/docs/wiki-assets/browser-tests.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/wiki-assets/browser-tests.png
--------------------------------------------------------------------------------
/docs/wiki-assets/file-architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/wiki-assets/file-architecture.png
--------------------------------------------------------------------------------
/docs/wiki-assets/mocha-chrome-tests.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/docs/wiki-assets/mocha-chrome-tests.png
--------------------------------------------------------------------------------
/examples/Compressor/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/DelayNoiseEnvelope/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/DelayNoiseEnvelope/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Example: p5.Delay w/ p5.Noise, p5.Envelope & p5.Amplitude
3 | *
4 | * Click the mouse to hear the p5.Delay process a Noise Envelope.
5 | *
6 | * MouseX controls the p5.Delay Filter Frequency.
7 | * MouseY controls both the p5.Delay Time and Resonance.
8 | */
9 |
10 | var noise, env, analyzer, delay;
11 |
12 | function setup() {
13 | createCanvas(710, 400);
14 | noise = new p5.Noise('white'); // other types include 'brown' and 'pink'
15 |
16 | // Turn down because we'll control .amp with a p5.Envelope
17 | noise.amp(0);
18 |
19 | noise.start();
20 | noise.disconnect(); // so we will only hear the p5.Delay effect
21 |
22 | delay = new p5.Delay();
23 | delay.process(noise, .12, .7, 2300); // tell delay to process noise
24 |
25 | // the Envelope ADSR: attackTime, decayTime, sustainLevel, releaseTime
26 | env = new p5.Envelope();
27 | env.setADSR(0.01, 0.2, 0.2, 0.1)
28 | env.setRange(1, 0);
29 |
30 | // p5.Amplitude will analyze all sound in the sketch
31 | analyzer = new p5.Amplitude();
32 | }
33 |
34 | function draw() {
35 | background(0);
36 |
37 | // get volume reading from the p5.Amplitude analyzer
38 | var level = analyzer.getLevel();
39 | // then use level to draw a green rectangle
40 | var levelHeight = map(level, 0, .4, 0, height);
41 | fill(100,250,100);
42 | rect(0, height, width, - levelHeight);
43 |
44 | // map mouseX and mouseY to p5.Delay parameters
45 | var filterFreq = map(mouseX, 0, width, 60, 15000);
46 | filterFreq = constrain(filterFreq, 60, 15000);
47 | var filterRes = map(mouseY, 0, height, 3, 0.01);
48 | filterRes = constrain(filterRes, 0.01, 3);
49 | delay.filter(filterFreq, filterRes);
50 | var delTime = map(mouseY, 0, width, .2, .01);
51 | delTime = constrain(delTime, .01, .2);
52 | delay.delayTime(delTime);
53 | }
54 |
55 | function mousePressed() {
56 | env.play(noise, 0, 0.1, 0);
57 | }
--------------------------------------------------------------------------------
/examples/FFT_freqRange/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/FFT_freqRange/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Display the average amount of energy (amplitude) across a range
3 | * of frequencies using the p5.FFT class and its methods analyze()
4 | * and getEnergy().
5 | *
6 | * This example divides the frequency spectrum into eight bands.
7 | */
8 |
9 | var soundFile;
10 | var fft;
11 |
12 | var description = 'loading';
13 | var p;
14 |
15 | function preload() {
16 | soundFormats('mp3', 'ogg');
17 | soundFile = loadSound('../files/beat');
18 | }
19 |
20 | function setup() {
21 | createCanvas(1024, 400);
22 | fill(255, 40, 255);
23 | noStroke();
24 | textAlign(CENTER);
25 |
26 | fft = new p5.FFT();
27 |
28 | p = createP(description);
29 | var p2 = createP('Description: Using getEnergy(low, high) to measure amplitude within a range of frequencies.');
30 | }
31 |
32 | function draw() {
33 | background(30,20,30);
34 | updateDescription();
35 |
36 | fft.analyze(); // analyze before calling fft.getEnergy()
37 |
38 | // Generate 8 bars to represent 8 different frequency ranges
39 | for (var i = 0; i < 8; i++){
40 | noStroke();
41 | fill((i*30) % 100 + 50, 195, (i*25 + 50) % 255 )
42 |
43 | // Each bar has a unique frequency range
44 | var centerFreq = (pow(2,i)*125)/2;
45 | var loFreq = (pow(2,i-1)*125)/2 + centerFreq/4;
46 | var hiFreq = (centerFreq + centerFreq/2);
47 |
48 | // get the average value in a frequency range
49 | var freqValue = fft.getEnergy(loFreq, hiFreq - 1);
50 |
51 | // Rectangle height represents the average value of this frequency range
52 | var h = -height + map(freqValue, 0, 255, height, 0);
53 | rect((i+1)*width/8 - width/8, height, width/8, h);
54 |
55 | fill(255);
56 | text( loFreq.toFixed(0) +' Hz - ' + hiFreq.toFixed(0)+' Hz', (i+1)*width/8 - width/8/2, 30);
57 | }
58 | }
59 |
60 | function keyPressed() {
61 | if (soundFile.isPlaying()){
62 | soundFile.pause();
63 | } else {
64 | soundFile.loop();
65 | }
66 | }
67 |
68 | // Change description text if the song is loading, playing or paused
69 | function updateDescription() {
70 | if (!soundFile.isPlaying()) {
71 | description = 'Paused...';
72 | p.html(description);
73 | }
74 | else if (soundFile.isPlaying()){
75 | description = 'Playing!';
76 | p.html(description);
77 | }
78 | else {
79 | for (var i = 0; i < frameCount%3; i++ ) {
80 |
81 | // add periods to loading to create a fun loading bar effect
82 | if (frameCount%4 == 0){
83 | description += '.';
84 | }
85 | if (frameCount%25 == 0) {
86 | description = 'loading';
87 |
88 | }
89 | }
90 | p.html(description);
91 | }
92 | }
--------------------------------------------------------------------------------
/examples/FFT_frequency_spectrum/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/FFT_linAverages/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/FFT_logAverages/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/FFT_scaleNeighbors/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/FFT_scaleNeighbors/sketch.js:
--------------------------------------------------------------------------------
1 | var source, fft;
2 |
3 | function setup() {
4 | createCanvas(windowWidth, windowHeight);
5 | noFill();
6 |
7 | source = new p5.AudioIn();
8 | source.start();
9 |
10 | fft = new p5.FFT(0.8, 1024);
11 | fft.setInput(source);
12 | }
13 |
14 | function draw() {
15 | background(220);
16 | var spectrum = fft.analyze();
17 | var newBuffer = [];
18 |
19 | if (source.freq) {
20 | source.freq(map(mouseX, 0, width, 100, 300));
21 | }
22 |
23 | var quarterSpectrum = spectrum.length/2;
24 |
25 | beginShape();
26 | for (var i = 0; i < quarterSpectrum; i++) {
27 | var point = smoothPoint(spectrum, i);
28 | newBuffer.push(point);
29 | var x = map(i, 0, quarterSpectrum, 0, width);
30 | var y = map(point, 0, 255, height, 0);
31 | curveVertex(x, y);
32 | }
33 | endShape();
34 | }
35 |
36 |
37 |
38 | // average each point with its neighbors
39 | function smoothPoint(spectrum, index) {
40 | var neighbors = 20;
41 |
42 | var val = 0;
43 |
44 | for (var i = index; i < (index+neighbors); i++) {
45 | val += spectrum[i];
46 | }
47 |
48 | // TO DO: scale value logarithmically
49 | // val *= logScale(index, spectrum.length);
50 |
51 | return val/neighbors;
52 | }
53 |
54 |
55 | /**
56 | * Given an index and the total number of entries, return the
57 | * log-scaled value.
58 | *
59 | * https://github.com/borismus/spectrograph/blob/master/g-spectrograph.js
60 | * MIT license
61 | */
62 | function logScale(index, total, opt_base) {
63 | var base = opt_base || 2;
64 | var logmax = logBase(total + 1, base);
65 | var exp = logmax * index / total;
66 | return Math.round(Math.pow(base, exp) - 1);
67 | }
68 |
69 | function logBase(val, base) {
70 | return Math.log(val) / Math.log(base);
71 | }
--------------------------------------------------------------------------------
/examples/FFT_scaleOneThirdOctave/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/Filter_BandPass/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/examples/Filter_BandPass/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Example: Apply a p5.BandPass filter to white noise.
3 | * Visualize the sound with FFT.
4 | * Map mouseX to the bandpass frequency
5 | * and mouseY to resonance/width of the a BandPass filter
6 | */
7 |
8 | var description = 'loading';
9 | var p;
10 | var noise;
11 | var fft;
12 | var filter, filterFreq, filterWidth;
13 |
14 | function setup() {
15 | createCanvas(710, 256);
16 | fill(255, 40, 255);
17 |
18 | filter = new p5.BandPass();
19 |
20 | noise = new p5.Noise();
21 |
22 | noise.disconnect(); // Disconnect soundfile from main output...
23 | filter.process(noise); // ...and connect to filter so we'll only hear BandPass.
24 | noise.start();
25 |
26 | fft = new p5.FFT();
27 |
28 | // update description text
29 | p = createP(description);
30 | var p2 = createP('Draw the array returned by FFT.analyze( ). This represents the frequency spectrum, from lowest to highest frequencies.');
31 | }
32 |
33 | function draw() {
34 | background(30);
35 |
36 | // Map mouseX to a bandpass freq from the FFT spectrum range: 10Hz - 22050Hz
37 | filterFreq = map (mouseX, 0, width, 10, 22050);
38 | // Map mouseY to resonance/width
39 | filterWidth = map(mouseY, 0, height, 0, 90);
40 | // set filter parameters
41 | filter.set(filterFreq, filterWidth);
42 |
43 | // Draw every value in the FFT spectrum analysis where
44 | // x = lowest (10Hz) to highest (22050Hz) frequencies,
45 | // h = energy / amplitude at that frequency
46 | var spectrum = fft.analyze();
47 | noStroke();
48 | for (var i = 0; i< spectrum.length; i++){
49 | var x = map(i, 0, spectrum.length, 0, width);
50 | var h = -height + map(spectrum[i], 0, 255, height, 0);
51 | rect(x, height, width/spectrum.length, h) ;
52 | }
53 |
54 | updateDescription();
55 | }
56 |
57 | // display current Filter params
58 | function updateDescription() {
59 | description = 'Playing! Press any key to pause. Filter Frequency = ' + filterFreq + ' Filter Width = ' + filterWidth;
60 | p.html(description);
61 | }
62 |
--------------------------------------------------------------------------------
/examples/Filter_LowPass/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/examples/Filter_LowPass/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Example: Apply a p5.LowPass filter to a p5.SoundFile.
3 | * Visualize the sound with FFT.
4 | * Map mouseX to the the filter's cutoff frequency
5 | * and mouseY to resonance/width of the a BandPass filter
6 | */
7 |
8 | var soundFile;
9 | var fft;
10 |
11 | var description = 'loading';
12 | var p;
13 |
14 | var filter, filterFreq, filterRes;
15 |
16 | function preload() {
17 | soundFormats('mp3', 'ogg');
18 | soundFile = loadSound('../files/beat');
19 | }
20 |
21 | function setup() {
22 | createCanvas(710, 256);
23 | fill(255, 40, 255);
24 |
25 | // loop the sound file
26 | soundFile.loop();
27 |
28 | filter = new p5.LowPass();
29 |
30 | // Disconnect soundFile from main output.
31 | // Then, connect it to the filter, so that we only hear the filtered sound
32 | soundFile.disconnect();
33 | soundFile.connect(filter);
34 |
35 | fft = new p5.FFT();
36 |
37 | // update description text
38 | p = createP(description);
39 | var p2 = createP('Draw the array returned by FFT.analyze( ). This represents the frequency spectrum, from lowest to highest frequencies.');
40 | }
41 |
42 | function draw() {
43 | background(30);
44 |
45 | // Map mouseX to a the cutoff frequency for our lowpass filter
46 | filterFreq = map (mouseX, 0, width, 10, 22050);
47 | // Map mouseY to resonance/width
48 | filterRes = map(mouseY, 0, height, 15, 5);
49 | // set filter parameters
50 | filter.set(filterFreq, filterRes);
51 |
52 | // Draw every value in the FFT spectrum analysis where
53 | // x = lowest (10Hz) to highest (22050Hz) frequencies,
54 | // h = energy / amplitude at that frequency
55 | var spectrum = fft.analyze();
56 | noStroke();
57 | for (var i = 0; i< spectrum.length; i++){
58 | var x = map(i, 0, spectrum.length, 0, width);
59 | var h = -height + map(spectrum[i], 0, 255, height, 0);
60 | rect(x, height, width/spectrum.length, h) ;
61 | }
62 |
63 | updateDescription();
64 | }
65 |
66 |
67 | // Change description text if the song is loading, playing or paused
68 | function updateDescription() {
69 | description = 'Filter Frequency = ' + filterFreq + ' Filter Res = ' + filterRes;
70 | p.html(description);
71 | }
72 |
--------------------------------------------------------------------------------
/examples/Reverb_basic/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/Reverb_basic/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Example: Reverb
3 | */
4 |
5 | var sound, reverb;
6 |
7 | function preload() {
8 | soundFormats('mp3', 'ogg');
9 | soundFile = loadSound('../files/Damscray_-_Dancing_Tiger_02');
10 |
11 | // disconnect the default connection
12 | // so that we only hear the sound via the reverb.process
13 | soundFile.disconnect();
14 | }
15 |
16 | function setup() {
17 | createCanvas(720,100);
18 | background(0);
19 |
20 | reverb = new p5.Reverb();
21 |
22 | // sonnects soundFile to reverb with a
23 | // reverbTime of 6 seconds, decayRate of 0.2%
24 | reverb.process(soundFile, 6, 0.2);
25 |
26 | reverb.amp(3); // turn it up!
27 | }
28 |
29 | function mousePressed() {
30 | soundFile.play();
31 | }
--------------------------------------------------------------------------------
/examples/Reverb_convolve/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/Reverb_convolve/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Example: Convolution Reverb
3 | *
4 | * The p5.Convolver can recreate the sound of actual spaces using convolution.
5 | *
6 | * Toggle between five different buffer sources
7 | *
8 | * Convolution samples Creative Commons BY recordinghopkins, via freesound.org
9 | * https://www.freesound.org/people/recordinghopkins/
10 | */
11 |
12 | var sound, cVerb;
13 | var currentIR = 0;
14 | var p;
15 |
16 | function preload() {
17 | // we have included both MP3 and OGG versions of all the impulses/sounds
18 | soundFormats('ogg', 'mp3');
19 |
20 | // create a p5.Convolver
21 | cVerb = createConvolver('../files/bx-spring');
22 |
23 | // add Impulse Responses to cVerb.impulses array, in addition to bx-spring
24 | cVerb.addImpulse('../files/small-plate');
25 | cVerb.addImpulse('../files/drum');
26 | cVerb.addImpulse('../files/concrete-tunnel');
27 |
28 | // load a sound that will be processed by the p5.ConvultionReverb
29 | sound = loadSound('../files/Damscray_DancingTiger');
30 | }
31 |
32 | function setup() {
33 | // disconnect from main output...
34 | sound.disconnect();
35 | // ... and process with cVerb so that we only hear the reverb
36 | cVerb.process(sound);
37 |
38 | createP('Click to play a sound and change the impulse');
39 | p = createP('');
40 | }
41 |
42 | function mousePressed() {
43 |
44 | // cycle through the array of cVerb.impulses
45 | currentIR++;
46 | if (currentIR >= cVerb.impulses.length) {
47 | currentIR = 0;
48 | }
49 | cVerb.toggleImpulse(currentIR);
50 |
51 | // play the sound through the impulse
52 | sound.play();
53 |
54 | // display the current Impulse Response name (the filepath)
55 | p.html('Convolution Impulse Response: ' + cVerb.impulses[currentIR].name);
56 | }
57 |
--------------------------------------------------------------------------------
/examples/Reverb_convolve_FFT/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/Reverb_convolve_FFT/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Example: Convolution Reverb w/ FFT
3 | *
4 | * The p5.Convolver can recreate the sound of actual spaces using convolution.
5 | *
6 | * Toggle between different impulses with the mouse. Press any key to hear the
7 | * original impulse recording.
8 | *
9 | * Convolution samples Creative Commons BY recordinghopkins, via freesound.org
10 | * https://www.freesound.org/people/recordinghopkins/
11 | */
12 | var sound, env, cVerb, fft;
13 | var currentIR = 0;
14 | var p;
15 | var rawImpulse;
16 |
17 | function preload() {
18 |
19 | // we have included both MP3 and OGG versions of all the impulses/sounds
20 | soundFormats('ogg', 'mp3');
21 |
22 | // create a p5.Convolver
23 | cVerb = createConvolver('../files/bx-spring');
24 |
25 | // add Impulse Responses to cVerb.impulses array, in addition to bx-spring
26 | cVerb.addImpulse('../files/small-plate');
27 | cVerb.addImpulse('../files/drum');
28 | cVerb.addImpulse('../files/beatbox');
29 | cVerb.addImpulse('../files/concrete-tunnel');
30 |
31 | // load a sound that will be processed by the p5.ConvultionReverb
32 | sound = loadSound('../files/Damscray_DancingTiger');
33 | }
34 |
35 | function setup() {
36 | createCanvas(710, 400);
37 | rawImpulse = loadSound('../files/' + cVerb.impulses[currentIR].name);
38 |
39 | // disconnect from main output...
40 | sound.disconnect();
41 | // ... and process with cVerb
42 | // so that we only hear the reverb
43 | cVerb.process(sound);
44 |
45 | createP('Click to play a sound and change the impulse');
46 | createP('Press any key to play the impulse source as a SoundFile');
47 | p = createP('');
48 |
49 | fft = new p5.FFT();
50 | }
51 |
52 | function draw() {
53 | background(30);
54 | fill(0,255,40);
55 |
56 | var spectrum = fft.analyze();
57 |
58 | // Draw every value in the frequencySpectrum array as a rectangle
59 | noStroke();
60 | for (var i = 0; i< spectrum.length; i++){
61 | var x = map(i, 0, spectrum.length, 0, width);
62 | var h = -height + map(spectrum[i], 0, 255, height, 0);
63 | rect(x, height, width/spectrum.length, h) ;
64 | }
65 | }
66 |
67 | function mousePressed() {
68 |
69 | // cycle through the array of cVerb.impulses
70 | currentIR++;
71 | if (currentIR >= cVerb.impulses.length) {
72 | currentIR = 0;
73 | }
74 | cVerb.toggleImpulse(currentIR);
75 |
76 | // play the sound through the impulse
77 | sound.play();
78 |
79 | // display the current Impulse Response name (the filepath)
80 | p.html('Convolution Impulse Response: ' + cVerb.impulses[currentIR].name);
81 |
82 | rawImpulse.setPath('../files/' + cVerb.impulses[currentIR].name);
83 | }
84 |
85 | function keyPressed() {
86 | rawImpulse.play();
87 | }
88 |
--------------------------------------------------------------------------------
/examples/_monosynth_basic/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/_monosynth_basic/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Play a random note
3 | * every time you press a key
4 | */
5 |
6 | var monoSynth;
7 |
8 | function setup() {
9 | monoSynth = new p5.MonoSynth();
10 |
11 | createCanvas(400, 400);
12 | text('press to play a random note at a random velocity', 20, 20);
13 | }
14 |
15 | function mousePressed() {
16 | // pick a random midi note
17 | var midiVal = midiToFreq(round( random(50,72) ));
18 | monoSynth.triggerAttack(midiVal, random() );
19 | }
20 |
21 | function mouseReleased() {
22 | monoSynth.triggerRelease();
23 | }
24 |
--------------------------------------------------------------------------------
/examples/_polyphonic_synth/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/examples/_sound_loop/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/_sound_loop/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Create a sequence using a Part with callbacks that play back soundfiles.
3 | * The callback includes parameters (the value at that position in the Phrase array)
4 | * as well as time, which should be used to schedule playback with precision.
5 | *
6 | */
7 |
8 | var click, beatbox ;
9 |
10 | var looper1, looper2;
11 |
12 |
13 | function preload() {
14 | soundFormats('mp3', 'ogg');
15 | click = loadSound('../files/drum');
16 | beatbox = loadSound('../files/beatbox');
17 |
18 | }
19 |
20 | function setup() {
21 |
22 | //Hemiola! 2 loops, playing sounds in a 4 over 3 pattern
23 | //gradually increase the tempo of both loops
24 | //
25 | //the looper's callback is passed the timeFromNow
26 | //this value should be used as a reference point from
27 | //which to schedule sounds
28 |
29 | looper1 = new p5.SoundLoop(function(timeFromNow){
30 | click.play(timeFromNow);
31 | looper1.bpm = looper1.bpm += 0.5;
32 | }, "8n");
33 |
34 | looper2 = new p5.SoundLoop(function(timeFromNow){
35 | beatbox.play(timeFromNow);
36 | looper2.bpm = looper1.bpm;
37 | }, "12n");
38 |
39 | //start the loops together
40 | looper1.syncedStart(looper2);
41 | }
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/examples/amplitude_analysis/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/amplitude_analysis/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * DEMO: Use p5.Amplitude (volume) to change the size of an ellipse
3 | */
4 |
5 | var soundFile;
6 | var amplitude;
7 |
8 | // description text
9 | var description;
10 | var p1;
11 |
12 | var smoothing = .01;
13 | var smoothSlider, smoothLabel;
14 |
15 | function preload() {
16 | soundFile = loadSound(['../files/beat.mp3', '../files/beat.ogg']);
17 | }
18 |
19 | function setup() {
20 | createCanvas(400, 400);
21 | background(0);
22 | noStroke();
23 | fill(255);
24 |
25 | soundFile.loop();
26 |
27 | // create a new p5.Amplitude. Optionally, give it a 'smoothing' value betw 0.0 and .999
28 | amplitude = new p5.Amplitude(smoothing);
29 |
30 | // instruction text
31 | description = 'Spacebar: pause/unpause the loop.
Press "N" to toggle Normalize';
32 | p1 = createP(description);
33 |
34 | smoothSlider = createSlider(0.0, 99.9, smoothing*100);
35 | smoothLabel = createP('Smoothing: ' + smoothing);
36 | }
37 |
38 | function draw() {
39 | background(0);
40 |
41 | // get volume from the amplitude process
42 | var volume = amplitude.getLevel();
43 |
44 | // print the volume to the canvas. It is a float between 0 and 1.0.
45 | text('volume: ' + volume, 20, 20);
46 |
47 | // Change size based on volume. First, map to useful values.
48 | var diameter = map(volume, 0, 1.0, 25, 400);
49 | ellipse(width/2, height/2, diameter, diameter);
50 |
51 | // instruction text
52 | description = 'Spacebar: pause/unpause the loop.
Press "N" to toggle Normalize. Normalized is '+amplitude.normalize;
53 | p1.html(description);
54 |
55 | // change smoothing
56 | smoothing = smoothSlider.value()/100;
57 | smoothLabel.html('Smoothing: ' + smoothing);
58 | amplitude.smooth(smoothing);
59 | }
60 |
61 | // on key pressed...
62 | function keyPressed(e) {
63 |
64 | // spacebar pauses
65 | if (e.keyCode == 32) {
66 | if (soundFile.isPlaying()) {
67 | soundFile.pause();
68 | } else {
69 | soundFile.play();
70 | }
71 | }
72 |
73 | // 'n' keypress toggles normalize on/off
74 | if (e.keyCode == 78) {
75 | amplitude.toggleNormalize();
76 | }
77 |
78 | }
79 |
80 | function mouseClicked() {
81 | if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
82 | if ( getOutputVolume() == 0) {
83 | outputVolume(0, 1);
84 | } else {
85 | outputVolume(0.1),1;
86 | }
87 | }
88 | }
89 |
90 |
91 |
--------------------------------------------------------------------------------
/examples/array_of_notes/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/array_of_notes/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Basic example of playing back a static array of notes.
3 | *
4 | * Note that this implementation does not allow for starting/stopping
5 | * or dynamically modifying playback once it has started. For a more
6 | * versatile example of playback, see the SoundLoop version.
7 | */
8 |
9 | var synth;
10 | var songStarted = false;
11 | var song = [
12 | // Note pitch, velocity (between 0-1), start time (s), note duration (s)
13 | { pitch: 'E4', velocity: 1, time: 0, duration: 1 },
14 | { pitch: 'D4', velocity: 1, time: 1, duration: 1 },
15 | { pitch: 'C4', velocity: 1, time: 2, duration: 1 },
16 | { pitch: 'D4', velocity: 1, time: 3, duration: 1 },
17 | { pitch: 'E4', velocity: 1, time: 4, duration: 1 },
18 | { pitch: 'E4', velocity: 1, time: 5, duration: 1 },
19 | { pitch: 'E4', velocity: 1, time: 6, duration: 1 },
20 | // Rest indicated by offset in start time
21 | { pitch: 'D4', velocity: 1, time: 8, duration: 1 },
22 | { pitch: 'D4', velocity: 1, time: 9, duration: 1 },
23 | { pitch: 'E4', velocity: 1, time: 10, duration: 1 },
24 | { pitch: 'D4', velocity: 1, time: 11, duration: 1 },
25 | // Chord indicated by simultaneous note start times
26 | { pitch: 'C4', velocity: 1, time: 12, duration: 2 },
27 | { pitch: 'E4', velocity: 1, time: 12, duration: 2 },
28 | { pitch: 'G4', velocity: 1, time: 12, duration: 2 },
29 | ];
30 |
31 | function setup() {
32 | textAlign(CENTER, CENTER);
33 | synth = new p5.PolySynth();
34 | }
35 |
36 | function draw() {
37 | background(255);
38 | if (songStarted) {
39 | text('song started', width / 2, height / 2);
40 | } else {
41 | text('click to play song', width / 2, height / 2);
42 | }
43 | }
44 |
45 | function touchStarted() {
46 | if (!songStarted) { // Only play once
47 | for (var i = 0; i < song.length; i++) {
48 | note = song[i];
49 | synth.play(note.pitch, note.velocity, note.time, note.duration);
50 | }
51 | songStarted = true;
52 | }
53 | }
--------------------------------------------------------------------------------
/examples/array_of_notes_soundloop/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/audioIn_Multiple_Sources/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/examples/audioIn_Multiple_Sources/sketch.js:
--------------------------------------------------------------------------------
1 | /*
2 | This sketch shows how to use the .getSources function of p5.AudioIn().
3 | Calling getSources allows access to the available devices within a callback.
4 | This function is not compatible IE, Safari, or Firefox. Firefox allows for
5 | user input access but provides a selection dialogue on request
6 | instead of allowing for enumeration of devices. For Chrome, an
7 | HTTPS connection is required to access device names.
8 |
9 | */
10 | var audioGrab;
11 | var numSources = 0;
12 | var fft = [];
13 | var audioGrabArray = [];
14 | var sourceNames = [];
15 |
16 | function setup() {
17 | createCanvas(512, 400);
18 | textSize(32);
19 | textAlign(LEFT, CENTER);
20 | //we will use a new p5AudioIn to enumerate the
21 | //audio devices. This won't connect to any output.
22 | audioGrab = new p5.AudioIn();
23 | audioGrab.getSources(function(data){
24 | getSourcesCallback(data)
25 | });
26 | }
27 |
28 | function getSourcesCallback(sourceList) {
29 | numSources = sourceList.length;
30 | //creating an array of all the available sources
31 | for (var i = 0; i < numSources; i++) {
32 | audioGrabArray[i] = new p5.AudioIn();
33 | audioGrabArray[i].setSource(i);
34 | audioGrabArray[i].start();
35 |
36 | //from the FFT example
37 | fft[i] = new p5.FFT();
38 | fft[i].setInput(audioGrabArray[i]);
39 |
40 | //see if the browser is allowing us
41 | //to access the name of the device
42 | //otherwise we will ID the device with its
43 | //input array position
44 | if(sourceList[i].label) {
45 | sourceNames[i] = "device name: " + sourceList[i].label;
46 | } else {
47 | sourceNames[i] = "input array position: " + i;
48 | }
49 | }
50 | }
51 |
52 | function draw() {
53 | background(200);
54 | for (var i = 0; i < numSources; i++) {
55 | var yPos = ((i + 1) / numSources) * height;
56 | var spectrum = fft[i].analyze();
57 |
58 | stroke(0);
59 | line(0, ((i + 1) / numSources) * height, width, yPos);
60 | beginShape();
61 | vertex(0, ((i + 1) / numSources) * height);
62 |
63 | noStroke();
64 | fill(0, 255, 255);
65 | for (j = 0; j < spectrum.length; j++) {
66 | vertex(j, map(spectrum[j], 0, 1023, yPos, 0));
67 | }
68 | endShape();
69 |
70 | //without using HTTPS, Chrome cannot give us device names (such as
71 | //'mic' or 'soundflower'). We can use this visualization to determine
72 | //which input array position number is the one we want.
73 | //The first two are typically "default" and "mic"
74 | //(default usually is the mic)
75 |
76 | fill(0);
77 | text(sourceNames[i], 10, yPos - 0.5 * height / numSources);
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/examples/autoCorrelation/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/autoCorrelation/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Auto Correlation multiples each sample in a buffer by all
3 | * of the other samples. This emphasizes the fundamental
4 | * frequency. Auto Correlation is useful for pitch detection,
5 | * as well as for visualization
6 | *
7 | * This example is a Correlogram which is a plot
8 | * of the autocorrelations.
9 | *
10 | * Example by Jason Sigal and Golan Levin.
11 | */
12 |
13 | var source, fft;
14 | var bNormalize = true;
15 | var centerClip = false;
16 |
17 | function setup() {
18 | createCanvas(windowWidth, windowHeight);
19 | noFill();
20 |
21 | source = new p5.AudioIn();
22 | source.start();
23 |
24 | fft = new p5.FFT();
25 | fft.setInput(source);
26 | }
27 |
28 | function draw() {
29 | background(200);
30 |
31 | // array of values from -1 to 1
32 | var timeDomain = fft.waveform(2048, 'float32');
33 | var corrBuff = autoCorrelate(timeDomain);
34 |
35 | beginShape();
36 | for (var i = 0; i < corrBuff.length; i++) {
37 | var w = map(i, 0, corrBuff.length, 0, width);
38 | var h = map(corrBuff[i], -1, 1, height, 0);
39 | curveVertex(w, h);
40 | }
41 | endShape();
42 | }
43 |
44 |
45 | function autoCorrelate(buffer) {
46 | var newBuffer = [];
47 | var nSamples = buffer.length;
48 |
49 | var autocorrelation = [];
50 |
51 | // center clip removes any samples under 0.1
52 | if (centerClip) {
53 | var cutoff = 0.1;
54 | for (var i = 0; i < buffer.length; i++) {
55 | var val = buffer[i];
56 | buffer[i] = Math.abs(val) > cutoff ? val : 0;
57 | }
58 | }
59 |
60 | for (var lag = 0; lag < nSamples; lag++){
61 | var sum = 0;
62 | for (var index = 0; index < nSamples; index++){
63 | var indexLagged = index+lag;
64 | if (indexLagged < nSamples){
65 | var sound1 = buffer[index];
66 | var sound2 = buffer[indexLagged];
67 | var product = sound1 * sound2;
68 | sum += product;
69 | }
70 | }
71 |
72 | // average to a value between -1 and 1
73 | newBuffer[lag] = sum/nSamples;
74 | }
75 |
76 | if (bNormalize){
77 | var biggestVal = 0;
78 | for (var index = 0; index < nSamples; index++){
79 | if (abs(newBuffer[index]) > biggestVal){
80 | biggestVal = abs(newBuffer[index]);
81 | }
82 | }
83 | for (var index = 0; index < nSamples; index++){
84 | newBuffer[index] /= biggestVal;
85 | }
86 | }
87 |
88 | return newBuffer;
89 | }
--------------------------------------------------------------------------------
/examples/bells_envelope_test/assets/LadyChapelStAlbansCathedral.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/bells_envelope_test/assets/LadyChapelStAlbansCathedral.wav
--------------------------------------------------------------------------------
/examples/bells_envelope_test/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | bells_envelope_test
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/examples/distortion/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 | click to trigger saw oscillator through heavy distortion
13 |
14 |
15 |
--------------------------------------------------------------------------------
/examples/distortion/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Trigger an oscillator processed through distortion.
3 | */
4 |
5 | var env; // this is the env
6 | var osc; // this oscillator that will be effected by the distortion
7 | var distortion; // this is the waveshaper distortion effect
8 |
9 | var fft;
10 |
11 | function setup() {
12 | createCanvas(windowWidth, windowHeight);
13 | fft = new p5.FFT(0, 256);
14 |
15 |
16 | env = new p5.Envelope();
17 | env.setADSR(0.01, 0.2, 0.1, 0.3);
18 | env.setRange(1.0, 0.0);
19 |
20 | osc = new p5.SawOsc(); // connects to main output by default
21 | osc.start(0);
22 | osc.freq(220);
23 | osc.amp(env);
24 | osc.disconnect(); // Disconnect from output to process through distortion
25 |
26 | // Create a waveshaper distortion with 2x oversampling
27 | distortion = new p5.Distortion(1, '4x');
28 | osc.connect(distortion);
29 | }
30 |
31 | function draw() {
32 | var samples = fft.waveform();
33 | drawOscilloscope(samples);
34 | }
35 |
36 | function drawOscilloscope(samples) {
37 | var yTranslateScope = 50;
38 | var xTranslateScope = 50;
39 | var scopeWidth = width / 5;
40 | var scopeHeight = height / 4;
41 |
42 | fill(177, 177, 177);
43 | rect(xTranslateScope, yTranslateScope, scopeWidth, scopeHeight);
44 |
45 | stroke(0, 0, 0);
46 | strokeWeight(0.5);
47 |
48 | beginShape();
49 | for (var sampleIndex in samples) {
50 | var x = map(sampleIndex, 0, samples.length, 0, scopeWidth);
51 | var y = map(samples[sampleIndex], -1, 1, -scopeHeight / 2, scopeHeight / 2);
52 | vertex(x + xTranslateScope, y + scopeHeight/2 + yTranslateScope);
53 | }
54 | endShape();
55 | }
56 |
57 | function mousePressed() {
58 | env.triggerAttack();
59 | }
60 |
61 | function mouseReleased() {
62 | env.triggerRelease();
63 | }
64 |
--------------------------------------------------------------------------------
/examples/envAmpFreq/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 | click to trigger amplitude and frequency envelopes
13 |
14 |
--------------------------------------------------------------------------------
/examples/envAmpFreq/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Control the level of an envelope
3 | */
4 |
5 | var env; // this is the env
6 | var osc; // this oscillator will modulate the amplitude of the carrier
7 | var freqEnv; // env for frequency
8 |
9 | function setup() {
10 | env = new p5.Envelope();
11 | env.setADSR(0.01, 0.2, 0.2, 0.3);
12 | env.setRange(0, 1);
13 |
14 | freqEnv = new p5.Envelope();
15 | freqEnv.setADSR(0.01, 0.2, 0.2, 0.3);
16 | freqEnv.setRange(300, 5000);
17 |
18 |
19 | osc = new p5.Oscillator(); // connects to main output by default
20 | osc.start(0);
21 | osc.freq(220);
22 | // osc.freq(env.scale(0,1,800,300));
23 | osc.freq(freqEnv);
24 | osc.amp(env);
25 | }
26 |
27 | function mousePressed() {
28 | env.triggerAttack();
29 | freqEnv.triggerAttack();
30 | }
31 |
32 | function mouseReleased() {
33 | env.triggerRelease();
34 | freqEnv.triggerRelease();
35 | }
36 |
--------------------------------------------------------------------------------
/examples/envExp/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/envelope/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/envelope/sketch.js:
--------------------------------------------------------------------------------
1 | // Adapting Wilm Thoben's Envelope example from the Processing Handbook ex2
2 |
3 | /*
4 | This sketch shows how to use envelopes and oscillators. Envelopes are pre-defined amplitude
5 | distribution over time. The sound library provides an ADSR envelope which stands for attack,
6 | decay, sustain, release. The amplitude rises then decays to a sustain level, then decays slowly
7 | toward the release value.
8 |
9 | .
10 | . . _______
11 | . ---
12 | . ---
13 | A D S R
14 |
15 | */
16 |
17 | var triOsc;
18 | var env;
19 | var a;
20 |
21 | // Times and levels for the ASR envelope
22 | var attackTime = 0.001;
23 | var attackLevel = 0.9;
24 | var decayTime = 0.3;
25 | var susPercent = 0.2;
26 | var sustainTime = 0.1;
27 | var releaseTime = 0.5;
28 | var releaseLevel = 0;
29 |
30 | var midiSequence = [ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72 ];
31 | var duration = 1000;
32 | // Set the note trigger
33 | var trigger;
34 |
35 | // An index to count up the notes
36 | var note = 0;
37 |
38 | function setup(){
39 | createCanvas(600, 400);
40 | fill(0, 255, 0);
41 |
42 | trigger = millis();
43 |
44 | triOsc = new p5.TriOsc();
45 | triOsc.amp(0);
46 | triOsc.start();
47 |
48 | env = new p5.Envelope();
49 | env.setADSR(attackTime, decayTime, susPercent, releaseTime);
50 | env.setRange(attackLevel, releaseLevel);
51 |
52 | a = new p5.Amplitude();
53 | }
54 |
55 | function draw(){
56 | var size = 10;
57 | background(20, 20, 20, 70);
58 | ellipse(map ( (trigger - millis()) % duration, 1000, 0, 0, width), map ( a.getLevel(), 0, .5, height-size, 0), size, size);
59 |
60 | // If the determined trigger moment in time matches up with the computer clock and we if the
61 | // sequence of notes hasn't been finished yet the next note gets played.
62 | if ((millis() > trigger)){
63 | // midiToFreq transforms the MIDI value into a frequency in Hz which we use to control the triangle oscillator
64 | triOsc.freq(midiToFreq(midiSequence[note]));
65 |
66 | // The envelope gets triggered with the oscillator as input and the times and levels we defined earlier
67 | // play accepts an object to play, time from now, and a sustain time—how long to hold before the release.
68 | env.play(triOsc, 0, sustainTime);
69 |
70 | // Create the new trigger according to predefined durations and speed it up by deviding by 1.5
71 | trigger = millis() + duration;
72 |
73 | // Advance by one note in the midiSequence;
74 | note++;
75 |
76 | // Loop the sequence, notice the jitter
77 | if(note == 12) {
78 | note = 0;
79 | }
80 | }
81 | }
--------------------------------------------------------------------------------
/examples/envelopeMultipleSources/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/envelopeOnOff/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/envelopeOnOff/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Example: Create an Envelope (p5.Envelope) to control oscillator amplitude.
3 | * Trigger the Attack portion of the envelope when the mouse is clicked.
4 | * Trigger the Release when the mouse is released.
5 | */
6 |
7 | var osc;
8 | var env;
9 | var a;
10 |
11 | // Times and levels for the ADSR envelope
12 | var attackTime = 0.001;
13 | var attackLevel = 0.6;
14 | var decayTime = 0.1;
15 | var susPercent = 0.2;
16 | var releaseTime = 0.5;
17 | var releaseLevel = 0;
18 |
19 | var duration = 1000;
20 | // Set the note trigger
21 | var trigger;
22 |
23 | // An index to count up the notes
24 | var note = 0;
25 |
26 |
27 | function setup(){
28 | createCanvas(600, 300);
29 | background(20);
30 | fill(0,255,0);
31 |
32 | trigger = millis();
33 |
34 | osc = new p5.SinOsc();
35 | osc.freq(220);
36 | osc.start();
37 |
38 | env = new p5.Envelope();
39 | env.setADSR(attackTime, decayTime, susPercent, releaseTime);
40 | env.setRange(attackLevel, releaseLevel);
41 |
42 | osc.amp(env);
43 | createP('click mouse to triggerAttack, release mouse to triggerRelease');
44 |
45 | a = new p5.Amplitude();
46 | }
47 |
48 | function draw(){
49 | var size = 10;
50 | background(20, 20, 20, 70);
51 | ellipse( map ( (trigger - millis()) % duration, 1000, 0, 0, width) % width, map ( a.getLevel(), 0, .5, height-size, 0), size, size);
52 | }
53 |
54 | function mousePressed(){
55 | env.triggerAttack();
56 | trigger = millis() + duration;
57 | }
58 |
59 | function mouseReleased(){
60 | env.triggerRelease();
61 | }
--------------------------------------------------------------------------------
/examples/envelopeRamp/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | envelope_ramp
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/examples/envelopeRamp/sketch.js:
--------------------------------------------------------------------------------
1 | var osc, envelope, fft;
2 | var myPhraseAttack, myPhraseRelease, myPart;
3 | var atPattern = [1, 0,0,0];
4 | var scaleArray = [60, 62, 64, 65, 67, 69, 71, 72];
5 | var note = 0;
6 | var startPoint = 0;
7 | var endPoint = 0;
8 | var numWaveforms = 50;
9 |
10 | function setup() {
11 | createCanvas(710, 200);
12 | osc = new p5.SinOsc();
13 | envelope = new p5.Envelope();
14 | envelope.setADSR(.005,0.02);
15 | osc.amp(0.);
16 | osc.start();
17 | myPhraseAttack = new p5.Phrase('testerAttack', makeSoundAttack, atPattern);
18 | myPart = new p5.Part();
19 | myPart.addPhrase(myPhraseAttack);
20 | myPart.setBPM(240);
21 | myPart.loop();
22 | myPart.start();
23 | fft = new p5.FFT();
24 | endPoint = width / numWaveforms;
25 | noFill();
26 | background(20);
27 | }
28 |
29 | function draw() {
30 |
31 |
32 | var waveform = fft.waveform(); // analyze the waveform
33 | beginShape();
34 | stroke(255, 255, 0);
35 | for (var i = 0; i < waveform.length; i++){
36 | var x = map(i, 0, waveform.length, startPoint, endPoint);
37 | var y = map(waveform[i], -1, 1, height, 0);
38 | vertex(x, y);
39 | }
40 | endShape();
41 | startPoint = endPoint + 1;
42 | endPoint += (width / numWaveforms);
43 | if (endPoint > width)
44 | {
45 | background(20);
46 | startPoint = 0;
47 | endPoint = (width / numWaveforms);
48 | }
49 | }
50 |
51 |
52 | function makeSoundAttack(time, playbackRate)
53 | {
54 | var midiValue = scaleArray[note];
55 | var freqValue = midiToFreq(midiValue);
56 | osc.freq(freqValue * 2, .001, time);
57 | envelope.ramp(osc, time, 1, 0);
58 | note = (note + 1) % scaleArray.length;
59 | setTimeout(redrawWaveform, time * 1000.0);
60 | }
61 |
62 |
63 | function redrawWaveform()
64 | {
65 | background(20);
66 | startPoint = 0;
67 | endPoint = (width / numWaveforms);
68 | }
69 |
--------------------------------------------------------------------------------
/examples/envelope_designer/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/envelope_exponential_play/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | envelope_exponential_play
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/examples/envelope_exponential_play/sketch.js:
--------------------------------------------------------------------------------
1 | var osc, envelope, fft;
2 | var myPhraseAttack, myPhraseRelease, myPart;
3 | var atPattern = [1, 0,0,0];
4 | var relPattern = [0, 0,1,0];
5 | var scaleArray = [60, 62, 64, 65, 67, 69, 71, 72];
6 | var note = 0;
7 | var startPoint = 0;
8 | var endPoint = 0;
9 | var numWaveforms = 50;
10 |
11 | function setup() {
12 | createCanvas(710, 200);
13 | osc = new p5.SinOsc();
14 | envelope = new p5.Envelope(0.1, 1.0, 0.1, .5, .1, .5, .1, 0.0); //
15 | envelope.setExp(true);
16 | //envelope.setADSR(0.1, 1.0, 1.0, 0.2, 5.0, 0.0); //AT, AL, DT, SL, RT, RL
17 | osc.amp(0.);
18 | osc.start();
19 | myPhraseAttack = new p5.Phrase('testerAttack', makeSoundAttack, atPattern);
20 | myPhraseRelease = new p5.Phrase('testerRelease', makeSoundRelease, relPattern);
21 | myPart = new p5.Part();
22 | myPart.addPhrase(myPhraseAttack);
23 | myPart.addPhrase(myPhraseRelease); // comment this back in to check release
24 | myPart.setBPM(100);
25 | myPart.loop();
26 | myPart.start();
27 | fft = new p5.FFT();
28 | endPoint = width / numWaveforms;
29 | noFill();
30 | background(20);
31 | }
32 |
33 | function draw() {
34 |
35 |
36 | var waveform = fft.waveform(); // analyze the waveform
37 | beginShape();
38 | stroke(255, 255, 0);
39 | for (var i = 0; i < waveform.length; i++){
40 | var x = map(i, 0, waveform.length, startPoint, endPoint);
41 | var y = map(waveform[i], -1, 1, height, 0);
42 | vertex(x, y);
43 | }
44 | endShape();
45 | startPoint = endPoint + 1;
46 | endPoint += (width / numWaveforms);
47 | if (endPoint > width)
48 | {
49 | background(20);
50 | startPoint = 0;
51 | endPoint = (width / numWaveforms);
52 | }
53 | }
54 |
55 |
56 | function makeSoundAttack(time, playbackRate)
57 | {
58 | var midiValue = scaleArray[note];
59 | var freqValue = midiToFreq(midiValue);
60 | osc.freq(freqValue * 2, .01, time); // comment this back in to check pitch changes
61 | envelope.play(osc, time);
62 | //envelope.triggerAttack(osc, time);
63 | note = (note + 1) % scaleArray.length;
64 | setTimeout(redrawWaveform, time * 1000.0);
65 |
66 | }
67 |
68 | function makeSoundRelease(time, playbackRate)
69 | {
70 | //envelope.triggerRelease(osc, time); // comment this back in to check release
71 | }
72 |
73 | function redrawWaveform()
74 | {
75 | background(20);
76 | startPoint = 0;
77 | endPoint = (width / numWaveforms);
78 | }
79 |
--------------------------------------------------------------------------------
/examples/envelope_exponential_trig_rel/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | envelope_exponential_trig_rel
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/examples/envelope_exponential_trig_rel/sketch.js:
--------------------------------------------------------------------------------
1 | var osc, envelope, fft;
2 | var myPhraseAttack, myPhraseRelease, myPart;
3 | var atPattern = [1, 0,0,0];
4 | var relPattern = [0, 0,1,0];
5 | var scaleArray = [60, 62, 64, 65, 67, 69, 71, 72];
6 | var note = 0;
7 | var startPoint = 0;
8 | var endPoint = 0;
9 | var numWaveforms = 50;
10 |
11 | function setup() {
12 | createCanvas(710, 200);
13 | osc = new p5.SinOsc();
14 | envelope = new p5.Envelope();
15 | envelope.setExp(true);
16 | envelope.setADSR(0.1, 1.0, .1, 0.2, .01, 0.0); //AT, AL, DT, SL, RT, RL
17 | osc.amp(0.);
18 | osc.start();
19 | myPhraseAttack = new p5.Phrase('testerAttack', makeSoundAttack, atPattern);
20 | myPhraseRelease = new p5.Phrase('testerRelease', makeSoundRelease, relPattern);
21 | myPart = new p5.Part();
22 | myPart.addPhrase(myPhraseAttack);
23 | myPart.addPhrase(myPhraseRelease); // comment this back in to check release
24 | myPart.setBPM(100);
25 | myPart.loop();
26 | myPart.start();
27 | fft = new p5.FFT();
28 | endPoint = width / numWaveforms;
29 | noFill();
30 | background(20);
31 | }
32 |
33 | function draw() {
34 |
35 |
36 | var waveform = fft.waveform(); // analyze the waveform
37 | beginShape();
38 | stroke(255, 255, 0);
39 | for (var i = 0; i < waveform.length; i++){
40 | var x = map(i, 0, waveform.length, startPoint, endPoint);
41 | var y = map(waveform[i], -1, 1, height, 0);
42 | vertex(x, y);
43 | }
44 | endShape();
45 | startPoint = endPoint + 1;
46 | endPoint += (width / numWaveforms);
47 | if (endPoint > width)
48 | {
49 | background(20);
50 | startPoint = 0;
51 | endPoint = (width / numWaveforms);
52 | }
53 | }
54 |
55 |
56 | function makeSoundAttack(time, playbackRate)
57 | {
58 | var midiValue = scaleArray[note];
59 | var freqValue = midiToFreq(midiValue);
60 | osc.freq(freqValue * 2, .01, time); // comment this back in to check pitch changes
61 | envelope.triggerAttack(osc, time);
62 | note = (note + 1) % scaleArray.length;
63 | setTimeout(redrawWaveform, time * 1000.0);
64 |
65 | }
66 |
67 | function makeSoundRelease(time, playbackRate)
68 | {
69 | envelope.triggerRelease(osc, time); // comment this back in to check release
70 | }
71 |
72 | function redrawWaveform()
73 | {
74 | background(20);
75 | startPoint = 0;
76 | endPoint = (width / numWaveforms);
77 | }
78 |
--------------------------------------------------------------------------------
/examples/files/Damscray_-_Dancing_Tiger_01.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/Damscray_-_Dancing_Tiger_01.mp3
--------------------------------------------------------------------------------
/examples/files/Damscray_-_Dancing_Tiger_01.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/Damscray_-_Dancing_Tiger_01.ogg
--------------------------------------------------------------------------------
/examples/files/Damscray_-_Dancing_Tiger_02.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/Damscray_-_Dancing_Tiger_02.mp3
--------------------------------------------------------------------------------
/examples/files/Damscray_-_Dancing_Tiger_02.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/Damscray_-_Dancing_Tiger_02.ogg
--------------------------------------------------------------------------------
/examples/files/Damscray_DancingTiger.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/Damscray_DancingTiger.mp3
--------------------------------------------------------------------------------
/examples/files/Damscray_DancingTiger.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/Damscray_DancingTiger.ogg
--------------------------------------------------------------------------------
/examples/files/Soni_Ventorum_Wind_Quintet_-_08_-_Danzi_Wind_Quintet_Op_67_No_3_In_E-Flat_Major_4_Allegretto.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/Soni_Ventorum_Wind_Quintet_-_08_-_Danzi_Wind_Quintet_Op_67_No_3_In_E-Flat_Major_4_Allegretto.mp3
--------------------------------------------------------------------------------
/examples/files/Tripping.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/Tripping.mp3
--------------------------------------------------------------------------------
/examples/files/Tripping.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/Tripping.ogg
--------------------------------------------------------------------------------
/examples/files/beat.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/beat.mp3
--------------------------------------------------------------------------------
/examples/files/beat.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/beat.ogg
--------------------------------------------------------------------------------
/examples/files/beatbox.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/beatbox.mp3
--------------------------------------------------------------------------------
/examples/files/beatbox.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/beatbox.ogg
--------------------------------------------------------------------------------
/examples/files/bx-spring.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/bx-spring.mp3
--------------------------------------------------------------------------------
/examples/files/bx-spring.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/bx-spring.ogg
--------------------------------------------------------------------------------
/examples/files/concrete-tunnel.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/concrete-tunnel.mp3
--------------------------------------------------------------------------------
/examples/files/concrete-tunnel.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/concrete-tunnel.ogg
--------------------------------------------------------------------------------
/examples/files/doorbell.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/doorbell.mp3
--------------------------------------------------------------------------------
/examples/files/doorbell.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/doorbell.ogg
--------------------------------------------------------------------------------
/examples/files/drum.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/drum.mp3
--------------------------------------------------------------------------------
/examples/files/drum.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/drum.ogg
--------------------------------------------------------------------------------
/examples/files/large-dark-plate.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/large-dark-plate.mp3
--------------------------------------------------------------------------------
/examples/files/large-dark-plate.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/large-dark-plate.ogg
--------------------------------------------------------------------------------
/examples/files/lucky_dragons_-_power_melody.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/lucky_dragons_-_power_melody.mp3
--------------------------------------------------------------------------------
/examples/files/lucky_dragons_-_power_melody.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/lucky_dragons_-_power_melody.ogg
--------------------------------------------------------------------------------
/examples/files/small-plate.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/small-plate.mp3
--------------------------------------------------------------------------------
/examples/files/small-plate.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/small-plate.ogg
--------------------------------------------------------------------------------
/examples/files/studio-b.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/studio-b.mp3
--------------------------------------------------------------------------------
/examples/files/studio-b.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/examples/files/studio-b.ogg
--------------------------------------------------------------------------------
/examples/fractal_music/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/genetic_music/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
13 |
14 |
--------------------------------------------------------------------------------
/examples/granular_sampler/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/granular_sampler_psynth/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/granular_sampler_psynth/sketch.js:
--------------------------------------------------------------------------------
1 | // mouseX = playback position
2 | // mouseY = playback rate
3 | // up arrow increase grain duration
4 | // down arrow decrease grain duration
5 |
6 | var source_file; // sound file
7 | var src_length; // hold its duration
8 | var peaks; // an array of peaks for the visual
9 | var pg;
10 |
11 | var psynth;
12 | var grainDur = 1; // length of the grain
13 |
14 | function preload(){
15 | source_file = loadSound('../files/Soni_Ventorum_Wind_Quintet_-_08_-_Danzi_Wind_Quintet_Op_67_No_3_In_E-Flat_Major_4_Allegretto.mp3'); // preload the sound
16 | }
17 |
18 | function setup() {
19 | createCanvas(800, 250);
20 | frameRate(25);
21 |
22 | psynth = new p5.PolySynth(25,GranularVoice);
23 |
24 | src_length = source_file.duration(); // store the sound duration
25 | peaks = source_file.getPeaks(); // get an array of peaks
26 | // draw the waveform to an off-screen graphic
27 | pg = createGraphics(width,height);
28 | pg.background(180);
29 | pg.noFill();
30 | pg.stroke(0);
31 | for (var i = 0 ; i < peaks.length ; i++){
32 | var x = map(i,0,peaks.length,0,width);
33 | var y = map(peaks[i],0,1,0,height);
34 | pg.line(x,height/2,x,height/2+y);
35 | pg.line(x,height/2,x,height/2-y);
36 | }
37 | }
38 |
39 | function draw() {
40 | background(180);
41 |
42 | if (mouseIsPressed){
43 | var start_play = map(mouseX,0,width,0,src_length); // map mouseX to position in the source
44 | var pitch = map(mouseY,0,height,1.5,0.5); // map mouseY to the rate the sound will be played
45 | //console.log(psynth.poly_counter);
46 | psynth.setADSR(grainDur*2/5,0,0,grainDur*2/5);
47 | psynth.voices[psynth.poly_counter].playGrain(start_play, pitch,grainDur);
48 | psynth.play();
49 | }
50 |
51 | image(pg,0,0); // display our waveform representation
52 | // draw playhead position
53 | fill(255,255,180,150);
54 | noStroke();
55 | rect(mouseX,0,map(grainDur,0,src_length,0,width),height);
56 |
57 | fill(0);
58 | text('Grain Duration : ' + grainDur , 5,25);
59 | }
60 |
61 | function keyPressed(){
62 | if (keyCode === DOWN_ARROW){
63 | grainDur -=0.05;
64 | }
65 | else if (keyCode === UP_ARROW){
66 | grainDur += 0.05;
67 | }
68 | grainDur = constrain(grainDur,0.1,25);
69 | }
70 |
71 |
72 | function GranularVoice(){
73 |
74 | p5.AudioVoice.call(this);
75 |
76 | this.amp = 0.05;
77 |
78 | source_file.connect(this.synthOut);
79 |
80 | this.playGrain = function(start,rate,grainDur){
81 | source_file.play(0,rate,this.amp,start,grainDur); // we need to play longer than grainDur because of the rate
82 | }
83 |
84 | this.setParams = function(params){
85 |
86 | }
87 | }
88 | GranularVoice.prototype = Object.create(p5.AudioVoice.prototype);
89 | GranularVoice.prototype.constructor = GranularVoice;
90 |
--------------------------------------------------------------------------------
/examples/graphical_eq/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/grid_sequencer/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/01a_loadSound_playback/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/01a_loadSound_playback/sketch.js:
--------------------------------------------------------------------------------
1 | // Adapted from Learning Processing by Daniel Shiffman
2 | // http://www.learningprocessing.com
3 |
4 | var song;
5 |
6 | function setup() {
7 | song = loadSound('../../files/lucky_dragons_-_power_melody.mp3');
8 | createCanvas(640, 360);
9 | background(255,0,0);
10 | }
11 |
12 | function mousePressed() {
13 | if ( song.isPlaying() ) { // .isPlaying() returns a boolean
14 | song.stop();
15 | background(255,0,0);
16 | } else {
17 | song.play();
18 | background(0,255,0);
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/01b_preloadSound_playback/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/01b_preloadSound_playback/sketch.js:
--------------------------------------------------------------------------------
1 | // loadSound during preload()
2 | // This ensures it will be loaded and ready to play by setup()
3 |
4 | var song;
5 |
6 | function preload() {
7 | song = loadSound('../../files/lucky_dragons_-_power_melody.mp3');
8 | }
9 |
10 | function setup() {
11 | createCanvas(720, 200);
12 | song.loop(); // song is ready to play during setup() because it was loaded during preload
13 | background(0,255,0);
14 | }
15 |
16 | function mousePressed() {
17 | if ( song.isPlaying() ) { // .isPlaying() returns a boolean
18 | song.pause(); // .play() will resume from .pause() position
19 | background(255,0,0);
20 | } else {
21 | song.play();
22 | background(0,255,0);
23 | }
24 | }
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/01c_soundFormats/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/01c_soundFormats/sketch.js:
--------------------------------------------------------------------------------
1 | // There is no single sound format that is supported
2 | // by all web browsers. For example, mp3 support is not native to
3 | // Firefox and Opera because the mp3 codec is patented.
4 | //
5 | // To ensure compatability, you can include the same sound file
6 | // in multiple formats, i.e. sound.mp3 and sound.ogg. Ogg is an
7 | // open source alternative to mp3. You can convert audio files
8 | // into web friendly formats for free online at http://media.io/
9 | //
10 | // The soundFormats() method tells loadSound which formats we have
11 | // included with our sketch. Then, loadSound will attempt to load
12 | // the first format that is supported by the client's web browser.
13 |
14 | var song;
15 |
16 | function preload() {
17 | // we have included both an .ogg file and an .mp3 file
18 | soundFormats('ogg', 'mp3');
19 |
20 | // if mp3 is not supported by this browser,
21 | // loadSound will load the ogg file we have included with our sketch
22 | song = loadSound('../../files/lucky_dragons_-_power_melody.mp3');
23 | }
24 |
25 | function setup() {
26 | createCanvas(720, 200);
27 |
28 | song.play(); // song loaded during preload(), ready to play in setup()
29 | background(0,255,0);
30 | }
31 |
32 | function mousePressed() {
33 | if ( song.isPlaying() ) { // .isPlaying() returns a boolean
34 | song.pause();
35 | background(255,0,0);
36 | } else {
37 | song.play(); // playback will resume from the pause position
38 | background(0,255,0);
39 | }
40 | }
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/02_sound_effect/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/02_sound_effect/sketch.js:
--------------------------------------------------------------------------------
1 | // Adapted from Learning Processing by Daniel Shiffman
2 | // http://www.learningprocessing.com
3 | // Doorbell sample by Corsica_S via freesound.org, Creative Commons BY 3.0
4 |
5 | // A sound file object
6 | var dingdong;
7 |
8 | // A doorbell object (that will trigger the sound)
9 | var doorbell;
10 |
11 | function setup() {
12 | createCanvas(200, 200);
13 |
14 | // Load the sound file.
15 | // We have included both an MP3 and an OGG version.
16 | soundFormats('mp3', 'ogg');
17 | dingdong = loadSound('../../files/doorbell.mp3');
18 |
19 | // Create a new doorbell
20 | doorbell = new Doorbell(width/2, height/2, 64);
21 | }
22 |
23 | function draw() {
24 | background(255);
25 | // Show the doorbell
26 | doorbell.display(mouseX, mouseY);
27 | }
28 |
29 | function mousePressed() {
30 | // If the user clicks on the doorbell, play the sound!
31 | if (doorbell.contains(mouseX, mouseY)) {
32 | dingdong.play();
33 | }
34 | }
35 |
36 | // A Class to describe a "doorbell" (really a button)
37 | var Doorbell = function(x_, y_, r_) {
38 | // Location and size
39 | var x = x_;
40 | var y = y_;
41 | var r = r_;
42 |
43 | // Is a point inside the doorbell? (used for mouse rollover, etc.)
44 | this.contains = function(mx, my) {
45 | if (dist(mx, my, x, y) < r) {
46 | return true;
47 | } else {
48 | return false;
49 | }
50 | };
51 |
52 | // Show the doorbell (hardcoded colors, could be improved)
53 | this.display = function(mx, my) {
54 | if (this.contains(mx, my)) {
55 | fill(100);
56 | } else {
57 | fill(175);
58 | }
59 | stroke(0);
60 | strokeWeight(4);
61 | ellipse(x, y, r, r);
62 | };
63 | };
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/03_manipulate_sound/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/03_manipulate_sound/sketch.js:
--------------------------------------------------------------------------------
1 | // Adapted from Learning Processing
2 | // Daniel Shiffman
3 | // http://www.learningprocessing.com
4 |
5 | // A sound file object
6 | var song;
7 |
8 | function preload() {
9 | // Load a sound file
10 | song = loadSound('../../files/Damscray_DancingTiger.mp3');
11 | }
12 |
13 | function setup() {
14 | createCanvas(720, 720);
15 |
16 | // Loop the sound forever
17 | // (well, at least until stop() is called)
18 | song.loop();
19 | }
20 |
21 | function draw() {
22 | background(200);
23 |
24 | // Set the volume to a range between 0 and 1.0
25 | var volume = map(mouseX, 0, width, 0, 1);
26 | volume = constrain(volume, 0, 1);
27 | song.amp(volume);
28 |
29 | // Set the rate to a range between 0.1 and 4
30 | // Changing the rate alters the pitch
31 | var speed = map(mouseY, 0.1, height, 0, 2);
32 | speed = constrain(speed, 0.01, 4);
33 | song.rate(speed);
34 |
35 | // Draw some circles to show what is going on
36 | stroke(0);
37 | fill(51, 100);
38 | ellipse(mouseX, 100, 48, 48);
39 | stroke(0);
40 | fill(51, 100);
41 | ellipse(100, mouseY, 48, 48);
42 | }
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/04_pan/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/04_pan/sketch.js:
--------------------------------------------------------------------------------
1 | // Adapted from Learning Processing
2 | // Daniel Shiffman
3 | // http://www.learningprocessing.com
4 |
5 | var song;
6 |
7 | function preload() {
8 | song = loadSound('../../files/Damscray_DancingTiger.mp3');
9 | }
10 |
11 | function setup() {
12 | createCanvas(720, 200);
13 | song.loop();
14 | }
15 |
16 | function draw() {
17 | background(200);
18 | // Map mouseX to a panning value (between -1.0 and 1.0)
19 | var panning = map(mouseX, 0., width, -1.0, 1.0);
20 | panning = constrain(panning, -1.0, 1.0);
21 | song.pan(panning);
22 |
23 | // Draw a circle
24 | stroke(0);
25 | fill(51, 100);
26 | ellipse(mouseX, 100, 48, 48);
27 | }
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/06_noise/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/06_noise/sketch.js:
--------------------------------------------------------------------------------
1 | // Adapted from Learning Processing
2 | // Daniel Shiffman
3 | // http://www.learningprocessing.com
4 |
5 | // Example: Make Some Noise
6 |
7 | var noise;
8 |
9 | function setup() {
10 | createCanvas(780, 200);
11 | noise = new p5.Noise(); // other types include 'brown' and 'pink'
12 | noise.start();
13 | }
14 |
15 | function draw() {
16 | background(0);
17 |
18 | var vol = map(mouseX, 0, width, 0, 1);
19 | noise.amp(vol);
20 | vol = constrain(vol, 0, 1);
21 | ellipse(mouseX, 100, 32, 32);
22 | }
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/06_oscillator_frequency/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/06_oscillator_frequency/sketch.js:
--------------------------------------------------------------------------------
1 | // Adapted from Learning Processing
2 | // Daniel Shiffman
3 | // http://www.learningprocessing.com
4 |
5 | // Example: Oscillator Frequency
6 |
7 | var osc;
8 |
9 | function setup() {
10 | createCanvas(720, 200);
11 |
12 | // Instantiate a Sine Wave Oscillator
13 | osc = new p5.SinOsc();
14 |
15 | // Tell the Oscillator to start oscillating.
16 | // We hear the frequency of these oscillators as a pitch.
17 | osc.start();
18 |
19 | // Oscillator has an output amplitude of 0.5 by default.
20 | // We can make it louder.
21 | osc.amp(1);
22 | }
23 |
24 | function draw() {
25 | background(200);
26 |
27 | // map the mouseX to set frequency of the between 40 and 880 Hz
28 | var freq = map(mouseX, 0, width, 40, 880);
29 | osc.freq(freq);
30 | ellipse(mouseX, 100, 32, 32);
31 | }
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/07_envelope/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/07_envelope/sketch.js:
--------------------------------------------------------------------------------
1 | // Adapted from Learning Processing
2 | // Daniel Shiffman
3 | // http://www.learningprocessing.com
4 |
5 | // Example: Playing Notes With Envelope
6 |
7 | // An Envelope is a series of fades, defined
8 | // as time / value pairs. In this example, the envelope
9 | // will be used to "play" a note by controlling the output
10 | // amplitude of an oscillator.
11 | //
12 | // HOW THIS WORKS: The p5.Oscillator sends its output through
13 | // an internal Web Audio GainNode (p5.Oscillator.output).
14 | // By default, that node has a constant value of 0.5. It can
15 | // be reset with the osc.amp() method. Or, in this example, an
16 | // Envelope takes control of that node, turning the amplitude
17 | // up and down like a volume knob.
18 |
19 | var osc;
20 | var envelope;
21 |
22 | var scaleArray = [60, 62, 64, 65, 67, 69, 71, 72];
23 | var note = 0;
24 |
25 | function setup() {
26 | createCanvas(200, 200);
27 | osc = new p5.SinOsc();
28 |
29 | // Instantiate the envelope with time / value pairs
30 | envelope = new p5.Envelope(0.01, 0.5, 1, 0.5);
31 |
32 | osc.start();
33 | }
34 |
35 | function draw() {
36 | background(255);
37 |
38 | if (frameCount % 60 == 0) {
39 | var midiValue = scaleArray[note];
40 | var freqValue = midiToFreq(midiValue);
41 | osc.freq(freqValue);
42 |
43 | envelope.play(osc);
44 | note = (note + 1) % scaleArray.length;
45 | }
46 | }
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/08_amplitude_analysis/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/08_amplitude_analysis/sketch.js:
--------------------------------------------------------------------------------
1 | // Learning Processing
2 | // Daniel Shiffman
3 | // http://www.learningprocessing.com
4 |
5 | // Playback Amplitude Analysis
6 |
7 | var song;
8 |
9 | var analyzer;
10 |
11 | function preload() {
12 | song = loadSound('../../files/lucky_dragons_-_power_melody.mp3');
13 | }
14 |
15 | function setup() {
16 | createCanvas(720, 200);
17 | song.loop();
18 |
19 | // create a new Amplitude analyzer
20 | analyzer = new p5.Amplitude();
21 |
22 | // Patch the input to an volume analyzer
23 | analyzer.setInput(song);
24 | }
25 |
26 | function draw() {
27 | background(255);
28 |
29 | // Get the overall volume (between 0 and 1.0)
30 | var vol = analyzer.getLevel();
31 | fill(127);
32 | stroke(0);
33 |
34 | // Draw an ellipse with size based on volume
35 | ellipse(width/2, height/2, 10+vol*200, 10+vol*200);
36 | }
37 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/09_live_input/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/09_live_input/sketch.js:
--------------------------------------------------------------------------------
1 | // make some noise to float the ellipse
2 | // p5.AudioIn contains its own p5.Amplitude object,
3 | // so you can call getLevel on p5.AudioIn without
4 | // creating a p5.Amplitude.
5 |
6 | var input;
7 | var analyzer;
8 |
9 | function setup() {
10 | createCanvas(200, 200);
11 |
12 | // Create an Audio input
13 | input = new p5.AudioIn();
14 |
15 | // start the Audio Input
16 | input.start();
17 |
18 | // create a new Amplitude analyzer
19 | analyzer = new p5.Amplitude();
20 |
21 | // Patch the input to an volume analyzer
22 | analyzer.setInput(input);
23 | }
24 |
25 | function draw() {
26 | background(200);
27 |
28 | // Get the overall volume (between 0 and 1.0)
29 | var vol = analyzer.getLevel();
30 | fill(127);
31 | stroke(0);
32 |
33 | // Draw an ellipse with height based on volume
34 | var h = map(vol, 0, 1, height, 0);
35 | ellipse(width/2, h - 25, 50, 50);
36 | }
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/10_mic_threshold/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/learningProcessingExamples/10_mic_threshold/sketch.js:
--------------------------------------------------------------------------------
1 | // Adapted from Learning Processing
2 | // Daniel Shiffman
3 | // http://www.learningprocessing.com
4 |
5 | // in the previous example, we created an Amplitude object to getLevel().
6 | // AudioIn contains its own internal amplitude object, so we can actually
7 | // just call getLevel on the AudioIn itself.
8 |
9 | var input;
10 | var analyzer;
11 |
12 | function setup() {
13 | createCanvas(200, 200);
14 | background(255);
15 |
16 | // Create an Audio input
17 | input = new p5.AudioIn();
18 |
19 | input.start();
20 | }
21 |
22 | function draw() {
23 | // Get the overall volume (between 0 and 1.0)
24 | var volume = input.getLevel();
25 |
26 | // If the volume is greater than 0.1 a rectangle is drawn at a random location in the window.
27 | // The louder the volume, the larger the rectangle.
28 | var threshold = 0.1;
29 | if (volume > threshold) {
30 | stroke(0);
31 | fill(0, 100);
32 | rect(random(40, width), random(height), volume*50, volume*50);
33 | }
34 |
35 | // Graph the overall potential volume, with a line at the threshold
36 | var y = map(volume, 0, 1, height, 0);
37 | var ythreshold = map(threshold, 0, 1, height, 0);
38 |
39 | noStroke();
40 | fill(175);
41 | rect(0, 0, 20, height);
42 | // Then draw a rectangle on the graph, sized according to volume
43 | fill(0);
44 | rect(0, y, 20, y);
45 | stroke(0);
46 | line(0, ythreshold, 19, ythreshold);
47 | }
--------------------------------------------------------------------------------
/examples/loadSound_404ErrorHandling/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/loadSound_404ErrorHandling/sketch.js:
--------------------------------------------------------------------------------
1 | // error handler
2 |
3 | function setup() {
4 | createCanvas(800,200);
5 | loadSound('http://badURL.mp3', soundReady, soundError);
6 | loadSound('../badPath.mp3', soundReady, soundError);
7 |
8 | createConvolver('http://badURL.mp3', soundReady, soundError);
9 | createConvolver('../badPath.mp3', soundReady, soundError);
10 | }
11 |
12 | function soundReady(soundFile){
13 | soundFile.play();
14 | }
15 |
16 | // the error has the following properties:
17 | function soundError(e) {
18 | console.log('New error:');
19 | console.log('- name: ' + e.name);
20 | console.log('- message: ' + e.message);
21 | console.log('- stack: ' + e.stack);
22 | console.log('- failed path: ' + e.failedPath);
23 | }
--------------------------------------------------------------------------------
/examples/loadSound_callback/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/loadSound_callback/sketch.js:
--------------------------------------------------------------------------------
1 | // Sound samples from Damscray - "Dancing Tiger",
2 | // Creative Commons BY-NC-SA
3 |
4 |
5 | function setup() {
6 | createCanvas(400,200);
7 | soundFormats('ogg', 'mp3');
8 | soundFile = loadSound('../files/Damscray_-_Dancing_Tiger_01', soundReady);
9 | }
10 |
11 | function soundReady(){
12 | soundFile.rate(1.75);
13 | soundFile.loop();
14 |
15 | text('File is ready! Click to pause / unpause', 50, 10);
16 |
17 | // draw the waveform
18 | peaks = soundFile.getPeaks();
19 | beginShape();
20 | for (i = 0; i< peaks.length; i++){
21 | vertex(map(i, 0, peaks.length, 0, width), map(peaks[i], -1, 1, height, 0) );
22 | }
23 | endShape();
24 | }
25 |
26 | function mousePressed(){
27 | if (soundFile.isPlaying()){
28 | soundFile.pause();
29 | } else {
30 | soundFile.play();
31 | }
32 | }
--------------------------------------------------------------------------------
/examples/loadSound_preload/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/loadSound_preload/sketch.js:
--------------------------------------------------------------------------------
1 | // Sound samples from Damscray - "Dancing Tiger",
2 | // Creative Commons BY-NC-SA
3 |
4 |
5 | function preload(){
6 | soundFormats('ogg', 'mp3');
7 | soundFile = loadSound('../files/Damscray_-_Dancing_Tiger_01');
8 | }
9 |
10 | function setup() {
11 | createCanvas(400,200);
12 |
13 | text('File is ready! Click to pause / play.', 50, 10);
14 |
15 | soundFile.rate(.8);
16 | soundFile.reverseBuffer();
17 | soundFile.loop();
18 |
19 | peaks = soundFile.getPeaks();
20 | beginShape();
21 | for (i = 0; i< peaks.length; i++){
22 | vertex(map(i, 0, peaks.length, 0, width), map(peaks[i], -1, 1, height, 0) );
23 | }
24 | endShape();
25 |
26 | }
27 |
28 | function mousePressed(){
29 | if (soundFile.isPlaying()){
30 | soundFile.pause();
31 | } else {
32 | soundFile.play();
33 | }
34 | }
--------------------------------------------------------------------------------
/examples/loadSound_with_Drag_and_Drop/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 | Pick sounds from a menu here:
27 |
33 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/examples/loadSound_with_Drag_and_Drop/sketch.js:
--------------------------------------------------------------------------------
1 | var soundArray = [];
2 |
3 | var fileInput;
4 | var dndSelect;
5 | var container;
6 |
7 | function setup(){
8 | createCanvas(500,100);
9 |
10 | container = getElement('container');
11 |
12 | dndSelect = createDiv();
13 | dndSelect.addClass('dndSelect');
14 | dndSelect.html('Drag and Drop sounds here.');
15 |
16 | //Event listeners for the file selection elements
17 | dndSelect.drop(gotFile);
18 | dndSelect.dragOver(dndDragHandler);
19 | dndSelect.dragLeave(dndResetBackground);
20 |
21 | fileInput = createFileInput(gotFile);
22 | fileInput.parent(container);
23 |
24 | }
25 |
26 |
27 | function draw(){
28 | background(240);
29 |
30 | for(i =0; i 77*i && mouseX < (77*i)+75 && mouseY > 12.5 && mouseY < 87.5){
44 | if(soundArray[i].isLoaded()){
45 | if(soundArray[i].isPlaying()){
46 | soundArray[i].stop();
47 | }
48 | else{
49 | soundArray[i].play();
50 | }
51 | break;
52 | }
53 | }
54 | }
55 | }
56 |
57 |
58 | // callback from fileInput and drag and drop
59 | function gotFile(file){
60 | var newSF = loadSound(file);
61 | soundArray.push(newSF);
62 | dndResetBackground();
63 | }
64 |
65 | function dndDragHandler(evt){
66 | dndSelect.style('background-color', 'rgb(255,0,0)');
67 | evt.dataTransfer.dropEffect = 'copy';
68 | }
69 |
70 | function dndResetBackground() {
71 | dndSelect.style('background-color', 'rgb(230,230,230)');
72 | }
73 |
--------------------------------------------------------------------------------
/examples/loop_stepSequencer/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/loop_stepSequencer/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Create a sequence using a Part with callbacks that play back soundfiles.
3 | * The callback includes parameters (the value at that position in the Phrase array)
4 | * as well as time, which should be used to schedule playback with precision.
5 | *
6 | */
7 |
8 | var click, beatbox;
9 | var clickPhrase = [1, 0, 0, 0];
10 | var bboxPhrase = [0, 0, 1, 0, 0, 0, 1, 1];
11 |
12 |
13 | var part; // a part we will loop
14 |
15 | function preload() {
16 | soundFormats('mp3', 'ogg');
17 | click = loadSound('../files/drum');
18 | beatbox = loadSound('../files/beatbox');
19 |
20 | }
21 |
22 | function setup() {
23 | // create a part with 8 spaces, where each space represents 1/16th note (default)
24 | part = new p5.Part(8, 1/16);
25 |
26 | // add phrases, with a name, a callback, and
27 | // an array of values that will be passed to the callback if > 0
28 | part.addPhrase('kick', playKick, clickPhrase);
29 | part.addPhrase('snare', playSnare, bboxPhrase);
30 |
31 | // set tempo (Beats Per Minute) of the part and tell it to loop
32 | part.setBPM(80);
33 | part.loop();
34 | }
35 |
36 | function playKick(time, params) {
37 | click.rate(params);
38 | click.play(time);
39 | }
40 |
41 | function playSnare(time, params) {
42 | beatbox.rate(params);
43 | beatbox.play(time);
44 | }
45 |
46 | // draw a ball mapped to current note height
47 | function draw() {
48 | background(255);
49 | fill(255, 0, 0);
50 | }
51 |
52 | // UI
53 | var hDiv = 2;
54 | var wDiv = 16;
--------------------------------------------------------------------------------
/examples/looper_simple/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/looper_simple/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Create a sequence using a Part.
3 | * Add two Phrases to the part, and tell the part to loop.
4 | *
5 | * The callback includes parameters (the value at that position in the Phrase array)
6 | * as well as time, which should be used to schedule playback with precision.
7 | */
8 |
9 | var osc, env; // used by playNote
10 | var noise, noiseEnv; // used by playSnare
11 | var part; // a part we will loop
12 | var currentBassNote = 47;
13 |
14 | var prevTime = 0;
15 |
16 | function setup() {
17 | // prepare the osc and env used by playNote()
18 | env = new p5.Envelope(0.01, 0.8, 0.2, 0);
19 | osc = new p5.TriOsc(); // connects to main output by default
20 | osc.start(0);
21 | osc.connect();
22 | env.setInput(osc);
23 |
24 | // prepare the noise and env used by playSnare()
25 | noise = new p5.Noise();
26 | // noise.amp(0.0);
27 | noise.start();
28 | noiseEnv = new p5.Envelope(0.01, 0.5, 0.1, 0);
29 | noiseEnv.setInput(noise);
30 | // create a part with 8 spaces, where each space represents 1/16th note (default)
31 | part = new p5.Part(8, 1/16);
32 |
33 | // add phrases, with a name, a callback, and
34 | // an array of values that will be passed to the callback if > 0
35 | part.addPhrase('snare', playSnare, [0, 0, 1, 0]);
36 | part.addPhrase('bass', playBass, [47, 42, 45, 47, 45,42, 40, 42]);
37 |
38 | // // set tempo (Beats Per Minute) of the part and tell it to loop
39 | part.setBPM(60);
40 | part.noLoop();
41 | part.start();
42 |
43 | }
44 |
45 | function playBass(time, params) {
46 | prevTime = time + getAudioContext().currentTime;
47 |
48 | currentBassNote = params;
49 | osc.freq(midiToFreq(params), 0, time);
50 | env.play(osc, time);
51 | }
52 |
53 |
54 | function playSnare(time, params) {
55 | noiseEnv.play(noise, time);
56 | }
57 |
58 | // draw a ball mapped to current note height
59 | function draw() {
60 | background(255);
61 | fill(255, 0, 0);
62 | var noteHeight = map(currentBassNote, 40, 50, height, 0);
63 | ellipse(width/2, noteHeight, 30, 30);
64 | }
65 |
--------------------------------------------------------------------------------
/examples/markov_music/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
13 |
14 |
--------------------------------------------------------------------------------
/examples/micFFT/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/micFFT/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Visualize the frequency spectrum of live audio input
3 | */
4 |
5 | var mic, fft;
6 |
7 | function setup() {
8 | createCanvas(512,400);
9 | noStroke();
10 | fill(0,255,255);
11 |
12 | mic = new p5.AudioIn();
13 | mic.start();
14 | fft = new p5.FFT();
15 | fft.setInput(mic);
16 | }
17 |
18 | function draw() {
19 | background(200);
20 | var spectrum = fft.analyze();
21 |
22 | beginShape();
23 | vertex(0, height);
24 | for (i = 0; i
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/micLevel/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Make some noise to float the ellipse
3 | */
4 |
5 | function setup() {
6 | createCanvas(400,400);
7 | mic = new p5.AudioIn();
8 | mic.start();
9 | }
10 |
11 | function draw() {
12 | background(0);
13 |
14 | // getLevel takes an optional smoothing value, or defaults to 0.0
15 | micLevel = mic.getLevel();
16 | ellipse(width/2, height - micLevel*height, 100, 100);
17 | }
--------------------------------------------------------------------------------
/examples/micLevel_on_off/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/micLevel_on_off/sketch.js:
--------------------------------------------------------------------------------
1 | var mic;
2 | var amplitude, micLevel, outputLevel, levelLabel;
3 |
4 | var soundToggle;
5 | var soundOn = false;
6 | var micOn = true;
7 | var micToggle;
8 |
9 | var h;
10 |
11 | function setup() {
12 | createCanvas(400,400);
13 | noStroke();
14 | fill(255);
15 |
16 | mic = new p5.AudioIn();
17 | mic.start();
18 |
19 | // create controls
20 | levelLabel = createP('Output Volume: ');
21 | outputLevel = createSlider(0,100,50);
22 |
23 | soundToggle = createButton('Sound ON');
24 | soundToggle.mousePressed(toggleSound);
25 |
26 | micToggle = createButton('Stop Mic');
27 | micToggle.mousePressed(toggleMic);
28 |
29 | h = createP('enable the mic...');
30 | createP('NOTE: Mic is disconnected from main output (speakers) by default. Turning sound on with mic.connect( ) may cause a feedback loop between the mic and speakers. Try headphones.');
31 | }
32 |
33 | function draw() {
34 | background(0);
35 |
36 | // get the volume level, accepts an optional smoothing value or defaults to 0.
37 | micLevel = mic.getLevel();
38 |
39 | text('input volume: ' + micLevel, 5, 10);
40 |
41 | // if the mic picks up a level greater than zero, we can assume
42 | // that the user has allowed their browser to access the microphone.
43 | if (micLevel > 0) {
44 | h.html('Make some noise!');
45 | }
46 |
47 | ellipse(width/2,height/2, 400*micLevel + 10, 400*micLevel + 10);
48 |
49 | // set main output
50 | levelLabel.html('Output Volume: ' + outputLevel.value()/100);
51 | outputVolume(outputLevel.value()/100);
52 | }
53 |
54 |
55 | // Toggle whether mic is connected to main output
56 | function toggleSound() {
57 | if (soundOn == false) {
58 | mic.connect();
59 | soundOn = true;
60 | soundToggle.html('Sound OFF');
61 | } else {
62 | mic.disconnect();
63 | soundOn = false;
64 | soundToggle.html('Sound ON');
65 | }
66 | }
67 |
68 | // Toggle whether the mic is on (getting input) or off
69 | function toggleMic() {
70 | if (micOn == true) {
71 | mic.stop();
72 | micOn = false;
73 | micToggle.html('Start Mic');
74 | } else {
75 | mic.start();
76 | micOn = true;
77 | micToggle.html('Stop mic');
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/examples/mixingSounds/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/mixingSounds/sketch.js:
--------------------------------------------------------------------------------
1 |
2 | // load two soundfile and crossfade beetween them
3 | var sound1,sound2;
4 | var gain1, gain2, gain3;
5 |
6 | function preload(){
7 | soundFormats('ogg', 'mp3');
8 | sound1 = loadSound('../files/Damscray_-_Dancing_Tiger_01');
9 | sound2 = loadSound('../files/beat.mp3');
10 | }
11 |
12 | function setup() {
13 | createCanvas(400,200);
14 |
15 | // create a 'mix bus' gain to which we will connect both soundfiles
16 | mixBus = new p5.Gain();
17 | mixBus.connect();
18 |
19 | // setup first sound for playing
20 | sound1.rate(1);
21 | sound1.loop();
22 | sound1.disconnect(); // diconnect from p5 output
23 |
24 | gain1 = new p5.Gain(); // setup a gain node
25 | gain1.setInput(sound1); // connect the first sound to its input
26 | gain1.connect(mixBus); // connect its output to the 'mix bus'
27 |
28 | sound2.rate(1);
29 | sound2.disconnect();
30 | sound2.loop();
31 |
32 | gain2 = new p5.Gain();
33 | gain2.setInput(sound2);
34 | gain2.connect(mixBus);
35 |
36 | }
37 |
38 |
39 | function draw(){
40 | background(180);
41 |
42 | // calculate the horizontal distance beetween the mouse and the right of the screen
43 | var d = dist(mouseX,0,width,0);
44 |
45 | // map the horizontal position of the mouse to values useable for volume control of sound1
46 | var vol1 = map(mouseX,0,width,0,1);
47 | var vol2 = 1-vol1; // when sound1 is loud, sound2 is quiet and vice versa
48 |
49 | gain1.amp(vol1,0.5,0);
50 | gain2.amp(vol2,0.5,0);
51 |
52 | // map the vertical position of the mouse to values useable for 'output volume control'
53 | var vol3 = map(mouseY,0,height,0,1);
54 | mixBus.amp(vol3,0.5,0);
55 | }
56 |
57 |
58 |
--------------------------------------------------------------------------------
/examples/noiseMod_AM/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/onended_callback/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/onended_callback/sketch.js:
--------------------------------------------------------------------------------
1 | var soundFile;
2 |
3 | function preload() {
4 | soundFile = loadSound(['../files/beat.mp3', '../files/beat.ogg']);
5 | }
6 |
7 | function setup() {
8 | soundFile.onended(sayDone);
9 | soundFile.rate(3);
10 | soundFile.play();
11 | }
12 |
13 | function sayDone(sf) {
14 | console.log('Done playing: ');
15 | console.log(sf);
16 | console.log('Expect the argument to equal the soundfile that just ended playback: ')
17 | console.log(sf === this);
18 | }
19 |
20 |
--------------------------------------------------------------------------------
/examples/oscillatorMod_AM/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/oscillatorMod_AM/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Example: Amplitude Modulation involves two oscillators, referred
3 | * to as the carrier and the modulator, where the modulator controls
4 | * the carrier's amplitude.
5 | *
6 | * The carrier is typically set at an audible frequency (i.e. 440 Hz)
7 | * and connected to main output by default. The carrier.amp is
8 | * set to zero because we will have the modulator control its amplitude.
9 | *
10 | * The modulator is typically set to a frequency that is lower than
11 | * humans can hear (i.e. 1 Hz, or one cycle every second). The modulator
12 | * is diconnected from main output. Instead, it is connected
13 | * to the amplitude of the Carrier, like this: carrier.amp(modulator).
14 | *
15 | * MouseX controls the amplitude of the modulator from 0 to 1. When the
16 | * modulator's amplitude is set to 0, the amplitude modulation has no effect.
17 | *
18 | * MouseY controls the frequency of the modulator from 0 to 20hz.
19 | * Both impact our perception of the Carrier frequency. A subtle amount
20 | * of Amplitude Modulation can simulate effects such as Tremolo.
21 | * Ring Modulation is a type of Amplitude Modulation where the original
22 | * carrier signal is not present.
23 | */
24 |
25 | var carrier; // this is the oscillator we will hear
26 | var modulator; // this oscillator will modulate the amplitude of the carrier
27 | var fft; // we'll visualize the waveform
28 |
29 | function setup() {
30 | createCanvas(800,400);
31 | background(30); // alpha
32 | noFill();
33 |
34 | carrier = new p5.Oscillator(); // connects to main output by default
35 | carrier.start();
36 | carrier.freq(340);
37 | carrier.amp(0.2);
38 | // carrier's amp is 0 by default, giving our modulator total control
39 |
40 |
41 | modulator = new p5.Oscillator('triangle');
42 | modulator.disconnect(); // disconnect the modulator from main output
43 | modulator.start();
44 | modulator.freq(5);
45 | modulator.amp(1);
46 |
47 | // Modulate the carrier's amplitude with the modulator
48 | carrier.amp(modulator);
49 |
50 | // create an fft to analyze the audio
51 | fft = new p5.FFT();
52 | }
53 |
54 | function draw() {
55 | background(30,30,30,100); // alpha
56 |
57 | // // map mouseY to moodulator freq between 0 and 20hz
58 | var modFreq = map(mouseY, 0, height, 4, 0);
59 | modulator.freq(modFreq);
60 |
61 | var modAmp = map(mouseX, 0, width, 0, 0.5);
62 | modulator.amp(modAmp, 0.01); // fade time of 0.1 for smooth fading
63 |
64 | // analyze the waveform
65 | waveform = fft.waveform();
66 |
67 | // draw the shape of the waveform
68 | stroke(240);
69 | strokeWeight(4);
70 | beginShape();
71 | for (var i = 0; i
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/oscillatorSecondsFromNow/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/oscillatorSecondsFromNow/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Example: change amplitude with fadeTime, and schedule the change to happen in the future.
3 | */
4 |
5 | function setup() {
6 | osc = new p5.TriOsc();
7 | osc.freq(260);
8 | createP('mousePressed: set amplitude to .7 over the course of .2 seconds');
9 | createP('mouseReleased: 1 second fade to 0. Start the fade 0.5 seconds from now');
10 | }
11 |
12 | function mousePressed () {
13 | osc.start();
14 | // fade amplitude to .7 over the course of .2 seconds
15 | osc.amp(0.7, 0.002);
16 | }
17 |
18 | function mouseReleased() {
19 | // fade amplitude to zero over the course of 1 second. Start the fade after .5 seconds.
20 | osc.amp(0, 0.2, 0.5);
21 | }
22 |
--------------------------------------------------------------------------------
/examples/oscillatorWaveform/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/oscillatorWaveform/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Example: change the frequency of an oscillator and visualize the waveform
3 | */
4 |
5 | var freqSlider, freqLabel, ampLabel, ampSlider, button;
6 |
7 | var osc;
8 | var freq = 220; // current frequency (updated by slider)
9 | var amp = 0.5;
10 | var fft;
11 |
12 |
13 | var oscOn = false;
14 |
15 | function setup() {
16 | createCanvas(800,400);
17 | noFill();
18 |
19 | freqLabel = createP('Frequency: ');
20 | freqSlider = createSlider(1, 700, freq);
21 |
22 | ampLabel = createP('Amplitude: ' + amp);
23 | ampSlider = createSlider(0.0, 100.0, amp*100);
24 |
25 | button = createButton('start');
26 | button.mousePressed(toggleOsc);
27 |
28 | // Other types of oscillators include TriOsc, SawOsc, SqrOsc, and generic Oscillator.
29 | osc = new p5.SinOsc(freq);
30 | osc.amp(amp);
31 |
32 | p = createP('Current Waveform: ' + osc.getType());
33 |
34 | // these buttons will change the osc's waveform
35 | sine = createButton('sine');
36 | sine.mousePressed(setSine);
37 | saw = createButton('sawtooth');
38 | saw.mousePressed(setSawtooth);
39 | tri = createButton('triangle');
40 | tri.mousePressed(setTriangle);
41 | sq = createButton('square');
42 | sq.mousePressed(setSquare);
43 |
44 | // create an fft to analyze the audio
45 | fft = new p5.FFT();
46 | }
47 |
48 | function draw() {
49 | background(30);
50 |
51 | amp = ampSlider.value()/100;
52 | osc.amp(amp);
53 | ampLabel.html('Amplitude: ' + amp + '/ 1.0');
54 |
55 | freq = freqSlider.value();
56 | osc.freq(freq);
57 | freqLabel.html('Frequency: ' + freq + ' Hz');
58 |
59 | p.html('Current Waveform: ' + osc.getType());
60 |
61 | // analyze the waveform
62 | waveform = fft.waveform();
63 |
64 | // draw the shape of the waveform
65 | stroke(255);
66 | strokeWeight(10);
67 | beginShape();
68 | for (var i = 0; i
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/oscillator_FMSynth/sketch.js:
--------------------------------------------------------------------------------
1 | var carrier, modulator;
2 |
3 | // carrier frequency signal, a p5.Signal
4 | var carrierFreq;
5 |
6 | // modulator frequency signal, a p5.Signal
7 | var modFreq;
8 |
9 |
10 | // output envelope
11 | var env;
12 |
13 | function setup() {
14 | carrier = new p5.Oscillator();
15 |
16 | carrierFreq = new p5.Signal(240);
17 | carrier.freq(carrierFreq);
18 | carrier.start();
19 |
20 | env = new p5.Envelope(0.05, 1, 0.5, 0);
21 | carrier.amp(env);
22 |
23 | modulator = new p5.Oscillator();
24 | modulator.disconnect();
25 | modFreq = new p5.SignalMult(8);
26 | modFreq.setInput(carrierFreq);
27 | modulator.freq(modFreq);
28 | modulator.start();
29 |
30 | var m1 = new p5.SignalMult();
31 | m1.setInput(modulator);
32 | m1.setValue(100);
33 | }
34 |
35 | function draw() {
36 | carrierFreq.fade(mouseX);
37 | }
38 |
39 | function mousePressed() {
40 | env.play();
41 | }
--------------------------------------------------------------------------------
/examples/outOfPhase/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/outOfPhase/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Tell two sine wave oscillators to start at the same time,
3 | * 50% out of phase. Phase Cancellation results!
4 | * Change the phase with the slider.
5 | */
6 |
7 | // create a variable for the sound file
8 | var osc1, osc2, fft;
9 | var phaseSlider;
10 |
11 | function setup() {
12 | createCanvas(800,400);
13 | noFill();
14 |
15 | osc1 = new p5.SinOsc();
16 | osc2 = new p5.SinOsc();
17 | fft = new p5.FFT();
18 | osc1.phase(.5);
19 | osc2.phase(0);
20 | osc1.amp(1);
21 | osc2.amp(1);
22 | osc1.start(); osc2.start();
23 |
24 | phaseSlider = createSlider(0, 100, 50);
25 | }
26 |
27 | function draw() {
28 | background(30);
29 |
30 | // analyze the waveform of all sound in the sketch
31 | waveform = fft.waveform();
32 |
33 | // draw the shape of the waveform
34 | stroke(255);
35 | strokeWeight(10);
36 | beginShape();
37 | for (var i = 0; i
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/pan_soundfile/sketch.js:
--------------------------------------------------------------------------------
1 | // ====================
2 | // DEMO: play a sound at a random speed/pitch when the ball hits the edge.
3 | // Pan the sound file left when ball hits left edge and vice versa.
4 | // ====================
5 |
6 |
7 |
8 | var ball;
9 | var soundFile;
10 |
11 | function preload() {
12 | soundFormats('mp3', 'ogg');
13 | soundFile = loadSound('../files/drum');
14 | }
15 |
16 | function setup() {
17 | createCanvas(400, 400);
18 |
19 | soundFile.volume = .6;
20 |
21 | // create the ball
22 | ball = {
23 | x: width/2,
24 | y: height/2,
25 | speed: 7
26 | }
27 | }
28 |
29 | function draw() {
30 | background(0);
31 |
32 | ball.x += ball.speed;
33 |
34 |
35 | // when the ball hits the wall...
36 | if (ball.x > width || ball.x < 0) {
37 |
38 | // map the ball's x location to a panning degree (float between -1.0 and 1.0)
39 | var panning = map(ball.x, 0, width, -1, 1);
40 | soundFile.pan(panning);
41 |
42 | // set a random playback speed for the sound
43 | var newSpeed = random(1);
44 | ball.speed = -ball.speed;
45 | soundFile.rate(newSpeed);
46 |
47 | // play the sound
48 | soundFile.play();
49 | }
50 | ellipse(ball.x, ball.y, 100, 100);
51 | }
--------------------------------------------------------------------------------
/examples/pause_soundfile/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/pause_soundfile/sketch.js:
--------------------------------------------------------------------------------
1 | // ====================
2 | // DEMO: pause sound when the user presses a key, resume on release
3 | // ====================
4 | 'use strict';
5 |
6 | var soundFile;
7 | var audioContextStarted = false;
8 |
9 | function preload() {
10 | // create a SoundFile
11 | soundFormats('ogg', 'mp3');
12 | soundFile = loadSound('../files/Damscray_-_Dancing_Tiger_02');
13 | }
14 |
15 | function setup() {
16 | createCanvas(400, 400);
17 | background(0, 255, 0);
18 |
19 | userStartAudio().then(function() {
20 | soundFile.loop();
21 | audioContextStarted = true;
22 | });
23 |
24 | createP('Press any key to pause. Resume when the key is released')
25 | }
26 |
27 | function keyTyped() {
28 | if (!audioContextStarted) {
29 | return;
30 | }
31 | soundFile.pause();
32 | background(255, 0, 0);
33 | }
34 |
35 | function keyReleased() {
36 | if (!audioContextStarted) {
37 | return;
38 | }
39 | soundFile.play();
40 | background(0, 255, 0);
41 | }
42 |
--------------------------------------------------------------------------------
/examples/peakDetect/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/peakDetect_basic/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 | Click canvas to play the beat!
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/examples/peakDetect_basic/sketch.js:
--------------------------------------------------------------------------------
1 | var cnv, soundFile, fft, peakDetect;
2 | var ellipseWidth = 10;
3 |
4 | function setup() {
5 | cnv = createCanvas(100,100);
6 |
7 | soundFile = loadSound('../files/beat.mp3');
8 | fft = new p5.FFT();
9 | peakDetect = new p5.PeakDetect();
10 |
11 | setupSound();
12 | }
13 |
14 | function draw() {
15 | background(0);
16 |
17 | fft.analyze();
18 | peakDetect.update(fft);
19 |
20 | if ( peakDetect.isDetected ) {
21 | ellipseWidth = 50;
22 | } else {
23 | ellipseWidth *= 0.95;
24 | }
25 |
26 | ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
27 | }
28 |
29 |
30 | function setupSound() {
31 | cnv.mouseClicked( function() {
32 | if (soundFile.isPlaying() ) {
33 | soundFile.stop();
34 | } else {
35 | soundFile.play();
36 | }
37 | });
38 | }
--------------------------------------------------------------------------------
/examples/play_soundfile/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/play_soundfile/sketch.js:
--------------------------------------------------------------------------------
1 | // ====================
2 | // DEMO: play a sound when the user presses a key
3 | // ====================
4 |
5 | // create a variable for the sound file
6 | var soundFile;
7 |
8 | function setup() {
9 | createCanvas(400, 400);
10 | background(0);
11 |
12 | // create a SoundFile
13 | soundFile = loadSound( ['../files/beatbox.ogg', '../files/beatbox.mp3'] );
14 |
15 | createP('Press any key to play the sound');
16 | }
17 |
18 | // when a key is pressed...
19 | function keyPressed() {
20 |
21 | // play the sound file
22 | soundFile.play();
23 |
24 | // also make the background yellow
25 | background(255, 255, 0);
26 | }
27 |
28 | function keyReleased() {
29 | // make the background black again when the key is released
30 | background(0);
31 | }
32 |
--------------------------------------------------------------------------------
/examples/playbackRate/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/playbackRate/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * DEMO
3 | * - change playback rate of a soundfile based on mouseX position
4 | * - a negative playback rate will reverse the soundfile, but won't
5 | * preserve current location of the playhead.
6 | */
7 |
8 | // ====================
9 |
10 | var soundFile;
11 | var p;
12 |
13 | function preload() {
14 | soundFormats('mp3', 'ogg');
15 | soundFile = loadSound('../files/Damscray_-_Dancing_Tiger_02');
16 | }
17 |
18 | function setup() {
19 | soundFile.loop(0);
20 | p = createP();
21 | }
22 |
23 | function draw() {
24 | // map playback rate of a sound file to mouseX position
25 | var newRate = (map(mouseX, 0, 1200, -0.5, 1.5));
26 | // newRate = constrain(newRate, 0.1, 1.5);
27 | soundFile.rate(newRate);
28 | p.html( 'Playback Rate: ' + newRate.toFixed(3) )
29 | }
30 |
31 | function keyPressed() {
32 | var key = keyCode;
33 | // Spacebar: pause
34 | if (key == 32) {
35 | soundFile.pause();
36 | }
37 | }
--------------------------------------------------------------------------------
/examples/playbackRatePart/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/playbackRatePart/sketch.js:
--------------------------------------------------------------------------------
1 | var mySound, myPhrase, myPart;
2 | var pattern = [1,0,0,2,0,2,0,0];
3 | var msg = 'click to play';
4 |
5 | function preload() {
6 | mySound = loadSound('../files/beatbox.mp3');
7 | }
8 |
9 | function setup() {
10 | noStroke();
11 | fill(255);
12 | textAlign(CENTER);
13 | outputVolume(0.1);
14 |
15 | myPhrase = new p5.Phrase('bbox', makeSound, pattern);
16 | myPart = new p5.Part();
17 | myPart.addPhrase(myPhrase);
18 | myPart.setBPM(60);
19 | }
20 |
21 | function draw() {
22 | background(0);
23 | text(msg, width/2, height/2);
24 | }
25 |
26 | function makeSound(time, playbackRate) {
27 | // mySound.rate(playbackRate);
28 | // mySound.play(time);
29 | mySound.play(time, playbackRate);
30 |
31 | }
32 |
33 | function mouseClicked() {
34 | if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
35 | myPart.start();
36 | msg = 'playing pattern';
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/examples/polyphonicSynth-Keyboard/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/examples/pulseWaveform/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/pulseWaveform/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * PWM
3 | */
4 | var freqSlider, freqLabel, ampLabel, ampSlider, widthLabel, widthSlider, button;
5 |
6 | var pulse;
7 | var freq = 1; // current frequency (updated by slider)
8 | var amp = 1.0;
9 | var w = .25;
10 | var fft;
11 |
12 |
13 | var oscOn = false;
14 |
15 | function setup() {
16 | createCanvas(800,400);
17 | noFill();
18 |
19 | widthLabel = createP('Width: ' + w);
20 | widthSlider = createSlider(0.0, 100.0, w*100);
21 |
22 | button = createButton('start');
23 | button.mousePressed(toggleOsc);
24 |
25 | freqLabel = createP('Frequency: ');
26 | freqSlider = createSlider(1, 700, freq);
27 |
28 | ampLabel = createP('Amplitude: ' + amp);
29 | ampSlider = createSlider(0.0, 100.0, amp*100);
30 |
31 |
32 | pulse = new p5.Pulse(freq);
33 | pulse.amp(amp);
34 |
35 | // create an fft to analyze the audio
36 | fft = new p5.FFT();
37 |
38 | // begin sound
39 | toggleOsc();
40 | }
41 |
42 | function draw() {
43 | background(30);
44 |
45 | amp = ampSlider.value()/100;
46 | pulse.amp(amp);
47 | ampLabel.html('Amplitude: ' + amp + '/ 1.0');
48 |
49 | freq = freqSlider.value();
50 | pulse.freq(freq);
51 | freqLabel.html('Frequency: ' + freq + ' Hz');
52 |
53 |
54 | w = widthSlider.value()/100;
55 | pulse.width(w);
56 | widthLabel.html('Width: ' + w + '/ 1.0');
57 |
58 | // process the waveform
59 | waveform = fft.waveform();
60 |
61 | // draw the shape of the waveform
62 | stroke(255);
63 | strokeWeight(10);
64 | beginShape();
65 | for (var i = 0; i
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/record/sketch.js:
--------------------------------------------------------------------------------
1 | // Example: Record a sound and then play it back.
2 | // We need p5.AudioIn (mic / sound source), p5.SoundRecorder
3 | // (records the sound), and a p5.SoundFile (play back).
4 |
5 | let mic, recorder, soundFile;
6 |
7 | let isRecordingStarted = false;
8 | let isResultPlayed = false;
9 |
10 | function setup() {
11 | createCanvas(400, 400);
12 | background(200);
13 | fill(0);
14 | text('Enable mic and click the mouse to begin recording', 20, 20);
15 |
16 | // create an audio in
17 | mic = new p5.AudioIn();
18 |
19 | // create a sound recorder
20 | recorder = new p5.SoundRecorder();
21 |
22 | // connect the mic to the recorder
23 | recorder.setInput(mic);
24 |
25 | // create an empty sound file that we will use to playback the recording
26 | soundFile = new p5.SoundFile();
27 | }
28 |
29 | function mousePressed() {
30 | userStartAudio();
31 | // use the '.enabled' boolean to make sure user enabled the mic (otherwise we'd record silence)
32 | if (!isRecordingStarted && !isResultPlayed) {
33 | // users must manually enable their browser microphone for recording to work properly!
34 | mic.start(function () {
35 | // Tell recorder to record to a p5.SoundFile which we will use for playback
36 | recorder.record(soundFile);
37 |
38 | background(255, 0, 0);
39 | text('Recording now! Click to stop.', 20, 20);
40 | isRecordingStarted = true;
41 | });
42 | } else if (isRecordingStarted && !isResultPlayed) {
43 | recorder.stop(); // stop recorder, and send the result to soundFile
44 | mic.dispose();
45 | background(0, 255, 0);
46 | text('Recording stopped. Click to play', 20, 20);
47 | isResultPlayed = true;
48 | } else if (isRecordingStarted && isResultPlayed) {
49 | soundFile.play(); // play the result!
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/examples/recordLoops/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/recordLoops/sketch.js:
--------------------------------------------------------------------------------
1 | // Example: Record a sound and then play it back.
2 | // We need p5.AudioIn (mic / sound source), p5.SoundRecorder
3 | // (records the sound), and a p5.SoundFile (play back).
4 |
5 | var mic, recorder, myLoop;
6 |
7 | var state = 0; // mousePress will increment from Record, to Stop, to Play
8 |
9 | var kick;
10 |
11 | function preload() {
12 | // load the kick
13 | kick = loadSound('../files/beatbox.mp3');
14 | }
15 |
16 | function setup() {
17 | createCanvas(400,400);
18 | background(200);
19 | fill(0);
20 |
21 | kick.loop();
22 |
23 | // create an audio in
24 | mic = new p5.AudioIn();
25 |
26 | // users must manually enable their browser microphone for recording to work properly!
27 | mic.start();
28 |
29 | // create a sound recorder
30 | recorder = new p5.SoundRecorder();
31 |
32 | // connect the mic to the recorder
33 | recorder.setInput(mic);
34 |
35 | // create an empty sound file that we will use to playback the recording
36 | soundFile = new p5.SoundFile();
37 | }
38 |
39 | function mousePressed() {
40 | // use the '.enabled' boolean to make sure user enabled the mic (otherwise we'd record silence)
41 | if (mic.enabled && !recorder.recording) {
42 |
43 | // Tell recorder to record to a p5.SoundFile which we will use for playback
44 | var fourBeats = (kick.duration() * 4);
45 | myLoop = new p5.SoundFile();
46 | recorder.record(myLoop, fourBeats, playRecording);
47 | background(255,0,0);
48 | }
49 | }
50 |
51 | function playRecording() {
52 | myLoop.loop();
53 | background(0,255,0);
54 | }
55 |
--------------------------------------------------------------------------------
/examples/removeSketch/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/removeSketch/sketch.js:
--------------------------------------------------------------------------------
1 | function init() {
2 | var s = function(c) {
3 |
4 | c.backgroundSound;
5 | c.sounds = {};
6 | c.soundFiles = [
7 | 'https://s3-us-west-2.amazonaws.com/s.cdpn.io/51676/chords1.mp3',
8 | 'https://s3-us-west-2.amazonaws.com/s.cdpn.io/51676/chords2.mp3',
9 | 'https://s3-us-west-2.amazonaws.com/s.cdpn.io/51676/chords3.mp3',
10 | ];
11 | c.count = 0;
12 |
13 | c.preload = function() {
14 | c.backgroundSound = c.loadSound("https://s3-us-west-2.amazonaws.com/s.cdpn.io/51676/FM8_synth_chords.mp3");
15 |
16 | for (var i = 0; i < c.soundFiles.length; i++) {
17 | c.sounds[i] = c.loadSound(c.soundFiles[i]);
18 | }
19 | }
20 |
21 | c.setup = function() {
22 | c.cnv = c.createCanvas(c.windowWidth, c.windowHeight);
23 | c.text('click 5 times to remove the sketch', 20, 20);
24 | c.backgroundSound.amp(1);
25 | c.backgroundSound.loop(0, 1, 1, 0, (c.backgroundSound.duration() - 0.1));
26 | }
27 |
28 | c.draw = function() {
29 |
30 | }
31 |
32 | c.mousePressed = function() {
33 | if (c.count == 5) {
34 | c.remove();
35 | } else {
36 | c.sounds[c.count % 3].play();
37 | }
38 |
39 | c.count++;
40 | }
41 | };
42 |
43 | new p5(s);
44 | }
45 |
46 | init();
--------------------------------------------------------------------------------
/examples/soundFormats/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/soundFormats/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * There is no single audio format that is supported by all
3 | * popular web browsers. Most web browsers support MP3, but
4 | * some (Firefox, Opera) do not because it is a patented codec.
5 | *
6 | * You can ensure file format compatability by including multiple
7 | * file extensions. Both MP3 and OGG are recommended.
8 | */
9 |
10 | var soundFile;
11 |
12 | function preload() {
13 | // set the extensions we have included
14 | soundFormats('mp3', 'ogg');
15 |
16 | // load either beatbox.mp3 or beatbox.ogg, depending on the browser
17 | soundFile = loadSound('../files/beatbox.mp3');
18 | }
19 |
20 | function setup() {
21 | createCanvas(400, 400);
22 | background(0);
23 | }
24 |
25 | function keyPressed() {
26 | soundFile.play();
27 | background(255, 255, 0);
28 | }
29 |
30 | function keyReleased() {
31 | background(0);
32 | }
33 |
--------------------------------------------------------------------------------
/examples/soundfileMod_AM/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/soundfile_playMode/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/soundfile_playMode/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Toggle play mode between 'restart' and 'sustain'.
3 | * Sustain is the default playmode for SoundFiles
4 | * Music from Damscray, "Dancing Tiger", Creative Commons BY-NC-SA
5 | */
6 |
7 | var playMode = 'sustain';
8 | var sample1, sample2, button;
9 |
10 | function setup() {
11 | createCanvas(0,0);
12 | sample1 = loadSound( ['../files/Damscray_-_Dancing_Tiger_01.ogg', '../files/Damscray_-_Dancing_Tiger_01.mp3'] );
13 | sample2 = loadSound( ['../files/Damscray_-_Dancing_Tiger_02.ogg', '../files/Damscray_-_Dancing_Tiger_02.mp3'] );
14 |
15 | createP('Press "a" and "s" on your keyboard to play two different samples.
Trigger lots of sounds at once! Change mode to hear the difference');
16 |
17 | button = createButton('Current Play Mode: ');
18 | button.mousePressed(togglePlayMode);
19 | }
20 |
21 | function draw() {
22 | button.html('Current Play Mode: ' + playMode);
23 | }
24 |
25 | // alternate between 'sustain' and 'restart', and set playMode of both samples
26 | function togglePlayMode(){
27 | if (playMode == 'sustain'){
28 | playMode = 'restart';
29 | }
30 | else if (playMode == 'restart'){
31 | playMode = 'untilDone';
32 | }
33 | else {
34 | playMode = 'sustain';
35 | }
36 | sample1.playMode(playMode);
37 | sample2.playMode(playMode);
38 | }
39 |
40 | function keyPressed(k) {
41 | if (k.keyCode == 65) {
42 | sample1.play(0, 1, .6);
43 |
44 | // Get even more monophonic by only letting one sample play at a time
45 | if ( playMode =='restart' && sample2.isPlaying() ){
46 | sample2.stopAll();
47 | }
48 | }
49 | if (k.keyCode == 83) {
50 | if ( playMode =='restart' && sample1.isPlaying() ){
51 | sample1.stopAll();
52 | }
53 | sample2.play(0, 1, .6);
54 | }
55 | }
--------------------------------------------------------------------------------
/examples/soundfile_remove_cue/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/soundfile_remove_cue/sketch.js:
--------------------------------------------------------------------------------
1 | function setup() {
2 | background(0);
3 | noStroke();
4 | fill(255);
5 | textAlign(CENTER);
6 | text('click to play', width/2, height/2);
7 |
8 | mySound = loadSound('../files/beat.mp3');
9 |
10 | // schedule calls to changeText
11 | var firstCueId = mySound.addCue(0.50, changeText, "hello" );
12 | mySound.addCue(1.00, changeText, "p5" );
13 | var thirdCueId = mySound.addCue(1.50, changeText, "what" );
14 | mySound.addCue(2.00, changeText, "do" );
15 | mySound.addCue(2.50, changeText, "you" );
16 | mySound.addCue(3.00, changeText, "want" );
17 | mySound.addCue(4.00, changeText, "to" );
18 | mySound.addCue(5.00, changeText, "make" );
19 | mySound.addCue(6.00, changeText, "?" );
20 |
21 | //remove the first cue
22 | mySound.removeCue(firstCueId);
23 |
24 | //remove the third cue
25 | mySound.removeCue(thirdCueId);
26 | }
27 |
28 | function changeText(val) {
29 | background(0);
30 | text(val, width/2, height/2);
31 | }
32 |
33 | function mouseClicked() {
34 | if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
35 | if (mySound.isPlaying() ) {
36 | mySound.stop();
37 | } else {
38 | mySound.play();
39 | }
40 | }
41 | }
--------------------------------------------------------------------------------
/examples/spatial_panning/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/spatial_panning/sketch.js:
--------------------------------------------------------------------------------
1 | // ====================
2 | // DEMO: P5.Panner3D: Moves sound in 3D space from max negative coordinates, to max positive
3 | // ====================
4 |
5 |
6 | var soundFile;
7 | var panner3d;
8 | var description, position;
9 |
10 | function preload() {
11 | soundFormats('mp3', 'ogg');
12 | soundFile = loadSound('../files/lucky_dragons_-_power_melody');
13 | }
14 |
15 | var i;
16 | var factorI;
17 | function setup() {
18 | createCanvas(500, 500, WEBGL);
19 |
20 |
21 | description = createDiv('Panner3D: The cone symbolizes the soundFile '+
22 | 'which is panning the soundin relation to the center of the '+
23 | 'canvas');
24 | p2 = createDiv(position);
25 |
26 | description.position(550,0).size(400,50);
27 | p2.position(550,50);
28 |
29 | panner1 = new p5.Panner3D();
30 |
31 |
32 | i = 0;
33 | factorI = 1;
34 | soundFile.disconnect();
35 | soundFile.loop();
36 | soundFile.connect(panner1);
37 | }
38 |
39 | function draw() {
40 | background(0);
41 |
42 | if (i > 500 || i < -500) {factorI = -1*factorI;}
43 |
44 | updateDescription();
45 |
46 | push();
47 | translate(i+=factorI*1,i + factorI*1,i + factorI*1);
48 | rotateX(frameCount* 0.01);
49 | rotateY(frameCount* 0.01);
50 | rotateZ(frameCount* 0.01);
51 | cone(100);
52 | pop();
53 |
54 | //pan the sound along with the cone
55 | panner1.set(i*10,i*10,i*10);
56 |
57 |
58 | }
59 |
60 | function updateDescription(){
61 | position = 'positionX: '+ panner1.positionX() +
62 | '
positionY: '+ panner1.positionY() +
63 | '
positionZ: '+ panner1.positionZ();
64 | p2.html(position);
65 | }
--------------------------------------------------------------------------------
/examples/spatial_panning_listener/flock.js:
--------------------------------------------------------------------------------
1 | // The Nature of Code
2 | // Daniel Shiffman
3 | // http://natureofcode.com
4 | // adapted for 3D rendering.
5 | // Flock object
6 | // Does very little, simply manages the array of all the boids
7 | function Flock() {
8 | // An array for all the boids
9 | this.boids = []; // Initialize the array
10 | }
11 |
12 | Flock.prototype.run = function() {
13 | for (var i = 0; i < this.boids.length; i++) {
14 | this.boids[i].run(this.boids); // Passing the entire list of boids to each boid individually
15 | }
16 | }
17 |
18 | Flock.prototype.addBoid = function(b) {
19 | this.boids.push(b);
20 | }
21 |
--------------------------------------------------------------------------------
/examples/spatial_panning_listener/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/examples/virtual_piano/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/virtual_piano/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Virtual Piano: Example for event-driven sound playback.
3 | *
4 | * This example uses the p5.PolySynth to produce instantaneously,
5 | * driven by user events.
6 | */
7 |
8 | var polySynth;
9 | var velocity = 0.7; // From 0-1
10 | var baseNote = 72;
11 | var keyOrder = "ASDFGHJKL";
12 | var keyStates = [0, 0, 0, 0, 0, 0, 0, 0, 0];
13 |
14 | function setup() {
15 | createCanvas(720, 400);
16 | textAlign(CENTER, CENTER);
17 | strokeWeight(3);
18 | // Create synth voice
19 | synth = new p5.PolySynth();
20 | }
21 |
22 | function draw() {
23 | var keyWidth = width / keyStates.length;
24 | // Draw keys
25 | for (var i = 0; i < keyStates.length; i++) {
26 | var keyColor;
27 | if (keyStates[i] === 1) {
28 | keyColor = color(255, 220, 120);
29 | } else {
30 | keyColor = color(150, 230, 245);
31 | }
32 | fill(keyColor);
33 | stroke(255);
34 | rect(i * keyWidth, 0, keyWidth, height);
35 | // Key label
36 | fill(40);
37 | noStroke();
38 | text(keyOrder[i], i * keyWidth + keyWidth / 2, height / 2);
39 | }
40 | }
41 |
42 | function keyPressed() {
43 | var keyIndex = keyOrder.indexOf(key);
44 | // Check if valid note key pressed
45 | if (keyIndex >= 0) {
46 | // Update key state
47 | keyStates[keyIndex] = 1;
48 | // Play synth
49 | var midiNoteNumber = baseNote + keyIndex; // 0-127; 60 is Middle C (C4)
50 | var freq = midiToFreq(midiNoteNumber);
51 | synth.noteAttack(freq, velocity, 0);
52 | }
53 | }
54 |
55 | function keyReleased() {
56 | var keyIndex = keyOrder.indexOf(key);
57 | // Check if valid note key pressed
58 | if (keyIndex >= 0) {
59 | // Update key state
60 | keyStates[keyIndex] = 0;
61 | // Stop synth
62 | var midiNoteNumber = baseNote + keyIndex; // 0-127; 60 is Middle C (C4)
63 | var freq = midiToFreq(midiNoteNumber);
64 | synth.noteRelease(freq, 0);
65 | }
66 | }
67 |
68 | function touchStarted() {
69 | var keyWidth = width / keyStates.length;
70 | var keyIndex = floor(mouseX / keyWidth);
71 | // Update key state
72 | keyStates[keyIndex] = 1;
73 | // Play synth
74 | var midiNoteNumber = baseNote + keyIndex; // 0-127; 60 is Middle C (C4)
75 | var freq = midiToFreq(midiNoteNumber);
76 | synth.noteAttack(freq, velocity, 0);
77 | }
78 |
79 | function touchEnded() {
80 | for (var i = 0; i < keyStates.length; i++) {
81 | keyStates[i] = 0;
82 | }
83 | synth.noteRelease();
84 | }
85 |
--------------------------------------------------------------------------------
/examples/visualize_pentatonic/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/waveform/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/waveform/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * DEMO: Draw the waveform of a sound as it plays using p5.FFT.waveform()
3 | */
4 |
5 | var soundFile;
6 | var fft;
7 | var fftBands = 1024;
8 |
9 | // Array of amplitude values (0-255) over time.
10 | var waveform = [];
11 |
12 | function preload() {
13 | soundFormats('mp3', 'ogg');
14 | soundFile = loadSound('../files/beat');
15 | }
16 |
17 | function setup() {
18 | createCanvas(fftBands, 256);
19 | noFill();
20 |
21 | soundFile.loop();
22 |
23 | /**
24 | * Create an FFT object.
25 | * Accepts optional parameters for
26 | * - Smoothing
27 | * - Length of the FFT's analyze/waveform array. Must be a power of two between 16 and 1024 (default).
28 | */
29 | fft = new p5.FFT(.99, fftBands);
30 |
31 | p = createP('press any key to pause / play');
32 | }
33 |
34 | function draw() {
35 | background(250);
36 |
37 | /**
38 | * Analyze the sound as a waveform (amplitude over time)
39 | */
40 | waveform = fft.waveform();
41 |
42 | // Draw snapshot of the waveform
43 | beginShape();
44 | for (var i = 0; i< waveform.length; i++){
45 | stroke(5);
46 | strokeWeight(5);
47 | vertex(i*2, map(waveform[i], -1, 1, height, 0) );
48 | }
49 | endShape();
50 | }
51 |
52 | function keyPressed() {
53 | if (soundFile.isPlaying() ) {
54 | soundFile.pause();
55 | } else {
56 | soundFile.play();
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/examples/waveform_peaks_with_playhead/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/waveform_peaks_with_playhead/sketch.js:
--------------------------------------------------------------------------------
1 | /**
2 | * DEMO
3 | * - find the peaks in an audio file to draw the entire waveform with SoundFile.getPeaks();
4 | * - draw cursor on a timeline with SoundFile.currentTime() and SoundFile.duration();
5 | */
6 |
7 | // ====================
8 |
9 | var soundFile;
10 |
11 | var p, peakCount;
12 |
13 | function preload() {
14 | soundFormats('ogg', 'mp3');
15 | soundFile = loadSound('../files/lucky_dragons_-_power_melody');
16 | }
17 |
18 | function setup() {
19 | createCanvas(800, 400);
20 | noFill();
21 |
22 | soundFile.loop();
23 | background(0);
24 | p = createP('peaks to draw: ' + peakCount);
25 | createP('Press any key to play/pause.');
26 | }
27 |
28 |
29 | function draw() {
30 | background(255);
31 |
32 | peakCount = map(this.mouseY, height, 0, 5, 2000);
33 | if (peakCount < 8) {
34 | peakCount = 8;
35 | }
36 | var waveform = soundFile.getPeaks(peakCount);
37 | fill(0);
38 | stroke(0);
39 | strokeWeight(2);
40 | beginShape();
41 | for (var i = 0; i< waveform.length; i++){
42 | vertex(map(i, 0, waveform.length, 0, width), map(waveform[i], -1, 1, height, 0));
43 | }
44 | endShape();
45 |
46 | // update display text:
47 | p.html('MouseY = Visible Amplitude Peaks: ' + peakCount.toFixed(3) );
48 |
49 | drawCursor();
50 | }
51 |
52 |
53 | function drawCursor() {
54 | noStroke();
55 | fill(0,255,0);
56 | rect(map(soundFile.currentTime(), 0, soundFile.duration(), 0, width), 0, 5, height);
57 | }
58 |
59 | // Keyboard Controls
60 | function keyTyped() {
61 | if (soundFile.isPlaying()) {
62 | soundFile.pause();
63 | } else {
64 | soundFile.play();
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/fragments/after.frag:
--------------------------------------------------------------------------------
1 | }));
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "p5.sound",
3 | "repository": {
4 | "type": "git",
5 | "url": "https://github.com/processing/p5.js-sound.git"
6 | },
7 | "version": "1.0.2",
8 | "license": "MIT",
9 | "devDependencies": {
10 | "@babel/core": "^7.21.8",
11 | "@babel/preset-env": "^7.21.5",
12 | "almond": "^0.3.3",
13 | "amdclean": "^2.7.0",
14 | "babel-loader": "^8.3.0",
15 | "babel-plugin-preval": "^3.0.1",
16 | "chai": "3.4.1",
17 | "eslint-config-prettier": "^6.15.0",
18 | "eslint-plugin-prettier": "^3.4.1",
19 | "grunt": "^1.6.1",
20 | "grunt-cli": "^1.4.3",
21 | "grunt-contrib-connect": "^3.0.0",
22 | "grunt-decomment": "^0.2.4",
23 | "grunt-eslint": "^20.2.0",
24 | "grunt-githooks": "^0.6.0",
25 | "grunt-mocha": "^1.2.0",
26 | "grunt-open": "^0.2.4",
27 | "grunt-webpack": "^5.0.0",
28 | "mocha": "^10.2.0",
29 | "prettier": "^2.8.8",
30 | "raw-loader": "^4.0.2",
31 | "tslint-config-prettier": "^1.18.0",
32 | "uglify-loader": "^3.0.0",
33 | "uglifyjs-webpack-plugin": "^2.2.0",
34 | "webpack": "^4.46.0",
35 | "webpack-auto-inject-version": "^1.2.2"
36 | },
37 | "dependencies": {
38 | "audioworklet-polyfill": "^1.1.2",
39 | "p5": "^1.6.0",
40 | "sinon": "^9.2.4",
41 | "startaudiocontext": "^1.2.1",
42 | "tone": "0.10.0"
43 | },
44 | "scripts": {
45 | "build": "grunt",
46 | "test": "grunt run-tests",
47 | "lint": "grunt lint",
48 | "dev": "grunt dev",
49 | "postinstall": "cp ./node_modules/p5/lib/p5.js ./node_modules/p5/lib/p5.min.js ./lib && grunt githooks"
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/src/audioVoice.js:
--------------------------------------------------------------------------------
1 | import p5sound from './main';
2 |
3 | /**
4 | * Base class for monophonic synthesizers. Any extensions of this class
5 | * should follow the API and implement the methods below in order to
6 | * remain compatible with p5.PolySynth();
7 | *
8 | * @class p5.AudioVoice
9 | * @constructor
10 | */
11 | class AudioVoice {
12 | constructor() {
13 | this.ac = p5sound.audiocontext;
14 | this.output = this.ac.createGain();
15 | this.connect();
16 | p5sound.soundArray.push(this);
17 | }
18 | play(note, velocity, secondsFromNow, sustime) {}
19 |
20 | triggerAttack(note, velocity, secondsFromNow) {}
21 |
22 | triggerRelease(secondsFromNow) {}
23 |
24 | amp(vol, rampTime) {}
25 |
26 | setADSR(attack, decay, sustain, release) {}
27 |
28 | /**
29 | * Connect to p5 objects or Web Audio Nodes
30 | * @method connect
31 | * @for p5.AudioVoice
32 | * @param {Object} unit
33 | */
34 | connect(unit) {
35 | var u = unit || p5sound.input;
36 | this.output.connect(u.input ? u.input : u);
37 | if (unit && unit._onNewInput) {
38 | unit._onNewInput(this);
39 | }
40 | }
41 |
42 | /**
43 | * Disconnect from soundOut
44 | * @method disconnect
45 | * @for p5.AudioVoice
46 | */
47 | disconnect() {
48 | this.output.disconnect();
49 | }
50 |
51 | /**
52 | * Dispose the output if it exists
53 | * @method dispose
54 | * @for p5.AudioVoice
55 | */
56 | dispose() {
57 | if (this.output) {
58 | this.output.disconnect();
59 | delete this.output;
60 | }
61 | }
62 | }
63 |
64 | export default AudioVoice;
65 |
--------------------------------------------------------------------------------
/src/audioWorklet/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "globals": {
3 | "currentFrame": true,
4 | "currentTime": true,
5 | "sampleRate": true,
6 | "preval": true
7 | },
8 | "parserOptions": {
9 | "sourceType": "module"
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/src/audioWorklet/index.js:
--------------------------------------------------------------------------------
1 | import p5sound from '../main.js';
2 | const moduleSources = [
3 | require('raw-loader!./recorderProcessor').default,
4 | require('raw-loader!./soundFileProcessor').default,
5 | require('raw-loader!./amplitudeProcessor').default,
6 | ];
7 | const ac = p5sound.audiocontext;
8 | let initializedAudioWorklets = false;
9 |
10 | function loadAudioWorkletModules() {
11 | return Promise.all(
12 | moduleSources.map(function (moduleSrc) {
13 | const blob = new Blob([moduleSrc], { type: 'application/javascript' });
14 | const objectURL = URL.createObjectURL(blob);
15 | return (
16 | ac.audioWorklet
17 | .addModule(objectURL)
18 | // in "p5 instance mode," the module may already be registered
19 | .catch(() => Promise.resolve())
20 | );
21 | })
22 | );
23 | }
24 |
25 | p5.prototype.registerMethod('init', function () {
26 | if (initializedAudioWorklets) return;
27 | // ensure that a preload function exists so that p5 will wait for preloads to finish
28 | if (!this.preload && !window.preload) {
29 | this.preload = function () {};
30 | }
31 |
32 | // use p5's preload system to load necessary AudioWorklet modules before setup()
33 | this._incrementPreload();
34 | const onWorkletModulesLoad = function () {
35 | initializedAudioWorklets = true;
36 | this._decrementPreload();
37 | }.bind(this);
38 | loadAudioWorkletModules().then(onWorkletModulesLoad);
39 | });
40 |
--------------------------------------------------------------------------------
/src/audioWorklet/processorNames.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | recorderProcessor: 'recorder-processor',
3 | soundFileProcessor: 'sound-file-processor',
4 | amplitudeProcessor: 'amplitude-processor',
5 | };
6 |
--------------------------------------------------------------------------------
/src/audioWorklet/soundFileProcessor.js:
--------------------------------------------------------------------------------
1 | // import dependencies via preval.require so that they're available as values at compile time
2 | const processorNames = preval.require('./processorNames');
3 | const RingBuffer = preval.require('./ringBuffer').default;
4 |
5 | class SoundFileProcessor extends AudioWorkletProcessor {
6 | constructor(options) {
7 | super();
8 |
9 | const processorOptions = options.processorOptions || {};
10 | this.bufferSize = processorOptions.bufferSize || 256;
11 | this.inputRingBuffer = new RingBuffer(this.bufferSize, 1);
12 | this.inputRingBufferArraySequence = [new Float32Array(this.bufferSize)];
13 | }
14 |
15 | process(inputs) {
16 | const input = inputs[0];
17 | // we only care about the first input channel, because that contains the position data
18 | this.inputRingBuffer.push([input[0]]);
19 |
20 | if (this.inputRingBuffer.framesAvailable >= this.bufferSize) {
21 | this.inputRingBuffer.pull(this.inputRingBufferArraySequence);
22 | const inputChannel = this.inputRingBufferArraySequence[0];
23 | const position = inputChannel[inputChannel.length - 1] || 0;
24 |
25 | this.port.postMessage({ name: 'position', position: position });
26 | }
27 |
28 | return true;
29 | }
30 | }
31 |
32 | registerProcessor(processorNames.soundFileProcessor, SoundFileProcessor);
33 |
--------------------------------------------------------------------------------
/src/deprecations/Env.js:
--------------------------------------------------------------------------------
1 | import Envelope from '../envelope';
2 |
3 | class Env extends Envelope {
4 | constructor(t1, l1, t2, l2, t3, l3) {
5 | console.warn(
6 | 'WARNING: p5.Env is now deprecated and may be removed in future versions. ' +
7 | 'Please use the new p5.Envelope instead.'
8 | );
9 | super(t1, l1, t2, l2, t3, l3);
10 | }
11 | }
12 |
13 | export default Env;
14 |
--------------------------------------------------------------------------------
/src/deprecations/Signal.js:
--------------------------------------------------------------------------------
1 | class Signal {
2 | constructor() {
3 | console.warn('p5.Signal is deprecated , Use Tone.js Signal instead ');
4 | }
5 | }
6 |
7 | export default Signal;
8 |
--------------------------------------------------------------------------------
/src/eqFilter.js:
--------------------------------------------------------------------------------
1 | import Filter from './filter';
2 | import p5sound from './main';
3 |
4 | /**
5 | * EQFilter extends p5.Filter with constraints
6 | * necessary for the p5.EQ
7 | *
8 | * @private
9 | */
10 | class EQFilter extends Filter {
11 | constructor(freq, res) {
12 | super('peaking');
13 |
14 | this.disconnect();
15 | this.set(freq, res);
16 | this.biquad.gain.value = 0;
17 | delete this.input;
18 | delete this.output;
19 | delete this._drywet;
20 | delete this.wet;
21 | }
22 |
23 | amp() {
24 | console.warn('`amp()` is not available for p5.EQ bands. Use `.gain()`');
25 | }
26 |
27 | drywet() {
28 | console.warn('`drywet()` is not available for p5.EQ bands.');
29 | }
30 |
31 | connect(unit) {
32 | var u = unit || p5.soundOut.input;
33 | if (this.biquad) {
34 | this.biquad.connect(u.input ? u.input : u);
35 | } else {
36 | this.output.connect(u.input ? u.input : u);
37 | }
38 | }
39 | disconnect() {
40 | if (this.biquad) {
41 | this.biquad.disconnect();
42 | }
43 | }
44 |
45 | dispose() {
46 | // remove reference form soundArray
47 | const index = p5sound.soundArray.indexOf(this);
48 | p5sound.soundArray.splice(index, 1);
49 | this.disconnect();
50 | delete this.biquad;
51 | }
52 | }
53 |
54 | export default EQFilter;
55 |
--------------------------------------------------------------------------------
/src/errorHandler.js:
--------------------------------------------------------------------------------
1 | /*
2 | Helper function to generate an error
3 | with a custom stack trace that points to the sketch
4 | and removes other parts of the stack trace.
5 |
6 | @private
7 | @class customError
8 | @constructor
9 | @param {String} name custom error name
10 | @param {String} errorTrace custom error trace
11 | @param {String} failedPath path to the file that failed to load
12 | @property {String} name custom error name
13 | @property {String} message custom error message
14 | @property {String} stack trace the error back to a line in the user's sketch.
15 | Note: this edits out stack trace within p5.js and p5.sound.
16 | @property {String} originalStack unedited, original stack trace
17 | @property {String} failedPath path to the file that failed to load
18 | @return {Error} returns a custom Error object
19 | */
20 | var CustomError = function (name, errorTrace, failedPath) {
21 | var err = new Error();
22 | var tempStack, splitStack;
23 |
24 | err.name = name;
25 | err.originalStack = err.stack + errorTrace;
26 | tempStack = err.stack + errorTrace;
27 | err.failedPath = failedPath;
28 |
29 | // only print the part of the stack trace that refers to the user code:
30 | splitStack = tempStack.split('\n').filter(function (ln) {
31 | return !ln.match(/(p5.|native code|globalInit)/g);
32 | });
33 | err.stack = splitStack.join('\n');
34 |
35 | return err; // TODO: is this really a constructor?
36 | };
37 | export default CustomError;
38 |
--------------------------------------------------------------------------------
/src/onsetDetect.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Listen for onsets (a sharp increase in volume) within a given
3 | * frequency range.
4 | *
5 | * @class p5.OnsetDetect
6 | * @constructor
7 | * @param {Number} freqLow Low frequency
8 | * @param {Number} freqHigh High frequency
9 | * @param {Number} threshold Amplitude threshold between 0 (no energy) and 1 (maximum)
10 | * @param {Function} callback Function to call when an onset is detected
11 | */
12 | class OnsetDetect {
13 | constructor(freqLow, freqHigh, threshold, callback) {
14 | this.isDetected = false;
15 | this.freqLow = freqLow;
16 | this.freqHigh = freqHigh;
17 | this.treshold = threshold;
18 | this.energy = 0;
19 | this.penergy = 0;
20 |
21 | // speed of decay
22 | this.sensitivity = 500;
23 |
24 | this.callback = callback;
25 | }
26 |
27 | // callback here too?
28 | update(fftObject, callback) {
29 | this.energy = fftObject.getEnergy(this.freqLow, this.freqHigh) / 255;
30 |
31 | if (this.isDetected === false) {
32 | if (this.energy - this.penergy > this.treshold) {
33 | this.isDetected = true;
34 |
35 | if (this.callback) {
36 | this.callback(this.energy);
37 | } else if (callback) {
38 | callback(this.energy);
39 | }
40 |
41 | var self = this;
42 | setTimeout(function () {
43 | self.isDetected = false;
44 | }, this.sensitivity);
45 | }
46 | }
47 |
48 | this.penergy = this.energy;
49 | }
50 | }
51 |
52 | export default OnsetDetect;
53 |
--------------------------------------------------------------------------------
/templates/pre-commit-hook.js:
--------------------------------------------------------------------------------
1 | // hooks/pre-commit.js
2 |
3 | const exec = require('child_process').exec;
4 | // Executes shell commands synchronously
5 | const sh = require('child_process').execSync;
6 |
7 | exec('git diff --cached --quiet', function (err, stdout, stderr) {
8 |
9 | // only run if there are staged changes
10 | // i.e. what you would be committing if you ran "git commit" without "-a" option.
11 | if (err) {
12 |
13 | // stash unstaged changes - only test what's being committed
14 | sh('git stash --keep-index --quiet');
15 |
16 | exec('grunt {{task}}', function (err, stdout, stderr) {
17 |
18 | console.log(stdout);
19 |
20 | // restore stashed changes
21 | sh('git stash pop --quiet');
22 |
23 | let exitCode = 0;
24 | if (err) {
25 | console.log(stderr);
26 | exitCode = -1;
27 | }
28 | process.exit(exitCode);
29 | });
30 | }
31 |
32 | });
--------------------------------------------------------------------------------
/test/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | TESTS
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/test/setup.js:
--------------------------------------------------------------------------------
1 | const startTest = () => {
2 | //dynamic importing the modules , this ensures that tests must run after audioWorklet processors have been loaded properly
3 | import('./tests.js');
4 |
5 | let test_has_run = false;
6 |
7 | document.getElementById('mocha').innerHTML = 'click to begin tests';
8 |
9 | // chromes autoplay policy requires a user interaction
10 | // before the audiocontext can activate
11 | const mousePressed = () => {
12 | if (!test_has_run) {
13 | document.getElementById('mocha').innerHTML = '';
14 | p5.prototype.outputVolume(0);
15 | p5.prototype.userStartAudio();
16 | mocha.run();
17 | test_has_run = true;
18 | }
19 | };
20 | document.addEventListener('click', mousePressed, false);
21 | };
22 |
23 | //operating p5 in instance mode ( read more about it here - https://github.com/processing/p5.js/wiki/Global-and-instance-mode )
24 | const s = (sketch) => {
25 | sketch.setup = () => {
26 | mocha.setup('bdd');
27 | startTest();
28 | };
29 | };
30 |
31 | new p5(s);
32 |
--------------------------------------------------------------------------------
/test/testAudio/bx-spring.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/test/testAudio/bx-spring.mp3
--------------------------------------------------------------------------------
/test/testAudio/bx-spring.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/test/testAudio/bx-spring.ogg
--------------------------------------------------------------------------------
/test/testAudio/drum.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/test/testAudio/drum.mp3
--------------------------------------------------------------------------------
/test/testAudio/drum.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/processing/p5.js-sound/6feca5f809b69a0c6b7fbee4d376e76d4c5e4abc/test/testAudio/drum.ogg
--------------------------------------------------------------------------------
/test/tests.js:
--------------------------------------------------------------------------------
1 | import('./tests/main.js');
2 | import('./tests/p5.Helpers.js');
3 | import('./tests/p5.PeakDetect.js');
4 | import('./tests/p5.OnsetDetect.js');
5 | import('./tests/p5.Distortion.js');
6 | import('./tests/p5.AudioContext.js');
7 | import('./tests/p5.Looper.js');
8 | import('./tests/p5.Metro.js');
9 | import('./tests/p5.Effect.js');
10 | import('./tests/p5.Filter.js');
11 | import('./tests/p5.Gain.js');
12 | import('./tests/p5.FFT.js');
13 | import('./tests/p5.SoundLoop.js');
14 | import('./tests/p5.Compressor.js');
15 | import('./tests/p5.EQ.js');
16 | import('./tests/p5.AudioIn.js');
17 | import('./tests/p5.AudioVoice.js');
18 | import('./tests/p5.MonoSynth.js');
19 | import('./tests/p5.PolySynth.js');
20 | import('./tests/p5.SoundRecorder.js');
21 | import('./tests/p5.SoundFile.js');
22 | import('./tests/p5.Amplitude.js');
23 | import('./tests/p5.Oscillator.js');
24 | import('./tests/p5.Envelope.js');
25 | import('./tests/p5.Pulse.js');
26 | import('./tests/p5.Noise.js');
27 | import('./tests/p5.Panner.js');
28 | import('./tests/p5.Panner3d.js');
29 | import('./tests/p5.Delay.js');
30 | import('./tests/p5.Reverb.js');
31 | import('./tests/p5.Listener3d.js');
32 |
--------------------------------------------------------------------------------
/test/tests/main.js:
--------------------------------------------------------------------------------
1 | const expect = chai.expect;
2 | describe('main output', function () {
3 | it('can initiate main class', function () {
4 | expect(p5.soundOut.input).to.have.property('gain');
5 | expect(p5.soundOut.input).to.have.property('context');
6 | expect(p5.soundOut.output).to.have.property('gain');
7 | expect(p5.soundOut.output).to.have.property('context');
8 | expect(p5.soundOut.limiter.threshold.value).to.equal(-3);
9 | expect(p5.soundOut.limiter.ratio.value).to.equal(20);
10 | expect(p5.soundOut.limiter.knee.value).to.equal(1);
11 | expect(p5.soundOut.audiocontext).to.have.property('audioWorklet');
12 | expect(p5.soundOut.audiocontext).to.have.property('baseLatency');
13 | expect(p5.soundOut.meter).to.have.property('gain');
14 | expect(p5.soundOut.meter).to.have.property('context');
15 | expect(p5.soundOut.fftMeter).to.have.property('gain');
16 | expect(p5.soundOut.fftMeter).to.have.property('context');
17 | expect(p5.soundOut.soundArray).to.be.an('array');
18 | expect(p5.soundOut.parts).to.be.an('array');
19 | expect(p5.soundOut.extensions).to.be.an('array');
20 |
21 | expect(p5.soundOut._silentNode).to.have.property('gain');
22 | expect(p5.soundOut._silentNode).to.have.property('context');
23 | expect(p5.soundOut._silentNode.gain.value).to.equal(0);
24 |
25 | console.log(p5.soundOut);
26 | });
27 |
28 | it('can set and return output volume', function (done) {
29 | p5.prototype.outputVolume(0.6);
30 |
31 | setTimeout(function () {
32 | expect(p5.prototype.getOutputVolume()).to.be.approximately(0.6, 0.05);
33 | expect(p5.prototype.outputVolume().value).to.be.approximately(0.6, 0.05);
34 | done();
35 | }, 100);
36 | });
37 | it('can set output volume after t seconds in future', function (done) {
38 | let t = 1;
39 | p5.prototype.outputVolume(0.9, 0, t);
40 |
41 | setTimeout(function () {
42 | expect(p5.prototype.getOutputVolume()).to.be.approximately(0.9, 0.05);
43 | done();
44 | }, 1100);
45 | });
46 |
47 | it('can create a linear fade effect in output volume ', function (done) {
48 | let t = 1;
49 | p5.prototype.outputVolume(1, t, 0);
50 |
51 | setTimeout(function () {
52 | expect(p5.prototype.getOutputVolume()).to.be.approximately(0.5, 0.5);
53 | done();
54 | }, 500);
55 | });
56 |
57 | it('can connect an audio node to p5sound output', function () {
58 | let noise = new p5.Noise();
59 | p5.prototype.outputVolume(noise);
60 | });
61 | });
62 |
--------------------------------------------------------------------------------
/test/tests/p5.AudioContext.js:
--------------------------------------------------------------------------------
1 | const expect = chai.expect;
2 |
3 | describe('p5.AudioContext', function () {
4 | describe('getAudioContext', function () {
5 | it('returns a audioContext', function () {
6 | let audioContext = p5.prototype.getAudioContext();
7 | expect(audioContext).to.have.property('baseLatency').to.be.an('number');
8 | expect(audioContext).to.have.property('destination');
9 | expect(audioContext).to.have.property('state').to.be.an('string');
10 | });
11 | });
12 |
13 | describe('userStartAudio', function () {
14 | it('can get initialized and returns a promise', function (done) {
15 | let startAudio = p5.prototype.userStartAudio();
16 | startAudio.then(() => {
17 | done();
18 | });
19 | });
20 | });
21 | });
22 |
--------------------------------------------------------------------------------
/test/tests/p5.AudioVoice.js:
--------------------------------------------------------------------------------
1 | const expect = chai.expect;
2 |
3 | describe('p5.AudioVoice', function () {
4 | it('can be created and disposed', function () {
5 | let av = new p5.AudioVoice();
6 | let audioContext = av.ac;
7 | expect(audioContext).to.have.property('baseLatency').to.be.an('number');
8 | expect(audioContext).to.have.property('destination');
9 | expect(audioContext).to.have.property('state').to.be.an('string');
10 | expect(av.output).to.have.property('gain');
11 | expect(av.output).to.have.property('context');
12 | av.dispose();
13 | expect(av).to.not.have.property('output');
14 | });
15 | it('can be connected and disconnected', function () {
16 | let av = new p5.AudioVoice();
17 | let filter = new p5.Filter();
18 |
19 | //if unit has input property
20 | av.connect(filter);
21 | av.disconnect();
22 |
23 | //if unit doesnot have an input property
24 | av = new p5.AudioVoice();
25 | av.connect(filter.input);
26 | av.disconnect();
27 | });
28 | it('can execute _onNewInput() hook on connected unit', function (done) {
29 | let av = new p5.AudioVoice();
30 | const gain = new p5.Gain();
31 | gain._onNewInput = function () {
32 | done();
33 | };
34 | av.connect(gain);
35 | });
36 | });
37 |
--------------------------------------------------------------------------------
/test/tests/p5.Gain.js:
--------------------------------------------------------------------------------
1 | const expect = chai.expect;
2 | let gain;
3 |
4 | describe('p5.Gain', function () {
5 | beforeEach(function () {
6 | gain = new p5.Gain();
7 | });
8 |
9 | it('can be created', function () {
10 | expect(gain.input).to.have.property('gain');
11 | expect(gain.output).to.have.property('gain');
12 | let audioContext = gain.ac;
13 | expect(audioContext).to.have.property('baseLatency').to.be.an('number');
14 | expect(audioContext).to.have.property('destination');
15 | expect(audioContext).to.have.property('state').to.be.an('string');
16 | });
17 | it('can be created and disposed', function () {
18 | gain.dispose();
19 | expect(gain).to.not.have.property('input');
20 | expect(gain).to.not.have.property('output');
21 | });
22 |
23 | describe('methods', function () {
24 | describe('setInput', function () {
25 | it('can set Input', function () {
26 | let soundFile = p5.prototype.loadSound('./testAudio/drum.mp3');
27 | gain.setInput(soundFile);
28 | });
29 | });
30 | describe('connect, disconnect', function () {
31 | it('connects to p5.soundOut when no arguments are provided', function () {
32 | gain.connect();
33 | });
34 | it('can connect with or without input property', function () {
35 | let filter = new p5.Filter();
36 | gain.connect(filter);
37 | gain.connect(filter.input);
38 | });
39 | it('can set the output level of gain node', (done) => {
40 | let osc = new p5.Oscillator('sine');
41 | let mainGainNode = new p5.Gain();
42 | let amplitude = new p5.Amplitude();
43 |
44 | osc.amp(1);
45 | osc.start();
46 | osc.disconnect();
47 |
48 | mainGainNode.setInput(osc);
49 | amplitude.setInput(mainGainNode);
50 |
51 | mainGainNode.amp(0.5);
52 | setTimeout(function () {
53 | expect(amplitude.getLevel()).to.be.closeTo(0.25, 0.125);
54 | done();
55 | }, 100);
56 | });
57 | it('can disconnect', function () {
58 | let filter = new p5.Filter();
59 | gain.connect(filter);
60 | gain.disconnect();
61 | });
62 | it('can execute _onNewInput() hook on connected unit', function (done) {
63 | const gainToConnect = new p5.Gain();
64 | gainToConnect._onNewInput = function () {
65 | done();
66 | };
67 | gain.connect(gainToConnect);
68 | });
69 | });
70 | describe('amp', function () {
71 | it('can take only volume as input', function () {
72 | //TODO
73 | });
74 | });
75 | });
76 | });
77 |
--------------------------------------------------------------------------------
/test/tests/p5.Metro.js:
--------------------------------------------------------------------------------
1 | const expect = chai.expect;
2 | let metro;
3 |
4 | describe('p5.Metro', function () {
5 | beforeEach(function () {
6 | metro = new p5.Metro();
7 | });
8 |
9 | it('can be created', function () {
10 | expect(metro).to.have.property('bpm').to.equal(120);
11 | expect(metro).to.have.property('clock');
12 | expect(metro).to.have.property('syncedParts').to.be.an('array');
13 | });
14 |
15 | it('can be initialised with a beatlength, bpm', function () {
16 | metro.beatLength(0.0625);
17 | metro.setBPM(60);
18 | expect(metro.tatums).to.equal(4);
19 | expect(metro.tatumTime).to.equal(0.25);
20 | expect(metro.getBPM()).to.equal(60);
21 | });
22 |
23 | it('can be started and stopped', function (done) {
24 | this.timeout = 2000;
25 | let ticks;
26 | metro.setBPM(600);
27 | metro.start();
28 | setTimeout(() => {
29 | ticks = metro.metroTicks;
30 | metro.stop();
31 | expect(ticks).to.not.equal(0);
32 | setTimeout(() => {
33 | expect(metro.metroTicks).to.equal(ticks);
34 | done();
35 | }, 100);
36 | }, 1000);
37 | });
38 |
39 | it('can be started and stopped with delay', function (done) {
40 | let ticks;
41 | metro.setBPM(600);
42 | metro.start(0.1);
43 | setTimeout(() => {
44 | ticks = metro.metroTicks;
45 | metro.stop(0.2);
46 | expect(ticks).to.not.equal(0);
47 | setTimeout(() => {
48 | expect(metro.metroTicks).to.be.above(ticks);
49 | }, 100);
50 | setTimeout(() => {
51 | ticks = metro.metroTicks;
52 | setTimeout(() => {
53 | expect(metro.metroTicks).to.equal(ticks);
54 | done();
55 | }, 100);
56 | }, 200);
57 | }, 1000);
58 | });
59 |
60 | it('can sync parts', function () {
61 | let part = new p5.Part();
62 | part.addPhrase('snare', () => {}, [0, 0, 1, 0]);
63 | part.setBPM(60);
64 | part.noLoop();
65 | part.start();
66 | expect(metro.syncedParts.length).to.equal(0);
67 | metro.resetSync(part);
68 | expect(metro.syncedParts.length).to.equal(1);
69 | });
70 |
71 | it('parts can be pushed into syncedParts', function () {
72 | let phraseAttack = new p5.Phrase('testerAttack', () => {}, [1, 0, 0, 0]);
73 | let part = new p5.Part();
74 | part.addPhrase(phraseAttack);
75 | part.setBPM(60);
76 | part.start();
77 | expect(metro.syncedParts.length).to.equal(0);
78 | metro.pushSync(part);
79 | expect(metro.syncedParts.length).to.equal(1);
80 | metro.pushSync(part);
81 | expect(metro.syncedParts.length).to.equal(2);
82 | metro.resetSync(part);
83 | expect(metro.syncedParts.length).to.equal(1);
84 | });
85 | });
86 |
--------------------------------------------------------------------------------
/test/tests/p5.Noise.js:
--------------------------------------------------------------------------------
1 | const expect = chai.expect;
2 |
3 | describe('p5.Noise', function () {
4 | it('can be created and disposed', function () {
5 | let noise = new p5.Noise();
6 | expect(noise).to.not.have.property('f');
7 | expect(noise).to.not.have.property('oscillator');
8 | expect(noise).to.have.property('buffer');
9 | expect(noise.started).to.be.false;
10 | expect(noise.buffer.type).to.equal('white');
11 |
12 | noise.dispose();
13 | expect(noise.output).to.be.null;
14 | expect(noise.panner).to.be.null;
15 | expect(noise.buffer).to.be.null;
16 | expect(noise.noise).to.be.null;
17 | });
18 | describe('methods', function () {
19 | it('can get and set type', function (done) {
20 | let noise = new p5.Noise();
21 | noise.start();
22 | noise.setType('brown');
23 | expect(noise.getType()).to.equal('brown');
24 | noise.setType();
25 | expect(noise.getType()).to.equal('white');
26 | setTimeout(() => {
27 | expect(noise.started).to.be.true;
28 | done();
29 | }, 100);
30 | });
31 | it('can be started and stopped', function () {
32 | let noise = new p5.Noise();
33 | expect(noise).to.not.have.property('noise');
34 | noise.start();
35 | expect(noise).to.have.property('noise');
36 | expect(noise.noise).to.have.property('buffer');
37 | expect(noise.noise.loop).to.be.true;
38 | expect(noise.started).to.be.true;
39 | noise.stop();
40 | expect(noise.started).to.be.false;
41 | });
42 | //TODO: test noise buffer generator functions
43 | });
44 | });
45 |
--------------------------------------------------------------------------------
/test/tests/p5.OnsetDetect.js:
--------------------------------------------------------------------------------
1 | const expect = chai.expect;
2 |
3 | describe('p5.OnsetDetect', function () {
4 | it('can be initalized ', function () {
5 | const onsetDetect = new p5.OnsetDetect(40, 120, 0.8, () => {});
6 | expect(onsetDetect.freqLow).to.equal(40);
7 | expect(onsetDetect.freqHigh).to.equal(120);
8 | expect(onsetDetect.treshold).to.equal(0.8);
9 | expect(onsetDetect.energy).to.equal(0);
10 | });
11 |
12 | describe('methods', function () {
13 | //TODO : test update functions by mocking or using a FFT
14 | });
15 | });
16 |
--------------------------------------------------------------------------------
/test/tests/p5.Panner.js:
--------------------------------------------------------------------------------
1 | const expect = chai.expect;
2 |
3 | describe('p5.Panner', function () {
4 | let ac, input;
5 | beforeEach(function () {
6 | ac = p5.prototype.getAudioContext();
7 | input = ac.createGain();
8 | });
9 | it('can be created', function () {
10 | new p5.Panner();
11 | });
12 | it('can be connected and disconnected', function () {
13 | let panner = new p5.Panner();
14 | panner.connect(input);
15 | panner.disconnect();
16 | });
17 | it('can be panned without a delay', function (done) {
18 | let panner = new p5.Panner();
19 | panner.pan(0.4);
20 | setTimeout(() => {
21 | expect(panner.getPan()).to.be.approximately(0.4, 0.01);
22 | done();
23 | }, 25);
24 | });
25 | it('can be panned with a delay', function (done) {
26 | let panner = new p5.Panner();
27 | panner.pan(-0.7, 0.1);
28 | setTimeout(() => {
29 | expect(panner.getPan()).to.be.approximately(-0.7, 0.01);
30 | done();
31 | }, 125);
32 | });
33 | });
34 |
--------------------------------------------------------------------------------
/test/tests/p5.PeakDetect.js:
--------------------------------------------------------------------------------
1 | const expect = chai.expect;
2 |
3 | describe('p5.PeakDetect', function () {
4 | it('can be initialized without any arguments', function () {
5 | const peakDetect = new p5.PeakDetect();
6 | expect(peakDetect.cutoff).to.equal(0);
7 | expect(peakDetect.framesSinceLastPeak).to.equal(0);
8 | expect(peakDetect.energy).to.equal(0);
9 | expect(peakDetect.isDetected).to.equal(false);
10 | });
11 | it('can be initialized with arguemts', function () {
12 | const peakDetect = new p5.PeakDetect(40, 120, 0.8, 20);
13 | expect(peakDetect.f1).to.equal(40);
14 | expect(peakDetect.f2).to.equal(120);
15 | expect(peakDetect.threshold).to.equal(0.8);
16 | expect(peakDetect.framesPerPeak).to.equal(20);
17 | });
18 | describe('methods', function () {
19 | //TODO : test update, onPeak functions by mocking or using a FFT
20 | });
21 | });
22 |
--------------------------------------------------------------------------------
/test/tests/p5.Pulse.js:
--------------------------------------------------------------------------------
1 | const expect = chai.expect;
2 |
3 | describe('p5.Pulse', function () {
4 | it('can be created without any arguments', function () {
5 | let pulse = new p5.Pulse();
6 | expect(pulse.w).to.be.zero;
7 | expect(pulse.oscillator.type).to.equal('sawtooth');
8 | expect(pulse.f).to.equal(440);
9 | expect(pulse.osc2).to.have.property('connection');
10 | expect(pulse.osc2).to.have.property('oscMods');
11 | expect(pulse.osc2).to.have.property('oscillator');
12 | expect(pulse.dcOffset).to.have.property('buffer');
13 | expect(pulse.dcOffset).to.have.property('channelCount');
14 | expect(pulse.dcGain).to.have.property('gain');
15 | expect(pulse.output.gain.value).to.equal(1);
16 | });
17 | it('can be with arguments', function () {
18 | let pulse = new p5.Pulse(220, 0.4);
19 | expect(pulse.w).to.equal(0.4);
20 | expect(pulse.f).to.equal(220);
21 | expect(pulse.dNode.delayTime.value).to.be.approximately(0.0009, 0.00001);
22 | expect(pulse.dcGain.gain.value).to.be.approximately(0.17, 0.001);
23 | });
24 | describe('methods', function () {
25 | it('can set width', function (done) {
26 | let pulse = new p5.Pulse();
27 | pulse.width(0.3);
28 | expect(pulse.dNode.delayTime.value).to.be.approximately(0.00068, 0.00001);
29 | expect(pulse.dcGain.gain.value).to.be.approximately(0.34, 0.001);
30 |
31 | //can take non-numerical value
32 | let osc = new p5.Oscillator();
33 | pulse.width(osc);
34 | done();
35 | });
36 | it('can be started and stopped', function (done) {
37 | let pulse = new p5.Pulse(444, 0.1);
38 | expect(pulse.started).to.be.false;
39 | pulse.start(221, 0.1);
40 | setTimeout(() => {
41 | expect(pulse.oscillator.frequency.value).to.equal(221);
42 | expect(pulse.oscillator.type).to.equal('sawtooth');
43 | expect(pulse.osc2.oscillator.type).to.equal('sawtooth');
44 | done();
45 | }, 500);
46 | expect(pulse.started).to.be.true;
47 | expect(pulse.osc2.started).to.be.true;
48 | pulse.stop();
49 | expect(pulse.started).to.be.false;
50 | expect(pulse.osc2.started).to.be.false;
51 | });
52 | it('can set frequency', function () {
53 | //TODO
54 | });
55 | });
56 | });
57 |
--------------------------------------------------------------------------------
/webpack.config.js:
--------------------------------------------------------------------------------
1 | const webpack = require('webpack');
2 | const path = require('path');
3 | const fs = require('fs');
4 | const UglifyJsPlugin = require('uglifyjs-webpack-plugin');
5 | const WebpackAutoInject = require('webpack-auto-inject-version');
6 |
7 | const autoInjectVersionConfig = {
8 | SILENT: true,
9 | SHORT: 'p5.sound',
10 | components: {
11 | AutoIncreaseVersion: false,
12 | InjectAsComment: true,
13 | InjectByTag: false
14 | },
15 | componentsOptions: {
16 | InjectAsComment: {
17 | tag: 'Version: {version} - {date}',
18 | dateFormat: 'yyyy-mm-dd',
19 | multiLineCommentType: true,
20 | },
21 | }
22 | };
23 |
24 | module.exports = {
25 | context: __dirname + '/src',
26 | entry: {
27 | 'p5.sound': './app.js',
28 | 'p5.sound.min': './app.js'
29 | },
30 | output: {
31 | // where we want to output built files
32 | path: __dirname + "/lib"
33 | },
34 | mode: 'production',
35 | devtool: 'source-map',
36 | optimization: {
37 | runtimeChunk: true,
38 | },
39 | plugins: [
40 | new webpack.NormalModuleReplacementPlugin(/Tone(\.*)/, function(resource) {
41 | resource.request = path.join(__dirname, './node_modules/tone/', resource.request);
42 | }),
43 | new webpack.BannerPlugin({
44 | banner: fs.readFileSync('./fragments/before.frag').toString(),
45 | raw: true,
46 | }),
47 | new WebpackAutoInject(autoInjectVersionConfig)
48 | ],
49 | module: {
50 | rules: [
51 | {
52 | test: /node_modules(\.*)/,
53 | use: {
54 | loader: 'uglify-loader'
55 | }
56 | },
57 | {
58 | test: /\.js$/,
59 | exclude: /(node_modules)/,
60 | use: {
61 | loader: 'babel-loader'
62 | }
63 | },
64 | ]
65 | },
66 | optimization: {
67 | minimize: true,
68 | minimizer: [
69 | new UglifyJsPlugin({
70 | include: [/\.min\.js$/],
71 | cache: true,
72 | parallel: true,
73 | uglifyOptions: {
74 | compress: {
75 | drop_console: true
76 | },
77 | ecma: 6,
78 | mangle: true,
79 | output: {
80 | comments: false
81 | }
82 | },
83 | sourceMap: true,
84 | })
85 | ]
86 | },
87 | resolve: {
88 | modules: [path.resolve(__dirname, 'src'), 'node_modules']
89 | }
90 | }
91 |
--------------------------------------------------------------------------------