├── .eslintrc.js
├── .gitignore
├── audio-utils.js
├── canvas-utils.js
├── filter.js
├── image-filtering.js
├── index.html
├── lena.jpg
├── package.json
├── peels.mp3
├── playing-array
├── index.html
└── playing-array.js
├── plot-image-lines
├── image-lines-filtered.html
├── image-lines-filtered.js
├── index.html
└── plot-image.js
├── plot-utils.js
├── simple-filter
├── index.html
└── simple-filter.js
├── sound-example
├── filter-results.html
├── filter-results.js
├── index.html
└── sound-example.js
└── vendor
└── Chart.js
/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | "extends": "standard",
3 | globals: {
4 | Image: true,
5 | AudioContext: true,
6 | Filter: true,
7 | plotFiltered: true,
8 | splitRGB: true,
9 | XMLHttpRequest: true
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 |
--------------------------------------------------------------------------------
/audio-utils.js:
--------------------------------------------------------------------------------
1 | const playSignal = (signal = [], audioContext, sampleRate) => {
2 | const signalLength = signal.length
3 |
4 | // This node will be used as the source to play the signal
5 | const node = audioContext.createBufferSource()
6 |
7 | // Let's create the buffer to be assigned to the above node
8 | // The arguments are: the number of channels, the length of the buffer, which
9 | // is the same from the signal and the sample rate
10 | // Sample Rate is basically how many samples the sound array contains per second
11 | // if sample rate is 3000 and you want to build a signal with 2 seconds, you'll need
12 | // an array with length equals to 6000
13 | const buffer = audioContext.createBuffer(1, signalLength, sampleRate)
14 |
15 | // This is the data on the buffer, as we created an empty one, this a empty array so far
16 | const data = buffer.getChannelData(0)
17 |
18 | // Let's write the values from our signal to the buffer...
19 | for (let i = 0; i < signalLength; i += 1) {
20 | data[i] = signal[i]
21 | }
22 |
23 | // then assign the buffer to the node.buffer prop
24 | node.buffer = buffer
25 |
26 | // let's connect the buffer to the audioContext.destination, which means the speakers
27 | // audioContext is the context that you need to handle all this fancy stuff Web Audio API related :)
28 | node.connect(audioContext.destination)
29 |
30 | // now we start to play!
31 | node.start(audioContext.currentTime)
32 | }
33 |
34 | const loadSound = (path, audioContext) => {
35 | const request = new XMLHttpRequest()
36 | request.open('GET', path, true)
37 | request.responseType = 'arraybuffer'
38 |
39 | const promise = new Promise((resolve, reject) => {
40 | request.onload = () => {
41 | audioContext.decodeAudioData(
42 | request.response,
43 | (buffer) => resolve(buffer),
44 | (error) => console.error(error)
45 | )
46 | }
47 |
48 | request.onerror = (error) => reject(error)
49 | })
50 |
51 | request.send()
52 |
53 | return promise
54 | }
55 |
56 | const playBuffer = (buffer, audioContext) => {
57 | const startTime = audioContext.currentTime
58 |
59 | const source = audioContext.createBufferSource()
60 | source.buffer = buffer
61 |
62 | source.connect(audioContext.destination)
63 |
64 | source.start(startTime)
65 | source.stop(startTime + 2)
66 | }
67 |
--------------------------------------------------------------------------------
/canvas-utils.js:
--------------------------------------------------------------------------------
1 | const splitRGB = (data) => {
2 | const rgba = {
3 | red: [],
4 | green: [],
5 | blue: [],
6 | alpha: []
7 | }
8 |
9 | for (let i = 0; i < data.length - 1; i += 4) {
10 | rgba.red.push(data[i])
11 | rgba.green.push(data[i + 1])
12 | rgba.blue.push(data[i + 2])
13 | rgba.alpha.push(data[i + 3])
14 | }
15 |
16 | return rgba
17 | }
18 |
19 | const mountRGB = ({red, green, blue, alpha}) => {
20 | const arr = new Uint8ClampedArray(red.length * 4)
21 |
22 | const length = arr.length
23 | let iterator = 0
24 |
25 | for (let i = 0; i < length - 1; i += 4) {
26 | arr[i] = red[iterator]
27 | arr[i + 1] = green[iterator]
28 | arr[i + 2] = blue[iterator]
29 | arr[i + 3] = alpha[iterator]
30 |
31 | iterator++
32 | }
33 |
34 | return arr
35 | }
36 |
37 | const plotFiltered = (signal, plotContext) => {
38 | const { width, height } = plotContext.canvas
39 |
40 | const imgData = plotContext.getImageData(0, 0, width, height)
41 | const dataArr = new Uint8ClampedArray(imgData.data.length)
42 |
43 | for (let i = 0; i < signal.length; i += 4) {
44 | dataArr[i] = signal[i]
45 | dataArr[i + 1] = signal[i + 1]
46 | dataArr[i + 2] = signal[i + 2]
47 | dataArr[i + 3] = 255
48 | }
49 |
50 | imgData.data.set(dataArr)
51 | plotContext.putImageData(imgData, 0, 0)
52 | }
53 |
54 | const removeAlpha = (data) => {
55 | const arr = []
56 |
57 | for (let i = 0; i < data.length - 1; i += 4) {
58 | arr.push(data[i])
59 | arr.push(data[i + 1])
60 | arr.push(data[i + 2])
61 | }
62 |
63 | return arr
64 | }
65 |
--------------------------------------------------------------------------------
/filter.js:
--------------------------------------------------------------------------------
1 | class Filter {
2 | constructor (audioContext) {
3 | this.audioContext = audioContext
4 | }
5 |
6 | filterSignal (signal, type = 'highpass', frequency = 10000) {
7 | const node = this.audioContext.createBufferSource()
8 | const buffer = this.audioContext.createBuffer(1, signal.length, this.audioContext.sampleRate)
9 | const data = buffer.getChannelData(0)
10 | const processed = []
11 |
12 | const filter = this.audioContext.createBiquadFilter()
13 | filter.type = type
14 | filter.frequency.value = frequency
15 | // filter.Q.value = 0.2
16 |
17 | const processorNode = this.audioContext.createScriptProcessor(4096, 2, 2)
18 |
19 | const length = signal.length
20 |
21 | const promise = new Promise((resolve, reject) => {
22 | processorNode.onaudioprocess = this.onAudioProcess.bind(this, processed, processorNode, length, resolve)
23 | })
24 |
25 | for (let i = 0; i < length; i++) {
26 | data[i] = signal[i]
27 | }
28 |
29 | node.buffer = buffer
30 | node.connect(filter)
31 | // node.connect(this.audioContext.destination)
32 | filter.connect(processorNode)
33 | processorNode.connect(this.audioContext.destination)
34 |
35 | node.start(this.audioContext.currentTime)
36 |
37 | return promise
38 | }
39 |
40 | onAudioProcess (processed, processorNode, finalLength, resolve, ev) {
41 | const inputBuffer = ev.inputBuffer
42 | const inputData = inputBuffer.getChannelData(0)
43 | const length = inputData.length
44 |
45 | for (let sample = 0; sample < length; sample++) {
46 | processed.push(inputData[sample])
47 | }
48 |
49 | if (processed.length >= finalLength) {
50 | processorNode.disconnect()
51 | resolve(processed)
52 | }
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/image-filtering.js:
--------------------------------------------------------------------------------
1 | const canvas1 = document.querySelector('#canvas')
2 | const context1 = canvas1.getContext('2d')
3 |
4 | const canvasResult = document.querySelector('#canvas-result')
5 | const contextResult = canvasResult.getContext('2d')
6 |
7 | const imageFilter = new Filter(new AudioContext())
8 |
9 | const baseImage = new Image()
10 | baseImage.src = 'lena.jpg'
11 |
12 | baseImage.onload = () => {
13 | const { width, height } = context1.canvas
14 | context1.drawImage(baseImage, 0, 0, width, height)
15 |
16 | const imageData = context1.getImageData(0, 0, context1.canvas.width, context1.canvas.height)
17 | const splitted = splitRGB(imageData.data)
18 |
19 | Promise.all([
20 | imageFilter.filterSignal(splitted.red, 'highpass', 4000),
21 | imageFilter.filterSignal(splitted.green, 'highpass', 4000),
22 | imageFilter.filterSignal(splitted.blue, 'highpass', 4000)
23 | ])
24 | .then((values) => {
25 | const arr = mountRGB({
26 | red: values[0],
27 | green: values[1],
28 | blue: values[2],
29 | alpha: splitted.alpha
30 | })
31 | console.log(arr)
32 | plotFiltered(arr, contextResult)
33 | })
34 | }
35 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/lena.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rssilva/web-audio-image-filtering/3e4cd6ff812a988dcb4da8b1e19c6564e29ed579/lena.jpg
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "image-filtering-with-webaudio-api",
3 | "version": "0.0.1",
4 | "description": "",
5 | "main": "image-filtering.js",
6 | "dependencies": {
7 | "chart.js": "^2.7.0",
8 | "eslint": "^4.8.0"
9 | },
10 | "devDependencies": {
11 | "eslint-config-standard": "^10.2.1",
12 | "eslint-plugin-import": "^2.7.0",
13 | "eslint-plugin-node": "^5.2.0",
14 | "eslint-plugin-promise": "^3.5.0",
15 | "eslint-plugin-standard": "^3.0.1"
16 | },
17 | "scripts": {
18 | "test": "echo \"Error: no test specified\" && exit 1"
19 | },
20 | "author": "@rssilva",
21 | "license": "MIT"
22 | }
23 |
--------------------------------------------------------------------------------
/peels.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rssilva/web-audio-image-filtering/3e4cd6ff812a988dcb4da8b1e19c6564e29ed579/peels.mp3
--------------------------------------------------------------------------------
/playing-array/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/playing-array/playing-array.js:
--------------------------------------------------------------------------------
1 | // Let's add a canvas to see the graphs, a textarea to write the equation
2 | // and a button to start playing
3 | const context = document.querySelector('#canvas').getContext('2d')
4 | const textArea = document.querySelector('#function-area')
5 | const button = document.querySelector('#play-button')
6 |
7 | const audioContext = new AudioContext()
8 |
9 | const SAMPLE_RATE = 3000
10 |
11 | // Setting the duration interval in seconds which the sound will be played
12 | const duration = 1
13 |
14 | // Time increment is the interval between each sample
15 | // so we are determining that each second will have 3000 values
16 | const increment = 1 / SAMPLE_RATE
17 |
18 | // on every button click
19 | button.addEventListener('click', () => {
20 | let signal = []
21 | let axis = []
22 |
23 | // t will have the time in seconds
24 | for (let t = 0; t < duration - increment; t += increment) {
25 | // maybe the JS 👮 will arrest me for this, but let's get the string on the textArea
26 | // and use eval to calculate the amplitude
27 | const value = eval(textArea.value.trim())
28 |
29 | // then we store it to the signal array
30 | signal.push(value)
31 | axis.push(t)
32 | }
33 |
34 | // and let's call the playSignal function that was explained earlier
35 | playSignal(signal, audioContext, SAMPLE_RATE)
36 |
37 | // then we plot to the canvas
38 | plot({
39 | context,
40 | axis: axis.slice(0, 100),
41 | signals: [signal],
42 | colors: ['orange'],
43 | suggestedMin: -1,
44 | suggestedMax: 1
45 | })
46 | })
47 |
--------------------------------------------------------------------------------
/plot-image-lines/image-lines-filtered.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/plot-image-lines/image-lines-filtered.js:
--------------------------------------------------------------------------------
1 | const canvas = document.querySelector('#canvas')
2 | const context = canvas.getContext('2d')
3 |
4 | const graphContext1 = document.querySelector('#graph').getContext('2d')
5 | const graphContext2 = document.querySelector('#graph2').getContext('2d')
6 | const graphContext3 = document.querySelector('#graph3').getContext('2d')
7 | const graphContext4 = document.querySelector('#graph4').getContext('2d')
8 |
9 | const baseImage = new Image()
10 | baseImage.src = '../lena.jpg'
11 |
12 | baseImage.onload = () => {
13 | const { width, height } = context.canvas
14 |
15 | context.drawImage(baseImage, 0, 0, width, height)
16 |
17 | const imageData = context.getImageData(0, 0, width, height)
18 |
19 | const axis = []
20 |
21 | imageData.data.forEach((val, i) => {
22 | axis.push(i)
23 | })
24 |
25 | const imageFilter = new Filter(new AudioContext())
26 |
27 | const splitted = splitRGB(imageData.data)
28 |
29 | imageFilter.filterSignal(splitted.red, 'highpass', 4000)
30 | .then((filtered) => {
31 | plot({
32 | signals: [
33 | splitted.red.slice(80000, 80400),
34 | filtered.slice(80000, 80400)
35 | ],
36 | axis: axis.slice(0, 400),
37 | context: graphContext1,
38 | colors: ['red', 'lightBlue']
39 | })
40 | })
41 | }
42 |
--------------------------------------------------------------------------------
/plot-image-lines/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/plot-image-lines/plot-image.js:
--------------------------------------------------------------------------------
1 | const canvas = document.querySelector('#canvas')
2 | const context = canvas.getContext('2d')
3 |
4 | const graphContext1 = document.querySelector('#graph').getContext('2d')
5 | const graphContext2 = document.querySelector('#graph2').getContext('2d')
6 | const graphContext3 = document.querySelector('#graph3').getContext('2d')
7 | const graphContext4 = document.querySelector('#graph4').getContext('2d')
8 |
9 | const baseImage = new Image()
10 | baseImage.src = '../lena.jpg'
11 |
12 | baseImage.onload = () => {
13 | const { width, height } = context.canvas
14 |
15 | context.drawImage(baseImage, 0, 0, width, height)
16 |
17 | const imageData = context.getImageData(0, 0, width, height)
18 |
19 | const axis = []
20 |
21 | imageData.data.forEach((val, i) => {
22 | axis.push(i)
23 | })
24 |
25 | plot({
26 | signals: [imageData.data.slice(0, 200)],
27 | axis: axis.slice(0, 200),
28 | context: graphContext1,
29 | colors: ['orange']
30 | })
31 |
32 | const withoutAlpha = removeAlpha(imageData.data)
33 |
34 | plot({
35 | signals: [withoutAlpha.slice(0, 150)],
36 | axis: axis.slice(0, 150),
37 | context: graphContext2,
38 | colors: ['orange']
39 | })
40 |
41 | const splitted = splitRGB(imageData.data)
42 |
43 | plot({
44 | signals: [
45 | splitted.red.slice(0, 400),
46 | splitted.green.slice(0, 400),
47 | splitted.blue.slice(0, 400)
48 | ],
49 | axis: axis.slice(0, 400),
50 | context: graphContext3,
51 | colors: ['red', 'green', 'blue']
52 | })
53 |
54 | plot({
55 | signals: [
56 | splitted.red.slice(80000, 80400),
57 | splitted.green.slice(80000, 80400),
58 | splitted.blue.slice(80000, 80400)
59 | ],
60 | axis: axis.slice(0, 400),
61 | context: graphContext4,
62 | colors: ['red', 'green', 'blue']
63 | })
64 | }
65 |
--------------------------------------------------------------------------------
/plot-utils.js:
--------------------------------------------------------------------------------
1 | const COLORS = {
2 | orange: 'rgba(247, 158, 2, 1)',
3 | red: 'rgba(255, 0, 0, 1)',
4 | green: 'rgba(0, 255, 0, 1)',
5 | blue: 'rgba(0, 0, 255, 1)',
6 | lightBlue: 'rgba(27, 156, 229, 1)'
7 | }
8 |
9 | const getDataSets = (signals, colors) => {
10 | const datasets = signals.map((signal, index) => {
11 | const colorName = colors[index]
12 |
13 | return {
14 | data: signal,
15 | borderWidth: 1,
16 | fill: false,
17 | borderColor: COLORS[colorName],
18 | pointRadius: 0
19 | }
20 | })
21 |
22 | return datasets
23 | }
24 |
25 | const plot = ({signals, axis, context, colors, suggestedMin = 0, suggestedMax = 255}) => {
26 | const chart = new Chart(context, {
27 | type: 'line',
28 | data: {
29 | labels: axis,
30 | datasets: getDataSets(signals, colors)
31 | },
32 | options: {
33 | axes: {
34 | display: 'none'
35 | },
36 | legend: {
37 | display: false
38 | },
39 | animation: {
40 | duration: 0
41 | },
42 | elements: {
43 | line: {
44 | tension: 0
45 | }
46 | },
47 | scales: {
48 | xAxes: [],
49 | yAxes: [{
50 | ticks: {
51 | suggestedMin,
52 | suggestedMax
53 | }
54 | }]
55 | }
56 | }
57 | })
58 |
59 | return chart
60 | }
61 |
--------------------------------------------------------------------------------
/simple-filter/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/simple-filter/simple-filter.js:
--------------------------------------------------------------------------------
1 | const audioContext = new AudioContext()
2 | const select = document.querySelector('#filter-selector')
3 | const frequencyRange = document.querySelector('#frequency-range')
4 | const currentFrequency = document.querySelector('#current-frequency')
5 |
6 | let sourceNode = null
7 | let filter = null
8 |
9 | select.addEventListener('change', () => {
10 | disconnect(sourceNode)
11 | play(audioContext, sourceNode)
12 | })
13 |
14 | frequencyRange.addEventListener('change', () => {
15 | currentFrequency.value = frequencyRange.value
16 | disconnect(sourceNode)
17 | play(audioContext, sourceNode)
18 | })
19 |
20 | loadSound('../peels.mp3', audioContext)
21 | .then((buffer) => {
22 | sourceNode = getSourceNode(audioContext, buffer)
23 | play(audioContext, sourceNode)
24 |
25 | sourceNode.start(audioContext.currentTime)
26 | })
27 |
28 | const getSourceNode = (audioContext, buffer) => {
29 | const source = audioContext.createBufferSource()
30 |
31 | source.buffer = buffer
32 | source.looping = true
33 |
34 | return source
35 | }
36 |
37 | const getFilter = (audioContext, type, frequency) => {
38 | const filter = audioContext.createBiquadFilter()
39 |
40 | filter.type = type
41 | filter.frequency.value = frequency
42 |
43 | return filter
44 | }
45 |
46 | const play = (audioContext, sourceNode) => {
47 | if (filter) {
48 | filter.disconnect()
49 | }
50 |
51 | filter = getFilter(audioContext, select.value, frequencyRange.value)
52 |
53 | sourceNode.connect(filter)
54 | filter.connect(audioContext.destination)
55 | }
56 |
57 | const disconnect = (sourceNode) => {
58 | sourceNode.disconnect()
59 | filter.disconnect()
60 | }
61 |
--------------------------------------------------------------------------------
/sound-example/filter-results.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/sound-example/filter-results.js:
--------------------------------------------------------------------------------
1 | const context1 = document.querySelector('#canvas1').getContext('2d')
2 | const context2 = document.querySelector('#canvas2').getContext('2d')
3 | const context3 = document.querySelector('#canvas3').getContext('2d')
4 | const audioContext = new AudioContext()
5 | const filter = new Filter(audioContext)
6 | const SAMPLE_RATE = 8000
7 |
8 | let signal = []
9 | const tAxis = []
10 | const duration = 1
11 | const increment = 1 / SAMPLE_RATE
12 |
13 | for (let t = 0; t < duration; t += increment) {
14 | const value = Math.sin(6.283 * (50 + 500 * t) * t)
15 |
16 | signal.push(value)
17 | tAxis.push(t)
18 | }
19 |
20 | plot({
21 | context: context1,
22 | colors: ['orange'],
23 | suggestedMin: -1.2,
24 | suggestedMax: 1.2,
25 | signals: [signal],
26 | axis: tAxis.slice(0, 1500)
27 | })
28 |
29 | filter
30 | .filterSignal(signal, 'lowpass', 400, SAMPLE_RATE)
31 | .then((result) => {
32 | plot({
33 | context: context2,
34 | colors: ['orange', 'lightBlue'],
35 | suggestedMin: -1.2,
36 | suggestedMax: 1.2,
37 | signals: [signal, result],
38 | axis: tAxis.slice(0, 1500)
39 | })
40 | })
41 |
42 | filter
43 | .filterSignal(signal, 'highpass', 700, SAMPLE_RATE)
44 | .then((result) => {
45 | plot({
46 | context: context3,
47 | colors: ['orange', 'lightBlue'],
48 | suggestedMin: -1.2,
49 | suggestedMax: 1.2,
50 | signals: [signal, result],
51 | axis: tAxis.slice(0, 1500)
52 | })
53 | })
54 |
--------------------------------------------------------------------------------
/sound-example/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/sound-example/sound-example.js:
--------------------------------------------------------------------------------
1 | const audioContext = new AudioContext()
2 | const filter = new Filter(audioContext)
3 |
4 | const loadSound = (path) => {
5 | const request = new XMLHttpRequest()
6 | request.open('GET', path, true)
7 | request.responseType = 'arraybuffer'
8 |
9 | const promise = new Promise((resolve, reject) => {
10 | request.onload = () => resolve(request.response)
11 |
12 | request.onerror = (error) => reject(error)
13 | })
14 |
15 | request.send()
16 |
17 | return promise
18 | }
19 |
20 | const playBuffer = (buffer) => {
21 | const startTime = audioContext.currentTime
22 | const signal = buffer.getChannelData(0).slice(1000, 1150)
23 |
24 | filter.filterSignal(signal, 'highpass', 15000)
25 | .then((filtered) => {
26 | plotGraph(signal, filtered)
27 | })
28 |
29 | const source = audioContext.createBufferSource()
30 | source.buffer = buffer
31 |
32 | source.connect(audioContext.destination)
33 |
34 | source.start(startTime)
35 | source.stop(startTime + 2)
36 | }
37 |
38 | const plotGraph = (signal, filtered) => {
39 | const context = document.querySelector('#canvas').getContext('2d')
40 | const axis = []
41 |
42 | signal.forEach((value, i) => { axis.push(i) })
43 |
44 | plot({
45 | signals: [signal, filtered],
46 | colors: ['orange', 'lightBlue'],
47 | axis,
48 | context,
49 | suggestedMin: 0,
50 | suggestedMax: 0.0005
51 | })
52 | }
53 |
54 | loadSound('../peels.mp3')
55 | .then((data) => {
56 | audioContext.decodeAudioData(
57 | data,
58 | (buffer) => {
59 | playBuffer(buffer)
60 | },
61 | (error) => console.error(error)
62 | )
63 | })
64 |
--------------------------------------------------------------------------------