├── Orbiter.jucer
├── OrbiterUnitTests
├── OrbiterUnitTests.jucer
└── Source
│ ├── Main.cpp
│ ├── MainComponent.cpp
│ └── MainComponent.h
├── README.md
├── Source
├── AzimuthUIComponent.cpp
├── AzimuthUIComponent.h
├── HRTFProcessor.cpp
├── HRTFProcessor.h
├── PluginEditor.cpp
├── PluginEditor.h
├── PluginProcessor.cpp
└── PluginProcessor.h
├── readme_resources
├── .DS_Store
├── Orbiter_GUI.png
├── Orbiter_GUI_v0.0.png
└── Orbiter_GUI_v0_1.png
└── signal_validation.py
/Orbiter.jucer:
--------------------------------------------------------------------------------
1 |
2 |
3 |
6 |
7 |
8 |
10 |
12 |
14 |
15 |
16 |
18 |
20 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/OrbiterUnitTests/OrbiterUnitTests.jucer:
--------------------------------------------------------------------------------
1 |
2 |
3 |
6 |
7 |
8 |
9 |
10 |
12 |
13 |
14 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/OrbiterUnitTests/Source/Main.cpp:
--------------------------------------------------------------------------------
1 | /*
2 | ==============================================================================
3 |
4 | This file contains the basic startup code for a JUCE application.
5 |
6 | ==============================================================================
7 | */
8 |
9 | #include
10 | #include "MainComponent.h"
11 |
12 | //==============================================================================
13 | class OrbiterUnitTestsApplication : public juce::JUCEApplication
14 | {
15 | public:
16 | //==============================================================================
17 | OrbiterUnitTestsApplication() {}
18 |
19 | const juce::String getApplicationName() override { return ProjectInfo::projectName; }
20 | const juce::String getApplicationVersion() override { return ProjectInfo::versionString; }
21 | bool moreThanOneInstanceAllowed() override { return true; }
22 |
23 | //==============================================================================
24 | void initialise (const juce::String& commandLine) override
25 | {
26 | // This method is where you should put your application's initialisation code..
27 |
28 | mainWindow.reset (new MainWindow (getApplicationName()));
29 | }
30 |
31 | void shutdown() override
32 | {
33 | // Add your application's shutdown code here..
34 |
35 | mainWindow = nullptr; // (deletes our window)
36 | }
37 |
38 | //==============================================================================
39 | void systemRequestedQuit() override
40 | {
41 | // This is called when the app is being asked to quit: you can ignore this
42 | // request and let the app carry on running, or call quit() to allow the app to close.
43 | quit();
44 | }
45 |
46 | void anotherInstanceStarted (const juce::String& commandLine) override
47 | {
48 | // When another instance of the app is launched while this one is running,
49 | // this method is invoked, and the commandLine parameter tells you what
50 | // the other instance's command-line arguments were.
51 | }
52 |
53 | //==============================================================================
54 | /*
55 | This class implements the desktop window that contains an instance of
56 | our MainComponent class.
57 | */
58 | class MainWindow : public juce::DocumentWindow
59 | {
60 | public:
61 | MainWindow (juce::String name)
62 | : DocumentWindow (name,
63 | juce::Desktop::getInstance().getDefaultLookAndFeel()
64 | .findColour (juce::ResizableWindow::backgroundColourId),
65 | DocumentWindow::allButtons)
66 | {
67 | setUsingNativeTitleBar (true);
68 | setContentOwned (new MainComponent(), true);
69 |
70 | #if JUCE_IOS || JUCE_ANDROID
71 | setFullScreen (true);
72 | #else
73 | setResizable (true, true);
74 | centreWithSize (getWidth(), getHeight());
75 | #endif
76 |
77 | setVisible (true);
78 | }
79 |
80 | void closeButtonPressed() override
81 | {
82 | // This is called when the user tries to close this window. Here, we'll just
83 | // ask the app to quit when this happens, but you can change this to do
84 | // whatever you need.
85 | JUCEApplication::getInstance()->systemRequestedQuit();
86 | }
87 |
88 | /* Note: Be careful if you override any DocumentWindow methods - the base
89 | class uses a lot of them, so by overriding you might break its functionality.
90 | It's best to do all your work in your content component instead, but if
91 | you really have to override any DocumentWindow methods, make sure your
92 | subclass also calls the superclass's method.
93 | */
94 |
95 | private:
96 | JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MainWindow)
97 | };
98 |
99 | private:
100 | std::unique_ptr mainWindow;
101 | };
102 |
103 | //==============================================================================
104 | // This macro generates the main() routine that launches the app.
105 | START_JUCE_APPLICATION (OrbiterUnitTestsApplication)
106 |
--------------------------------------------------------------------------------
/OrbiterUnitTests/Source/MainComponent.cpp:
--------------------------------------------------------------------------------
1 | #include "MainComponent.h"
2 |
3 | //==============================================================================
4 | MainComponent::MainComponent()
5 | {
6 | // Make sure you set the size of the component after
7 | // you add any child components.
8 | setSize (800, 600);
9 |
10 | // Some platforms require permissions to open input channels so request that here
11 | if (juce::RuntimePermissions::isRequired (juce::RuntimePermissions::recordAudio)
12 | && ! juce::RuntimePermissions::isGranted (juce::RuntimePermissions::recordAudio))
13 | {
14 | juce::RuntimePermissions::request (juce::RuntimePermissions::recordAudio,
15 | [&] (bool granted) { setAudioChannels (granted ? 2 : 0, 2); });
16 | }
17 | else
18 | {
19 | // Specify the number of input and output channels that we want to open
20 | setAudioChannels (2, 2);
21 | }
22 |
23 | addAndMakeVisible(startTestButton);
24 | startTestButton.onClick = [this]{ start (); };
25 |
26 | addAndMakeVisible(testResultsBox);
27 | testResultsBox.setMultiLine(true);
28 | testResultsBox.setFont(juce::Font(juce::Font::getDefaultMonospacedFontName(), 12.0f, juce::Font::plain));
29 |
30 | addAndMakeVisible(categoriesBox);
31 | categoriesBox.addItem("All Tests", 1);
32 |
33 | auto categories = juce::UnitTest::getAllCategories();
34 | categories.sort(true);
35 |
36 | categoriesBox.addItemList(categories, 2);
37 | categoriesBox.setSelectedId(1);
38 |
39 | // bool success = sofa.readSOFAFile(sofaFilePath);
40 | // if (!success)
41 | // DBG("Failed to read SOFA file");
42 |
43 |
44 | setSize (500, 500);
45 | }
46 |
47 | MainComponent::~MainComponent()
48 | {
49 | // This shuts down the audio device and clears the audio source.
50 | shutdownAudio();
51 | }
52 |
53 | //==============================================================================
54 | void MainComponent::prepareToPlay (int samplesPerBlockExpected, double sampleRate)
55 | {
56 | // This function will be called when the audio device is started, or when
57 | // its settings (i.e. sample rate, block size, etc) are changed.
58 |
59 | // You can use this function to initialise any resources you might need,
60 | // but be careful - it will be called on the audio thread, not the GUI thread.
61 |
62 | // For more details, see the help for AudioProcessor::prepareToPlay()
63 | }
64 |
65 | void MainComponent::getNextAudioBlock (const juce::AudioSourceChannelInfo& bufferToFill)
66 | {
67 | // Your audio-processing code goes here!
68 |
69 | // For more details, see the help for AudioProcessor::getNextAudioBlock()
70 |
71 | // Right now we are not producing any data, in which case we need to clear the buffer
72 | // (to prevent the output of random noise)
73 | bufferToFill.clearActiveBufferRegion();
74 | }
75 |
76 | void MainComponent::releaseResources()
77 | {
78 | // This will be called when the audio device stops, or when it is being
79 | // restarted due to a setting change.
80 |
81 | // For more details, see the help for AudioProcessor::releaseResources()
82 | }
83 |
84 | //==============================================================================
85 | void MainComponent::paint (juce::Graphics& g)
86 | {
87 | // (Our component is opaque, so we must completely fill the background with a solid colour)
88 | g.fillAll (getLookAndFeel().findColour (juce::ResizableWindow::backgroundColourId));
89 |
90 | // You can add your drawing code here!
91 | }
92 |
93 | void MainComponent::resized()
94 | {
95 | auto bounds = getLocalBounds().reduced(6);
96 | auto topSlice = bounds.removeFromTop(25);
97 |
98 | startTestButton.setBounds(topSlice.removeFromLeft(200));
99 | topSlice.removeFromLeft(10);
100 |
101 | categoriesBox.setBounds(topSlice.removeFromLeft(250));
102 |
103 | bounds.removeFromTop(5);
104 | testResultsBox.setBounds(bounds);
105 | }
106 |
107 | void MainComponent::start()
108 | {
109 | startTest(categoriesBox.getText());
110 | }
111 |
112 | void MainComponent::startTest(const juce::String &category)
113 | {
114 | testResultsBox.clear();
115 | startTestButton.setEnabled(false);
116 |
117 | currentTestThread.reset(new TestRunnerThread (*this, category));
118 | currentTestThread->startThread();
119 | }
120 |
121 | void MainComponent::stopTest()
122 | {
123 | if (currentTestThread.get() != nullptr)
124 | {
125 | currentTestThread->stopThread(15000);
126 | currentTestThread.reset();
127 | }
128 | }
129 |
130 | void MainComponent::logMessage(const juce::String &message)
131 | {
132 | testResultsBox.moveCaretToEnd();
133 | testResultsBox.insertTextAtCaret(message + juce::newLine);
134 | testResultsBox.moveCaretToEnd();
135 | }
136 |
137 | void MainComponent::testFinished()
138 | {
139 | stopTest();
140 | startTestButton.setEnabled(true);
141 | logMessage("*** Tests Finished ***");
142 | }
143 |
--------------------------------------------------------------------------------
/OrbiterUnitTests/Source/MainComponent.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include
4 | #include "HRTFProcessor.h"
5 |
6 |
7 | //==============================================================================
8 | /*
9 | This component lives inside our window, and this is where you should put all
10 | your controls and content.
11 | */
12 | class MainComponent : public juce::AudioAppComponent
13 | {
14 | public:
15 | //==============================================================================
16 | MainComponent();
17 | ~MainComponent() override;
18 |
19 | //==============================================================================
20 | void prepareToPlay (int samplesPerBlockExpected, double sampleRate) override;
21 | void getNextAudioBlock (const juce::AudioSourceChannelInfo& bufferToFill) override;
22 | void releaseResources() override;
23 |
24 | //==============================================================================
25 | void paint (juce::Graphics& g) override;
26 | void resized() override;
27 |
28 | private:
29 |
30 | class TestRunnerThread : public juce::Thread, private juce::Timer
31 | {
32 | public:
33 | TestRunnerThread (MainComponent &utd, const juce::String &ctg) : juce::Thread("Unit Tests"), owner(utd), category(ctg) {};
34 |
35 | void run() override
36 | {
37 | CustomTestRunner runner(*this);
38 | runner.runTestsInCategory(category);
39 |
40 | startTimer(50);
41 | }
42 |
43 | void logMessage(const juce::String &message)
44 | {
45 | juce::WeakReference safeOwner(&owner);
46 |
47 | juce::MessageManager::callAsync([=]
48 | {
49 | if (auto *o = safeOwner.get())
50 | o->logMessage (message);
51 | });
52 | }
53 |
54 | void timerCallback() override
55 | {
56 | if (!isThreadRunning())
57 | owner.testFinished();
58 | }
59 |
60 | private:
61 |
62 | class CustomTestRunner : public juce::UnitTestRunner
63 | {
64 | public:
65 | CustomTestRunner (TestRunnerThread &trt) : owner(trt) {}
66 |
67 | void logMessage(const juce::String &message) override
68 | {
69 | owner.logMessage(message);
70 | }
71 |
72 | bool shouldAbortTests() override
73 | {
74 | return owner.threadShouldExit();
75 | }
76 |
77 | private:
78 | TestRunnerThread &owner;
79 |
80 | JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(CustomTestRunner)
81 | };
82 |
83 | MainComponent &owner;
84 | const juce::String category;
85 | };
86 |
87 | //==============================================================================
88 | // Your private member variables go here...
89 | void logMessage(const juce::String &message);
90 | void testFinished();
91 | void startTest(const juce::String &category);
92 | void stopTest();
93 | void start();
94 |
95 |
96 | juce::TextButton startTestButton {"Run Unit Tests"};
97 | juce::TextEditor testResultsBox;
98 | juce::ComboBox categoriesBox;
99 |
100 | std::unique_ptr currentTestThread;
101 |
102 | // const std::string sofaFilePath = "/Users/superkittens/projects/sound_prototypes/hrtf/hrtfs/BRIRs_from_a_room/A/002.sofa";
103 | // BasicSOFA::BasicSOFA sofa;
104 |
105 |
106 | JUCE_DECLARE_WEAK_REFERENCEABLE(MainComponent)
107 | JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (MainComponent)
108 | };
109 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Orbiter Plugin
2 |
3 | A spatial audio plugin for DAWs. This plugin accepts user-supplied HRIRs in the form of SOFA (Spatially Oriented Format for Acoustics) files. Only HRIRs, which are then converted into HRTFs, are accepted by the plugin.
4 |
5 | 
6 |
7 | ## Supported DAWs
8 | * Ableton Live
9 | * Logic Pro X
10 | * REAPER
11 |
12 | **Currently using automation in Logic Pro X will not work. A fix is in the works**
13 |
14 | ## Building
15 | To read SOFA files, Orbiter uses [libBasicSOFA](https://github.com/superkittens/libBasicSOFA) which in turn uses the HDF5 library. Orbiter also uses the JUCE framework.
16 | The recommended way to build is to open the project via the Projucer. You may need to add the HDF5 library and libBasicSOFA manually in your project settings.
17 |
18 | ## Instructions for Use
19 | Currently, no default SOFA file is provided by the plugin itself. You will need to have one ready before using the plugin. The following example set of [SOFA files](https://zenodo.org/record/206860#.XzygXy0ZNQI) have been known to work with Orbiter.
20 |
21 | To load a SOFA file, open the plugin GUI and click *Open SOFA* and select your desired file.
22 |
23 | Oribiter only accepts SOFA files with measurements in spherical coordinates. Theta is the source angle on the horizontal head plane while Phi is the elevation angle. While Theta can range from -179 to 180 degrees and Phi ranges from -90 to 90 degrees, the sliders map the values 0 - 1 to the available angles defined in the SOFA file. Radius controls the distance of the source from the listener.
24 |
25 | The left side of the GUI represents the location of the sound source. Moving the orange circle around will change the source's theta and radius parameters. The elevation vertical slider changes the elevation (phi). The rotary sliders to the right control the input/output gain and reverb settings.
26 |
27 |
--------------------------------------------------------------------------------
/Source/AzimuthUIComponent.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include "AzimuthUIComponent.h"
3 |
4 | //==============================================================================
5 | AzimuthUIComponent::AzimuthUIComponent()
6 | {
7 | // In your constructor, you should add any child components, and
8 | // initialise any special settings that your component needs.
9 | setSize(300, 300);
10 | addAndMakeVisible(sourceComponent);
11 |
12 | circleScaleValue = 0.75;
13 | circleLineThickness = 5;
14 |
15 | maxRadius = (getWidth() / 2) * 0.75;
16 |
17 | sourceComponent.addComponentListener(this);
18 | }
19 |
20 | AzimuthUIComponent::~AzimuthUIComponent()
21 | {
22 | }
23 |
24 | void AzimuthUIComponent::paint (juce::Graphics& g)
25 | {
26 | g.fillAll (getLookAndFeel().findColour (juce::ResizableWindow::backgroundColourId)); // clear the background
27 |
28 | // Draw Axes
29 | float lineThickness = 2;
30 |
31 | g.setColour(juce::Colours::white);
32 | g.drawLine(getWidth() / 2, 0, getWidth() / 2, getHeight(), lineThickness);
33 | g.drawLine(0, getHeight() / 2, getWidth(), getHeight() / 2, lineThickness);
34 |
35 | g.setColour(juce::Colours::white);
36 |
37 | // Draw Angle Indicators
38 | float angleTextWidth = 50;
39 | float angleTextHeight = 10;
40 |
41 | g.drawFittedText("0", getWidth() / 2, 0, angleTextWidth, angleTextHeight, juce::Justification::Flags::left, 1);
42 | g.drawFittedText("90", 0, getHeight() / 2, angleTextWidth, angleTextHeight, juce::Justification::Flags::left, 1);
43 | g.drawFittedText("180", getWidth() / 2, getHeight() - angleTextHeight, angleTextWidth, angleTextHeight, juce::Justification::Flags::left, 1);
44 | g.drawFittedText("-90", getWidth() - (angleTextWidth / 2), getHeight() / 2, angleTextWidth, angleTextHeight, juce::Justification::Flags::left, 1);
45 |
46 |
47 | // Draw "sound field"
48 | juce::Rectangle circleArea(getWidth() * circleScaleValue, getHeight() * circleScaleValue);
49 | circleArea.setCentre(getWidth() / 2, getHeight() / 2);
50 |
51 | g.setColour(juce::Colours::grey);
52 | g.fillEllipse(circleArea);
53 |
54 | g.setColour(juce::Colours::white);
55 | g.drawEllipse(circleArea, circleLineThickness);
56 |
57 | circleArea = juce::Rectangle(30, 30);
58 | circleArea.setCentre(getWidth() / 2, getHeight() / 2);
59 | g.fillEllipse(circleArea);
60 | }
61 |
62 | void AzimuthUIComponent::resized()
63 | {
64 | // This method is where you should set the bounds of any child
65 | // components that your component contains..
66 | sourceComponent.setCentrePosition(getWidth() / 2, 100);
67 | }
68 |
69 |
70 | /*
71 | * Calculate the angle and readius of the source relative to the listener position
72 | */
73 | std::pair AzimuthUIComponent::calculateSourceAngleAndRadius()
74 | {
75 | auto sourcePos = sourceComponent.getBounds().getCentre().toFloat();
76 |
77 | auto sourceXPosRelative = sourcePos.getX() - (getWidth() / 2);
78 | auto sourceYPosRelative = sourcePos.getY() - (getHeight() / 2);
79 |
80 | auto radius = sqrt((sourceXPosRelative * sourceXPosRelative) + (sourceYPosRelative * sourceYPosRelative));
81 | auto angle = (atan2(-sourceXPosRelative, -sourceYPosRelative) * 180 / juce::MathConstants::pi);
82 |
83 | auto radiusNormalised = juce::jmap(radius, 0, getWidth() / 2, 0, 1);
84 | auto angleNormalised = juce::jmap(angle, -179, 180, 0, 1);
85 |
86 | normalisedSourceAngleAndRadius.first = angleNormalised;
87 | normalisedSourceAngleAndRadius.second = radiusNormalised;
88 |
89 | return std::pair(angle, radius);
90 | }
91 |
92 |
93 | void AzimuthUIComponent::componentMovedOrResized(Component &component, bool wasMoved, bool wasResized)
94 | {
95 | std::pair angleAndRadius = calculateSourceAngleAndRadius();
96 |
97 | if (angleAndRadius.second > maxRadius)
98 | angleAndRadius.second = maxRadius;
99 |
100 | auto radiusNormalised = juce::jmap(angleAndRadius.second, 0, getWidth() / 2, 0, 1);
101 | auto angleNormalised = juce::jmap(angleAndRadius.first, -179, 180, 0, 1);
102 |
103 | normalisedSourceAngleAndRadius.first = angleNormalised;
104 | normalisedSourceAngleAndRadius.second = radiusNormalised;
105 | }
106 |
107 |
108 | void AzimuthUIComponent::updateSourcePosition(float normalisedAngle, float normalisedRadius)
109 | {
110 | auto angle = juce::jmap(normalisedAngle, 0, 1, -179, 180);
111 | auto radius = juce::jmap(normalisedRadius, 0, 1, 0, getWidth() / 2);
112 |
113 | angle = angle * juce::MathConstants::pi / 180.f;
114 |
115 | // Transform the vector to get it in terms of the window frame of reference
116 | int xPos = (getWidth() / 2) - (radius * sin(angle));
117 | int yPos = (getHeight() / 2) - (radius * cos(angle));
118 |
119 | sourceComponent.setCentrePosition(yPos, xPos);
120 | repaint();
121 | }
122 |
123 |
124 | std::pair AzimuthUIComponent::getNormalisedAngleAndRadius()
125 | {
126 | return normalisedSourceAngleAndRadius;
127 | }
128 |
--------------------------------------------------------------------------------
/Source/AzimuthUIComponent.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include
4 |
5 | //==============================================================================
6 | /*
7 | */
8 | class AzimuthUIComponent : public juce::Component, public juce::ComponentListener
9 | {
10 | public:
11 |
12 | class SourceUIComponent : public juce::Component
13 | {
14 | public:
15 | SourceUIComponent()
16 | {
17 | setSize(30, 30);
18 | sourceColour = juce::Colours::orange;
19 | }
20 |
21 | void paint(juce::Graphics &g) override
22 | {
23 | juce::Rectangle sourceArea(30, 30);
24 | g.setColour (sourceColour);
25 | g.fillEllipse(sourceArea);
26 | }
27 |
28 | void resized() override
29 | {
30 | constrainer.setMinimumOnscreenAmounts (getHeight(), getWidth(),
31 | getHeight(), getWidth());
32 | }
33 |
34 | void mouseDown(const juce::MouseEvent &e) override
35 | {
36 | dragger.startDraggingComponent(this, e);
37 | }
38 |
39 | void mouseDrag(const juce::MouseEvent &e) override
40 | {
41 | dragger.dragComponent(this, e, &constrainer);
42 | }
43 |
44 | void mouseEnter(const juce::MouseEvent &e) override
45 | {
46 | sourceColour = juce::Colours::lightgreen;
47 | repaint();
48 | }
49 |
50 | void mouseExit(const juce::MouseEvent &e) override
51 | {
52 | sourceColour = juce::Colours::orange;
53 | repaint();
54 | }
55 |
56 | private:
57 | juce::ComponentBoundsConstrainer constrainer;
58 | juce::ComponentDragger dragger;
59 |
60 | juce::Colour sourceColour;
61 |
62 | JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(SourceUIComponent);
63 | };
64 |
65 |
66 | AzimuthUIComponent();
67 | ~AzimuthUIComponent() override;
68 |
69 | void paint (juce::Graphics&) override;
70 | void resized() override;
71 | std::pair getNormalisedAngleAndRadius();
72 | void updateSourcePosition(float normalisedAngle, float normalisedRadius);
73 |
74 |
75 | SourceUIComponent sourceComponent;
76 |
77 | private:
78 |
79 | void componentMovedOrResized(Component &component, bool wasMoved, bool wasResized) override;
80 | std::pair calculateSourceAngleAndRadius();
81 |
82 |
83 | std::pair normalisedSourceAngleAndRadius;
84 | float circleScaleValue;
85 | float circleLineThickness;
86 | float maxRadius;
87 |
88 |
89 | JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AzimuthUIComponent)
90 | };
91 |
--------------------------------------------------------------------------------
/Source/HRTFProcessor.cpp:
--------------------------------------------------------------------------------
1 | #include "HRTFProcessor.h"
2 |
3 | HRTFProcessor::HRTFProcessor()
4 | {
5 | olaWriteIndex = 0;
6 | hrirChanged = false;
7 | hrirLoaded = false;
8 | }
9 |
10 | HRTFProcessor::HRTFProcessor(const double *hrir, size_t hrirSize, float samplingFreq, size_t audioBufferSize, size_t numDelaySamples)
11 | {
12 | olaWriteIndex = 0;
13 | hrirChanged = false;
14 | hrirLoaded = false;
15 |
16 | if (!init(hrir, hrirSize, fs, audioBufferSize, numDelaySamples))
17 | hrirLoaded = false;
18 | }
19 |
20 |
21 | bool HRTFProcessor::init(const double *hrir, size_t hrirSize, float samplingFreq, size_t audioBufferSize, size_t numDelaySamples)
22 | {
23 | if (hrirLoaded)
24 | return false;
25 |
26 | if (hrir == nullptr)
27 | return false;
28 |
29 | if (hrirSize <= 0 || samplingFreq <= 0.0 || audioBufferSize <= 1)
30 | return false;
31 |
32 | // audioBufferSize must be a power of 2
33 | // Check this constraint here
34 | auto audioBufferSizeCopy = audioBufferSize;
35 | size_t bitSum = 0;
36 | for (auto i = 0; i < sizeof(audioBufferSize) * 8; ++i)
37 | {
38 | bitSum += audioBufferSizeCopy & 0x01;
39 | audioBufferSizeCopy = audioBufferSizeCopy >> 1;
40 | }
41 |
42 | if (bitSum > 1)
43 | return false;
44 |
45 |
46 | // When performing overlap and add with windowing, we need to have 2x audioBufferSize in order to get audioBufferSize outputs
47 | fs = samplingFreq;
48 | audioBlockSize = (audioBufferSize * 2) + 1;
49 | hopSize = audioBufferSize;
50 |
51 | // Calculate buffer sizes and initialize them
52 | auto bufferPower = calculateNextPowerOfTwo(hrirSize + audioBlockSize);
53 | zeroPaddedBufferSize = pow(2, bufferPower);
54 |
55 | fftEngine.reset(new juce::dsp::FFT(bufferPower));
56 | if (fftEngine.get() == nullptr)
57 | return false;
58 |
59 | inputBuffer = std::vector(3 * audioBufferSize+ 1);
60 | std::fill(inputBuffer.begin(), inputBuffer.end(), 0.0);
61 |
62 | outputBuffer = std::vector(zeroPaddedBufferSize);
63 | std::fill(outputBuffer.begin(), outputBuffer.end(), 0.0);
64 |
65 | xBuffer = std::vector>(zeroPaddedBufferSize);
66 | std::fill(xBuffer.begin(), xBuffer.end(), std::complex(0.0, 0.0));
67 |
68 | auxBuffer = std::vector>(zeroPaddedBufferSize);
69 |
70 | olaBuffer = std::vector(zeroPaddedBufferSize);
71 | std::fill(olaBuffer.begin(), olaBuffer.end(), 0.0);
72 |
73 | shadowOLABuffer = std::vector(zeroPaddedBufferSize);
74 | std::fill(shadowOLABuffer.begin(), shadowOLABuffer.end(), 0.0);
75 |
76 | activeHRTF = std::vector>(zeroPaddedBufferSize);
77 | std::fill(xBuffer.begin(), xBuffer.end(), std::complex(0.0, 0.0));
78 |
79 | auxHRTFBuffer = std::vector>(zeroPaddedBufferSize);
80 | std::fill(xBuffer.begin(), xBuffer.end(), std::complex(0.0, 0.0));
81 |
82 | reverbBuffer = std::vector(zeroPaddedBufferSize);
83 | std::fill(reverbBuffer.begin(), reverbBuffer.end(), 0.0);
84 |
85 | outputSampleStart = 0;
86 | outputSampleEnd = 0;
87 | numSamplesAdded = 0;
88 | inputBlockStart = 0;
89 | inputSampleAddIndex = 0;
90 | numOutputSamplesAvailable = 0;
91 |
92 | reverbBufferStartIndex = 0;
93 | reverbBufferAddIndex = 0;
94 |
95 | reverb.setSampleRate(samplingFreq);
96 | juce::Reverb::Parameters reverbParam;
97 |
98 | reverbParam.roomSize = 0.5f;
99 | reverbParam.damping = 0.5f;
100 | reverbParam.dryLevel = 0.5f;
101 | reverbParam.wetLevel = 0.5f;
102 | reverbParam.width = 0.5f;
103 |
104 | reverb.setParameters(reverbParam);
105 |
106 | // Create input window
107 | // To satisfy the COLA constraint for a Hamming window, the last value should be 0
108 | window = std::vector(audioBlockSize);
109 | juce::dsp::WindowingFunction::fillWindowingTables(window.data(), audioBlockSize, juce::dsp::WindowingFunction::WindowingMethod::triangular);
110 |
111 |
112 | // Transform HRIR into HRTF
113 | if (!setupHRTF(hrir, hrirSize, numDelaySamples))
114 | return false;
115 |
116 | for (auto i = 0; i < audioBlockSize; ++i)
117 | {
118 | fadeOutEnvelope.push_back(pow(juce::dsp::FastMathApproximations::cos((i * juce::MathConstants::pi) / (2 * audioBlockSize)), 2));
119 | fadeInEnvelope.push_back(pow(juce::dsp::FastMathApproximations::sin((i * juce::MathConstants::pi) / (2 * audioBlockSize)), 2));
120 | }
121 |
122 | hrirLoaded = true;
123 |
124 | return true;
125 | }
126 |
127 |
128 | bool HRTFProcessor::swapHRIR(const double *hrir, size_t hrirSize, size_t numDelaySamples)
129 | {
130 | if (!hrirLoaded || hrirSize <= 0)
131 | return false;
132 |
133 | bool success = setupHRTF(hrir, hrirSize, numDelaySamples);
134 | if (!success)
135 | return false;
136 |
137 | hrirChanged = true;
138 |
139 | return true;
140 | }
141 |
142 |
143 | /*
144 | * DO NOT CALL THIS FUNCTION ON MULTIPLE THREADS
145 | * Add input samples into the input buffer
146 | * If enough samples have been collected then process those samples
147 | * To get the processed output, call getOutput()
148 | */
149 | bool HRTFProcessor::addSamples(float *samples, size_t numSamples)
150 | {
151 | if (numSamples > inputBuffer.size() - numSamplesAdded)
152 | return false;
153 |
154 | for (auto i = 0; i < numSamples; ++i)
155 | {
156 | // Add samples into the input buffer and reverb buffer
157 | inputBuffer[inputSampleAddIndex] = samples[i];
158 | reverbBuffer[reverbBufferAddIndex] = samples[i];
159 |
160 | inputSampleAddIndex = (inputSampleAddIndex + 1) % inputBuffer.size();
161 | reverbBufferAddIndex = (reverbBufferAddIndex + 1) % reverbBuffer.size();
162 |
163 | numSamplesAdded++;
164 | }
165 |
166 |
167 | // Execute when we have added enough samples for processing
168 | if (numSamplesAdded >= audioBlockSize)
169 | {
170 | numSamplesAdded -= hopSize;
171 | std::vector x(audioBlockSize);
172 | auto blockStart = inputBlockStart;
173 |
174 | for (auto i = 0; i < audioBlockSize; ++i)
175 | {
176 | x[i] = inputBuffer[blockStart] * window[i];
177 | blockStart = (blockStart + 1) % inputBuffer.size();
178 | }
179 |
180 | inputBlockStart = (inputBlockStart + hopSize) % inputBuffer.size();
181 | calculateOutput(x);
182 | }
183 |
184 | return true;
185 | }
186 |
187 |
188 | std::vector HRTFProcessor::getOutput(size_t numSamples)
189 | {
190 | std::vector out(numSamples);
191 | if (numSamples > numOutputSamplesAvailable)
192 | return std::vector(0);
193 |
194 | // Get reverberated input signal
195 | reverb.processMono(reverbBuffer.data() + reverbBufferStartIndex, (int)numSamples);
196 |
197 | for (auto i = 0; i < numSamples; ++i)
198 | {
199 | out[i] = outputBuffer[outputSampleStart] + (0.5f * reverbBuffer[reverbBufferStartIndex]);
200 | outputSampleStart = (outputSampleStart + 1) % outputBuffer.size();
201 | reverbBufferStartIndex = (reverbBufferStartIndex + 1) % reverbBuffer.size();
202 | }
203 |
204 | numOutputSamplesAvailable -= numSamples;
205 |
206 | return out;
207 | }
208 |
209 |
210 | /*
211 | * Clear everything in the input, output and OLA buffers
212 | * Any indices related to these buffers are also reset
213 | */
214 | void HRTFProcessor::flushBuffers()
215 | {
216 | std::fill(inputBuffer.begin(), inputBuffer.end(), 0.0);
217 | std::fill(outputBuffer.begin(), outputBuffer.end(), 0.0);
218 | std::fill(olaBuffer.begin(), olaBuffer.end(), 0.0);
219 |
220 | inputBlockStart = 0;
221 | inputSampleAddIndex = 0;
222 | numSamplesAdded = 0;
223 | outputSamplesStart = 0;
224 | outputSamplesEnd = 0;
225 | numOutputSamplesAvailable = 0;
226 | }
227 |
228 |
229 | /*
230 | * DO NOT CALL THIS FUNCTION ON MULTIPLE THREADS
231 | * Apply the HRTF to an input sample of audio data
232 | * If the HRTF is changed, the output will be a crossfaded mix of audio data
233 | * with both HRTFs applied
234 | */
235 | const float* HRTFProcessor::calculateOutput(const std::vector &x)
236 | {
237 | if (!hrirLoaded || x.size() == 0)
238 | return nullptr;
239 |
240 | if (x.size() != audioBlockSize)
241 | return nullptr;
242 |
243 | std::fill(olaBuffer.begin() + olaWriteIndex, olaBuffer.begin() + olaWriteIndex + hopSize, 0.0);
244 |
245 | olaWriteIndex = (olaWriteIndex + hopSize) % olaBuffer.size();
246 |
247 | std::fill(xBuffer.begin(), xBuffer.end(), std::complex(0.0, 0.0));
248 | for (auto i = 0; i < x.size(); ++i)
249 | xBuffer.at(i) = std::complex(x.at(i), 0.0);
250 |
251 | fftEngine->perform(xBuffer.data(), xBuffer.data(), false);
252 |
253 |
254 | for (auto i = 0; i < zeroPaddedBufferSize; ++i)
255 | xBuffer.at(i) = xBuffer.at(i) * activeHRTF.at(i);
256 |
257 | fftEngine->perform(xBuffer.data(), xBuffer.data(), true);
258 |
259 | if (hrirChanged)
260 | {
261 | juce::SpinLock::ScopedTryLockType hrirChangingScopedLock(hrirChangingLock);
262 | if (hrirChangingScopedLock.isLocked())
263 | {
264 | hrirChanged = false;
265 | crossfadeWithNewHRTF(x);
266 |
267 | std::copy(auxHRTFBuffer.begin(), auxHRTFBuffer.end(), activeHRTF.begin());
268 |
269 | crossFaded = true;
270 | }
271 | }else
272 | crossFaded = false;
273 |
274 | if(!overlapAndAdd())
275 | return nullptr;
276 |
277 |
278 | // Copy outputtable audio data to the output buffer
279 | std::copy(olaBuffer.begin() + olaWriteIndex, olaBuffer.begin() + olaWriteIndex + hopSize, outputBuffer.begin() + outputSampleEnd);
280 | outputSampleEnd = (outputSampleEnd + hopSize) % outputBuffer.size();
281 |
282 | numOutputSamplesAvailable += hopSize;
283 |
284 |
285 | return olaBuffer.data() + olaWriteIndex;
286 | }
287 |
288 |
289 |
290 | // Convert an HRIR into an HRTF and queue the new HRTF for swapping which is done in calculateOutput()
291 | bool HRTFProcessor::setupHRTF(const double *hrir, size_t hrirSize, size_t numDelaySamples)
292 | {
293 | if (hrirSize == 0 || hrirSize > zeroPaddedBufferSize)
294 | return false;
295 |
296 | if (fftEngine.get() == nullptr)
297 | return false;
298 |
299 |
300 | juce::SpinLock::ScopedLockType scopedLock(hrirChangingLock);
301 |
302 | std::fill(auxHRTFBuffer.begin(), auxHRTFBuffer.end(), std::complex(0.0, 0.0));
303 |
304 | std::vector hrirVec(hrirSize);
305 | for (auto i = 0; i < hrirSize; ++i)
306 | hrirVec[i] = hrir[i];
307 |
308 | if (numDelaySamples != 0)
309 | {
310 | if (!removeImpulseDelay(hrirVec, numDelaySamples))
311 | return false;
312 | }
313 |
314 | for (auto i = 0; i < hrirSize; ++i)
315 | {
316 | if (!hrirLoaded)
317 | activeHRTF.at(i) = std::complex((float)hrirVec[i], 0.0);
318 | else
319 | auxHRTFBuffer.at(i) = std::complex((float)hrirVec[i], 0.0);
320 | }
321 |
322 | if (!hrirLoaded)
323 | fftEngine->perform(activeHRTF.data(), activeHRTF.data(), false);
324 |
325 | else
326 | {
327 | fftEngine->perform(auxHRTFBuffer.data(), auxHRTFBuffer.data(), false);
328 | hrirChanged = true;
329 | }
330 |
331 | return true;
332 | }
333 |
334 |
335 | // Peel off a copy of the OLA buffer
336 | bool HRTFProcessor::copyOLABuffer(std::vector &dest, size_t numSamplesToCopy)
337 | {
338 | if (!hrirLoaded)
339 | return false;
340 |
341 | if (numSamplesToCopy > zeroPaddedBufferSize)
342 | return false;
343 |
344 | juce::SpinLock::ScopedLockType olaScopeLock(shadowOLACopyingLock);
345 |
346 | std::copy(shadowOLABuffer.begin(), shadowOLABuffer.begin() + numSamplesToCopy, dest.begin());
347 |
348 | return true;
349 | }
350 |
351 |
352 | void HRTFProcessor::setReverbParameters(juce::Reverb::Parameters params)
353 | {
354 | reverb.setParameters(params);
355 | }
356 |
357 |
358 | bool HRTFProcessor::crossfadeWithNewHRTF(const std::vector &x)
359 | {
360 | if (!hrirLoaded)
361 | return false;
362 |
363 | // Calculate the output with the new HRTF applied before we crossfade the old and new outputs together
364 | std::fill(auxBuffer.begin(), auxBuffer.end(), std::complex(0.0, 0.0));
365 | for (auto i = 0; i < audioBlockSize; ++i)
366 | auxBuffer.at(i) = std::complex(x.at(i), 0.0);
367 |
368 | fftEngine->perform(auxBuffer.data(), auxBuffer.data(), false);
369 |
370 | for (auto i = 0; i < zeroPaddedBufferSize; ++i)
371 | auxBuffer.at(i) = auxBuffer.at(i) * auxHRTFBuffer.at(i);
372 |
373 | fftEngine->perform(auxBuffer.data(), auxBuffer.data(), true);
374 |
375 |
376 | for (auto i = 0; i < zeroPaddedBufferSize; ++i)
377 | {
378 | if (i < audioBlockSize)
379 | {
380 | auto fadedSignal = (xBuffer.at(i).real() * fadeOutEnvelope.at(i)) + (auxBuffer.at(i).real() * fadeInEnvelope.at(i));
381 | xBuffer.at(i) = std::complex(fadedSignal, 0.0);
382 | }
383 | else
384 | xBuffer.at(i) = auxBuffer.at(i);
385 | }
386 |
387 | return true;
388 | }
389 |
390 |
391 | bool HRTFProcessor::overlapAndAdd()
392 | {
393 | if (!hrirLoaded)
394 | return false;
395 |
396 | auto offset = olaWriteIndex;
397 |
398 | for (auto i = 0; i < zeroPaddedBufferSize; ++i)
399 | {
400 | olaBuffer.at(offset) += xBuffer.at(i).real();
401 | offset = (offset + 1) % zeroPaddedBufferSize;
402 | }
403 |
404 | juce::SpinLock::ScopedTryLockType olaScopeLock(shadowOLACopyingLock);
405 | if (olaScopeLock.isLocked())
406 | std::copy(olaBuffer.begin(), olaBuffer.end(), shadowOLABuffer.begin());
407 |
408 | return true;
409 | }
410 |
411 |
412 | unsigned int HRTFProcessor::calculateNextPowerOfTwo(float x)
413 | {
414 | return static_cast(log2(x)) + 1;
415 | }
416 |
417 |
418 | /*
419 | * Some impulse responses will have a delay before the impulse is actually triggered
420 | * This isn't much of a problem except when the delays between different impulses are different
421 | * which can lead to unpleasant zipper noise between HRIR transitions.
422 | * We'll attempt to remove the delay as much as possible to bring all HRIRs to start at a "common" point
423 | *
424 | * The starting point is determined by finding the first point where the impulse has a value >= abs(mean) + std dev
425 | */
426 | bool HRTFProcessor::removeImpulseDelay(std::vector &hrir, size_t numDelaySamples)
427 | {
428 | if (hrir.size() == 0) return false;
429 |
430 | std::copy(hrir.begin() + numDelaySamples, hrir.end(), hrir.begin());
431 | std::fill(hrir.end() - numDelaySamples, hrir.end(), 0.0);
432 |
433 | return true;
434 | }
435 |
436 |
437 | /*
438 | * Simple mean and std deviation calculation function
439 | * Mean and std are returned as a std::pair where mean is the first value and std is the second
440 | */
441 | std::pair HRTFProcessor::getMeanAndStd(const std::vector &x) const
442 | {
443 | std::pair stats(0, 0);
444 |
445 | if (x.size() != 0)
446 | {
447 | // Calculate mean
448 | float sum = 0;
449 | for (auto &i : x)
450 | sum += i;
451 |
452 | stats.first = sum / x.size();
453 |
454 | // Calculate std
455 | float stdSum = 0;
456 | for (auto &i : x)
457 | stdSum += (i - stats.first) * (i - stats.first);
458 |
459 | stdSum = sqrt(stdSum / x.size());
460 | stats.second = stdSum;
461 | }
462 |
463 | return stats;
464 | }
465 |
466 |
467 |
468 | #ifdef JUCE_UNIT_TESTS
469 | void HRTFProcessorTest::runTest()
470 | {
471 | HRTFProcessor processor;
472 |
473 | // Input an impulse to the HRTFProcessor
474 | // HRTF should be a constant (1)
475 | size_t fftSize = 512;
476 | std::vector hrir(fftSize);
477 | std::fill(hrir.begin(), hrir.end(), 0.0);
478 | hrir[(fftSize / 2) - 1] = 1.0;
479 |
480 | float samplingFreq = 44100.0;
481 | size_t audioBufferSize = 256;
482 |
483 | beginTest("HRTFProcessor Initialization");
484 |
485 | bool success = processor.init(hrir.data(), hrir.size(), samplingFreq, audioBufferSize);
486 | expect(success);
487 |
488 | expectEquals(processor.isHRIRLoaded(), 1);
489 | expectEquals(processor.zeroPaddedBufferSize, 2048);
490 | expectEquals(processor.fs, samplingFreq);
491 | expectEquals(processor.audioBlockSize, 2 * audioBufferSize + 1);
492 |
493 | for (auto i = 0; i < processor.zeroPaddedBufferSize; ++i)
494 | expectWithinAbsoluteError(std::abs(processor.activeHRTF[i]), 1.0, 0.01);
495 |
496 | //===================================================================================================//
497 |
498 |
499 | beginTest("HRTF Application");
500 |
501 | std::vector x(audioBufferSize);
502 | std::fill(x.begin(), x.end(), 1.0);
503 |
504 | // In order to meet the min number of samples needed for processing, call addSamples() 3 times
505 | processor.addSamples(x.data(), x.size());
506 | processor.addSamples(x.data(), x.size());
507 | processor.addSamples(x.data(), x.size());
508 |
509 | auto output = processor.getOutput(audioBufferSize);
510 |
511 | expectEquals(output.size(), audioBufferSize);
512 |
513 | // std::cout << "HRTFProcessor Output" << std::endl;
514 | // std::cout << "------------------" << std::endl;
515 | //
516 | // for (auto i = 0; i < audioBufferSize; ++i)
517 | // std::cout << output[i] << std::endl;
518 |
519 | //===================================================================================================//
520 |
521 |
522 | beginTest("Changing HRTF");
523 |
524 | size_t testSignalLength = 2048;
525 | float testFrequency = 500;
526 |
527 | // Create container to hold the results of processed data when HRIR was changed
528 | std::vector processedData(testSignalLength);
529 | std::fill(processedData.begin(), processedData.end(), 0.0);
530 |
531 | // First, reset all the internal HRTFProcessor buffers to start fresh
532 | processor.flushBuffers();
533 | output = processor.getOutput(audioBufferSize);
534 |
535 | expectEquals(output.size(), 0);
536 |
537 | std::vector signal(testSignalLength);
538 | createTestSignal(samplingFreq, testFrequency, signal);
539 |
540 | processor.addSamples(signal.data(), audioBufferSize * 3);
541 | output = processor.getOutput(audioBufferSize);
542 | std::copy(output.begin(), output.end(), processedData.begin());
543 |
544 | // Swap HRIR for an impulse response of all ones
545 | std::fill(hrir.begin(), hrir.end(), 1.0);
546 | expect(processor.swapHRIR(hrir.data(), hrir.size()));
547 |
548 | // Feed in rest of test signal and get the processed data
549 | for (auto i = 0; i < (testSignalLength / audioBufferSize) - 3; ++i)
550 | {
551 | processor.addSamples(signal.data() + ((i + 3) * audioBufferSize), audioBufferSize);
552 | output = processor.getOutput(audioBufferSize);
553 | std::copy(output.begin(), output.end(), processedData.begin() + ((i + 1) * audioBufferSize));
554 | }
555 |
556 |
557 | std::cout << "Processed Data from HRTF Change" << std::endl;
558 | std::cout << "==================" << std::endl;
559 |
560 | for (auto i = 0; i < testSignalLength; ++i)
561 | std::cout << processedData[i] << std::endl;
562 |
563 | }
564 |
565 |
566 | bool HRTFProcessorTest::createTestSignal(float fs, float f0, std::vector &dest)
567 | {
568 | if (dest.size() == 0)
569 | return false;
570 |
571 | for (auto i = 0; i < dest.size(); ++i)
572 | dest.at(i) = sin((i * 2 * juce::MathConstants::pi * f0) / fs);
573 |
574 | return true;
575 | }
576 |
577 | #endif
578 |
--------------------------------------------------------------------------------
/Source/HRTFProcessor.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include
3 | #include
4 | #include
5 |
6 |
7 | class HRTFProcessor
8 | {
9 | #ifdef JUCE_UNIT_TESTS
10 | friend class HRTFProcessorTest;
11 | #endif
12 |
13 | public:
14 |
15 | HRTFProcessor();
16 | HRTFProcessor(const double *hrir, size_t hrirSize, float samplingFreq, size_t audioBufferSize, size_t numDelaySamples);
17 |
18 | bool init(const double *hrir, size_t hrirSize, float samplingFreq, size_t audioBufferSize, size_t numDelaySamples);
19 | bool swapHRIR(const double *hrir, size_t hrirSize, size_t numDelaySamples);
20 | bool addSamples(float *samples, size_t numSamples);
21 | std::vector getOutput(size_t numSamples);
22 | void flushBuffers();
23 | bool copyOLABuffer(std::vector &dest, size_t numSamplesToCopy);
24 | bool isHRIRLoaded() { return hrirLoaded; }
25 | void setReverbParameters(juce::Reverb::Parameters params);
26 |
27 | bool crossFaded;
28 |
29 |
30 | protected:
31 |
32 | bool setupHRTF(const double *hrir, size_t hrirSize, size_t numDelaySamples);
33 | const float* calculateOutput(const std::vector &x);
34 | bool overlapAndAdd();
35 | bool crossfadeWithNewHRTF(const std::vector &x);
36 | unsigned int calculateNextPowerOfTwo(float x);
37 | bool removeImpulseDelay(std::vector &hrir, size_t numDelaySamples);
38 | std::pair getMeanAndStd(const std::vector &x) const;
39 |
40 |
41 | double fs;
42 |
43 | std::vector inputBuffer;
44 | size_t inputBlockStart;
45 | size_t inputSampleAddIndex;
46 | std::vector outputBuffer;
47 | size_t outputSamplesStart;
48 | size_t outputSamplesEnd;
49 | size_t numOutputSamplesAvailable;
50 | size_t numSamplesAdded;
51 | size_t hopSize;
52 |
53 | std::vector reverbBuffer;
54 | size_t reverbBufferStartIndex;
55 | size_t reverbBufferAddIndex;
56 |
57 | std::vector window;
58 |
59 | std::vector shadowOLABuffer;
60 | std::vector> activeHRTF;
61 | std::vector> auxHRTFBuffer;
62 | std::vector> xBuffer;
63 | std::vector olaBuffer;
64 | size_t outputSampleStart;
65 | size_t outputSampleEnd;
66 | std::vector> auxBuffer;
67 | size_t audioBlockSize;
68 |
69 | bool hrirChanged;
70 | size_t olaWriteIndex;
71 | size_t zeroPaddedBufferSize;
72 | std::vector fadeInEnvelope;
73 | std::vector fadeOutEnvelope;
74 |
75 | std::unique_ptr fftEngine;
76 |
77 | juce::Reverb reverb;
78 |
79 | juce::SpinLock hrirChangingLock;
80 | juce::SpinLock shadowOLACopyingLock;
81 |
82 | bool hrirLoaded;
83 | };
84 |
85 |
86 | #ifdef JUCE_UNIT_TESTS
87 | class HRTFProcessorTest : public juce::UnitTest
88 | {
89 | public:
90 | HRTFProcessorTest() : UnitTest("HRTFProcessorUnitTest", "HRTFProcessor") {};
91 |
92 | void runTest() override;
93 |
94 | private:
95 | bool createTestSignal(float fs, float f0, std::vector &dest);
96 | };
97 |
98 | static HRTFProcessorTest hrtfProcessorUnitTest;
99 |
100 | #endif
101 |
--------------------------------------------------------------------------------
/Source/PluginEditor.cpp:
--------------------------------------------------------------------------------
1 | /*
2 | ==============================================================================
3 |
4 | This file contains the basic framework code for a JUCE plugin editor.
5 |
6 | ==============================================================================
7 | */
8 |
9 | #include "PluginProcessor.h"
10 | #include "PluginEditor.h"
11 |
12 | //==============================================================================
13 | OrbiterAudioProcessorEditor::OrbiterAudioProcessorEditor (OrbiterAudioProcessor& p)
14 | : AudioProcessorEditor (&p),
15 | inputGainSlider(gainSliderSize, "Input Gain"),
16 | outputGainSlider(gainSliderSize, "Output Gain"),
17 | reverbRoomSizeSlider(reverbSliderSize, "Room Size"),
18 | reverbDampingSlider(reverbSliderSize, "Damping"),
19 | reverbWetLevelSlider(reverbSliderSize, "Wet"),
20 | reverbDryLevelSlider(reverbSliderSize, "Dry"),
21 | reverbWidthSlider(reverbSliderSize, "Width"),
22 | audioProcessor (p)
23 |
24 | {
25 | // Make sure that before the constructor has finished, you've set the
26 | // editor's size to whatever you need it to be.
27 | setSize (900, 350);
28 |
29 | hrtfThetaSlider.setSliderStyle(juce::Slider::SliderStyle::Rotary);
30 | hrtfThetaSlider.setTextBoxStyle(juce::Slider::TextEntryBoxPosition::TextBoxBelow, true, 50, 10);
31 | hrtfThetaSlider.setRange(0, 1);
32 | addChildComponent(hrtfThetaSlider);
33 |
34 | hrtfRadiusSlider.setSliderStyle(juce::Slider::SliderStyle::Rotary);
35 | hrtfRadiusSlider.setTextBoxStyle(juce::Slider::TextEntryBoxPosition::TextBoxBelow, true, 50, 10);
36 | hrtfRadiusSlider.setRange(0, 1);
37 | addChildComponent(hrtfRadiusSlider);
38 |
39 | hrtfPhiSlider.setSliderStyle(juce::Slider::SliderStyle::LinearVertical);
40 | hrtfPhiSlider.setTextBoxStyle(juce::Slider::TextEntryBoxPosition::NoTextBox, true, 100, 10);
41 | hrtfPhiSlider.setRange(0, 1);
42 | addAndMakeVisible(hrtfPhiSlider);
43 |
44 | addAndMakeVisible(inputGainSlider);
45 | addAndMakeVisible(outputGainSlider);
46 |
47 | addAndMakeVisible(reverbRoomSizeSlider);
48 | addAndMakeVisible(reverbDampingSlider);
49 | addAndMakeVisible(reverbWetLevelSlider);
50 | addAndMakeVisible(reverbDryLevelSlider);
51 | addAndMakeVisible(reverbWidthSlider);
52 |
53 |
54 | sofaFileButton.setButtonText("Open SOFA");
55 | sofaFileButton.onClick = [this]{ openSofaButtonClicked(); };
56 | addAndMakeVisible(sofaFileButton);
57 |
58 | hrtfThetaAttachment = std::make_unique(audioProcessor.valueTreeState, HRTF_THETA_ID, hrtfThetaSlider);
59 |
60 | hrtfPhiAttachment = std::make_unique(audioProcessor.valueTreeState, HRTF_PHI_ID, hrtfPhiSlider);
61 |
62 | hrtfRadiusAttachment = std::make_unique(audioProcessor.valueTreeState, HRTF_RADIUS_ID, hrtfRadiusSlider);
63 |
64 | inputGainAttachment = std::make_unique(audioProcessor.valueTreeState, HRTF_INPUT_GAIN_ID, inputGainSlider.slider);
65 |
66 | outputGainAttachment = std::make_unique(audioProcessor.valueTreeState, HRTF_OUTPUT_GAIN_ID, outputGainSlider.slider);
67 |
68 | reverbRoomSizeAttachment = std::make_unique(audioProcessor.valueTreeState, HRTF_REVERB_ROOM_SIZE_ID, reverbRoomSizeSlider.slider);
69 |
70 | reverbDampingAttachment = std::make_unique(audioProcessor.valueTreeState, HRTF_REVERB_DAMPING_ID, reverbDampingSlider.slider);
71 |
72 | reverbWetLevelAttachment = std::make_unique(audioProcessor.valueTreeState, HRTF_REVERB_WET_LEVEL_ID, reverbWetLevelSlider.slider);
73 |
74 | reverbDryLevelAttachment = std::make_unique(audioProcessor.valueTreeState, HRTF_REVERB_DRY_LEVEL_ID, reverbDryLevelSlider.slider);
75 |
76 | reverbWidthAttachment = std::make_unique(audioProcessor.valueTreeState, HRTF_REVERB_WIDTH_ID, reverbWidthSlider.slider);
77 |
78 |
79 | addAndMakeVisible(azimuthComp);
80 |
81 | prevAzimuthAngle = -1;
82 | prevAzimuthRadius = -1;
83 | prevParamAngle = -1;
84 | prevParamRadius = -1;
85 |
86 | startTimerHz(30);
87 | }
88 |
89 | OrbiterAudioProcessorEditor::~OrbiterAudioProcessorEditor()
90 | {
91 | }
92 |
93 | //==============================================================================
94 | void OrbiterAudioProcessorEditor::paint (juce::Graphics& g)
95 | {
96 | // (Our component is opaque, so we must completely fill the background with a solid colour)
97 | g.fillAll (getLookAndFeel().findColour (juce::ResizableWindow::backgroundColourId));
98 |
99 | g.setColour (juce::Colours::white);
100 | g.setFont (15.0f);
101 |
102 | auto bounds = getLocalBounds();
103 | g.drawFittedText("Elevation", bounds.withTrimmedLeft(azimuthComp.getWidth() + azimuthXOffset).withTrimmedTop(paramCategoryTextOffset).withSize(gainSliderSize, sliderTextHeight), juce::Justification::Flags::centred, 1);
104 |
105 | g.drawFittedText("Gain Params", bounds.withTrimmedLeft(gainSliderXOffset - (gainSliderSize / 2)).withTrimmedTop(paramCategoryTextOffset).withSize(gainSliderSize, sliderTextHeight), juce::Justification::Flags::centred, 1);
106 |
107 | g.drawFittedText("Reverb Params", bounds.withTrimmedLeft(reverbSliderXOffset - 10).withTrimmedTop(paramCategoryTextOffset).withSize(100, sliderTextHeight), juce::Justification::Flags::centred, 1);
108 |
109 |
110 | // Draw gain parameters enclosing box
111 | g.setColour(juce::Colours::grey);
112 |
113 | juce::Rectangle gainEnclosingBox(gainSliderXOffset - (gainSliderEnclosingBoxWidth / 2), gainSliderEnclosingBoxYOffset, gainSliderEnclosingBoxWidth, gainSliderEnclosingBoxHeight);
114 | g.fillRoundedRectangle(gainEnclosingBox, 10);
115 |
116 | // Draw reverb parameters enclosing box
117 | float enclosureBuffer = (reverbSliderEnclosingBoxWidth - (reverbSliderSeparation + reverbSliderSize)) / 2;
118 |
119 | juce::Rectangle reverbEnclosingBox(reverbSliderXOffset - (reverbSliderSize / 2) - enclosureBuffer, reverbSliderEnclosingBoxYOffset, reverbSliderEnclosingBoxWidth, reverbSliderEnclosingBoxHeight);
120 | g.fillRoundedRectangle(reverbEnclosingBox, 10);
121 |
122 |
123 | // Draw SOFA load status text
124 | juce::String sofaStatus;
125 | if (audioProcessor.sofaFileLoaded)
126 | {
127 | g.setColour(juce::Colours::green);
128 | sofaStatus = "SOFA Loaded";
129 | }
130 | else
131 | {
132 | g.setColour(juce::Colours::red);
133 | sofaStatus = "SOFA Not Loaded";
134 | }
135 |
136 | g.drawFittedText(sofaStatus, getLocalBounds().withTrimmedTop(sofaStatusYOffset).withTrimmedLeft(sofaStatusXOffset).withSize(sofaStatusWidth, sofaStatusHeight), juce::Justification::Flags::centred, 1);
137 |
138 |
139 |
140 | }
141 |
142 | void OrbiterAudioProcessorEditor::resized()
143 | {
144 | // Theta and Radius sliders for debugging purposes
145 | hrtfThetaSlider.setBounds(getLocalBounds().withTrimmedTop(20).withTrimmedLeft(800).withSize(100, 100));
146 | hrtfRadiusSlider.setBounds(getLocalBounds().withTrimmedTop(150).withTrimmedLeft(800).withSize(100, 100));
147 |
148 | azimuthComp.setCentrePosition((azimuthComp.getWidth() / 2) + azimuthXOffset, getHeight() / 2);
149 |
150 | inputGainSlider.setCentrePosition(gainSliderXOffset, gainSliderYOffset);
151 | outputGainSlider.setCentrePosition(gainSliderXOffset, gainSliderYOffset + gainSliderEnclosingBoxHeight - gainSliderSize - 10);
152 |
153 | hrtfPhiSlider.setBounds(getLocalBounds().withTrimmedTop(elevationSliderYOffset).withTrimmedLeft(elevationSliderXOffset).withSize(50, 200));
154 |
155 | sofaFileButton.setBounds(getLocalBounds().withTrimmedTop(sofaButtonYOffset).withTrimmedLeft(sofaButtonXOffset).withSize(sofaButtonWidth, sofaButtonHeight));
156 |
157 | reverbRoomSizeSlider.setCentrePosition(reverbSliderXOffset, reverbSliderYOffset);
158 | reverbDampingSlider.setCentrePosition(reverbSliderXOffset + reverbSliderSeparation, reverbSliderYOffset);
159 | reverbWetLevelSlider.setCentrePosition(reverbSliderXOffset, reverbSliderYOffset + reverbSliderSeparation);
160 | reverbDryLevelSlider.setCentrePosition(reverbSliderXOffset + reverbSliderSeparation, reverbSliderYOffset + reverbSliderSeparation);
161 | reverbWidthSlider.setCentrePosition(reverbSliderXOffset + (reverbSliderSeparation / 2), reverbSliderYOffset + (2 * reverbSliderSeparation));
162 | }
163 |
164 |
165 | void OrbiterAudioProcessorEditor::openSofaButtonClicked()
166 | {
167 | juce::FileChooser fileChooser("Select SOFA File", {}, "*.sofa");
168 |
169 | if (fileChooser.browseForFileToOpen())
170 | {
171 | auto file = fileChooser.getResult();
172 | auto path = file.getFullPathName();
173 |
174 | notifyNewSOFA(path);
175 | }
176 | }
177 |
178 |
179 | void OrbiterAudioProcessorEditor::notifyNewSOFA(juce::String filePath)
180 | {
181 | audioProcessor.newSofaFilePath.swapWith(filePath);
182 | audioProcessor.newSofaFileWaiting = true;
183 | }
184 |
185 |
186 | void OrbiterAudioProcessorEditor::timerCallback()
187 | {
188 | if (audioProcessor.sofaFileLoaded)
189 | repaint();
190 |
191 | auto sourceAngleAndRadius = azimuthComp.getNormalisedAngleAndRadius();
192 | auto paramAngle = audioProcessor.valueTreeState.getRawParameterValue(HRTF_THETA_ID);
193 | auto paramRadius = audioProcessor.valueTreeState.getRawParameterValue(HRTF_RADIUS_ID);
194 |
195 | auto paramAngleValue = floorValue(*paramAngle, 0.00001);
196 | auto paramRadiusValue = floorValue(*paramRadius, 0.00001);
197 | auto uiAngleValue = floorValue(sourceAngleAndRadius.first, 0.00001);
198 | auto uiRadiusValue = floorValue(sourceAngleAndRadius.second, 0.00001);
199 |
200 | if (paramAngleValue != prevParamAngle || paramRadiusValue != prevParamRadius)
201 | {
202 | azimuthComp.updateSourcePosition(paramAngleValue, paramRadiusValue);
203 | prevAzimuthRadius = paramRadiusValue;
204 | prevAzimuthAngle = paramAngleValue;
205 | prevParamRadius = paramRadiusValue;
206 | prevParamAngle = paramAngleValue;
207 | }
208 | else if (uiAngleValue != prevAzimuthAngle || uiRadiusValue != prevAzimuthRadius)
209 | {
210 |
211 | audioProcessor.valueTreeState.getParameter(HRTF_THETA_ID)->setValueNotifyingHost(uiAngleValue);
212 | audioProcessor.valueTreeState.getParameter(HRTF_RADIUS_ID)->setValueNotifyingHost(uiRadiusValue);
213 |
214 | prevAzimuthRadius = uiRadiusValue;
215 | prevAzimuthAngle = uiAngleValue;
216 | prevParamRadius = uiRadiusValue;
217 | prevParamAngle = uiAngleValue;
218 | }
219 | else{}
220 |
221 | }
222 |
223 |
224 | float OrbiterAudioProcessorEditor::floorValue(float value, float epsilon)
225 | {
226 | int valueTruncated = value / epsilon;
227 | return (float)valueTruncated * epsilon;
228 | }
229 |
230 |
--------------------------------------------------------------------------------
/Source/PluginEditor.h:
--------------------------------------------------------------------------------
1 | /*
2 | ==============================================================================
3 |
4 | This file contains the basic framework code for a JUCE plugin editor.
5 |
6 | ==============================================================================
7 | */
8 |
9 | #pragma once
10 |
11 | #include
12 | #include "PluginProcessor.h"
13 | #include "AzimuthUIComponent.h"
14 |
15 | //==============================================================================
16 | /**
17 | */
18 | class OrbiterAudioProcessorEditor : public juce::AudioProcessorEditor, public juce::Timer
19 | {
20 | public:
21 |
22 | OrbiterAudioProcessorEditor (OrbiterAudioProcessor&);
23 | ~OrbiterAudioProcessorEditor() override;
24 |
25 | //==============================================================================
26 | void paint (juce::Graphics&) override;
27 | void resized() override;
28 |
29 | void openSofaButtonClicked();
30 | void notifyNewSOFA(juce::String filePath);
31 |
32 |
33 | AzimuthUIComponent azimuthComp;
34 |
35 | private:
36 | // This reference is provided as a quick way for your editor to
37 | // access the processor object that created it.
38 | class OrbiterSliderComponent : public juce::Component
39 | {
40 | public:
41 |
42 | OrbiterSliderComponent(){};
43 |
44 | OrbiterSliderComponent(float width, juce::String sliderName)
45 | {
46 | name = sliderName;
47 |
48 | setSize(width, width);
49 | slider.setSliderStyle(juce::Slider::SliderStyle::Rotary);
50 | slider.setTextBoxStyle(juce::Slider::TextEntryBoxPosition::NoTextBox, true, width, nameHeight);
51 | slider.setRange(0, 1);
52 |
53 | addAndMakeVisible(slider);
54 | }
55 |
56 | void paint(juce::Graphics &g) override
57 | {
58 | g.setFont(nameHeight);
59 | g.setColour(juce::Colours::white);
60 | g.drawFittedText(name, getLocalBounds().withTrimmedTop(getHeight() - nameHeight).withSize(getWidth(), nameHeight), juce::Justification::Flags::centred, 1);
61 | }
62 |
63 | void resized() override
64 | {
65 | slider.setBounds(getLocalBounds().withTrimmedBottom(nameHeight).withSize(getWidth(), getHeight() - nameHeight));
66 | }
67 |
68 |
69 | juce::Slider slider;
70 | juce::String name;
71 |
72 | static constexpr float nameHeight = 15;
73 | };
74 |
75 | void timerCallback() override;
76 | float floorValue(float value, float epsilon);
77 |
78 | juce::Slider hrtfThetaSlider;
79 | juce::Slider hrtfPhiSlider;
80 | juce::Slider hrtfRadiusSlider;
81 | OrbiterSliderComponent inputGainSlider;
82 | OrbiterSliderComponent outputGainSlider;
83 | OrbiterSliderComponent reverbRoomSizeSlider;
84 | OrbiterSliderComponent reverbDampingSlider;
85 | OrbiterSliderComponent reverbWetLevelSlider;
86 | OrbiterSliderComponent reverbDryLevelSlider;
87 | OrbiterSliderComponent reverbWidthSlider;
88 |
89 | juce::TextButton sofaFileButton;
90 |
91 | std::unique_ptr hrtfThetaAttachment;
92 | std::unique_ptr hrtfPhiAttachment;
93 | std::unique_ptr hrtfRadiusAttachment;
94 | std::unique_ptr inputGainAttachment;
95 | std::unique_ptr outputGainAttachment;
96 | std::unique_ptr reverbRoomSizeAttachment;
97 | std::unique_ptr reverbDampingAttachment;
98 | std::unique_ptr reverbWetLevelAttachment;
99 | std::unique_ptr reverbDryLevelAttachment;
100 | std::unique_ptr reverbWidthAttachment;
101 |
102 | float prevAzimuthAngle;
103 | float prevAzimuthRadius;
104 | float prevParamAngle;
105 | float prevParamRadius;
106 |
107 |
108 | // UI Element Offsets
109 | float sliderTextHeight = 10;
110 | float paramCategoryTextOffset = 35;
111 |
112 | float azimuthXOffset = 15;
113 |
114 | // Elevation Slider Characteristics
115 | float elevationSliderXOffset = 330;
116 | float elevationSliderYOffset = 75;
117 |
118 | // Gain Slider Characteristics
119 | static constexpr float gainSliderSize = 80;
120 |
121 | float gainSliderXOffset = 470;
122 | float gainSliderYOffset = 125;
123 |
124 | float gainSliderEnclosingBoxWidth = 100;
125 | float gainSliderEnclosingBoxHeight = 185;
126 | float gainSliderEnclosingBoxYOffset = 80;
127 |
128 | // Reverb Slider Characteristics
129 | static constexpr float reverbSliderSize = 75;
130 |
131 | float reverbSliderXOffset = 595;
132 | float reverbSliderYOffset = 120;
133 | float reverbSliderSeparation = reverbSliderSize + 5;
134 |
135 | float reverbSliderEnclosingBoxWidth = 180;
136 | float reverbSliderEnclosingBoxHeight = 240;
137 | float reverbSliderEnclosingBoxYOffset = 80;
138 |
139 | // Sofa Button Characteristics
140 | float sofaButtonXOffset = 765;
141 | float sofaButtonYOffset = 80;
142 | float sofaButtonWidth = 100;
143 | float sofaButtonHeight = 80;
144 | float sofaStatusXOffset = 765;
145 | float sofaStatusYOffset = 150;
146 | float sofaStatusWidth = 100;
147 | float sofaStatusHeight = 80;
148 |
149 |
150 | OrbiterAudioProcessor& audioProcessor;
151 | JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OrbiterAudioProcessorEditor)
152 | };
153 |
--------------------------------------------------------------------------------
/Source/PluginProcessor.cpp:
--------------------------------------------------------------------------------
1 | /*
2 | ==============================================================================
3 |
4 | This file contains the basic framework code for a JUCE plugin processor.
5 |
6 | ==============================================================================
7 | */
8 |
9 | #include "PluginProcessor.h"
10 | #include "PluginEditor.h"
11 |
12 | //==============================================================================
13 | OrbiterAudioProcessor::OrbiterAudioProcessor()
14 | #ifndef JucePlugin_PreferredChannelConfigurations
15 | : AudioProcessor (BusesProperties()
16 | #if ! JucePlugin_IsMidiEffect
17 | #if ! JucePlugin_IsSynth
18 | .withInput ("Input", juce::AudioChannelSet::stereo(), true)
19 | #endif
20 | .withOutput ("Output", juce::AudioChannelSet::stereo(), true)
21 | #endif
22 | ),
23 | Thread("HRTF Parameter Watcher"),
24 | valueTreeState(*this, nullptr, "PARAMETERS", createParameters())
25 | #endif
26 | {
27 | sofaFileLoaded = false;
28 | newSofaFileWaiting = false;
29 | newSofaFilePath = "";
30 | currentSOFA = nullptr;
31 |
32 | prevTheta = -1;
33 | prevPhi = -1;
34 | prevRadius = -1;
35 | hrtfParamChangeLoop = true;
36 |
37 | audioBlockSize = 0;
38 |
39 | valueTreeState.addParameterListener(HRTF_REVERB_ROOM_SIZE_ID, this);
40 | valueTreeState.addParameterListener(HRTF_REVERB_DAMPING_ID, this);
41 | valueTreeState.addParameterListener(HRTF_REVERB_WET_LEVEL_ID, this);
42 | valueTreeState.addParameterListener(HRTF_REVERB_DRY_LEVEL_ID, this);
43 | valueTreeState.addParameterListener(HRTF_REVERB_WIDTH_ID, this);
44 | reverbParamsChanged.store(false);
45 |
46 | startThread();
47 | }
48 |
49 | OrbiterAudioProcessor::~OrbiterAudioProcessor()
50 | {
51 | hrtfParamChangeLoop = false;
52 | stopThread(4000);
53 | }
54 |
55 | //==============================================================================
56 | const juce::String OrbiterAudioProcessor::getName() const
57 | {
58 | return JucePlugin_Name;
59 | }
60 |
61 | bool OrbiterAudioProcessor::acceptsMidi() const
62 | {
63 | #if JucePlugin_WantsMidiInput
64 | return true;
65 | #else
66 | return false;
67 | #endif
68 | }
69 |
70 | bool OrbiterAudioProcessor::producesMidi() const
71 | {
72 | #if JucePlugin_ProducesMidiOutput
73 | return true;
74 | #else
75 | return false;
76 | #endif
77 | }
78 |
79 | bool OrbiterAudioProcessor::isMidiEffect() const
80 | {
81 | #if JucePlugin_IsMidiEffect
82 | return true;
83 | #else
84 | return false;
85 | #endif
86 | }
87 |
88 | double OrbiterAudioProcessor::getTailLengthSeconds() const
89 | {
90 | return 0.0;
91 | }
92 |
93 | int OrbiterAudioProcessor::getNumPrograms()
94 | {
95 | return 1; // NB: some hosts don't cope very well if you tell them there are 0 programs,
96 | // so this should be at least 1, even if you're not really implementing programs.
97 | }
98 |
99 | int OrbiterAudioProcessor::getCurrentProgram()
100 | {
101 | return 0;
102 | }
103 |
104 | void OrbiterAudioProcessor::setCurrentProgram (int index)
105 | {
106 | }
107 |
108 | const juce::String OrbiterAudioProcessor::getProgramName (int index)
109 | {
110 | return {};
111 | }
112 |
113 | void OrbiterAudioProcessor::changeProgramName (int index, const juce::String& newName)
114 | {
115 | }
116 |
117 | //==============================================================================
118 | void OrbiterAudioProcessor::prepareToPlay (double sampleRate, int samplesPerBlock)
119 | {
120 | audioBlockSize = samplesPerBlock;
121 | auto *inputGainParam = valueTreeState.getRawParameterValue(HRTF_INPUT_GAIN_ID);
122 | auto *outputGainParam = valueTreeState.getRawParameterValue(HRTF_OUTPUT_GAIN_ID);
123 |
124 | prevInputGain = *inputGainParam;
125 | prevOutputGain = *outputGainParam;
126 | }
127 |
128 | void OrbiterAudioProcessor::releaseResources()
129 | {
130 | // When playback stops, you can use this as an opportunity to free up any
131 | // spare memory, etc.
132 | }
133 |
134 | #ifndef JucePlugin_PreferredChannelConfigurations
135 | bool OrbiterAudioProcessor::isBusesLayoutSupported (const BusesLayout& layouts) const
136 | {
137 | #if JucePlugin_IsMidiEffect
138 | juce::ignoreUnused (layouts);
139 | return true;
140 | #else
141 | // This is the place where you check if the layout is supported.
142 | // In this template code we only support mono or stereo.
143 | if (layouts.getMainOutputChannelSet() != juce::AudioChannelSet::mono()
144 | && layouts.getMainOutputChannelSet() != juce::AudioChannelSet::stereo())
145 | return false;
146 |
147 | // This checks if the input layout matches the output layout
148 | #if ! JucePlugin_IsSynth
149 | if (layouts.getMainOutputChannelSet() != layouts.getMainInputChannelSet())
150 | return false;
151 | #endif
152 |
153 | return true;
154 | #endif
155 | }
156 | #endif
157 |
158 | void OrbiterAudioProcessor::processBlock (juce::AudioBuffer& buffer, juce::MidiBuffer& midiMessages)
159 | {
160 | juce::ScopedNoDenormals noDenormals;
161 | auto totalNumInputChannels = getTotalNumInputChannels();
162 | auto totalNumOutputChannels = getTotalNumOutputChannels();
163 |
164 |
165 | for (auto i = totalNumInputChannels; i < totalNumOutputChannels; ++i)
166 | buffer.clear (i, 0, buffer.getNumSamples());
167 |
168 |
169 | if (sofaFileLoaded)
170 | {
171 | ReferenceCountedSOFA::Ptr retainedSofa(currentSOFA);
172 |
173 | for (int channel = 0; channel < 1; ++channel)
174 | {
175 | auto *channelData = buffer.getWritePointer (channel);
176 |
177 | auto *inputGainParam = valueTreeState.getRawParameterValue(HRTF_INPUT_GAIN_ID);
178 | float inputGain = *inputGainParam;
179 |
180 | buffer.applyGainRamp(0, 0, buffer.getNumSamples(), prevInputGain, inputGain);
181 | prevInputGain = inputGain;
182 |
183 | retainedSofa->leftHRTFProcessor.addSamples(channelData, buffer.getNumSamples());
184 | retainedSofa->rightHRTFProcessor.addSamples(channelData, buffer.getNumSamples());
185 |
186 | auto left = retainedSofa->leftHRTFProcessor.getOutput(buffer.getNumSamples());
187 | auto right = retainedSofa->rightHRTFProcessor.getOutput(buffer.getNumSamples());
188 |
189 | if (left.size() != 0 || right.size() != 0)
190 | {
191 | auto *outLeft = buffer.getWritePointer(0);
192 | auto *outRight = buffer.getWritePointer(1);
193 |
194 | for (auto i = 0; i < buffer.getNumSamples(); ++i)
195 | {
196 | outLeft[i] = left[i];
197 | outRight[i] = right[i];
198 | }
199 |
200 | auto *outputGainParam = valueTreeState.getRawParameterValue(HRTF_OUTPUT_GAIN_ID);
201 | float outputGain = *outputGainParam;
202 |
203 | buffer.applyGainRamp(0, 0, buffer.getNumSamples(), prevOutputGain, outputGain);
204 | buffer.applyGainRamp(1, 0, buffer.getNumSamples(), prevOutputGain, outputGain);
205 | prevOutputGain = outputGain;
206 | }
207 |
208 | }
209 | }
210 | }
211 |
212 | //==============================================================================
213 | bool OrbiterAudioProcessor::hasEditor() const
214 | {
215 | return true; // (change this to false if you choose to not supply an editor)
216 | }
217 |
218 | juce::AudioProcessorEditor* OrbiterAudioProcessor::createEditor()
219 | {
220 | return new OrbiterAudioProcessorEditor (*this);
221 | }
222 |
223 | //==============================================================================
224 | void OrbiterAudioProcessor::getStateInformation (juce::MemoryBlock& destData)
225 | {
226 | // You should use this method to store your parameters in the memory block.
227 | // You could do that either as raw data, or use the XML or ValueTree classes
228 | // as intermediaries to make it easy to save and load complex data.
229 | }
230 |
231 | void OrbiterAudioProcessor::setStateInformation (const void* data, int sizeInBytes)
232 | {
233 | // You should use this method to restore your parameters from this memory block,
234 | // whose contents will have been created by the getStateInformation() call.
235 | }
236 |
237 |
238 | juce::AudioProcessorValueTreeState::ParameterLayout OrbiterAudioProcessor::createParameters()
239 | {
240 | std::vector> parameters;
241 | juce::NormalisableRange parameterRange(0, 1, 0.0000001);
242 |
243 | parameters.push_back(std::make_unique(HRTF_THETA_ID, "Theta", parameterRange, 0));
244 | parameters.push_back(std::make_unique(HRTF_PHI_ID, "Phi", parameterRange, 0));
245 | parameters.push_back(std::make_unique(HRTF_RADIUS_ID, "Radius", parameterRange, 0));
246 | parameters.push_back(std::make_unique(HRTF_INPUT_GAIN_ID, "Input Gain", parameterRange, 1));
247 | parameters.push_back(std::make_unique(HRTF_OUTPUT_GAIN_ID, "Output Gain", 0, 10, 1));
248 | parameters.push_back(std::make_unique(HRTF_REVERB_ROOM_SIZE_ID, "Room Size", 0, 1, 0.5));
249 | parameters.push_back(std::make_unique(HRTF_REVERB_DAMPING_ID, "Damping", 0, 1, 0.5));
250 | parameters.push_back(std::make_unique(HRTF_REVERB_WET_LEVEL_ID, "Wet Level", 0, 1, 0.5));
251 | parameters.push_back(std::make_unique(HRTF_REVERB_DRY_LEVEL_ID, "Dry Level", 0, 1, 0.5));
252 | parameters.push_back(std::make_unique(HRTF_REVERB_WIDTH_ID, "Reverb Width", 0, 1, 0.5));
253 |
254 | //parameters.push_back(std::make_unique("ORBIT", "Enable Orbit", false));
255 | return {parameters.begin(), parameters.end()};
256 | }
257 |
258 |
259 | void OrbiterAudioProcessor::run()
260 | {
261 | while (hrtfParamChangeLoop)
262 | {
263 | checkForNewSofaToLoad();
264 | checkForGUIParameterChanges();
265 | checkForHRTFReverbParamChanges();
266 | juce::Thread::wait(10);
267 | }
268 | }
269 |
270 |
271 | void OrbiterAudioProcessor::checkForGUIParameterChanges()
272 | {
273 | ReferenceCountedSOFA::Ptr retainedSofa(currentSOFA);
274 |
275 | if (retainedSofa != nullptr)
276 | {
277 | auto *theta = valueTreeState.getRawParameterValue(HRTF_THETA_ID);
278 | auto *phi = valueTreeState.getRawParameterValue(HRTF_PHI_ID);
279 | auto *radius = valueTreeState.getRawParameterValue(HRTF_RADIUS_ID);
280 |
281 | float t = *theta;
282 | float p = *phi;
283 | float r = *radius;
284 |
285 | auto thetaMapped = mapAndQuantize(t, 0.f, 1.f, retainedSofa->sofa.getMinTheta(), retainedSofa->sofa.getMaxTheta(), retainedSofa->sofa.getDeltaTheta());
286 | auto phiMapped = mapAndQuantize(p, 0.f, 1.f, retainedSofa->sofa.getMinPhi(), retainedSofa->sofa.getMaxPhi(), retainedSofa->sofa.getDeltaPhi());
287 | auto radiusMapped = mapAndQuantize(r, 0.f, 1.f, retainedSofa->sofa.getMinRadius(), retainedSofa->sofa.getMaxRadius(), retainedSofa->sofa.getDeltaRadius());
288 |
289 |
290 | if ((thetaMapped != prevTheta) || (phiMapped != prevPhi) || (radiusMapped != prevRadius))
291 | {
292 | auto *hrirLeft = retainedSofa->sofa.getHRIR(0, (int)thetaMapped, (int)phiMapped, radiusMapped);
293 | auto *hrirRight = retainedSofa->sofa.getHRIR(1, (int)thetaMapped, (int)phiMapped, radiusMapped);
294 |
295 | if ((hrirLeft != nullptr) && (hrirRight != nullptr))
296 | {
297 | retainedSofa->leftHRTFProcessor.swapHRIR(hrirLeft, currentSOFA->hrirSize, currentSOFA->sofa.getMinImpulseDelay() * 0.75);
298 | retainedSofa->rightHRTFProcessor.swapHRIR(hrirRight, currentSOFA->hrirSize, currentSOFA->sofa.getMinImpulseDelay() * 0.75);
299 | }
300 | prevTheta = thetaMapped;
301 | prevPhi = phiMapped;
302 | prevRadius = radiusMapped;
303 |
304 | sofaFileLoaded = true;
305 | }
306 | }
307 | }
308 |
309 |
310 | void OrbiterAudioProcessor::checkForNewSofaToLoad()
311 | {
312 | if (newSofaFileWaiting)
313 | {
314 | if (newSofaFilePath.isNotEmpty())
315 | {
316 | ReferenceCountedSOFA::Ptr newSofa = new ReferenceCountedSOFA();
317 | bool success = newSofa->sofa.readSOFAFile(newSofaFilePath.toStdString());
318 |
319 | if (success){
320 | newSofa->hrirSize = juce::jmin((size_t)newSofa->sofa.getN(), MAX_HRIR_LENGTH);
321 |
322 | bool leftHRTFSuccess = false;
323 | bool rightHRTFSuccess = false;
324 |
325 | auto radiusMapped = mapAndQuantize(1, 0, 1, newSofa->sofa.getMinRadius(), newSofa->sofa.getMaxRadius(), newSofa->sofa.getDeltaRadius());
326 | auto thetaMapped = mapAndQuantize(0.5, 0, 1, newSofa->sofa.getMinTheta(), newSofa->sofa.getMaxTheta(), newSofa->sofa.getDeltaTheta());
327 | auto phiMapped = mapAndQuantize(0.5, 0, 1, newSofa->sofa.getMinPhi(), newSofa->sofa.getMaxPhi(), newSofa->sofa.getDeltaPhi());
328 |
329 | leftHRTFSuccess = newSofa->leftHRTFProcessor.init(newSofa->sofa.getHRIR(0, (int)thetaMapped, (int)phiMapped, radiusMapped), newSofa->hrirSize, newSofa->sofa.getFs(), audioBlockSize, newSofa->sofa.getMinImpulseDelay() * 0.75);
330 | rightHRTFSuccess = newSofa->rightHRTFProcessor.init(newSofa->sofa.getHRIR(1, (int)thetaMapped, (int)phiMapped, radiusMapped), newSofa->hrirSize, newSofa->sofa.getFs(), audioBlockSize, newSofa->sofa.getMinImpulseDelay() * 0.75);
331 |
332 |
333 | if (leftHRTFSuccess && rightHRTFSuccess)
334 | currentSOFA = newSofa;
335 |
336 | sofaInstances.add(newSofa);
337 | }
338 | }
339 |
340 | newSofaFileWaiting = false;
341 | }
342 | }
343 |
344 |
345 | void OrbiterAudioProcessor::checkSofaInstancesToFree()
346 | {
347 | for (auto i = sofaInstances.size(); i >= 0; --i)
348 | {
349 | ReferenceCountedSOFA::Ptr sofaInstance(sofaInstances.getUnchecked(i));
350 | if (sofaInstance->getReferenceCount() == 2)
351 | sofaInstances.remove(i);
352 | }
353 | }
354 |
355 |
356 | void OrbiterAudioProcessor::checkForHRTFReverbParamChanges()
357 | {
358 | auto changeFlag = reverbParamsChanged.load();
359 | if (changeFlag)
360 | {
361 | ReferenceCountedSOFA::Ptr retainedSOFA(currentSOFA);
362 | if (retainedSOFA != nullptr)
363 | {
364 | retainedSOFA->leftHRTFProcessor.setReverbParameters(reverbParams);
365 | retainedSOFA->rightHRTFProcessor.setReverbParameters(reverbParams);
366 | reverbParamsChanged.store(false);
367 | }
368 | }
369 | }
370 |
371 |
372 | /*
373 | * Map a value from one set to another
374 | * This works similar to jmap except that the output is quantized in steps of outputDelta
375 | */
376 | float OrbiterAudioProcessor::mapAndQuantize(float value, float inputMin, float inputMax, float outputMin, float outputMax, float outputDelta)
377 | {
378 | if ((outputMax - outputMin) == 0)
379 | return 0;
380 |
381 | if ((outputMax < outputMin) || (inputMax < inputMin) || (outputDelta < 0))
382 | return 0;
383 |
384 | unsigned int totalNumSteps = (outputMax - outputMin) / outputDelta;
385 | float inputChunkSize = (inputMax - inputMin) / totalNumSteps;
386 |
387 | unsigned int deltaMultiplier = value / inputChunkSize;
388 |
389 | return (deltaMultiplier * outputDelta) + outputMin;
390 | }
391 |
392 |
393 | void OrbiterAudioProcessor::parameterChanged(const juce::String ¶meterID, float newValue)
394 | {
395 | if (parameterID == HRTF_REVERB_ROOM_SIZE_ID)
396 | {
397 | reverbParams.roomSize = newValue;
398 | reverbParamsChanged.store(true);
399 | }
400 |
401 | else if (parameterID == HRTF_REVERB_DAMPING_ID)
402 | {
403 | reverbParams.damping = newValue;
404 | reverbParamsChanged.store(true);
405 | }
406 |
407 | else if (parameterID == HRTF_REVERB_WET_LEVEL_ID)
408 | {
409 | reverbParams.wetLevel = newValue;
410 | reverbParamsChanged.store(true);
411 | }
412 |
413 | else if (parameterID == HRTF_REVERB_DRY_LEVEL_ID)
414 | {
415 | reverbParams.dryLevel = newValue;
416 | reverbParamsChanged.store(true);
417 | }
418 |
419 | else if (parameterID == HRTF_REVERB_WIDTH_ID)
420 | {
421 | reverbParams.width = newValue;
422 | reverbParamsChanged.store(true);
423 | }
424 |
425 | else{}
426 | }
427 |
428 |
429 |
430 | //==============================================================================
431 | // This creates new instances of the plugin..
432 | juce::AudioProcessor* JUCE_CALLTYPE createPluginFilter()
433 | {
434 | return new OrbiterAudioProcessor();
435 | }
436 |
--------------------------------------------------------------------------------
/Source/PluginProcessor.h:
--------------------------------------------------------------------------------
1 | /*
2 | ==============================================================================
3 |
4 | This file contains the basic framework code for a JUCE plugin processor.
5 |
6 | ==============================================================================
7 | */
8 |
9 | #pragma once
10 |
11 | #include
12 | #include
13 | #include "HRTFProcessor.h"
14 |
15 | #define HRTF_THETA_ID "HRTF_THETA"
16 | #define HRTF_PHI_ID "HRTF_PHI"
17 | #define HRTF_RADIUS_ID "HRTF_RADIUS"
18 | #define HRTF_INPUT_GAIN_ID "HRTF_INPUT_GAIN"
19 | #define HRTF_OUTPUT_GAIN_ID "HRTF_OUTPUT_GAIN"
20 | #define HRTF_REVERB_ROOM_SIZE_ID "HRTF_REVERB_ROOM_SIZE"
21 | #define HRTF_REVERB_DAMPING_ID "HRTF_REVERB_DAMPING"
22 | #define HRTF_REVERB_WET_LEVEL_ID "HRTF_REVERB_WET_LEVEL"
23 | #define HRTF_REVERB_DRY_LEVEL_ID "HRTF_REVERB_DRY_LEVEL"
24 | #define HRTF_REVERB_WIDTH_ID "HRTF_REVERB_WIDTH"
25 |
26 |
27 |
28 | //==============================================================================
29 | /**
30 | */
31 | class OrbiterAudioProcessor : public juce::AudioProcessor, public juce::Thread, public juce::AudioProcessorValueTreeState::Listener
32 | {
33 | public:
34 | //==============================================================================
35 | OrbiterAudioProcessor();
36 | ~OrbiterAudioProcessor() override;
37 |
38 | //==============================================================================
39 | void prepareToPlay (double sampleRate, int samplesPerBlock) override;
40 | void releaseResources() override;
41 |
42 | #ifndef JucePlugin_PreferredChannelConfigurations
43 | bool isBusesLayoutSupported (const BusesLayout& layouts) const override;
44 | #endif
45 |
46 | void processBlock (juce::AudioBuffer&, juce::MidiBuffer&) override;
47 |
48 | //==============================================================================
49 | juce::AudioProcessorEditor* createEditor() override;
50 | bool hasEditor() const override;
51 |
52 | //==============================================================================
53 | const juce::String getName() const override;
54 |
55 | bool acceptsMidi() const override;
56 | bool producesMidi() const override;
57 | bool isMidiEffect() const override;
58 | double getTailLengthSeconds() const override;
59 |
60 | //==============================================================================
61 | int getNumPrograms() override;
62 | int getCurrentProgram() override;
63 | void setCurrentProgram (int index) override;
64 | const juce::String getProgramName (int index) override;
65 | void changeProgramName (int index, const juce::String& newName) override;
66 |
67 | //==============================================================================
68 | void getStateInformation (juce::MemoryBlock& destData) override;
69 | void setStateInformation (const void* data, int sizeInBytes) override;
70 |
71 | //==============================================================================
72 | void run() override;
73 |
74 | juce::AudioProcessorValueTreeState::ParameterLayout createParameters();
75 |
76 |
77 | bool newSofaFileWaiting;
78 | bool sofaFileLoaded;
79 |
80 | juce::String newSofaFilePath;
81 |
82 | juce::AudioProcessorValueTreeState valueTreeState;
83 |
84 |
85 | private:
86 | //==============================================================================
87 |
88 | class ReferenceCountedSOFA : public juce::ReferenceCountedObject
89 | {
90 | public:
91 | typedef juce::ReferenceCountedObjectPtr Ptr;
92 |
93 | ReferenceCountedSOFA(){}
94 | BasicSOFA::BasicSOFA *getSOFA() { return &sofa; }
95 |
96 | BasicSOFA::BasicSOFA sofa;
97 | HRTFProcessor leftHRTFProcessor;
98 | HRTFProcessor rightHRTFProcessor;
99 |
100 | size_t hrirSize;
101 |
102 | private:
103 |
104 | JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(ReferenceCountedSOFA)
105 | };
106 |
107 |
108 | //==============================================================================
109 |
110 | void checkSofaInstancesToFree();
111 | void checkForNewSofaToLoad();
112 | void checkForGUIParameterChanges();
113 | void checkForHRTFReverbParamChanges();
114 |
115 | float mapAndQuantize(float value, float inputMin, float inputMax, float outputMin, float outputMax, float outputDelta);
116 |
117 | void parameterChanged(const juce::String ¶meterID, float newValue) override;
118 |
119 | float prevTheta;
120 | float prevPhi;
121 | float prevRadius;
122 | bool hrtfParamChangeLoop;
123 |
124 | int audioBlockSize;
125 |
126 | float prevInputGain;
127 | float prevOutputGain;
128 |
129 | static constexpr size_t MAX_HRIR_LENGTH = 15000;
130 |
131 | juce::Reverb::Parameters reverbParams;
132 | std::atomic reverbParamsChanged;
133 |
134 | ReferenceCountedSOFA::Ptr currentSOFA;
135 | juce::ReferenceCountedArray sofaInstances;
136 |
137 |
138 | JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OrbiterAudioProcessor)
139 | };
140 |
--------------------------------------------------------------------------------
/readme_resources/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/superkittens/Orbiter/2124238b06b67a58f33c9e3f59c604caa41a2670/readme_resources/.DS_Store
--------------------------------------------------------------------------------
/readme_resources/Orbiter_GUI.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/superkittens/Orbiter/2124238b06b67a58f33c9e3f59c604caa41a2670/readme_resources/Orbiter_GUI.png
--------------------------------------------------------------------------------
/readme_resources/Orbiter_GUI_v0.0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/superkittens/Orbiter/2124238b06b67a58f33c9e3f59c604caa41a2670/readme_resources/Orbiter_GUI_v0.0.png
--------------------------------------------------------------------------------
/readme_resources/Orbiter_GUI_v0_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/superkittens/Orbiter/2124238b06b67a58f33c9e3f59c604caa41a2670/readme_resources/Orbiter_GUI_v0_1.png
--------------------------------------------------------------------------------
/signal_validation.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import matplotlib.pyplot as plt
3 | import h5py
4 | import soundfile as sf
5 |
6 | %matplotlib qt
7 |
8 | #%%
9 | def createFadeInOutEnvelope(length, fadeInOut):
10 | envelope = np.zeros(length)
11 |
12 | for i in range(0, length):
13 | if fadeInOut == 'fade_in':
14 | envelope[i] = np.power(np.sin(i * np.pi / (2 * length)), 2)
15 |
16 | if fadeInOut == 'fade_out':
17 | envelope[i] = np.power(np.cos(i * np.pi / (2 * length)), 2)
18 |
19 | return envelope
20 |
21 |
22 | def overlapAndAdd(olaBuffer, x, writeIndex, numSamplesToWrite):
23 |
24 | x_index = 0
25 | ola_index = writeIndex
26 | buffer_size = len(olaBuffer)
27 |
28 | for i in range(writeIndex, writeIndex + numSamplesToWrite):
29 |
30 | if ola_index >= buffer_size:
31 | ola_index = 0
32 |
33 | olaBuffer[ola_index] += x[x_index]
34 | x_index += 1
35 | ola_index += 1
36 |
37 | return olaBuffer
38 |
39 |
40 |
41 | # Setup
42 | #%%
43 | hrir_length = 512
44 | audio_block_length = 256
45 | zero_padded_exponent = int(np.log2(hrir_length + (2 * audio_block_length) + 1)) + 1
46 | zero_padded_length = np.power(2, zero_padded_exponent)
47 |
48 | w = np.hamming((2 * audio_block_length) + 1)
49 | w[-1] = 0
50 |
51 | hrir_impulse = np.zeros(hrir_length)
52 | hrir_impulse[0] = 1.0
53 |
54 | hrir_dc = np.ones(hrir_length)
55 |
56 | fade_out_envelope = createFadeInOutEnvelope(2 * audio_block_length + 1, fadeInOut='fade_out')
57 | fade_in_envelope = createFadeInOutEnvelope(2 * audio_block_length + 1, fadeInOut='fade_in')
58 |
59 | hrir_impulse_zp = np.zeros(zero_padded_length)
60 | hrir_impulse_zp[0:hrir_length] = hrir_impulse
61 |
62 | hrir_dc_zp = np.zeros(zero_padded_length)
63 | hrir_dc_zp[0:hrir_length] = hrir_dc
64 |
65 | x = np.ones((2 * audio_block_length) + 1) * w
66 | x_zp = np.zeros(zero_padded_length)
67 | x_zp[0 : len(x)] = x
68 |
69 | ola_buffer = np.zeros(zero_padded_length)
70 |
71 |
72 | # This block corresponds to the "HRTF Appplication" test in HRTFProcessorTest
73 | # ie. feed an impulse signal into the HRTFProcessor
74 | # The result from HRTFProessor and this script should match
75 | y = np.fft.ifft(np.fft.fft(x_zp) * np.fft.fft(hrir_impulse_zp))
76 |
77 | ola_buffer = overlapAndAdd(ola_buffer, y, 0, zero_padded_length)
78 | out = ola_buffer[0 : audio_block_length]
79 |
80 | plt.plot(out)
81 |
82 |
83 | #%%
84 | # This block corresponds to the "Changing HRTF" test
85 | test_signal_length = 2048
86 | f0 = 500
87 | fs = 44100
88 |
89 | ola_buffer = np.zeros(zero_padded_length)
90 | processed_data = np.zeros(test_signal_length)
91 |
92 | test_signal = np.sin(np.arange(0, test_signal_length, 1) * 2 * np.pi * f0 / fs)
93 |
94 | x_zp = np.zeros(zero_padded_length)
95 | x_zp[0 : (2 * audio_block_length) + 1] = test_signal[0 : (2 * audio_block_length) + 1] * w
96 |
97 | y = np.fft.ifft(np.fft.fft(x_zp) * np.fft.fft(hrir_impulse_zp))
98 | ola_buffer = overlapAndAdd(ola_buffer, y, 0, zero_padded_length)
99 | processed_data[0 : audio_block_length] = ola_buffer[0 : audio_block_length]
100 |
101 | x_zp[0 : (2 * audio_block_length) + 1] = test_signal[audio_block_length : audio_block_length + (2 * audio_block_length) + 1] * w
102 |
103 | y_old = np.fft.ifft(np.fft.fft(x_zp) * np.fft.fft(hrir_impulse_zp))
104 | y_new = np.fft.ifft(np.fft.fft(x_zp) * np.fft.fft(hrir_dc_zp))
105 |
106 | y_old[0 : 2 * audio_block_length + 1] = y_old[0 : 2 * audio_block_length + 1] * fade_out_envelope
107 | y_new[0 : 2 * audio_block_length + 1] = y_new[0 : 2 * audio_block_length + 1] * fade_in_envelope
108 |
109 | y = y_old + y_new
110 |
111 | ola_buffer = overlapAndAdd(ola_buffer, y, audio_block_length, zero_padded_length)
112 | processed_data[audio_block_length : audio_block_length * 2] = ola_buffer[audio_block_length : audio_block_length * 2]
113 |
114 |
115 | x_zp[0 : (2 * audio_block_length) + 1] = test_signal[2 * audio_block_length : 2 * audio_block_length + (2 * audio_block_length) + 1] * w
116 | y = np.fft.ifft(np.fft.fft(x_zp) * np.fft.fft(hrir_dc_zp))
117 | ola_buffer = overlapAndAdd(ola_buffer, y, 2 * audio_block_length, zero_padded_length)
118 | processed_data[2 * audio_block_length : 3 * audio_block_length] = ola_buffer[2 * audio_block_length : 3 * audio_block_length]
119 |
120 | plt.plot(processed_data)
121 |
122 |
123 | # This block corresponds to "Regular Operation" test in HRTFProcessorTest
124 | #%%
125 | f0 = 1000
126 | fs = 44100
127 |
128 | x = np.sin(np.arange(0, 2048, 1) * 2 * np.pi * f0 / fs)
129 |
130 | ola_buffer = np.zeros(zero_padded_length)
131 |
132 | for block in range(0, 3):
133 | x_zp = np.zeros(zero_padded_length)
134 | x_zp[0 : audio_block_length] = x[block * audio_block_length : block * audio_block_length + audio_block_length]
135 |
136 | y = np.fft.ifft(np.fft.fft(x_zp) * np.fft.fft(hrir_impulse_zp))
137 |
138 | ola_buffer = overlapAndAdd(ola_buffer, y, block * audio_block_length, zero_padded_length - (audio_block_length * block))
139 |
140 |
141 |
142 | # Test using a real HRTF
143 | #%%
144 | sofa_filepath = '/Users/superkittens/projects/sound_prototypes/hrtf/hrtfs/BRIRs_from_a_room/B/002.sofa'
145 |
146 | f = h5py.File(sofa_filepath, 'r')
147 |
148 | Fs = f['Data.SamplingRate']
149 | Ns = int(np.size(f['N']))
150 | EPos = f['EmitterPosition']
151 | Azimuth = f['SourcePosition']
152 | HRIR = f['Data.IR']
153 |
154 | hrir_index = 54
155 |
156 | hrir_left = HRIR[hrir_index, 0, :]
157 | hrir_right = HRIR[hrir_index, 1, :]
158 |
159 | num_samples = 131072
160 | N = 16384
161 | M = 15000
162 | ola = np.zeros((2,N))
163 | test_signal = np.sin(np.arange(0, num_samples, 1) * 2 * np.pi * 1000 / 44100)
164 | output = np.zeros((2,num_samples + N))
165 |
166 | num_blocks = int(num_samples / audio_block_length)
167 | ola_index = 0
168 |
169 | for block in range(0, num_blocks):
170 | print(block)
171 |
172 | x = np.zeros(N)
173 | h_left = np.zeros(N)
174 | h_right = np.zeros(N)
175 |
176 | x[0 : audio_block_length] = test_signal[block * audio_block_length : (block * audio_block_length) + audio_block_length]
177 | h_left[0 : M] = hrir_left[0 : M]
178 | h_right[0 : M] = hrir_right[0 : M]
179 |
180 | y_left = np.fft.ifft(np.fft.fft(x, N) * np.fft.fft(h_left, N))
181 | y_right = np.fft.ifft(np.fft.fft(x, N) * np.fft.fft(h_right, N))
182 |
183 | for i in range(ola_index, ola_index + audio_block_length):
184 | ola[0][i] = 0
185 | ola[1][i] = 0
186 |
187 | ola_index += audio_block_length
188 |
189 | if ola_index >= N:
190 | ola_index = 0
191 |
192 | ola[0] = overlapAndAdd(ola[0], y_left, ola_index, N)
193 | ola[1] = overlapAndAdd(ola[1], y_right, ola_index, N)
194 |
195 | output[0][block * audio_block_length : (block * audio_block_length) + audio_block_length] = ola[0][ola_index : ola_index + audio_block_length]
196 | output[1][block * audio_block_length : (block * audio_block_length) + audio_block_length] = ola[1][ola_index : ola_index + audio_block_length]
197 |
198 |
199 |
200 |
201 |
202 | # Test changing HRTFs
203 | #%%
204 |
205 | audio_block_length = 2048
206 | fade_in_envelope = createFadeInOutEnvelope(audio_block_length, 'fade_in')
207 | fade_out_envelope = createFadeInOutEnvelope(audio_block_length, 'fade_out')
208 |
209 | sofa_filepath = '/Users/superkittens/projects/sound_prototypes/hrtf/hrtfs/BRIRs_from_a_room/B/002.sofa'
210 |
211 | f = h5py.File(sofa_filepath, 'r')
212 |
213 | Fs = f['Data.SamplingRate']
214 | Ns = int(np.size(f['N']))
215 | EPos = f['EmitterPosition']
216 | Azimuth = f['SourcePosition']
217 | HRIR = f['Data.IR']
218 |
219 | hrir_index = 54
220 |
221 | hrir_left = HRIR[hrir_index, 0, :]
222 | hrir_right = HRIR[hrir_index, 1, :]
223 |
224 | num_samples = 131072
225 | N = 16384
226 | M = 15000
227 | ola = np.zeros(N)
228 | test_signal = np.sin(np.arange(0, num_samples, 1) * 2 * np.pi * 1000 / 44100)
229 | output = np.zeros(num_samples + N)
230 |
231 | num_blocks = int(num_samples / audio_block_length)
232 | ola_index = 0
233 |
234 | x = np.zeros(N)
235 | x[0 : audio_block_length] = test_signal[0 : audio_block_length]
236 | h = np.zeros(N)
237 | h[0 : M] = hrir_left[0 : M]
238 | y = np.fft.ifft(np.fft.fft(x) * np.fft.fft(h))
239 |
240 | output[0:N] = y
241 |
242 |
243 | x0 = np.zeros(N)
244 | x0[0 : audio_block_length] = test_signal[audio_block_length : audio_block_length + audio_block_length]
245 | x1 = np.zeros(N)
246 | x1[0 : audio_block_length] = test_signal[audio_block_length : audio_block_length + audio_block_length]
247 |
248 | h_new = np.zeros(N)
249 | h_new[0 : M] = HRIR[hrir_index + 1, 0, 0 : M]
250 | y_old = np.fft.ifft(np.fft.fft(x0) * np.fft.fft(h))
251 | y_new = np.fft.ifft(np.fft.fft(x0) * np.fft.fft(h_new))
252 |
253 | fade_in = np.ones(N)
254 | fade_out = np.zeros(N)
255 |
256 | fade_in[0 : audio_block_length] = fade_in_envelope
257 | fade_out[0 : audio_block_length] = fade_out_envelope
258 |
259 | y = (y_old * fade_out) + (y_new * fade_in)
260 |
261 | output[audio_block_length : N + audio_block_length] = output[audio_block_length : N + audio_block_length] + y
262 |
263 | plt.plot(output)
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
--------------------------------------------------------------------------------