kopia lustrzana https://github.com/jameshball/osci-render
Fix compilation issues on free version, add more extensive hardware accelleration support for video
rodzic
231e1d8234
commit
a125abfa7f
|
@ -158,12 +158,15 @@ void MainComponent::updateFileLabel() {
|
|||
showRightArrow = audioProcessor.getCurrentFileIndex() < audioProcessor.numFiles() - 1;
|
||||
|
||||
{
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock);
|
||||
if (audioProcessor.objectServerRendering) {
|
||||
fileLabel.setText("Rendering from Blender", juce::dontSendNotification);
|
||||
} else if (audioProcessor.isSyphonInputActive()) {
|
||||
if (audioProcessor.isSyphonInputActive()) {
|
||||
fileLabel.setText(audioProcessor.getSyphonSourceName(), juce::dontSendNotification);
|
||||
} else if (audioProcessor.getCurrentFileIndex() == -1) {
|
||||
} else
|
||||
#endif
|
||||
if (audioProcessor.objectServerRendering) {
|
||||
fileLabel.setText("Rendering from Blender", juce::dontSendNotification);
|
||||
}else if (audioProcessor.getCurrentFileIndex() == -1) {
|
||||
fileLabel.setText("No file open", juce::dontSendNotification);
|
||||
} else {
|
||||
fileLabel.setText(audioProcessor.getCurrentFileName(), juce::dontSendNotification);
|
||||
|
|
|
@ -526,8 +526,8 @@ void OscirenderAudioProcessorEditor::openVisualiserSettings() {
|
|||
visualiserSettingsWindow.toFront(true);
|
||||
}
|
||||
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
void OscirenderAudioProcessorEditor::openSyphonInputDialog() {
|
||||
#if JUCE_MAC || JUCE_WINDOWS
|
||||
SyphonInputSelectorComponent* selector = nullptr;
|
||||
{
|
||||
juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock);
|
||||
|
@ -548,7 +548,6 @@ void OscirenderAudioProcessorEditor::openSyphonInputDialog() {
|
|||
options.useNativeTitleBar = true;
|
||||
options.resizable = false;
|
||||
options.launchAsync();
|
||||
#endif
|
||||
}
|
||||
|
||||
void OscirenderAudioProcessorEditor::onSyphonInputSelected(const juce::String& server, const juce::String& app) {
|
||||
|
@ -560,3 +559,4 @@ void OscirenderAudioProcessorEditor::onSyphonInputDisconnected() {
|
|||
juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock);
|
||||
audioProcessor.disconnectSyphonInput();
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -7,19 +7,18 @@
|
|||
*/
|
||||
|
||||
#include "PluginProcessor.h"
|
||||
|
||||
#include "PluginEditor.h"
|
||||
#include "parser/FileParser.h"
|
||||
#include "parser/FrameProducer.h"
|
||||
#include "audio/VectorCancellingEffect.h"
|
||||
#include "audio/DistortEffect.h"
|
||||
#include "audio/SmoothEffect.h"
|
||||
#include "audio/BitCrushEffect.h"
|
||||
#include "audio/BulgeEffect.h"
|
||||
#include "audio/DistortEffect.h"
|
||||
#include "audio/SmoothEffect.h"
|
||||
#include "audio/VectorCancellingEffect.h"
|
||||
#include "parser/FileParser.h"
|
||||
#include "parser/FrameProducer.h"
|
||||
|
||||
#if JUCE_MAC || JUCE_WINDOWS
|
||||
#include "SyphonFrameGrabber.h"
|
||||
#include "img/ImageParser.h"
|
||||
#include "../modules/juce_sharedtexture/SharedTexture.h"
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
#include "img/ImageParser.h"
|
||||
#endif
|
||||
|
||||
//==============================================================================
|
||||
|
@ -28,111 +27,103 @@ OscirenderAudioProcessor::OscirenderAudioProcessor() : CommonAudioProcessor(Buse
|
|||
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
std::make_shared<BitCrushEffect>(),
|
||||
new osci::EffectParameter("Bit Crush", "Limits the resolution of points drawn to the screen, making the object look pixelated, and making the audio sound more 'digital' and distorted.", "bitCrush", VERSION_HINT, 0.6, 0.0, 1.0)
|
||||
));
|
||||
new osci::EffectParameter("Bit Crush", "Limits the resolution of points drawn to the screen, making the object look pixelated, and making the audio sound more 'digital' and distorted.", "bitCrush", VERSION_HINT, 0.6, 0.0, 1.0)));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
std::make_shared<BulgeEffect>(),
|
||||
new osci::EffectParameter("Bulge", "Applies a bulge that makes the centre of the image larger, and squishes the edges of the image. This applies a distortion to the audio.", "bulge", VERSION_HINT, 0.5, 0.0, 1.0)
|
||||
));
|
||||
new osci::EffectParameter("Bulge", "Applies a bulge that makes the centre of the image larger, and squishes the edges of the image. This applies a distortion to the audio.", "bulge", VERSION_HINT, 0.5, 0.0, 1.0)));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
std::make_shared<VectorCancellingEffect>(),
|
||||
new osci::EffectParameter("Vector Cancelling", "Inverts the audio and image every few samples to 'cancel out' the audio, making the audio quiet, and distorting the image.", "vectorCancelling", VERSION_HINT, 0.1111111, 0.0, 1.0)
|
||||
));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
return input * osci::Point(values[0], values[1], values[2]);
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Scale X", "Scales the object in the horizontal direction.", "scaleX", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
new osci::EffectParameter("Scale Y", "Scales the object in the vertical direction.", "scaleY", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
new osci::EffectParameter("Scale Z", "Scales the depth of the object.", "scaleZ", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
}
|
||||
));
|
||||
new osci::EffectParameter("Vector Cancelling", "Inverts the audio and image every few samples to 'cancel out' the audio, making the audio quiet, and distorting the image.", "vectorCancelling", VERSION_HINT, 0.1111111, 0.0, 1.0)));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
int flip = index % 2 == 0 ? 1 : -1;
|
||||
osci::Point jitter = osci::Point(flip * values[0], flip * values[1], flip * values[2]);
|
||||
return input + jitter;
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
return input * osci::Point(values[0], values[1], values[2]);
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Scale X", "Scales the object in the horizontal direction.", "scaleX", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
new osci::EffectParameter("Scale Y", "Scales the object in the vertical direction.", "scaleY", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
new osci::EffectParameter("Scale Z", "Scales the depth of the object.", "scaleZ", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
}));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
int flip = index % 2 == 0 ? 1 : -1;
|
||||
osci::Point jitter = osci::Point(flip * values[0], flip * values[1], flip * values[2]);
|
||||
return input + jitter;
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Distort X", "Distorts the image in the horizontal direction by jittering the audio sample being drawn.", "distortX", VERSION_HINT, 0.0, 0.0, 1.0),
|
||||
new osci::EffectParameter("Distort Y", "Distorts the image in the vertical direction by jittering the audio sample being drawn.", "distortY", VERSION_HINT, 0.0, 0.0, 1.0),
|
||||
new osci::EffectParameter("Distort Z", "Distorts the depth of the image by jittering the audio sample being drawn.", "distortZ", VERSION_HINT, 0.1, 0.0, 1.0),
|
||||
}
|
||||
));
|
||||
}));
|
||||
auto rippleEffect = std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
double phase = values[1] * std::numbers::pi;
|
||||
double distance = 100 * values[2] * (input.x * input.x + input.y * input.y);
|
||||
input.z += values[0] * std::sin(phase + distance);
|
||||
return input;
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Ripple Depth", "Controls how large the ripples applied to the image are.", "rippleDepth", VERSION_HINT, 0.2, 0.0, 1.0),
|
||||
new osci::EffectParameter("Ripple Phase", "Controls the position of the ripple. Animate this to see a moving ripple effect.", "ripplePhase", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
new osci::EffectParameter("Ripple Amount", "Controls how many ripples are applied to the image.", "rippleAmount", VERSION_HINT, 0.1, 0.0, 1.0),
|
||||
}
|
||||
);
|
||||
rippleEffect->getParameter("ripplePhase")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth);
|
||||
});
|
||||
rippleEffect->getParameter("ripplePhase")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
|
||||
toggleableEffects.push_back(rippleEffect);
|
||||
auto rotateEffect = std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
input.rotate(values[0] * std::numbers::pi, values[1] * std::numbers::pi, values[2] * std::numbers::pi);
|
||||
return input;
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Rotate X", "Controls the rotation of the object in the X axis.", "rotateX", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
new osci::EffectParameter("Rotate Y", "Controls the rotation of the object in the Y axis.", "rotateY", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
new osci::EffectParameter("Rotate Z", "Controls the rotation of the object in the Z axis.", "rotateZ", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
}
|
||||
);
|
||||
rotateEffect->getParameter("rotateY")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth);
|
||||
});
|
||||
rotateEffect->getParameter("rotateY")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
|
||||
rotateEffect->getParameter("rotateY")->lfoRate->setUnnormalisedValueNotifyingHost(0.2);
|
||||
toggleableEffects.push_back(rotateEffect);
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
return input + osci::Point(values[0], values[1], values[2]);
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Translate X", "Moves the object horizontally.", "translateX", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
new osci::EffectParameter("Translate Y", "Moves the object vertically.", "translateY", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
new osci::EffectParameter("Translate Z", "Moves the object away from the camera.", "translateZ", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
}
|
||||
));
|
||||
new osci::EffectParameter("Translate Z", "Moves the object away from the camera.", "translateZ", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
}));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
double length = 10 * values[0] * input.magnitude();
|
||||
double newX = input.x * std::cos(length) - input.y * std::sin(length);
|
||||
double newY = input.x * std::sin(length) + input.y * std::cos(length);
|
||||
return osci::Point(newX, newY, input.z);
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Swirl", "Swirls the image in a spiral pattern.", "swirl", VERSION_HINT, 0.3, -1.0, 1.0),
|
||||
}
|
||||
));
|
||||
}));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
std::make_shared<SmoothEffect>(),
|
||||
new osci::EffectParameter("Smoothing", "This works as a low-pass frequency filter that removes high frequencies, making the image look smoother, and audio sound less harsh.", "smoothing", VERSION_HINT, 0.75, 0.0, 1.0)
|
||||
));
|
||||
new osci::EffectParameter("Smoothing", "This works as a low-pass frequency filter that removes high frequencies, making the image look smoother, and audio sound less harsh.", "smoothing", VERSION_HINT, 0.75, 0.0, 1.0)));
|
||||
std::shared_ptr<osci::Effect> wobble = std::make_shared<osci::Effect>(
|
||||
wobbleEffect,
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Wobble Amount", "Adds a sine wave of the prominent frequency in the audio currently playing. The sine wave's frequency is slightly offset to create a subtle 'wobble' in the image. Increasing the slider increases the strength of the wobble.", "wobble", VERSION_HINT, 0.3, 0.0, 1.0),
|
||||
new osci::EffectParameter("Wobble Phase", "Controls the phase of the wobble.", "wobblePhase", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
}
|
||||
);
|
||||
wobble->getParameter("wobblePhase")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth);
|
||||
});
|
||||
wobble->getParameter("wobblePhase")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
|
||||
toggleableEffects.push_back(wobble);
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
delayEffect,
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Delay Decay", "Adds repetitions, delays, or echos to the audio. This slider controls the volume of the echo.", "delayDecay", VERSION_HINT, 0.4, 0.0, 1.0),
|
||||
new osci::EffectParameter("Delay Length", "Controls the time in seconds between echos.", "delayLength", VERSION_HINT, 0.5, 0.0, 1.0)
|
||||
}
|
||||
));
|
||||
new osci::EffectParameter("Delay Length", "Controls the time in seconds between echos.", "delayLength", VERSION_HINT, 0.5, 0.0, 1.0)}));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
dashedLineEffect,
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Dash Length", "Controls the length of the dashed line.", "dashLength", VERSION_HINT, 0.2, 0.0, 1.0),
|
||||
}
|
||||
));
|
||||
}));
|
||||
toggleableEffects.push_back(custom);
|
||||
toggleableEffects.push_back(trace);
|
||||
trace->getParameter("traceLength")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth);
|
||||
trace->getParameter("traceLength")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
|
||||
|
||||
for (int i = 0; i < toggleableEffects.size(); i++) {
|
||||
auto effect = toggleableEffects[i];
|
||||
|
@ -228,13 +219,12 @@ void OscirenderAudioProcessor::addLuaSlider() {
|
|||
[this, sliderIndex](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
luaValues[sliderIndex].store(values[0]);
|
||||
return input;
|
||||
}, new osci::EffectParameter(
|
||||
},
|
||||
new osci::EffectParameter(
|
||||
"Lua Slider " + sliderName,
|
||||
"Controls the value of the Lua variable called slider_" + sliderName.toLowerCase() + ".",
|
||||
"lua" + sliderName,
|
||||
VERSION_HINT, 0.0, 0.0, 1.0
|
||||
)
|
||||
));
|
||||
VERSION_HINT, 0.0, 0.0, 1.0)));
|
||||
}
|
||||
|
||||
void OscirenderAudioProcessor::addErrorListener(ErrorListener* listener) {
|
||||
|
@ -258,10 +248,10 @@ void OscirenderAudioProcessor::updateEffectPrecedence() {
|
|||
// parsersLock AND effectsLock must be locked before calling this function
|
||||
void OscirenderAudioProcessor::updateFileBlock(int index, std::shared_ptr<juce::MemoryBlock> block) {
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
fileBlocks[index] = block;
|
||||
openFile(index);
|
||||
return;
|
||||
}
|
||||
fileBlocks[index] = block;
|
||||
openFile(index);
|
||||
}
|
||||
|
||||
// parsersLock AND effectsLock must be locked before calling this function
|
||||
|
@ -269,7 +259,7 @@ void OscirenderAudioProcessor::addFile(juce::File file) {
|
|||
fileBlocks.push_back(std::make_shared<juce::MemoryBlock>());
|
||||
fileNames.push_back(file.getFileName());
|
||||
fileIds.push_back(currentFileId++);
|
||||
parsers.push_back(std::make_shared<FileParser>(*this, errorCallback));
|
||||
parsers.push_back(std::make_shared<FileParser>(*this, errorCallback));
|
||||
sounds.push_back(new ShapeSound(*this, parsers.back()));
|
||||
file.createInputStream()->readIntoMemoryBlock(*fileBlocks.back());
|
||||
|
||||
|
@ -306,9 +296,9 @@ void OscirenderAudioProcessor::setFileRemovedCallback(std::function<void(int)> c
|
|||
|
||||
// parsersLock AND effectsLock must be locked before calling this function
|
||||
void OscirenderAudioProcessor::removeFile(int index) {
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
fileBlocks.erase(fileBlocks.begin() + index);
|
||||
fileNames.erase(fileNames.begin() + index);
|
||||
fileIds.erase(fileIds.begin() + index);
|
||||
|
@ -350,9 +340,9 @@ int OscirenderAudioProcessor::numFiles() {
|
|||
// it will reparse any existing files, so it is safer.
|
||||
// parsersLock AND effectsLock must be locked before calling this function
|
||||
void OscirenderAudioProcessor::openFile(int index) {
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
parsers[index]->parse(juce::String(fileIds[index]), fileNames[index], fileNames[index].fromLastOccurrenceOf(".", true, false).toLowerCase(), std::make_unique<juce::MemoryInputStream>(*fileBlocks[index], false), font);
|
||||
changeCurrentFile(index);
|
||||
}
|
||||
|
@ -365,9 +355,9 @@ void OscirenderAudioProcessor::changeCurrentFile(int index) {
|
|||
currentFile = -1;
|
||||
changeSound(defaultSound);
|
||||
}
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
currentFile = index;
|
||||
changeSound(sounds[index]);
|
||||
}
|
||||
|
@ -402,7 +392,7 @@ std::shared_ptr<FileParser> OscirenderAudioProcessor::getCurrentFileParser() {
|
|||
|
||||
juce::String OscirenderAudioProcessor::getCurrentFileName() {
|
||||
if (objectServerRendering || currentFile == -1) {
|
||||
return "";
|
||||
return "";
|
||||
} else {
|
||||
return fileNames[currentFile];
|
||||
}
|
||||
|
@ -446,7 +436,7 @@ void OscirenderAudioProcessor::setObjectServerPort(int port) {
|
|||
void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, juce::MidiBuffer& midiMessages) {
|
||||
juce::ScopedNoDenormals noDenormals;
|
||||
// Audio info variables
|
||||
int totalNumInputChannels = getTotalNumInputChannels();
|
||||
int totalNumInputChannels = getTotalNumInputChannels();
|
||||
int totalNumOutputChannels = getTotalNumOutputChannels();
|
||||
double sampleRate = getSampleRate();
|
||||
|
||||
|
@ -514,6 +504,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
outputBuffer3d.clear();
|
||||
|
||||
{
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
juce::SpinLock::ScopedLockType sLock(syphonLock);
|
||||
if (isSyphonInputActive()) {
|
||||
for (int sample = 0; sample < outputBuffer3d.getNumSamples(); sample++) {
|
||||
|
@ -521,7 +512,9 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
outputBuffer3d.setSample(0, sample, point.x);
|
||||
outputBuffer3d.setSample(1, sample, point.y);
|
||||
}
|
||||
} else if (usingInput && totalNumInputChannels >= 1) {
|
||||
} else
|
||||
#endif
|
||||
if (usingInput && totalNumInputChannels >= 1) {
|
||||
if (totalNumInputChannels >= 2) {
|
||||
for (auto channel = 0; channel < juce::jmin(2, totalNumInputChannels); channel++) {
|
||||
outputBuffer3d.copyFrom(channel, 0, inputBuffer, channel, 0, buffer.getNumSamples());
|
||||
|
@ -535,9 +528,10 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
// handle all midi messages
|
||||
auto midiIterator = midiMessages.cbegin();
|
||||
std::for_each(midiIterator,
|
||||
midiMessages.cend(),
|
||||
[&] (const juce::MidiMessageMetadata& meta) { synth.publicHandleMidiEvent(meta.getMessage()); }
|
||||
);
|
||||
midiMessages.cend(),
|
||||
[&](const juce::MidiMessageMetadata& meta) {
|
||||
synth.publicHandleMidiEvent(meta.getMessage());
|
||||
});
|
||||
} else {
|
||||
juce::SpinLock::ScopedLockType lock1(parsersLock);
|
||||
juce::SpinLock::ScopedLockType lock2(effectsLock);
|
||||
|
@ -556,7 +550,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
|
||||
auto* channelData = buffer.getArrayOfWritePointers();
|
||||
|
||||
for (int sample = 0; sample < buffer.getNumSamples(); ++sample) {
|
||||
for (int sample = 0; sample < buffer.getNumSamples(); ++sample) {
|
||||
if (animateFrames->getBoolValue()) {
|
||||
if (juce::JUCEApplicationBase::isStandaloneApp()) {
|
||||
animationFrame = animationFrame + sTimeSec * animationRate->getValueUnnormalised();
|
||||
|
@ -573,7 +567,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
if (loopAnimation->getBoolValue()) {
|
||||
animationFrame = std::fmod(animationFrame, totalFrames);
|
||||
} else {
|
||||
animationFrame = juce::jlimit(0.0, (double) totalFrames - 1, animationFrame.load());
|
||||
animationFrame = juce::jlimit(0.0, (double)totalFrames - 1, animationFrame.load());
|
||||
}
|
||||
sounds[currentFile]->parser->setFrame(animationFrame);
|
||||
}
|
||||
|
@ -598,7 +592,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
currentVolume = std::sqrt(squaredVolume);
|
||||
currentVolume = juce::jlimit(0.0, 1.0, currentVolume);
|
||||
|
||||
osci::Point channels = { outputBuffer3d.getSample(0, sample), outputBuffer3d.getSample(1, sample), outputBuffer3d.getSample(2, sample) };
|
||||
osci::Point channels = {outputBuffer3d.getSample(0, sample), outputBuffer3d.getSample(1, sample), outputBuffer3d.getSample(2, sample)};
|
||||
|
||||
{
|
||||
juce::SpinLock::ScopedLockType lock1(parsersLock);
|
||||
|
@ -621,8 +615,8 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
}
|
||||
}
|
||||
|
||||
double x = channels.x;
|
||||
double y = channels.y;
|
||||
double x = channels.x;
|
||||
double y = channels.y;
|
||||
|
||||
x *= volume;
|
||||
y *= volume;
|
||||
|
@ -640,9 +634,9 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
}
|
||||
|
||||
if (totalNumOutputChannels >= 2) {
|
||||
channelData[0][sample] = x;
|
||||
channelData[1][sample] = y;
|
||||
} else if (totalNumOutputChannels == 1) {
|
||||
channelData[0][sample] = x;
|
||||
channelData[1][sample] = y;
|
||||
} else if (totalNumOutputChannels == 1) {
|
||||
channelData[0][sample] = x;
|
||||
}
|
||||
|
||||
|
@ -650,7 +644,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
playTimeSeconds += sTimeSec;
|
||||
playTimeBeats += sTimeBeats;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// used for any callback that must guarantee all audio is recieved (e.g. when recording to a file)
|
||||
juce::SpinLock::ScopedLockType lock(audioThreadCallbackLock);
|
||||
|
@ -904,7 +898,7 @@ void OscirenderAudioProcessor::envelopeChanged(EnvelopeComponent* changedEnvelop
|
|||
}
|
||||
}
|
||||
|
||||
#if JUCE_MAC || JUCE_WINDOWS
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
// Syphon/Spout input management
|
||||
|
||||
// syphonLock must be held when calling this function
|
||||
|
|
|
@ -11,40 +11,43 @@
|
|||
#define VERSION_HINT 2
|
||||
|
||||
#include <JuceHeader.h>
|
||||
#include "audio/ShapeSound.h"
|
||||
#include "audio/ShapeVoice.h"
|
||||
#include "audio/PublicSynthesiser.h"
|
||||
#include "audio/SampleRateManager.h"
|
||||
|
||||
#include <numbers>
|
||||
#include "audio/DelayEffect.h"
|
||||
#include "audio/WobbleEffect.h"
|
||||
#include "audio/PerspectiveEffect.h"
|
||||
#include "obj/ObjectServer.h"
|
||||
|
||||
#include "CommonPluginProcessor.h"
|
||||
#include "UGen/Env.h"
|
||||
#include "UGen/ugen_JuceEnvelopeComponent.h"
|
||||
#include "audio/CustomEffect.h"
|
||||
#include "audio/DashedLineEffect.h"
|
||||
#include "CommonPluginProcessor.h"
|
||||
#include "SyphonFrameGrabber.h"
|
||||
#include "audio/DelayEffect.h"
|
||||
#include "audio/PerspectiveEffect.h"
|
||||
#include "audio/PublicSynthesiser.h"
|
||||
#include "audio/SampleRateManager.h"
|
||||
#include "audio/ShapeSound.h"
|
||||
#include "audio/ShapeVoice.h"
|
||||
#include "audio/WobbleEffect.h"
|
||||
#include "obj/ObjectServer.h"
|
||||
|
||||
#if JUCE_MAC || JUCE_WINDOWS
|
||||
#include "../modules/juce_sharedtexture/SharedTexture.h"
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
#include "../modules/juce_sharedtexture/SharedTexture.h"
|
||||
#include "video/SyphonFrameGrabber.h"
|
||||
#endif
|
||||
|
||||
//==============================================================================
|
||||
/**
|
||||
*/
|
||||
class OscirenderAudioProcessor : public CommonAudioProcessor, juce::AudioProcessorParameter::Listener, public EnvelopeComponentListener
|
||||
#if JucePlugin_Enable_ARA
|
||||
, public juce::AudioProcessorARAExtension
|
||||
#endif
|
||||
*/
|
||||
class OscirenderAudioProcessor : public CommonAudioProcessor, juce::AudioProcessorParameter::Listener, public EnvelopeComponentListener
|
||||
#if JucePlugin_Enable_ARA
|
||||
,
|
||||
public juce::AudioProcessorARAExtension
|
||||
#endif
|
||||
{
|
||||
public:
|
||||
OscirenderAudioProcessor();
|
||||
~OscirenderAudioProcessor() override;
|
||||
|
||||
void prepareToPlay (double sampleRate, int samplesPerBlock) override;
|
||||
void processBlock (juce::AudioBuffer<float>&, juce::MidiBuffer&) override;
|
||||
void prepareToPlay(double sampleRate, int samplesPerBlock) override;
|
||||
void processBlock(juce::AudioBuffer<float>&, juce::MidiBuffer&) override;
|
||||
|
||||
juce::AudioProcessorEditor* createEditor() override;
|
||||
|
||||
|
@ -56,21 +59,20 @@ public:
|
|||
void parameterGestureChanged(int parameterIndex, bool gestureIsStarting) override;
|
||||
void envelopeChanged(EnvelopeComponent* changedEnvelope) override;
|
||||
|
||||
std::vector<std::shared_ptr<osci::Effect>> toggleableEffects;
|
||||
std::vector<std::shared_ptr<osci::Effect>> toggleableEffects;
|
||||
std::vector<std::shared_ptr<osci::Effect>> luaEffects;
|
||||
std::atomic<double> luaValues[26] = { 0.0 };
|
||||
std::atomic<double> luaValues[26] = {0.0};
|
||||
|
||||
std::shared_ptr<osci::Effect> frequencyEffect = std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
frequency = values[0].load();
|
||||
return input;
|
||||
}, new osci::EffectParameter(
|
||||
},
|
||||
new osci::EffectParameter(
|
||||
"Frequency",
|
||||
"Controls how many times per second the image is drawn, thereby controlling the pitch of the sound. Lower frequencies result in more-accurately drawn images, but more flickering, and vice versa.",
|
||||
"frequency",
|
||||
VERSION_HINT, 220.0, 0.0, 4200.0
|
||||
)
|
||||
);
|
||||
VERSION_HINT, 220.0, 0.0, 4200.0));
|
||||
|
||||
std::shared_ptr<osci::Effect> trace = std::make_shared<osci::Effect>(
|
||||
std::vector<osci::EffectParameter*>{
|
||||
|
@ -78,16 +80,13 @@ public:
|
|||
"Trace Start",
|
||||
"Defines how far into the frame the drawing is started at. This has the effect of 'tracing' out the image from a single dot when animated. By default, we start drawing from the beginning of the frame, so this value is 0.0.",
|
||||
"traceStart",
|
||||
VERSION_HINT, 0.0, 0.0, 1.0, 0.001
|
||||
),
|
||||
VERSION_HINT, 0.0, 0.0, 1.0, 0.001),
|
||||
new osci::EffectParameter(
|
||||
"Trace Length",
|
||||
"Defines how much of the frame is drawn per cycle. This has the effect of 'tracing' out the image from a single dot when animated. By default, we draw the whole frame, corresponding to a value of 1.0.",
|
||||
"traceLength",
|
||||
VERSION_HINT, 1.0, 0.0, 1.0, 0.001
|
||||
),
|
||||
}
|
||||
);
|
||||
VERSION_HINT, 1.0, 0.0, 1.0, 0.001),
|
||||
});
|
||||
|
||||
std::shared_ptr<DelayEffect> delayEffect = std::make_shared<DelayEffect>();
|
||||
|
||||
|
@ -97,8 +96,7 @@ public:
|
|||
std::shared_ptr<CustomEffect> customEffect = std::make_shared<CustomEffect>(errorCallback, luaValues);
|
||||
std::shared_ptr<osci::Effect> custom = std::make_shared<osci::Effect>(
|
||||
customEffect,
|
||||
new osci::EffectParameter("Lua Effect", "Controls the strength of the custom Lua effect applied. You can write your own custom effect using Lua by pressing the edit button on the right.", "customEffectStrength", VERSION_HINT, 1.0, 0.0, 1.0)
|
||||
);
|
||||
new osci::EffectParameter("Lua Effect", "Controls the strength of the custom Lua effect applied. You can write your own custom effect using Lua by pressing the edit button on the right.", "customEffectStrength", VERSION_HINT, 1.0, 0.0, 1.0));
|
||||
|
||||
std::shared_ptr<PerspectiveEffect> perspectiveEffect = std::make_shared<PerspectiveEffect>();
|
||||
std::shared_ptr<osci::Effect> perspective = std::make_shared<osci::Effect>(
|
||||
|
@ -106,8 +104,7 @@ public:
|
|||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Perspective", "Controls the strength of the 3D perspective projection.", "perspectiveStrength", VERSION_HINT, 1.0, 0.0, 1.0),
|
||||
new osci::EffectParameter("Focal Length", "Controls the focal length of the 3D perspective effect. A higher focal length makes the image look more flat, and a lower focal length makes the image look more 3D.", "perspectiveFocalLength", VERSION_HINT, 2.0, 0.0, 10.0),
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
osci::BooleanParameter* midiEnabled = new osci::BooleanParameter("MIDI Enabled", "midiEnabled", VERSION_HINT, false, "Enable MIDI input for the synth. If disabled, the synth will play a constant tone, as controlled by the frequency slider.");
|
||||
osci::BooleanParameter* inputEnabled = new osci::BooleanParameter("Audio Input Enabled", "inputEnabled", VERSION_HINT, false, "Enable to use input audio, instead of the generated audio.");
|
||||
|
@ -133,7 +130,7 @@ public:
|
|||
osci::FloatParameter* releaseTime = new osci::FloatParameter("Release Time", "releaseTime", VERSION_HINT, 0.4, 0.0, 1.0);
|
||||
osci::FloatParameter* attackShape = new osci::FloatParameter("Attack Shape", "attackShape", VERSION_HINT, 5, -50, 50);
|
||||
osci::FloatParameter* decayShape = new osci::FloatParameter("Decay osci::Shape", "decayShape", VERSION_HINT, -20, -50, 50);
|
||||
osci::FloatParameter* releaseShape = new osci::FloatParameter("Release Shape", "releaseShape", VERSION_HINT, -5,-50, 50);
|
||||
osci::FloatParameter* releaseShape = new osci::FloatParameter("Release Shape", "releaseShape", VERSION_HINT, -5, -50, 50);
|
||||
|
||||
Env adsrEnv = Env::adsr(
|
||||
attackTime->getValueUnnormalised(),
|
||||
|
@ -141,8 +138,7 @@ public:
|
|||
sustainLevel->getValueUnnormalised(),
|
||||
releaseTime->getValueUnnormalised(),
|
||||
1.0,
|
||||
std::vector<EnvCurve>{ attackShape->getValueUnnormalised(), decayShape->getValueUnnormalised(), releaseShape->getValueUnnormalised() }
|
||||
);
|
||||
std::vector<EnvCurve>{attackShape->getValueUnnormalised(), decayShape->getValueUnnormalised(), releaseShape->getValueUnnormalised()});
|
||||
|
||||
juce::MidiKeyboardState keyboardState;
|
||||
|
||||
|
@ -158,23 +154,21 @@ public:
|
|||
std::shared_ptr<osci::Effect> imageThreshold = std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
return input;
|
||||
}, new osci::EffectParameter(
|
||||
},
|
||||
new osci::EffectParameter(
|
||||
"Image Threshold",
|
||||
"Controls the probability of visiting a dark pixel versus a light pixel. Darker pixels are less likely to be visited, so turning the threshold to a lower value makes it more likely to visit dark pixels.",
|
||||
"imageThreshold",
|
||||
VERSION_HINT, 0.5, 0, 1
|
||||
)
|
||||
);
|
||||
VERSION_HINT, 0.5, 0, 1));
|
||||
std::shared_ptr<osci::Effect> imageStride = std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
return input;
|
||||
}, new osci::EffectParameter(
|
||||
},
|
||||
new osci::EffectParameter(
|
||||
"Image Stride",
|
||||
"Controls the spacing between pixels when drawing an image. Larger values mean more of the image can be drawn, but at a lower fidelity.",
|
||||
"imageStride",
|
||||
VERSION_HINT, 4, 1, 50, 1
|
||||
)
|
||||
);
|
||||
VERSION_HINT, 4, 1, 50, 1));
|
||||
|
||||
std::atomic<double> animationFrame = 0.f;
|
||||
|
||||
|
@ -202,10 +196,10 @@ public:
|
|||
void openFile(int index);
|
||||
int getCurrentFileIndex();
|
||||
std::shared_ptr<FileParser> getCurrentFileParser();
|
||||
juce::String getCurrentFileName();
|
||||
juce::String getCurrentFileName();
|
||||
juce::String getFileName(int index);
|
||||
juce::String getFileId(int index);
|
||||
std::shared_ptr<juce::MemoryBlock> getFileBlock(int index);
|
||||
std::shared_ptr<juce::MemoryBlock> getFileBlock(int index);
|
||||
void setObjectServerRendering(bool enabled);
|
||||
void setObjectServerPort(int port);
|
||||
void addErrorListener(ErrorListener* listener);
|
||||
|
@ -240,7 +234,6 @@ public:
|
|||
};
|
||||
|
||||
private:
|
||||
|
||||
std::atomic<bool> prevMidiEnabled = !midiEnabled->getBoolValue();
|
||||
|
||||
juce::SpinLock audioThreadCallbackLock;
|
||||
|
@ -271,7 +264,7 @@ private:
|
|||
std::istringstream parser(input.toStdString());
|
||||
parser >> result[0];
|
||||
for (int idx = 1; idx < 3; idx++) {
|
||||
parser.get(); //Skip period
|
||||
parser.get(); // Skip period
|
||||
parser >> result[idx];
|
||||
}
|
||||
}
|
||||
|
@ -287,8 +280,7 @@ private:
|
|||
|
||||
juce::AudioPlayHead* playHead;
|
||||
|
||||
|
||||
#if JUCE_MAC || JUCE_WINDOWS
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
public:
|
||||
bool isSyphonInputActive() const;
|
||||
bool isSyphonInputStarted() const;
|
||||
|
@ -297,11 +289,12 @@ public:
|
|||
juce::String getSyphonSourceName() const;
|
||||
|
||||
juce::SpinLock syphonLock;
|
||||
|
||||
private:
|
||||
ImageParser syphonImageParser = ImageParser(*this);
|
||||
std::unique_ptr<SyphonFrameGrabber> syphonFrameGrabber;
|
||||
#endif
|
||||
|
||||
//==============================================================================
|
||||
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OscirenderAudioProcessor)
|
||||
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(OscirenderAudioProcessor)
|
||||
};
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "SettingsComponent.h"
|
||||
|
||||
#include "PluginEditor.h"
|
||||
|
||||
SettingsComponent::SettingsComponent(OscirenderAudioProcessor& p, OscirenderAudioProcessorEditor& editor) : audioProcessor(p), pluginEditor(editor) {
|
||||
|
@ -23,7 +24,6 @@ SettingsComponent::SettingsComponent(OscirenderAudioProcessor& p, OscirenderAudi
|
|||
mainLayout.setItemLayout(2, -0.1, -0.9, -(1.0 + mainLayoutPreferredSize));
|
||||
}
|
||||
|
||||
|
||||
void SettingsComponent::resized() {
|
||||
auto area = getLocalBounds();
|
||||
area.removeFromLeft(5);
|
||||
|
@ -38,11 +38,11 @@ void SettingsComponent::resized() {
|
|||
juce::Component dummy;
|
||||
juce::Component dummy2;
|
||||
|
||||
juce::Component* midiComponents[] = { &dummy, &midiResizerBar, &midi };
|
||||
juce::Component* midiComponents[] = {&dummy, &midiResizerBar, &midi};
|
||||
midiLayout.layOutComponents(midiComponents, 3, area.getX(), area.getY(), area.getWidth(), area.getHeight(), true, true);
|
||||
midi.setBounds(midi.getBounds());
|
||||
|
||||
juce::Component* columns[] = { &dummy2, &mainResizerBar, &dummy };
|
||||
juce::Component* columns[] = {&dummy2, &mainResizerBar, &dummy};
|
||||
mainLayout.layOutComponents(columns, 3, dummy.getX(), dummy.getY(), dummy.getWidth(), dummy.getHeight(), false, true);
|
||||
|
||||
auto bounds = dummy2.getBounds();
|
||||
|
@ -79,8 +79,25 @@ void SettingsComponent::fileUpdated(juce::String fileName) {
|
|||
juce::String extension = fileName.fromLastOccurrenceOf(".", true, false).toLowerCase();
|
||||
txt.setVisible(false);
|
||||
frame.setVisible(false);
|
||||
bool isImage = extension == ".gif" || extension == ".png" || extension == ".jpg" || extension == ".jpeg" || extension == ".mov" || extension == ".mp4" || audioProcessor.isSyphonInputStarted();
|
||||
if ((fileName.isEmpty() && !audioProcessor.isSyphonInputStarted()) || audioProcessor.objectServerRendering) {
|
||||
|
||||
// Check if the file is an image based on extension or Syphon/Spout input
|
||||
bool isSyphonActive = false;
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
isSyphonActive = audioProcessor.isSyphonInputStarted();
|
||||
#endif
|
||||
|
||||
bool isImage = isSyphonActive ||
|
||||
(extension == ".gif" ||
|
||||
extension == ".png" ||
|
||||
extension == ".jpg" ||
|
||||
extension == ".jpeg" ||
|
||||
extension == ".mov" ||
|
||||
extension == ".mp4");
|
||||
|
||||
// Skip processing if object server is rendering or if no file is selected and no Syphon input
|
||||
bool skipProcessing = audioProcessor.objectServerRendering || (fileName.isEmpty() && !isSyphonActive);
|
||||
|
||||
if (skipProcessing) {
|
||||
// do nothing
|
||||
} else if (extension == ".txt") {
|
||||
txt.setVisible(true);
|
||||
|
|
|
@ -62,6 +62,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend
|
|||
editor.openRecordingSettings();
|
||||
});
|
||||
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
// Add Syphon/Spout input menu item under Recording
|
||||
addMenuItem(2, audioProcessor.isSyphonInputActive() ? "Disconnect Syphon/Spout Input" : "Select Syphon/Spout Input...", [this] {
|
||||
if (audioProcessor.isSyphonInputActive())
|
||||
|
@ -69,6 +70,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend
|
|||
else
|
||||
openSyphonInputDialog();
|
||||
});
|
||||
#endif
|
||||
|
||||
if (editor.processor.wrapperType == juce::AudioProcessor::WrapperType::wrapperType_Standalone) {
|
||||
addMenuItem(3, "Settings...", [this] {
|
||||
|
@ -77,6 +79,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend
|
|||
}
|
||||
}
|
||||
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
void OsciMainMenuBarModel::openSyphonInputDialog() {
|
||||
editor.openSyphonInputDialog();
|
||||
}
|
||||
|
@ -84,3 +87,4 @@ void OsciMainMenuBarModel::openSyphonInputDialog() {
|
|||
void OsciMainMenuBarModel::disconnectSyphonInput() {
|
||||
audioProcessor.disconnectSyphonInput();
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -0,0 +1,343 @@
|
|||
#include "FFmpegEncoderManager.h"
|
||||
|
||||
FFmpegEncoderManager::FFmpegEncoderManager(juce::File& ffmpegExecutable)
|
||||
: ffmpegExecutable(ffmpegExecutable) {
|
||||
queryAvailableEncoders();
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::buildVideoEncodingCommand(
|
||||
VideoCodec codec,
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile) {
|
||||
switch (codec) {
|
||||
case VideoCodec::H264:
|
||||
return buildH264EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile);
|
||||
case VideoCodec::H265:
|
||||
return buildH265EncodingCommand(crf, videoToolboxQuality, width, height, frameRate, compressionPreset, outputFile);
|
||||
case VideoCodec::VP9:
|
||||
return buildVP9EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile);
|
||||
#if JUCE_MAC
|
||||
case VideoCodec::ProRes:
|
||||
return buildProResEncodingCommand(width, height, frameRate, outputFile);
|
||||
#endif
|
||||
default:
|
||||
// Default to H.264 if unknown codec
|
||||
return buildH264EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile);
|
||||
}
|
||||
}
|
||||
|
||||
juce::Array<FFmpegEncoderManager::EncoderDetails> FFmpegEncoderManager::getAvailableEncodersForCodec(VideoCodec codec) {
|
||||
// Return cached list of encoders if available
|
||||
auto it = availableEncoders.find(codec);
|
||||
if (it != availableEncoders.end()) {
|
||||
return it->second;
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
bool FFmpegEncoderManager::isHardwareEncoderAvailable(const juce::String& encoderName) {
|
||||
// Check if the encoder is available and supported
|
||||
for (auto& pair : availableEncoders) {
|
||||
for (auto& encoder : pair.second) {
|
||||
if (encoder.name == encoderName && encoder.isSupported && encoder.isHardwareAccelerated) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::getBestEncoderForCodec(VideoCodec codec) {
|
||||
auto encoders = getAvailableEncodersForCodec(codec);
|
||||
|
||||
// Define priority lists for each codec type
|
||||
juce::StringArray h264Encoders = {"h264_nvenc", "h264_amf", "h264_qsv", "h264_videotoolbox", "libx264"};
|
||||
juce::StringArray h265Encoders = {"hevc_nvenc", "hevc_amf", "hevc_qsv", "hevc_videotoolbox", "libx265"};
|
||||
juce::StringArray vp9Encoders = {"libvpx-vp9"};
|
||||
#if JUCE_MAC
|
||||
juce::StringArray proResEncoders = {"prores_ks", "prores"};
|
||||
#endif
|
||||
|
||||
// Select the appropriate priority list based on codec
|
||||
juce::StringArray* priorityList = nullptr;
|
||||
switch (codec) {
|
||||
case VideoCodec::H264:
|
||||
priorityList = &h264Encoders;
|
||||
break;
|
||||
case VideoCodec::H265:
|
||||
priorityList = &h265Encoders;
|
||||
break;
|
||||
case VideoCodec::VP9:
|
||||
priorityList = &vp9Encoders;
|
||||
break;
|
||||
#if JUCE_MAC
|
||||
case VideoCodec::ProRes:
|
||||
priorityList = &proResEncoders;
|
||||
break;
|
||||
#endif
|
||||
default:
|
||||
priorityList = &h264Encoders; // Default to H.264
|
||||
}
|
||||
|
||||
// Find the highest priority encoder that is available
|
||||
for (const auto& encoderName : *priorityList) {
|
||||
for (const auto& encoder : encoders) {
|
||||
if (encoder.name == encoderName && encoder.isSupported) {
|
||||
return encoderName;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Return default software encoder if no hardware encoder is available
|
||||
switch (codec) {
|
||||
case VideoCodec::H264:
|
||||
return "libx264";
|
||||
case VideoCodec::H265:
|
||||
return "libx265";
|
||||
case VideoCodec::VP9:
|
||||
return "libvpx-vp9";
|
||||
#if JUCE_MAC
|
||||
case VideoCodec::ProRes:
|
||||
return "prores";
|
||||
#endif
|
||||
default:
|
||||
return "libx264";
|
||||
}
|
||||
}
|
||||
|
||||
void FFmpegEncoderManager::queryAvailableEncoders() {
|
||||
// Query available encoders using ffmpeg -encoders
|
||||
juce::String output = runFFmpegCommand({"-encoders", "-hide_banner"});
|
||||
parseEncoderList(output);
|
||||
}
|
||||
|
||||
void FFmpegEncoderManager::parseEncoderList(const juce::String& output) {
|
||||
// Clear current encoders
|
||||
availableEncoders.clear();
|
||||
|
||||
// Initialize codec-specific encoder arrays
|
||||
availableEncoders[VideoCodec::H264] = {};
|
||||
availableEncoders[VideoCodec::H265] = {};
|
||||
availableEncoders[VideoCodec::VP9] = {};
|
||||
#if JUCE_MAC
|
||||
availableEncoders[VideoCodec::ProRes] = {};
|
||||
#endif
|
||||
|
||||
// Split the output into lines
|
||||
juce::StringArray lines;
|
||||
lines.addLines(output);
|
||||
|
||||
// Skip the first 10 lines (header information from ffmpeg -encoders)
|
||||
int linesToSkip = juce::jmin(10, lines.size());
|
||||
|
||||
// Parse each line to find encoder information
|
||||
for (int i = linesToSkip; i < lines.size(); ++i) {
|
||||
const auto& line = lines[i];
|
||||
|
||||
// Format: V..... libx264 H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10
|
||||
juce::String flags = line.substring(0, 6).trim();
|
||||
juce::String name = line.substring(8).upToFirstOccurrenceOf(" ", false, true);
|
||||
juce::String description = line.substring(8 + name.length()).trim();
|
||||
|
||||
EncoderDetails encoder;
|
||||
encoder.name = name;
|
||||
encoder.description = description;
|
||||
encoder.isHardwareAccelerated = name.contains("nvenc") || name.contains("amf") ||
|
||||
name.contains("qsv") || name.contains("videotoolbox");
|
||||
encoder.isSupported = flags.contains("V"); // Video encoder
|
||||
|
||||
// Add encoder to appropriate codec list
|
||||
if (name == "libx264" || name.startsWith("h264_")) {
|
||||
availableEncoders[VideoCodec::H264].add(encoder);
|
||||
} else if (name == "libx265" || name.startsWith("hevc_")) {
|
||||
availableEncoders[VideoCodec::H265].add(encoder);
|
||||
} else if (name == "libvpx-vp9") {
|
||||
availableEncoders[VideoCodec::VP9].add(encoder);
|
||||
}
|
||||
#if JUCE_MAC
|
||||
else if (name.startsWith("prores")) {
|
||||
availableEncoders[VideoCodec::ProRes].add(encoder);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::runFFmpegCommand(const juce::StringArray& args) {
|
||||
juce::ChildProcess process;
|
||||
juce::StringArray command;
|
||||
|
||||
command.add(ffmpegExecutable.getFullPathName());
|
||||
command.addArray(args);
|
||||
|
||||
process.start(command, juce::ChildProcess::wantStdOut);
|
||||
|
||||
juce::String output = process.readAllProcessOutput();
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::buildBaseEncodingCommand(
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::File& outputFile) {
|
||||
juce::String resolution = juce::String(width) + "x" + juce::String(height);
|
||||
juce::String cmd = "\"" + ffmpegExecutable.getFullPathName() + "\"" +
|
||||
" -r " + juce::String(frameRate) +
|
||||
" -f rawvideo" +
|
||||
" -pix_fmt rgba" +
|
||||
" -s " + resolution +
|
||||
" -i -" +
|
||||
" -threads 4" +
|
||||
" -y" +
|
||||
" -pix_fmt yuv420p" +
|
||||
" -vf vflip";
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::addH264EncoderSettings(
|
||||
juce::String cmd,
|
||||
const juce::String& encoderName,
|
||||
int crf,
|
||||
const juce::String& compressionPreset) {
|
||||
if (encoderName == "h264_nvenc") {
|
||||
cmd += " -c:v h264_nvenc";
|
||||
cmd += " -preset p7";
|
||||
cmd += " -profile:v high";
|
||||
cmd += " -rc vbr";
|
||||
cmd += " -cq " + juce::String(crf);
|
||||
cmd += " -b:v 0";
|
||||
} else if (encoderName == "h264_amf") {
|
||||
cmd += " -c:v h264_amf";
|
||||
cmd += " -quality quality";
|
||||
cmd += " -rc cqp";
|
||||
cmd += " -qp_i " + juce::String(crf);
|
||||
cmd += " -qp_p " + juce::String(crf);
|
||||
} else if (encoderName == "h264_qsv") {
|
||||
cmd += " -c:v h264_qsv";
|
||||
cmd += " -global_quality " + juce::String(crf);
|
||||
cmd += " -preset " + compressionPreset;
|
||||
} else if (encoderName == "h264_videotoolbox") {
|
||||
cmd += " -c:v h264_videotoolbox";
|
||||
cmd += " -q " + juce::String(crf);
|
||||
} else { // libx264 (software)
|
||||
cmd += " -c:v libx264";
|
||||
cmd += " -preset " + compressionPreset;
|
||||
cmd += " -crf " + juce::String(crf);
|
||||
}
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::addH265EncoderSettings(
|
||||
juce::String cmd,
|
||||
const juce::String& encoderName,
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
const juce::String& compressionPreset) {
|
||||
if (encoderName == "hevc_nvenc") {
|
||||
cmd += " -c:v hevc_nvenc";
|
||||
cmd += " -preset p7";
|
||||
cmd += " -profile:v main";
|
||||
cmd += " -rc vbr";
|
||||
cmd += " -cq " + juce::String(crf);
|
||||
cmd += " -b:v 0";
|
||||
} else if (encoderName == "hevc_amf") {
|
||||
cmd += " -c:v hevc_amf";
|
||||
cmd += " -quality quality";
|
||||
cmd += " -rc cqp";
|
||||
cmd += " -qp_i " + juce::String(crf);
|
||||
cmd += " -qp_p " + juce::String(crf);
|
||||
} else if (encoderName == "hevc_qsv") {
|
||||
cmd += " -c:v hevc_qsv";
|
||||
cmd += " -global_quality " + juce::String(crf);
|
||||
cmd += " -preset " + compressionPreset;
|
||||
} else if (encoderName == "hevc_videotoolbox") {
|
||||
cmd += " -c:v hevc_videotoolbox";
|
||||
cmd += " -q:v " + juce::String(videoToolboxQuality);
|
||||
cmd += " -tag:v hvc1";
|
||||
} else { // libx265 (software)
|
||||
cmd += " -c:v libx265";
|
||||
cmd += " -preset " + compressionPreset;
|
||||
cmd += " -crf " + juce::String(crf);
|
||||
}
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::buildH264EncodingCommand(
|
||||
int crf,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile) {
|
||||
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
|
||||
juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::H264);
|
||||
|
||||
cmd = addH264EncoderSettings(cmd, bestEncoder, crf, compressionPreset);
|
||||
cmd += " \"" + outputFile.getFullPathName() + "\"";
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::buildH265EncodingCommand(
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile) {
|
||||
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
|
||||
juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::H265);
|
||||
|
||||
cmd = addH265EncoderSettings(cmd, bestEncoder, crf, videoToolboxQuality, compressionPreset);
|
||||
cmd += " \"" + outputFile.getFullPathName() + "\"";
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::buildVP9EncodingCommand(
|
||||
int crf,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile) {
|
||||
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
|
||||
|
||||
cmd += juce::String(" -c:v libvpx-vp9") +
|
||||
" -b:v 0" +
|
||||
" -crf " + juce::String(crf) +
|
||||
" -deadline good -cpu-used 2";
|
||||
|
||||
cmd += " \"" + outputFile.getFullPathName() + "\"";
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
#if JUCE_MAC
|
||||
juce::String FFmpegEncoderManager::buildProResEncodingCommand(
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::File& outputFile) {
|
||||
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
|
||||
juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::ProRes);
|
||||
|
||||
cmd += " -c:v " + bestEncoder +
|
||||
" -profile:v 3"; // ProRes 422 HQ
|
||||
|
||||
cmd += " \"" + outputFile.getFullPathName() + "\"";
|
||||
|
||||
return cmd;
|
||||
}
|
||||
#endif
|
|
@ -0,0 +1,112 @@
|
|||
#pragma once
|
||||
|
||||
#include <JuceHeader.h>
|
||||
|
||||
#include "../visualiser/RecordingSettings.h"
|
||||
|
||||
class FFmpegEncoderManager {
|
||||
public:
|
||||
FFmpegEncoderManager(juce::File& ffmpegExecutable);
|
||||
~FFmpegEncoderManager() = default;
|
||||
|
||||
struct EncoderDetails {
|
||||
juce::String name;
|
||||
juce::String description;
|
||||
bool isHardwareAccelerated;
|
||||
bool isSupported;
|
||||
};
|
||||
|
||||
// FFMPEG command builder
|
||||
juce::String buildVideoEncodingCommand(
|
||||
VideoCodec codec,
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile);
|
||||
|
||||
// Get available encoders for a given codec
|
||||
juce::Array<EncoderDetails> getAvailableEncodersForCodec(VideoCodec codec);
|
||||
|
||||
// Check if a hardware encoder is available
|
||||
bool isHardwareEncoderAvailable(const juce::String& encoderName);
|
||||
|
||||
// Get the best encoder for a given codec
|
||||
juce::String getBestEncoderForCodec(VideoCodec codec);
|
||||
|
||||
private:
|
||||
juce::File ffmpegExecutable;
|
||||
std::map<VideoCodec, juce::Array<EncoderDetails>> availableEncoders;
|
||||
|
||||
// Query available encoders from FFmpeg
|
||||
void queryAvailableEncoders();
|
||||
|
||||
// Parse encoder output from FFmpeg
|
||||
void parseEncoderList(const juce::String& output);
|
||||
|
||||
// Run FFmpeg with given arguments and return output
|
||||
juce::String runFFmpegCommand(const juce::StringArray& args);
|
||||
|
||||
// Common base command builder to reduce duplication
|
||||
juce::String buildBaseEncodingCommand(
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::File& outputFile);
|
||||
|
||||
// H.264 encoder settings helper
|
||||
juce::String addH264EncoderSettings(
|
||||
juce::String cmd,
|
||||
const juce::String& encoderName,
|
||||
int crf,
|
||||
const juce::String& compressionPreset);
|
||||
|
||||
// H.265 encoder settings helper
|
||||
juce::String addH265EncoderSettings(
|
||||
juce::String cmd,
|
||||
const juce::String& encoderName,
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
const juce::String& compressionPreset);
|
||||
|
||||
// Build H.264 encoding command
|
||||
juce::String buildH264EncodingCommand(
|
||||
int crf,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile);
|
||||
|
||||
// Build H.265 encoding command
|
||||
juce::String buildH265EncodingCommand(
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile);
|
||||
|
||||
// Build VP9 encoding command
|
||||
juce::String buildVP9EncodingCommand(
|
||||
int crf,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile);
|
||||
|
||||
#if JUCE_MAC
|
||||
// Build ProRes encoding command
|
||||
juce::String buildProResEncodingCommand(
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::File& outputFile);
|
||||
#endif
|
||||
|
||||
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(FFmpegEncoderManager)
|
||||
};
|
|
@ -1,13 +1,12 @@
|
|||
#pragma once
|
||||
#include <JuceHeader.h>
|
||||
|
||||
#include "InvisibleOpenGLContextComponent.h"
|
||||
|
||||
class SyphonFrameGrabber : private juce::Thread, public juce::Component
|
||||
{
|
||||
class SyphonFrameGrabber : private juce::Thread, public juce::Component {
|
||||
public:
|
||||
SyphonFrameGrabber(SharedTextureManager& manager, juce::String server, juce::String app, ImageParser& parser, int pollMs = 16)
|
||||
: juce::Thread("SyphonFrameGrabber"), pollIntervalMs(pollMs), manager(manager), parser(parser)
|
||||
{
|
||||
: juce::Thread("SyphonFrameGrabber"), pollIntervalMs(pollMs), manager(manager), parser(parser) {
|
||||
// Create the invisible OpenGL context component
|
||||
glContextComponent = std::make_unique<InvisibleOpenGLContextComponent>();
|
||||
receiver = manager.addReceiver(server, app);
|
||||
|
@ -45,13 +44,11 @@ public:
|
|||
}
|
||||
}
|
||||
|
||||
bool isActive() const
|
||||
{
|
||||
bool isActive() const {
|
||||
return receiver != nullptr && receiver->isInit && receiver->enabled;
|
||||
}
|
||||
|
||||
juce::String getSourceName() const
|
||||
{
|
||||
juce::String getSourceName() const {
|
||||
if (receiver) {
|
||||
return receiver->sharingName + " (" + receiver->sharingAppName + ")";
|
||||
}
|
|
@ -1,14 +1,12 @@
|
|||
#include "../LookAndFeel.h"
|
||||
#include "VisualiserComponent.h"
|
||||
#include "../CommonPluginProcessor.h"
|
||||
#include "../CommonPluginEditor.h"
|
||||
|
||||
#include "../CommonPluginEditor.h"
|
||||
#include "../CommonPluginProcessor.h"
|
||||
#include "../LookAndFeel.h"
|
||||
#include "AfterglowFragmentShader.glsl"
|
||||
#include "AfterglowVertexShader.glsl"
|
||||
#include "BlurFragmentShader.glsl"
|
||||
#include "BlurVertexShader.glsl"
|
||||
#include "WideBlurFragmentShader.glsl"
|
||||
#include "WideBlurVertexShader.glsl"
|
||||
#include "GlowFragmentShader.glsl"
|
||||
#include "GlowVertexShader.glsl"
|
||||
#include "LineFragmentShader.glsl"
|
||||
|
@ -19,29 +17,31 @@
|
|||
#include "SimpleVertexShader.glsl"
|
||||
#include "TexturedFragmentShader.glsl"
|
||||
#include "TexturedVertexShader.glsl"
|
||||
#include "WideBlurFragmentShader.glsl"
|
||||
#include "WideBlurVertexShader.glsl"
|
||||
|
||||
VisualiserComponent::VisualiserComponent(
|
||||
CommonAudioProcessor& processor,
|
||||
CommonPluginEditor& pluginEditor,
|
||||
CommonAudioProcessor &processor,
|
||||
CommonPluginEditor &pluginEditor,
|
||||
#if OSCI_PREMIUM
|
||||
SharedTextureManager& sharedTextureManager,
|
||||
SharedTextureManager &sharedTextureManager,
|
||||
#endif
|
||||
juce::File ffmpegFile,
|
||||
VisualiserSettings& settings,
|
||||
RecordingSettings& recordingSettings,
|
||||
VisualiserComponent* parent,
|
||||
bool visualiserOnly
|
||||
) : audioProcessor(processor),
|
||||
ffmpegFile(ffmpegFile),
|
||||
VisualiserSettings &settings,
|
||||
RecordingSettings &recordingSettings,
|
||||
VisualiserComponent *parent,
|
||||
bool visualiserOnly) : audioProcessor(processor),
|
||||
ffmpegFile(ffmpegFile),
|
||||
#if OSCI_PREMIUM
|
||||
sharedTextureManager(sharedTextureManager),
|
||||
sharedTextureManager(sharedTextureManager),
|
||||
ffmpegEncoderManager(ffmpegFile),
|
||||
#endif
|
||||
settings(settings),
|
||||
recordingSettings(recordingSettings),
|
||||
visualiserOnly(visualiserOnly),
|
||||
osci::AudioBackgroundThread("VisualiserComponent" + juce::String(parent != nullptr ? " Child" : ""), processor.threadManager),
|
||||
parent(parent),
|
||||
editor(pluginEditor) {
|
||||
settings(settings),
|
||||
recordingSettings(recordingSettings),
|
||||
visualiserOnly(visualiserOnly),
|
||||
osci::AudioBackgroundThread("VisualiserComponent" + juce::String(parent != nullptr ? " Child" : ""), processor.threadManager),
|
||||
parent(parent),
|
||||
editor(pluginEditor) {
|
||||
#if OSCI_PREMIUM
|
||||
addAndMakeVisible(editor.ffmpegDownloader);
|
||||
#endif
|
||||
|
@ -82,13 +82,11 @@ VisualiserComponent::VisualiserComponent(
|
|||
sharedTextureButton.setTooltip("Toggles sending the oscilloscope's visuals to a Syphon/Spout receiver.");
|
||||
sharedTextureButton.onClick = [this] {
|
||||
if (sharedTextureSender != nullptr) {
|
||||
openGLContext.executeOnGLThread([this](juce::OpenGLContext& context) {
|
||||
closeSharedTexture();
|
||||
}, false);
|
||||
openGLContext.executeOnGLThread([this](juce::OpenGLContext &context) { closeSharedTexture(); },
|
||||
false);
|
||||
} else {
|
||||
openGLContext.executeOnGLThread([this](juce::OpenGLContext& context) {
|
||||
initialiseSharedTexture();
|
||||
}, false);
|
||||
openGLContext.executeOnGLThread([this](juce::OpenGLContext &context) { initialiseSharedTexture(); },
|
||||
false);
|
||||
}
|
||||
};
|
||||
#endif
|
||||
|
@ -113,9 +111,7 @@ VisualiserComponent::VisualiserComponent(
|
|||
audioInputButton.setClickingTogglesState(false);
|
||||
audioInputButton.setToggleState(!audioPlayer.isInitialised(), juce::NotificationType::dontSendNotification);
|
||||
audioPlayer.onParserChanged = [this] {
|
||||
juce::MessageManager::callAsync([this] {
|
||||
audioInputButton.setToggleState(!audioPlayer.isInitialised(), juce::NotificationType::dontSendNotification);
|
||||
});
|
||||
juce::MessageManager::callAsync([this] { audioInputButton.setToggleState(!audioPlayer.isInitialised(), juce::NotificationType::dontSendNotification); });
|
||||
};
|
||||
audioInputButton.onClick = [this] {
|
||||
audioProcessor.stopAudioFile();
|
||||
|
@ -124,7 +120,7 @@ VisualiserComponent::VisualiserComponent(
|
|||
|
||||
addChildComponent(audioPlayer);
|
||||
audioPlayer.setVisible(visualiserOnly);
|
||||
audioPlayer.addMouseListener(static_cast<juce::Component*>(this), true);
|
||||
audioPlayer.addMouseListener(static_cast<juce::Component *>(this), true);
|
||||
|
||||
openGLContext.setRenderer(this);
|
||||
openGLContext.attachTo(*this);
|
||||
|
@ -138,9 +134,7 @@ VisualiserComponent::~VisualiserComponent() {
|
|||
audioProcessor.haltRecording = nullptr;
|
||||
}
|
||||
openGLContext.detach();
|
||||
setShouldBeRunning(false, [this] {
|
||||
renderingSemaphore.release();
|
||||
});
|
||||
setShouldBeRunning(false, [this] { renderingSemaphore.release(); });
|
||||
}
|
||||
|
||||
void VisualiserComponent::setFullScreen(bool fullScreen) {
|
||||
|
@ -165,13 +159,13 @@ void VisualiserComponent::enableFullScreen() {
|
|||
grabKeyboardFocus();
|
||||
}
|
||||
|
||||
void VisualiserComponent::mouseDoubleClick(const juce::MouseEvent& event) {
|
||||
void VisualiserComponent::mouseDoubleClick(const juce::MouseEvent &event) {
|
||||
if (event.originalComponent == this) {
|
||||
enableFullScreen();
|
||||
}
|
||||
}
|
||||
|
||||
void VisualiserComponent::runTask(const std::vector<osci::Point>& points) {
|
||||
void VisualiserComponent::runTask(const std::vector<osci::Point> &points) {
|
||||
{
|
||||
juce::CriticalSection::ScopedLockType lock(samplesLock);
|
||||
|
||||
|
@ -187,7 +181,7 @@ void VisualiserComponent::runTask(const std::vector<osci::Point>& points) {
|
|||
zSamples.clear();
|
||||
|
||||
auto applyEffects = [&](osci::Point point) {
|
||||
for (auto& effect : settings.parameters.audioEffects) {
|
||||
for (auto &effect : settings.parameters.audioEffects) {
|
||||
point = effect->apply(0, point);
|
||||
}
|
||||
#if OSCI_PREMIUM
|
||||
|
@ -208,7 +202,7 @@ void VisualiserComponent::runTask(const std::vector<osci::Point>& points) {
|
|||
double triggerValue = settings.getTriggerValue();
|
||||
bool belowTrigger = false;
|
||||
|
||||
for (const osci::Point& point : points) {
|
||||
for (const osci::Point &point : points) {
|
||||
long samplePosition = sampleCount - lastTriggerPosition;
|
||||
double startPoint = 1.135;
|
||||
double sweep = samplePosition * sweepIncrement * 2 * startPoint - startPoint;
|
||||
|
@ -231,7 +225,7 @@ void VisualiserComponent::runTask(const std::vector<osci::Point>& points) {
|
|||
sampleCount++;
|
||||
}
|
||||
} else {
|
||||
for (const osci::Point& rawPoint : points) {
|
||||
for (const osci::Point &rawPoint : points) {
|
||||
osci::Point point = applyEffects(rawPoint);
|
||||
|
||||
#if OSCI_PREMIUM
|
||||
|
@ -266,7 +260,7 @@ void VisualiserComponent::runTask(const std::vector<osci::Point>& points) {
|
|||
double thisSample = xSamples[index];
|
||||
double nextSample = xSamples[index + 1];
|
||||
if (nextSample > thisSample) {
|
||||
smoothedXSamples[i] = xSamples[index] + (i % (int) RESAMPLE_RATIO) * (nextSample - thisSample) / RESAMPLE_RATIO;
|
||||
smoothedXSamples[i] = xSamples[index] + (i % (int)RESAMPLE_RATIO) * (nextSample - thisSample) / RESAMPLE_RATIO;
|
||||
} else {
|
||||
smoothedXSamples[i] = xSamples[index];
|
||||
}
|
||||
|
@ -323,11 +317,11 @@ void VisualiserComponent::setPaused(bool paused, bool affectAudio) {
|
|||
repaint();
|
||||
}
|
||||
|
||||
void VisualiserComponent::mouseDrag(const juce::MouseEvent& event) {
|
||||
void VisualiserComponent::mouseDrag(const juce::MouseEvent &event) {
|
||||
timerId = -1;
|
||||
}
|
||||
|
||||
void VisualiserComponent::mouseMove(const juce::MouseEvent& event) {
|
||||
void VisualiserComponent::mouseMove(const juce::MouseEvent &event) {
|
||||
if (event.getScreenX() == lastMouseX && event.getScreenY() == lastMouseY) {
|
||||
return;
|
||||
}
|
||||
|
@ -356,14 +350,13 @@ void VisualiserComponent::mouseMove(const juce::MouseEvent& event) {
|
|||
resized();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
} });
|
||||
}
|
||||
resized();
|
||||
}
|
||||
}
|
||||
|
||||
void VisualiserComponent::mouseDown(const juce::MouseEvent& event) {
|
||||
void VisualiserComponent::mouseDown(const juce::MouseEvent &event) {
|
||||
if (event.originalComponent == this) {
|
||||
if (event.mods.isLeftButtonDown() && child == nullptr && !record.getToggleState()) {
|
||||
setPaused(active);
|
||||
|
@ -371,7 +364,7 @@ void VisualiserComponent::mouseDown(const juce::MouseEvent& event) {
|
|||
}
|
||||
}
|
||||
|
||||
bool VisualiserComponent::keyPressed(const juce::KeyPress& key) {
|
||||
bool VisualiserComponent::keyPressed(const juce::KeyPress &key) {
|
||||
if (key.isKeyCode(juce::KeyPress::escapeKey)) {
|
||||
if (fullScreenCallback) {
|
||||
fullScreenCallback(FullScreenMode::MAIN_COMPONENT);
|
||||
|
@ -417,15 +410,13 @@ void VisualiserComponent::setRecording(bool recording) {
|
|||
downloading = false;
|
||||
resized();
|
||||
});
|
||||
});
|
||||
});
|
||||
}); });
|
||||
};
|
||||
auto onDownloadStart = [this] {
|
||||
juce::MessageManager::callAsync([this] {
|
||||
record.setEnabled(false);
|
||||
downloading = true;
|
||||
resized();
|
||||
});
|
||||
resized(); });
|
||||
};
|
||||
if (!audioProcessor.ensureFFmpegExists(onDownloadStart, onDownloadSuccess)) {
|
||||
record.setToggleState(false, juce::NotificationType::dontSendNotification);
|
||||
|
@ -436,47 +427,16 @@ void VisualiserComponent::setRecording(bool recording) {
|
|||
juce::String fileExtension = recordingSettings.getFileExtensionForCodec();
|
||||
tempVideoFile = std::make_unique<juce::TemporaryFile>("." + fileExtension);
|
||||
|
||||
juce::String resolution = std::to_string(renderTexture.width) + "x" + std::to_string(renderTexture.height);
|
||||
juce::String cmd = "\"" + ffmpegFile.getFullPathName() + "\"" +
|
||||
" -r " + juce::String(recordingSettings.getFrameRate()) +
|
||||
" -f rawvideo" +
|
||||
" -pix_fmt rgba" +
|
||||
" -s " + resolution +
|
||||
" -i -" +
|
||||
" -threads 4" +
|
||||
" -preset " + recordingSettings.getCompressionPreset() +
|
||||
" -y" +
|
||||
" -pix_fmt yuv420p";
|
||||
|
||||
// Apply codec-specific parameters
|
||||
VideoCodec codec = recordingSettings.getVideoCodec();
|
||||
if (codec == VideoCodec::H264) {
|
||||
cmd += " -c:v libx264";
|
||||
cmd += " -crf " + juce::String(recordingSettings.getCRF());
|
||||
} else if (codec == VideoCodec::H265) {
|
||||
cmd += " -c:v libx265";
|
||||
cmd += " -crf " + juce::String(recordingSettings.getCRF());
|
||||
#if JUCE_MAC && JUCE_ARM
|
||||
// use hardware encoding on Apple Silicon
|
||||
cmd += " -c:v hevc_videotoolbox";
|
||||
cmd += " -q:v " + juce::String(recordingSettings.getVideoToolboxQuality());
|
||||
cmd += " -tag:v hvc1";
|
||||
#endif
|
||||
} else if (codec == VideoCodec::VP9) {
|
||||
cmd += " -c:v libvpx-vp9";
|
||||
cmd += " -b:v 0";
|
||||
cmd += " -crf " + juce::String(recordingSettings.getCRF());
|
||||
cmd += " -deadline good -cpu-used 2";
|
||||
}
|
||||
#if JUCE_MAC
|
||||
else if (codec == VideoCodec::ProRes) {
|
||||
cmd += " -c:v prores";
|
||||
cmd += " -profile:v 3"; // ProRes 422 HQ
|
||||
}
|
||||
#endif
|
||||
|
||||
cmd += " -vf vflip";
|
||||
cmd += " \"" + tempVideoFile->getFile().getFullPathName() + "\"";
|
||||
juce::String cmd = ffmpegEncoderManager.buildVideoEncodingCommand(
|
||||
codec,
|
||||
recordingSettings.getCRF(),
|
||||
recordingSettings.getVideoToolboxQuality(),
|
||||
renderTexture.width,
|
||||
renderTexture.height,
|
||||
recordingSettings.getFrameRate(),
|
||||
recordingSettings.getCompressionPreset(),
|
||||
tempVideoFile->getFile());
|
||||
|
||||
ffmpegProcess.start(cmd);
|
||||
framePixels.resize(renderTexture.width * renderTexture.height * 4);
|
||||
|
@ -516,7 +476,7 @@ void VisualiserComponent::setRecording(bool recording) {
|
|||
auto flags = juce::FileBrowserComponent::saveMode | juce::FileBrowserComponent::canSelectFiles | juce::FileBrowserComponent::warnAboutOverwriting;
|
||||
|
||||
#if OSCI_PREMIUM
|
||||
chooser->launchAsync(flags, [this, wasRecordingAudio, wasRecordingVideo](const juce::FileChooser& chooser) {
|
||||
chooser->launchAsync(flags, [this, wasRecordingAudio, wasRecordingVideo](const juce::FileChooser &chooser) {
|
||||
auto file = chooser.getResult();
|
||||
if (file != juce::File()) {
|
||||
if (wasRecordingAudio && wasRecordingVideo) {
|
||||
|
@ -528,16 +488,14 @@ void VisualiserComponent::setRecording(bool recording) {
|
|||
tempVideoFile->getFile().copyFileTo(file);
|
||||
}
|
||||
audioProcessor.setLastOpenedDirectory(file.getParentDirectory());
|
||||
}
|
||||
});
|
||||
} });
|
||||
#else
|
||||
chooser->launchAsync(flags, [this](const juce::FileChooser& chooser) {
|
||||
chooser->launchAsync(flags, [this](const juce::FileChooser &chooser) {
|
||||
auto file = chooser.getResult();
|
||||
if (file != juce::File()) {
|
||||
tempAudioFile->getFile().copyFileTo(file);
|
||||
audioProcessor.setLastOpenedDirectory(file.getParentDirectory());
|
||||
}
|
||||
});
|
||||
} });
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -625,8 +583,7 @@ void VisualiserComponent::popoutWindow() {
|
|||
settings,
|
||||
recordingSettings,
|
||||
this,
|
||||
visualiserOnly
|
||||
);
|
||||
visualiserOnly);
|
||||
visualiser->settings.setLookAndFeel(&getLookAndFeel());
|
||||
visualiser->openSettings = openSettings;
|
||||
visualiser->closeSettings = closeSettings;
|
||||
|
@ -671,8 +628,7 @@ void VisualiserComponent::initialiseSharedTexture() {
|
|||
sharedTextureSender->setSharedTextureId(renderTexture.id);
|
||||
sharedTextureSender->setDrawFunction([this] {
|
||||
setShader(texturedShader.get());
|
||||
drawTexture({renderTexture});
|
||||
});
|
||||
drawTexture({renderTexture}); });
|
||||
}
|
||||
|
||||
void VisualiserComponent::closeSharedTexture() {
|
||||
|
@ -680,7 +636,6 @@ void VisualiserComponent::closeSharedTexture() {
|
|||
sharedTextureManager.removeSender(sharedTextureSender);
|
||||
sharedTextureSender = nullptr;
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -695,7 +650,7 @@ void VisualiserComponent::newOpenGLContextCreated() {
|
|||
glEnable(GL_BLEND);
|
||||
glBlendEquation(GL_FUNC_ADD);
|
||||
|
||||
fullScreenQuad = { -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f };
|
||||
fullScreenQuad = {-1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f};
|
||||
|
||||
simpleShader = std::make_unique<juce::OpenGLShaderProgram>(openGLContext);
|
||||
simpleShader->addVertexShader(juce::OpenGLHelpers::translateVertexShaderToV3(simpleVertexShader));
|
||||
|
@ -921,7 +876,7 @@ void VisualiserComponent::setupTextures() {
|
|||
renderTexture = makeTexture(recordingSettings.getResolution(), recordingSettings.getResolution());
|
||||
|
||||
screenOpenGLTexture.loadImage(emptyScreenImage);
|
||||
screenTexture = { screenOpenGLTexture.getTextureID(), screenTextureImage.getWidth(), screenTextureImage.getHeight() };
|
||||
screenTexture = {screenOpenGLTexture.getTextureID(), screenTextureImage.getWidth(), screenTextureImage.getHeight()};
|
||||
|
||||
#if OSCI_PREMIUM
|
||||
glowTexture = makeTexture(512, 512);
|
||||
|
@ -946,7 +901,7 @@ Texture VisualiserComponent::makeTexture(int width, int height, GLuint textureID
|
|||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER);
|
||||
float borderColor[] = { 0.0f, 0.0f, 0.0f, 1.0f };
|
||||
float borderColor[] = {0.0f, 0.0f, 0.0f, 1.0f};
|
||||
glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, borderColor);
|
||||
|
||||
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureID, 0);
|
||||
|
@ -958,7 +913,7 @@ Texture VisualiserComponent::makeTexture(int width, int height, GLuint textureID
|
|||
|
||||
glBindTexture(GL_TEXTURE_2D, 0); // Unbind
|
||||
|
||||
return { textureID, width, height };
|
||||
return {textureID, width, height};
|
||||
}
|
||||
|
||||
void VisualiserComponent::setResolution(int width) {
|
||||
|
@ -975,7 +930,7 @@ void VisualiserComponent::setResolution(int width) {
|
|||
glBindFramebuffer(GL_FRAMEBUFFER, 0); // Unbind
|
||||
}
|
||||
|
||||
void VisualiserComponent::drawLineTexture(const std::vector<float>& xPoints, const std::vector<float>& yPoints, const std::vector<float>& zPoints) {
|
||||
void VisualiserComponent::drawLineTexture(const std::vector<float> &xPoints, const std::vector<float> &yPoints, const std::vector<float> &zPoints) {
|
||||
using namespace juce::gl;
|
||||
|
||||
double persistence = std::pow(0.5, settings.getPersistence()) * 0.4;
|
||||
|
@ -988,7 +943,7 @@ void VisualiserComponent::drawLineTexture(const std::vector<float>& xPoints, con
|
|||
glBindTexture(GL_TEXTURE_2D, targetTexture.value().id);
|
||||
}
|
||||
|
||||
void VisualiserComponent::saveTextureToPNG(Texture texture, const juce::File& file) {
|
||||
void VisualiserComponent::saveTextureToPNG(Texture texture, const juce::File &file) {
|
||||
using namespace juce::gl;
|
||||
GLuint textureID = texture.id;
|
||||
int width = texture.width;
|
||||
|
@ -1000,20 +955,20 @@ void VisualiserComponent::saveTextureToPNG(Texture texture, const juce::File& fi
|
|||
// Read the pixels from the texture
|
||||
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels.data());
|
||||
|
||||
juce::Image image = juce::Image (juce::Image::PixelFormat::ARGB, width, height, true);
|
||||
juce::Image image = juce::Image(juce::Image::PixelFormat::ARGB, width, height, true);
|
||||
juce::Image::BitmapData bitmapData(image, juce::Image::BitmapData::writeOnly);
|
||||
|
||||
// Copy the pixel data to the JUCE image (and swap R and B channels)
|
||||
for (int y = 0; y < height; ++y) {
|
||||
for (int x = 0; x < width; ++x) {
|
||||
int srcIndex = (y * width + x) * 4; // RGBA format
|
||||
int srcIndex = (y * width + x) * 4; // RGBA format
|
||||
juce::uint8 r = (pixels)[srcIndex]; // Red
|
||||
juce::uint8 g = (pixels)[srcIndex + 1]; // Green
|
||||
juce::uint8 b = (pixels)[srcIndex + 2]; // Blue
|
||||
juce::uint8 a = (pixels)[srcIndex + 3]; // Alpha
|
||||
|
||||
// This method uses colors in RGBA
|
||||
bitmapData.setPixelColour(x, height-y-1, juce::Colour(r, g, b, a));
|
||||
bitmapData.setPixelColour(x, height - y - 1, juce::Colour(r, g, b, a));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1029,7 +984,7 @@ void VisualiserComponent::saveTextureToPNG(Texture texture, const juce::File& fi
|
|||
}
|
||||
}
|
||||
|
||||
void VisualiserComponent::saveTextureToQOI(Texture texture, const juce::File& file) {
|
||||
void VisualiserComponent::saveTextureToQOI(Texture texture, const juce::File &file) {
|
||||
using namespace juce::gl;
|
||||
GLuint textureID = texture.id;
|
||||
int width = texture.width;
|
||||
|
@ -1041,7 +996,7 @@ void VisualiserComponent::saveTextureToQOI(Texture texture, const juce::File& fi
|
|||
// Read the pixels from the texture
|
||||
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels.data());
|
||||
|
||||
const qoixx::qoi::desc imageFormat{ .width = (uint32_t) width, .height = (uint32_t) height, .channels = 4, .colorspace = qoixx::qoi::colorspace::srgb };
|
||||
const qoixx::qoi::desc imageFormat{.width = (uint32_t)width, .height = (uint32_t)height, .channels = 4, .colorspace = qoixx::qoi::colorspace::srgb};
|
||||
std::vector<unsigned char> binaryData = qoixx::qoi::encode<std::vector<unsigned char>>(pixels, imageFormat);
|
||||
file.replaceWithData(binaryData.data(), binaryData.size());
|
||||
}
|
||||
|
@ -1060,7 +1015,7 @@ void VisualiserComponent::activateTargetTexture(std::optional<Texture> texture)
|
|||
targetTexture = texture;
|
||||
}
|
||||
|
||||
void VisualiserComponent::setShader(juce::OpenGLShaderProgram* program) {
|
||||
void VisualiserComponent::setShader(juce::OpenGLShaderProgram *program) {
|
||||
currentShader = program;
|
||||
program->use();
|
||||
}
|
||||
|
@ -1103,7 +1058,7 @@ void VisualiserComponent::setNormalBlending() {
|
|||
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
|
||||
}
|
||||
|
||||
void VisualiserComponent::drawLine(const std::vector<float>& xPoints, const std::vector<float>& yPoints, const std::vector<float>& zPoints) {
|
||||
void VisualiserComponent::drawLine(const std::vector<float> &xPoints, const std::vector<float> &yPoints, const std::vector<float> &zPoints) {
|
||||
using namespace juce::gl;
|
||||
|
||||
setAdditiveBlending();
|
||||
|
@ -1111,11 +1066,12 @@ void VisualiserComponent::drawLine(const std::vector<float>& xPoints, const std:
|
|||
int nPoints = xPoints.size();
|
||||
|
||||
// Without this, there's an access violation that seems to occur only on some systems
|
||||
if (scratchVertices.size() != nPoints * 12) scratchVertices.resize(nPoints * 12);
|
||||
if (scratchVertices.size() != nPoints * 12)
|
||||
scratchVertices.resize(nPoints * 12);
|
||||
|
||||
for (int i = 0; i < nPoints; ++i) {
|
||||
int p = i * 12;
|
||||
scratchVertices[p] = scratchVertices[p + 3] = scratchVertices[p + 6] = scratchVertices[p + 9] = xPoints[i];
|
||||
scratchVertices[p] = scratchVertices[p + 3] = scratchVertices[p + 6] = scratchVertices[p + 9] = xPoints[i];
|
||||
scratchVertices[p + 1] = scratchVertices[p + 4] = scratchVertices[p + 7] = scratchVertices[p + 10] = yPoints[i];
|
||||
scratchVertices[p + 2] = scratchVertices[p + 5] = scratchVertices[p + 8] = scratchVertices[p + 11] = zPoints[i];
|
||||
}
|
||||
|
@ -1131,25 +1087,25 @@ void VisualiserComponent::drawLine(const std::vector<float>& xPoints, const std:
|
|||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
|
||||
glVertexAttribPointer(glGetAttribLocation(lineShader->getProgramID(), "aStart"), 3, GL_FLOAT, GL_FALSE, 0, 0);
|
||||
glVertexAttribPointer(glGetAttribLocation(lineShader->getProgramID(), "aEnd"), 3, GL_FLOAT, GL_FALSE, 0, (void*)(12 * sizeof(float)));
|
||||
glVertexAttribPointer(glGetAttribLocation(lineShader->getProgramID(), "aEnd"), 3, GL_FLOAT, GL_FALSE, 0, (void *)(12 * sizeof(float)));
|
||||
glBindBuffer(GL_ARRAY_BUFFER, quadIndexBuffer);
|
||||
glVertexAttribPointer(glGetAttribLocation(lineShader->getProgramID(), "aIdx"), 1, GL_FLOAT, GL_FALSE, 0, 0);
|
||||
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(GL_TEXTURE_2D, screenTexture.id);
|
||||
lineShader->setUniform("uScreen", 0);
|
||||
lineShader->setUniform("uSize", (GLfloat) settings.getFocus());
|
||||
lineShader->setUniform("uSize", (GLfloat)settings.getFocus());
|
||||
lineShader->setUniform("uGain", 450.0f / 512.0f);
|
||||
lineShader->setUniform("uInvert", 1.0f);
|
||||
|
||||
if (settings.getUpsamplingEnabled()) {
|
||||
lineShader->setUniform("uIntensity", intensity);
|
||||
} else {
|
||||
lineShader->setUniform("uIntensity", (GLfloat) (intensity * RESAMPLE_RATIO * 1.5));
|
||||
lineShader->setUniform("uIntensity", (GLfloat)(intensity * RESAMPLE_RATIO * 1.5));
|
||||
}
|
||||
|
||||
lineShader->setUniform("uFadeAmount", fadeAmount);
|
||||
lineShader->setUniform("uNEdges", (GLfloat) nEdges);
|
||||
lineShader->setUniform("uNEdges", (GLfloat)nEdges);
|
||||
setOffsetAndScale(lineShader.get());
|
||||
|
||||
#if OSCI_PREMIUM
|
||||
|
@ -1177,8 +1133,8 @@ void VisualiserComponent::fade() {
|
|||
#if OSCI_PREMIUM
|
||||
setShader(afterglowShader.get());
|
||||
afterglowShader->setUniform("fadeAmount", fadeAmount);
|
||||
afterglowShader->setUniform("afterglowAmount", (float) settings.getAfterglow());
|
||||
afterglowShader->setUniform("uResizeForCanvas", lineTexture.width / (float) recordingSettings.getResolution());
|
||||
afterglowShader->setUniform("afterglowAmount", (float)settings.getAfterglow());
|
||||
afterglowShader->setUniform("uResizeForCanvas", lineTexture.width / (float)recordingSettings.getResolution());
|
||||
drawTexture({lineTexture});
|
||||
#else
|
||||
simpleShader->use();
|
||||
|
@ -1201,33 +1157,33 @@ void VisualiserComponent::drawCRT() {
|
|||
|
||||
activateTargetTexture(blur1Texture);
|
||||
setShader(texturedShader.get());
|
||||
texturedShader->setUniform("uResizeForCanvas", lineTexture.width / (float) recordingSettings.getResolution());
|
||||
texturedShader->setUniform("uResizeForCanvas", lineTexture.width / (float)recordingSettings.getResolution());
|
||||
drawTexture({lineTexture});
|
||||
|
||||
//horizontal blur 512x512
|
||||
// horizontal blur 512x512
|
||||
activateTargetTexture(blur2Texture);
|
||||
setShader(blurShader.get());
|
||||
blurShader->setUniform("uOffset", 1.0f / 512.0f, 0.0f);
|
||||
drawTexture({blur1Texture});
|
||||
|
||||
//vertical blur 512x512
|
||||
// vertical blur 512x512
|
||||
activateTargetTexture(blur1Texture);
|
||||
blurShader->setUniform("uOffset", 0.0f, 1.0f / 512.0f);
|
||||
drawTexture({blur2Texture});
|
||||
|
||||
//preserve blur1 for later
|
||||
// preserve blur1 for later
|
||||
activateTargetTexture(blur3Texture);
|
||||
setShader(texturedShader.get());
|
||||
texturedShader->setUniform("uResizeForCanvas", 1.0f);
|
||||
drawTexture({blur1Texture});
|
||||
|
||||
//horizontal blur 128x128
|
||||
// horizontal blur 128x128
|
||||
activateTargetTexture(blur4Texture);
|
||||
setShader(wideBlurShader.get());
|
||||
wideBlurShader->setUniform("uOffset", 1.0f / 128.0f, 0.0f);
|
||||
drawTexture({blur3Texture});
|
||||
|
||||
//vertical blur 128x128
|
||||
// vertical blur 128x128
|
||||
activateTargetTexture(blur3Texture);
|
||||
wideBlurShader->setUniform("uOffset", 0.0f, 1.0f / 128.0f);
|
||||
drawTexture({blur4Texture});
|
||||
|
@ -1245,26 +1201,26 @@ void VisualiserComponent::drawCRT() {
|
|||
activateTargetTexture(renderTexture);
|
||||
setShader(outputShader.get());
|
||||
outputShader->setUniform("uExposure", 0.25f);
|
||||
outputShader->setUniform("uLineSaturation", (float) settings.getLineSaturation());
|
||||
outputShader->setUniform("uLineSaturation", (float)settings.getLineSaturation());
|
||||
#if OSCI_PREMIUM
|
||||
outputShader->setUniform("uScreenSaturation", (float) settings.getScreenSaturation());
|
||||
outputShader->setUniform("uHueShift", (float) settings.getScreenHue() / 360.0f);
|
||||
outputShader->setUniform("uOverexposure", (float) settings.getOverexposure());
|
||||
outputShader->setUniform("uScreenSaturation", (float)settings.getScreenSaturation());
|
||||
outputShader->setUniform("uHueShift", (float)settings.getScreenHue() / 360.0f);
|
||||
outputShader->setUniform("uOverexposure", (float)settings.getOverexposure());
|
||||
#else
|
||||
outputShader->setUniform("uScreenSaturation", 1.0f);
|
||||
outputShader->setUniform("uHueShift", 0.0f);
|
||||
outputShader->setUniform("uOverexposure", 0.5f);
|
||||
#endif
|
||||
outputShader->setUniform("uNoise", (float) settings.getNoise());
|
||||
outputShader->setUniform("uNoise", (float)settings.getNoise());
|
||||
outputShader->setUniform("uRandom", juce::Random::getSystemRandom().nextFloat());
|
||||
outputShader->setUniform("uGlow", (float) settings.getGlow());
|
||||
outputShader->setUniform("uAmbient", (float) settings.getAmbient());
|
||||
outputShader->setUniform("uGlow", (float)settings.getGlow());
|
||||
outputShader->setUniform("uAmbient", (float)settings.getAmbient());
|
||||
setOffsetAndScale(outputShader.get());
|
||||
#if OSCI_PREMIUM
|
||||
outputShader->setUniform("uFishEye", screenOverlay == ScreenOverlay::VectorDisplay ? VECTOR_DISPLAY_FISH_EYE : 0.0f);
|
||||
outputShader->setUniform("uRealScreen", settings.parameters.screenOverlay->isRealisticDisplay() ? 1.0f : 0.0f);
|
||||
#endif
|
||||
outputShader->setUniform("uResizeForCanvas", lineTexture.width / (float) recordingSettings.getResolution());
|
||||
outputShader->setUniform("uResizeForCanvas", lineTexture.width / (float)recordingSettings.getResolution());
|
||||
juce::Colour colour = juce::Colour::fromHSV(settings.getHue() / 360.0f, 1.0, 1.0, 1.0);
|
||||
outputShader->setUniform("uColour", colour.getFloatRed(), colour.getFloatGreen(), colour.getFloatBlue());
|
||||
drawTexture({
|
||||
|
@ -1279,9 +1235,9 @@ void VisualiserComponent::drawCRT() {
|
|||
});
|
||||
}
|
||||
|
||||
void VisualiserComponent::setOffsetAndScale(juce::OpenGLShaderProgram* shader) {
|
||||
void VisualiserComponent::setOffsetAndScale(juce::OpenGLShaderProgram *shader) {
|
||||
osci::Point offset;
|
||||
osci::Point scale = { 1.0f };
|
||||
osci::Point scale = {1.0f};
|
||||
#if OSCI_PREMIUM
|
||||
if (settings.getScreenOverlay() == ScreenOverlay::Real) {
|
||||
offset = REAL_SCREEN_OFFSET;
|
||||
|
@ -1291,8 +1247,8 @@ void VisualiserComponent::setOffsetAndScale(juce::OpenGLShaderProgram* shader) {
|
|||
scale = VECTOR_DISPLAY_SCALE;
|
||||
}
|
||||
#endif
|
||||
shader->setUniform("uOffset", (float) offset.x, (float) offset.y);
|
||||
shader->setUniform("uScale", (float) scale.x, (float) scale.y);
|
||||
shader->setUniform("uOffset", (float)offset.x, (float)offset.y);
|
||||
shader->setUniform("uScale", (float)scale.x, (float)scale.y);
|
||||
}
|
||||
|
||||
#if OSCI_PREMIUM
|
||||
|
@ -1307,7 +1263,7 @@ Texture VisualiserComponent::createReflectionTexture() {
|
|||
reflectionOpenGLTexture.loadImage(emptyReflectionImage);
|
||||
}
|
||||
|
||||
Texture texture = { reflectionOpenGLTexture.getTextureID(), reflectionOpenGLTexture.getWidth(), reflectionOpenGLTexture.getHeight() };
|
||||
Texture texture = {reflectionOpenGLTexture.getTextureID(), reflectionOpenGLTexture.getWidth(), reflectionOpenGLTexture.getHeight()};
|
||||
|
||||
return texture;
|
||||
}
|
||||
|
@ -1328,7 +1284,7 @@ Texture VisualiserComponent::createScreenTexture() {
|
|||
screenOpenGLTexture.loadImage(emptyScreenImage);
|
||||
}
|
||||
checkGLErrors(__FILE__, __LINE__);
|
||||
Texture texture = { screenOpenGLTexture.getTextureID(), screenTextureImage.getWidth(), screenTextureImage.getHeight() };
|
||||
Texture texture = {screenOpenGLTexture.getTextureID(), screenTextureImage.getWidth(), screenTextureImage.getHeight()};
|
||||
|
||||
if (screenOverlay == ScreenOverlay::Graticule || screenOverlay == ScreenOverlay::SmudgedGraticule) {
|
||||
activateTargetTexture(texture);
|
||||
|
@ -1366,7 +1322,8 @@ Texture VisualiserComponent::createScreenTexture() {
|
|||
|
||||
for (int j = 0; j < 51; j++) {
|
||||
float t = j * step / 5;
|
||||
if (static_cast<int>(t) % 5 == 0) continue;
|
||||
if (static_cast<int>(t) % 5 == 0)
|
||||
continue;
|
||||
|
||||
data.insert(data.begin(), {t - 2, 2.5f * step, t + 2, 2.5f * step});
|
||||
data.insert(data.begin(), {t - 2, 7.5f * step, t + 2, 7.5f * step});
|
||||
|
@ -1399,21 +1356,36 @@ void VisualiserComponent::checkGLErrors(juce::String file, int line) {
|
|||
while ((error = glGetError()) != GL_NO_ERROR) {
|
||||
juce::String errorMessage;
|
||||
switch (error) {
|
||||
case GL_INVALID_ENUM: errorMessage = "GL_INVALID_ENUM"; break;
|
||||
case GL_INVALID_VALUE: errorMessage = "GL_INVALID_VALUE"; break;
|
||||
case GL_INVALID_OPERATION: errorMessage = "GL_INVALID_OPERATION"; break;
|
||||
case GL_STACK_OVERFLOW: errorMessage = "GL_STACK_OVERFLOW"; break;
|
||||
case GL_STACK_UNDERFLOW: errorMessage = "GL_STACK_UNDERFLOW"; break;
|
||||
case GL_OUT_OF_MEMORY: errorMessage = "GL_OUT_OF_MEMORY"; break;
|
||||
case GL_INVALID_FRAMEBUFFER_OPERATION: errorMessage = "GL_INVALID_FRAMEBUFFER_OPERATION"; break;
|
||||
default: errorMessage = "Unknown OpenGL error"; break;
|
||||
case GL_INVALID_ENUM:
|
||||
errorMessage = "GL_INVALID_ENUM";
|
||||
break;
|
||||
case GL_INVALID_VALUE:
|
||||
errorMessage = "GL_INVALID_VALUE";
|
||||
break;
|
||||
case GL_INVALID_OPERATION:
|
||||
errorMessage = "GL_INVALID_OPERATION";
|
||||
break;
|
||||
case GL_STACK_OVERFLOW:
|
||||
errorMessage = "GL_STACK_OVERFLOW";
|
||||
break;
|
||||
case GL_STACK_UNDERFLOW:
|
||||
errorMessage = "GL_STACK_UNDERFLOW";
|
||||
break;
|
||||
case GL_OUT_OF_MEMORY:
|
||||
errorMessage = "GL_OUT_OF_MEMORY";
|
||||
break;
|
||||
case GL_INVALID_FRAMEBUFFER_OPERATION:
|
||||
errorMessage = "GL_INVALID_FRAMEBUFFER_OPERATION";
|
||||
break;
|
||||
default:
|
||||
errorMessage = "Unknown OpenGL error";
|
||||
break;
|
||||
}
|
||||
DBG("OpenGL error at " + file + ":" + juce::String(line) + " - " + errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void VisualiserComponent::paint(juce::Graphics& g) {
|
||||
void VisualiserComponent::paint(juce::Graphics &g) {
|
||||
g.setColour(Colours::veryDark);
|
||||
g.fillRect(buttonRow);
|
||||
if (!active) {
|
||||
|
@ -1428,7 +1400,7 @@ void VisualiserComponent::paint(juce::Graphics& g) {
|
|||
}
|
||||
}
|
||||
|
||||
void VisualiserComponent::renderScope(const std::vector<float>& xPoints, const std::vector<float>& yPoints, const std::vector<float>& zPoints) {
|
||||
void VisualiserComponent::renderScope(const std::vector<float> &xPoints, const std::vector<float> &yPoints, const std::vector<float> &zPoints) {
|
||||
if (screenOverlay != settings.getScreenOverlay()) {
|
||||
screenOverlay = settings.getScreenOverlay();
|
||||
#if OSCI_PREMIUM
|
||||
|
|
|
@ -1,17 +1,20 @@
|
|||
#pragma once
|
||||
|
||||
#include <algorithm>
|
||||
#include <JuceHeader.h>
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "../LookAndFeel.h"
|
||||
#include "../components/SvgButton.h"
|
||||
#include "VisualiserSettings.h"
|
||||
#include "RecordingSettings.h"
|
||||
#include "../components/StopwatchComponent.h"
|
||||
#include "../img/qoixx.hpp"
|
||||
#include "../components/DownloaderComponent.h"
|
||||
#include "../audio/AudioRecorder.h"
|
||||
#include "../wav/WavParser.h"
|
||||
#include "../components/AudioPlayerComponent.h"
|
||||
#include "../components/DownloaderComponent.h"
|
||||
#include "../components/StopwatchComponent.h"
|
||||
#include "../components/SvgButton.h"
|
||||
#include "../img/qoixx.hpp"
|
||||
#include "../video/FFmpegEncoderManager.h"
|
||||
#include "../wav/WavParser.h"
|
||||
#include "RecordingSettings.h"
|
||||
#include "VisualiserSettings.h"
|
||||
|
||||
#define FILE_RENDER_DUMMY 0
|
||||
#define FILE_RENDER_PNG 1
|
||||
|
@ -44,8 +47,7 @@ public:
|
|||
VisualiserSettings& settings,
|
||||
RecordingSettings& recordingSettings,
|
||||
VisualiserComponent* parent = nullptr,
|
||||
bool visualiserOnly = false
|
||||
);
|
||||
bool visualiserOnly = false);
|
||||
~VisualiserComponent() override;
|
||||
|
||||
std::function<void()> openSettings;
|
||||
|
@ -87,13 +89,13 @@ private:
|
|||
bool visualiserOnly;
|
||||
AudioPlayerComponent audioPlayer{audioProcessor};
|
||||
|
||||
SvgButton fullScreenButton{ "fullScreen", BinaryData::fullscreen_svg, juce::Colours::white, juce::Colours::white };
|
||||
SvgButton popOutButton{ "popOut", BinaryData::open_in_new_svg, juce::Colours::white, juce::Colours::white };
|
||||
SvgButton settingsButton{ "settings", BinaryData::cog_svg, juce::Colours::white, juce::Colours::white };
|
||||
SvgButton audioInputButton{ "audioInput", BinaryData::microphone_svg, juce::Colours::white, juce::Colours::red };
|
||||
SvgButton fullScreenButton{"fullScreen", BinaryData::fullscreen_svg, juce::Colours::white, juce::Colours::white};
|
||||
SvgButton popOutButton{"popOut", BinaryData::open_in_new_svg, juce::Colours::white, juce::Colours::white};
|
||||
SvgButton settingsButton{"settings", BinaryData::cog_svg, juce::Colours::white, juce::Colours::white};
|
||||
SvgButton audioInputButton{"audioInput", BinaryData::microphone_svg, juce::Colours::white, juce::Colours::red};
|
||||
|
||||
#if OSCI_PREMIUM
|
||||
SvgButton sharedTextureButton{ "sharedTexture", BinaryData::spout_svg, juce::Colours::white, juce::Colours::red };
|
||||
SvgButton sharedTextureButton{"sharedTexture", BinaryData::spout_svg, juce::Colours::white, juce::Colours::red};
|
||||
SharedTextureManager& sharedTextureManager;
|
||||
SharedTextureSender* sharedTextureSender = nullptr;
|
||||
#endif
|
||||
|
@ -118,6 +120,7 @@ private:
|
|||
std::vector<unsigned char> framePixels;
|
||||
osci::WriteProcess ffmpegProcess;
|
||||
std::unique_ptr<juce::TemporaryFile> tempVideoFile;
|
||||
FFmpegEncoderManager ffmpegEncoderManager;
|
||||
#endif
|
||||
|
||||
StopwatchComponent stopwatch;
|
||||
|
@ -187,11 +190,11 @@ private:
|
|||
juce::Image oscilloscopeReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::real_reflection_png, BinaryData::real_reflection_pngSize);
|
||||
juce::Image vectorDisplayReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::vector_display_reflection_png, BinaryData::vector_display_reflection_pngSize);
|
||||
|
||||
osci::Point REAL_SCREEN_OFFSET = { 0.02, -0.15 };
|
||||
osci::Point REAL_SCREEN_SCALE = { 0.6 };
|
||||
osci::Point REAL_SCREEN_OFFSET = {0.02, -0.15};
|
||||
osci::Point REAL_SCREEN_SCALE = {0.6};
|
||||
|
||||
osci::Point VECTOR_DISPLAY_OFFSET = { 0.075, -0.045 };
|
||||
osci::Point VECTOR_DISPLAY_SCALE = { 0.6 };
|
||||
osci::Point VECTOR_DISPLAY_OFFSET = {0.075, -0.045};
|
||||
osci::Point VECTOR_DISPLAY_SCALE = {0.6};
|
||||
float VECTOR_DISPLAY_FISH_EYE = 0.5;
|
||||
|
||||
juce::OpenGLTexture reflectionOpenGLTexture;
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit cf124cc5de4d9857c7633e9c03117f20e1550e81
|
||||
Subproject commit f8ac3007c25df061ca6e71ad2eaff4a5d01e2d7b
|
|
@ -574,6 +574,16 @@
|
|||
<FILE id="mC1tUv" name="ugen_JuceUtility.h" compile="0" resource="0"
|
||||
file="Source/UGen/ugen_JuceUtility.h"/>
|
||||
</GROUP>
|
||||
<GROUP id="{0F62E77C-5385-0C56-69A1-3C8866A6E6E3}" name="video">
|
||||
<FILE id="DniMew" name="FFmpegEncoderManager.cpp" compile="1" resource="0"
|
||||
file="Source/video/FFmpegEncoderManager.cpp"/>
|
||||
<FILE id="t2oI5O" name="FFmpegEncoderManager.h" compile="0" resource="0"
|
||||
file="Source/video/FFmpegEncoderManager.h"/>
|
||||
<FILE id="xEIRCs" name="InvisibleOpenGLContextComponent.h" compile="0"
|
||||
resource="0" file="Source/video/InvisibleOpenGLContextComponent.h"/>
|
||||
<FILE id="OyC3qj" name="SyphonFrameGrabber.h" compile="0" resource="0"
|
||||
file="Source/video/SyphonFrameGrabber.h"/>
|
||||
</GROUP>
|
||||
<GROUP id="{16A8DC64-BA02-898D-4DBA-AA3DDF6F9297}" name="visualiser">
|
||||
<FILE id="DkDKBX" name="AfterglowFragmentShader.glsl" compile="0" resource="0"
|
||||
file="Source/visualiser/AfterglowFragmentShader.glsl"/>
|
||||
|
@ -644,8 +654,6 @@
|
|||
file="Source/FrameSettingsComponent.cpp"/>
|
||||
<FILE id="lzBNS1" name="FrameSettingsComponent.h" compile="0" resource="0"
|
||||
file="Source/FrameSettingsComponent.h"/>
|
||||
<FILE id="nfoWJk" name="InvisibleOpenGLContextComponent.h" compile="0"
|
||||
resource="0" file="Source/InvisibleOpenGLContextComponent.h"/>
|
||||
<FILE id="d2zFqF" name="LookAndFeel.cpp" compile="1" resource="0" file="Source/LookAndFeel.cpp"/>
|
||||
<FILE id="TJDqWs" name="LookAndFeel.h" compile="0" resource="0" file="Source/LookAndFeel.h"/>
|
||||
<FILE id="X26RjJ" name="LuaComponent.cpp" compile="1" resource="0"
|
||||
|
@ -673,8 +681,6 @@
|
|||
file="Source/SettingsComponent.cpp"/>
|
||||
<FILE id="Vlmozi" name="SettingsComponent.h" compile="0" resource="0"
|
||||
file="Source/SettingsComponent.h"/>
|
||||
<FILE id="jyHVpz" name="SyphonFrameGrabber.h" compile="0" resource="0"
|
||||
file="Source/SyphonFrameGrabber.h"/>
|
||||
<FILE id="UxZu4n" name="TxtComponent.cpp" compile="1" resource="0"
|
||||
file="Source/TxtComponent.cpp"/>
|
||||
<FILE id="kxPbsL" name="TxtComponent.h" compile="0" resource="0" file="Source/TxtComponent.h"/>
|
||||
|
|
|
@ -77,6 +77,12 @@
|
|||
</GROUP>
|
||||
</GROUP>
|
||||
<GROUP id="{75439074-E50C-362F-1EDF-8B4BE9011259}" name="Source">
|
||||
<GROUP id="{34BCEBE9-062C-27E1-5661-B33652D8F4F5}" name="video">
|
||||
<FILE id="pmHHqY" name="FFmpegEncoderManager.cpp" compile="1" resource="0"
|
||||
file="Source/video/FFmpegEncoderManager.cpp"/>
|
||||
<FILE id="oKPzgR" name="FFmpegEncoderManager.h" compile="0" resource="0"
|
||||
file="Source/video/FFmpegEncoderManager.h"/>
|
||||
</GROUP>
|
||||
<FILE id="fqqP0r" name="CustomStandalone.cpp" compile="1" resource="0"
|
||||
file="Source/CustomStandalone.cpp"/>
|
||||
<FILE id="TFmWW0" name="CustomStandaloneFilterWindow.h" compile="0"
|
||||
|
|
Ładowanie…
Reference in New Issue