Fix compilation issues on free version, add more extensive hardware accelleration support for video

visualiser-refactor
James H Ball 2025-04-27 11:36:37 +01:00
rodzic 231e1d8234
commit a125abfa7f
15 zmienionych plików z 962 dodań i 512 usunięć

Wyświetl plik

@ -158,11 +158,14 @@ void MainComponent::updateFileLabel() {
showRightArrow = audioProcessor.getCurrentFileIndex() < audioProcessor.numFiles() - 1; showRightArrow = audioProcessor.getCurrentFileIndex() < audioProcessor.numFiles() - 1;
{ {
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock); juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock);
if (audioProcessor.isSyphonInputActive()) {
fileLabel.setText(audioProcessor.getSyphonSourceName(), juce::dontSendNotification);
} else
#endif
if (audioProcessor.objectServerRendering) { if (audioProcessor.objectServerRendering) {
fileLabel.setText("Rendering from Blender", juce::dontSendNotification); fileLabel.setText("Rendering from Blender", juce::dontSendNotification);
} else if (audioProcessor.isSyphonInputActive()) {
fileLabel.setText(audioProcessor.getSyphonSourceName(), juce::dontSendNotification);
}else if (audioProcessor.getCurrentFileIndex() == -1) { }else if (audioProcessor.getCurrentFileIndex() == -1) {
fileLabel.setText("No file open", juce::dontSendNotification); fileLabel.setText("No file open", juce::dontSendNotification);
} else { } else {

Wyświetl plik

@ -526,8 +526,8 @@ void OscirenderAudioProcessorEditor::openVisualiserSettings() {
visualiserSettingsWindow.toFront(true); visualiserSettingsWindow.toFront(true);
} }
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
void OscirenderAudioProcessorEditor::openSyphonInputDialog() { void OscirenderAudioProcessorEditor::openSyphonInputDialog() {
#if JUCE_MAC || JUCE_WINDOWS
SyphonInputSelectorComponent* selector = nullptr; SyphonInputSelectorComponent* selector = nullptr;
{ {
juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock); juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock);
@ -548,7 +548,6 @@ void OscirenderAudioProcessorEditor::openSyphonInputDialog() {
options.useNativeTitleBar = true; options.useNativeTitleBar = true;
options.resizable = false; options.resizable = false;
options.launchAsync(); options.launchAsync();
#endif
} }
void OscirenderAudioProcessorEditor::onSyphonInputSelected(const juce::String& server, const juce::String& app) { void OscirenderAudioProcessorEditor::onSyphonInputSelected(const juce::String& server, const juce::String& app) {
@ -560,3 +559,4 @@ void OscirenderAudioProcessorEditor::onSyphonInputDisconnected() {
juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock); juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock);
audioProcessor.disconnectSyphonInput(); audioProcessor.disconnectSyphonInput();
} }
#endif

Wyświetl plik

@ -7,19 +7,18 @@
*/ */
#include "PluginProcessor.h" #include "PluginProcessor.h"
#include "PluginEditor.h" #include "PluginEditor.h"
#include "parser/FileParser.h"
#include "parser/FrameProducer.h"
#include "audio/VectorCancellingEffect.h"
#include "audio/DistortEffect.h"
#include "audio/SmoothEffect.h"
#include "audio/BitCrushEffect.h" #include "audio/BitCrushEffect.h"
#include "audio/BulgeEffect.h" #include "audio/BulgeEffect.h"
#include "audio/DistortEffect.h"
#include "audio/SmoothEffect.h"
#include "audio/VectorCancellingEffect.h"
#include "parser/FileParser.h"
#include "parser/FrameProducer.h"
#if JUCE_MAC || JUCE_WINDOWS #if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
#include "SyphonFrameGrabber.h"
#include "img/ImageParser.h" #include "img/ImageParser.h"
#include "../modules/juce_sharedtexture/SharedTexture.h"
#endif #endif
//============================================================================== //==============================================================================
@ -28,108 +27,100 @@ OscirenderAudioProcessor::OscirenderAudioProcessor() : CommonAudioProcessor(Buse
toggleableEffects.push_back(std::make_shared<osci::Effect>( toggleableEffects.push_back(std::make_shared<osci::Effect>(
std::make_shared<BitCrushEffect>(), std::make_shared<BitCrushEffect>(),
new osci::EffectParameter("Bit Crush", "Limits the resolution of points drawn to the screen, making the object look pixelated, and making the audio sound more 'digital' and distorted.", "bitCrush", VERSION_HINT, 0.6, 0.0, 1.0) new osci::EffectParameter("Bit Crush", "Limits the resolution of points drawn to the screen, making the object look pixelated, and making the audio sound more 'digital' and distorted.", "bitCrush", VERSION_HINT, 0.6, 0.0, 1.0)));
));
toggleableEffects.push_back(std::make_shared<osci::Effect>( toggleableEffects.push_back(std::make_shared<osci::Effect>(
std::make_shared<BulgeEffect>(), std::make_shared<BulgeEffect>(),
new osci::EffectParameter("Bulge", "Applies a bulge that makes the centre of the image larger, and squishes the edges of the image. This applies a distortion to the audio.", "bulge", VERSION_HINT, 0.5, 0.0, 1.0) new osci::EffectParameter("Bulge", "Applies a bulge that makes the centre of the image larger, and squishes the edges of the image. This applies a distortion to the audio.", "bulge", VERSION_HINT, 0.5, 0.0, 1.0)));
));
toggleableEffects.push_back(std::make_shared<osci::Effect>( toggleableEffects.push_back(std::make_shared<osci::Effect>(
std::make_shared<VectorCancellingEffect>(), std::make_shared<VectorCancellingEffect>(),
new osci::EffectParameter("Vector Cancelling", "Inverts the audio and image every few samples to 'cancel out' the audio, making the audio quiet, and distorting the image.", "vectorCancelling", VERSION_HINT, 0.1111111, 0.0, 1.0) new osci::EffectParameter("Vector Cancelling", "Inverts the audio and image every few samples to 'cancel out' the audio, making the audio quiet, and distorting the image.", "vectorCancelling", VERSION_HINT, 0.1111111, 0.0, 1.0)));
));
toggleableEffects.push_back(std::make_shared<osci::Effect>( toggleableEffects.push_back(std::make_shared<osci::Effect>(
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) { [this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
return input * osci::Point(values[0], values[1], values[2]); return input * osci::Point(values[0], values[1], values[2]);
}, std::vector<osci::EffectParameter*>{ },
std::vector<osci::EffectParameter*>{
new osci::EffectParameter("Scale X", "Scales the object in the horizontal direction.", "scaleX", VERSION_HINT, 1.0, -5.0, 5.0), new osci::EffectParameter("Scale X", "Scales the object in the horizontal direction.", "scaleX", VERSION_HINT, 1.0, -5.0, 5.0),
new osci::EffectParameter("Scale Y", "Scales the object in the vertical direction.", "scaleY", VERSION_HINT, 1.0, -5.0, 5.0), new osci::EffectParameter("Scale Y", "Scales the object in the vertical direction.", "scaleY", VERSION_HINT, 1.0, -5.0, 5.0),
new osci::EffectParameter("Scale Z", "Scales the depth of the object.", "scaleZ", VERSION_HINT, 1.0, -5.0, 5.0), new osci::EffectParameter("Scale Z", "Scales the depth of the object.", "scaleZ", VERSION_HINT, 1.0, -5.0, 5.0),
} }));
));
toggleableEffects.push_back(std::make_shared<osci::Effect>( toggleableEffects.push_back(std::make_shared<osci::Effect>(
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) { [this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
int flip = index % 2 == 0 ? 1 : -1; int flip = index % 2 == 0 ? 1 : -1;
osci::Point jitter = osci::Point(flip * values[0], flip * values[1], flip * values[2]); osci::Point jitter = osci::Point(flip * values[0], flip * values[1], flip * values[2]);
return input + jitter; return input + jitter;
}, std::vector<osci::EffectParameter*>{ },
std::vector<osci::EffectParameter*>{
new osci::EffectParameter("Distort X", "Distorts the image in the horizontal direction by jittering the audio sample being drawn.", "distortX", VERSION_HINT, 0.0, 0.0, 1.0), new osci::EffectParameter("Distort X", "Distorts the image in the horizontal direction by jittering the audio sample being drawn.", "distortX", VERSION_HINT, 0.0, 0.0, 1.0),
new osci::EffectParameter("Distort Y", "Distorts the image in the vertical direction by jittering the audio sample being drawn.", "distortY", VERSION_HINT, 0.0, 0.0, 1.0), new osci::EffectParameter("Distort Y", "Distorts the image in the vertical direction by jittering the audio sample being drawn.", "distortY", VERSION_HINT, 0.0, 0.0, 1.0),
new osci::EffectParameter("Distort Z", "Distorts the depth of the image by jittering the audio sample being drawn.", "distortZ", VERSION_HINT, 0.1, 0.0, 1.0), new osci::EffectParameter("Distort Z", "Distorts the depth of the image by jittering the audio sample being drawn.", "distortZ", VERSION_HINT, 0.1, 0.0, 1.0),
} }));
));
auto rippleEffect = std::make_shared<osci::Effect>( auto rippleEffect = std::make_shared<osci::Effect>(
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) { [this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
double phase = values[1] * std::numbers::pi; double phase = values[1] * std::numbers::pi;
double distance = 100 * values[2] * (input.x * input.x + input.y * input.y); double distance = 100 * values[2] * (input.x * input.x + input.y * input.y);
input.z += values[0] * std::sin(phase + distance); input.z += values[0] * std::sin(phase + distance);
return input; return input;
}, std::vector<osci::EffectParameter*>{ },
std::vector<osci::EffectParameter*>{
new osci::EffectParameter("Ripple Depth", "Controls how large the ripples applied to the image are.", "rippleDepth", VERSION_HINT, 0.2, 0.0, 1.0), new osci::EffectParameter("Ripple Depth", "Controls how large the ripples applied to the image are.", "rippleDepth", VERSION_HINT, 0.2, 0.0, 1.0),
new osci::EffectParameter("Ripple Phase", "Controls the position of the ripple. Animate this to see a moving ripple effect.", "ripplePhase", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Ripple Phase", "Controls the position of the ripple. Animate this to see a moving ripple effect.", "ripplePhase", VERSION_HINT, 0.0, -1.0, 1.0),
new osci::EffectParameter("Ripple Amount", "Controls how many ripples are applied to the image.", "rippleAmount", VERSION_HINT, 0.1, 0.0, 1.0), new osci::EffectParameter("Ripple Amount", "Controls how many ripples are applied to the image.", "rippleAmount", VERSION_HINT, 0.1, 0.0, 1.0),
} });
);
rippleEffect->getParameter("ripplePhase")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth); rippleEffect->getParameter("ripplePhase")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
toggleableEffects.push_back(rippleEffect); toggleableEffects.push_back(rippleEffect);
auto rotateEffect = std::make_shared<osci::Effect>( auto rotateEffect = std::make_shared<osci::Effect>(
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) { [this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
input.rotate(values[0] * std::numbers::pi, values[1] * std::numbers::pi, values[2] * std::numbers::pi); input.rotate(values[0] * std::numbers::pi, values[1] * std::numbers::pi, values[2] * std::numbers::pi);
return input; return input;
}, std::vector<osci::EffectParameter*>{ },
std::vector<osci::EffectParameter*>{
new osci::EffectParameter("Rotate X", "Controls the rotation of the object in the X axis.", "rotateX", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Rotate X", "Controls the rotation of the object in the X axis.", "rotateX", VERSION_HINT, 0.0, -1.0, 1.0),
new osci::EffectParameter("Rotate Y", "Controls the rotation of the object in the Y axis.", "rotateY", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Rotate Y", "Controls the rotation of the object in the Y axis.", "rotateY", VERSION_HINT, 0.0, -1.0, 1.0),
new osci::EffectParameter("Rotate Z", "Controls the rotation of the object in the Z axis.", "rotateZ", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Rotate Z", "Controls the rotation of the object in the Z axis.", "rotateZ", VERSION_HINT, 0.0, -1.0, 1.0),
} });
);
rotateEffect->getParameter("rotateY")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth); rotateEffect->getParameter("rotateY")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
rotateEffect->getParameter("rotateY")->lfoRate->setUnnormalisedValueNotifyingHost(0.2); rotateEffect->getParameter("rotateY")->lfoRate->setUnnormalisedValueNotifyingHost(0.2);
toggleableEffects.push_back(rotateEffect); toggleableEffects.push_back(rotateEffect);
toggleableEffects.push_back(std::make_shared<osci::Effect>( toggleableEffects.push_back(std::make_shared<osci::Effect>(
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) { [this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
return input + osci::Point(values[0], values[1], values[2]); return input + osci::Point(values[0], values[1], values[2]);
}, std::vector<osci::EffectParameter*>{ },
std::vector<osci::EffectParameter*>{
new osci::EffectParameter("Translate X", "Moves the object horizontally.", "translateX", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Translate X", "Moves the object horizontally.", "translateX", VERSION_HINT, 0.0, -1.0, 1.0),
new osci::EffectParameter("Translate Y", "Moves the object vertically.", "translateY", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Translate Y", "Moves the object vertically.", "translateY", VERSION_HINT, 0.0, -1.0, 1.0),
new osci::EffectParameter("Translate Z", "Moves the object away from the camera.", "translateZ", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Translate Z", "Moves the object away from the camera.", "translateZ", VERSION_HINT, 0.0, -1.0, 1.0),
} }));
));
toggleableEffects.push_back(std::make_shared<osci::Effect>( toggleableEffects.push_back(std::make_shared<osci::Effect>(
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) { [this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
double length = 10 * values[0] * input.magnitude(); double length = 10 * values[0] * input.magnitude();
double newX = input.x * std::cos(length) - input.y * std::sin(length); double newX = input.x * std::cos(length) - input.y * std::sin(length);
double newY = input.x * std::sin(length) + input.y * std::cos(length); double newY = input.x * std::sin(length) + input.y * std::cos(length);
return osci::Point(newX, newY, input.z); return osci::Point(newX, newY, input.z);
}, std::vector<osci::EffectParameter*>{ },
std::vector<osci::EffectParameter*>{
new osci::EffectParameter("Swirl", "Swirls the image in a spiral pattern.", "swirl", VERSION_HINT, 0.3, -1.0, 1.0), new osci::EffectParameter("Swirl", "Swirls the image in a spiral pattern.", "swirl", VERSION_HINT, 0.3, -1.0, 1.0),
} }));
));
toggleableEffects.push_back(std::make_shared<osci::Effect>( toggleableEffects.push_back(std::make_shared<osci::Effect>(
std::make_shared<SmoothEffect>(), std::make_shared<SmoothEffect>(),
new osci::EffectParameter("Smoothing", "This works as a low-pass frequency filter that removes high frequencies, making the image look smoother, and audio sound less harsh.", "smoothing", VERSION_HINT, 0.75, 0.0, 1.0) new osci::EffectParameter("Smoothing", "This works as a low-pass frequency filter that removes high frequencies, making the image look smoother, and audio sound less harsh.", "smoothing", VERSION_HINT, 0.75, 0.0, 1.0)));
));
std::shared_ptr<osci::Effect> wobble = std::make_shared<osci::Effect>( std::shared_ptr<osci::Effect> wobble = std::make_shared<osci::Effect>(
wobbleEffect, wobbleEffect,
std::vector<osci::EffectParameter*>{ std::vector<osci::EffectParameter*>{
new osci::EffectParameter("Wobble Amount", "Adds a sine wave of the prominent frequency in the audio currently playing. The sine wave's frequency is slightly offset to create a subtle 'wobble' in the image. Increasing the slider increases the strength of the wobble.", "wobble", VERSION_HINT, 0.3, 0.0, 1.0), new osci::EffectParameter("Wobble Amount", "Adds a sine wave of the prominent frequency in the audio currently playing. The sine wave's frequency is slightly offset to create a subtle 'wobble' in the image. Increasing the slider increases the strength of the wobble.", "wobble", VERSION_HINT, 0.3, 0.0, 1.0),
new osci::EffectParameter("Wobble Phase", "Controls the phase of the wobble.", "wobblePhase", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Wobble Phase", "Controls the phase of the wobble.", "wobblePhase", VERSION_HINT, 0.0, -1.0, 1.0),
} });
);
wobble->getParameter("wobblePhase")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth); wobble->getParameter("wobblePhase")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
toggleableEffects.push_back(wobble); toggleableEffects.push_back(wobble);
toggleableEffects.push_back(std::make_shared<osci::Effect>( toggleableEffects.push_back(std::make_shared<osci::Effect>(
delayEffect, delayEffect,
std::vector<osci::EffectParameter*>{ std::vector<osci::EffectParameter*>{
new osci::EffectParameter("Delay Decay", "Adds repetitions, delays, or echos to the audio. This slider controls the volume of the echo.", "delayDecay", VERSION_HINT, 0.4, 0.0, 1.0), new osci::EffectParameter("Delay Decay", "Adds repetitions, delays, or echos to the audio. This slider controls the volume of the echo.", "delayDecay", VERSION_HINT, 0.4, 0.0, 1.0),
new osci::EffectParameter("Delay Length", "Controls the time in seconds between echos.", "delayLength", VERSION_HINT, 0.5, 0.0, 1.0) new osci::EffectParameter("Delay Length", "Controls the time in seconds between echos.", "delayLength", VERSION_HINT, 0.5, 0.0, 1.0)}));
}
));
toggleableEffects.push_back(std::make_shared<osci::Effect>( toggleableEffects.push_back(std::make_shared<osci::Effect>(
dashedLineEffect, dashedLineEffect,
std::vector<osci::EffectParameter*>{ std::vector<osci::EffectParameter*>{
new osci::EffectParameter("Dash Length", "Controls the length of the dashed line.", "dashLength", VERSION_HINT, 0.2, 0.0, 1.0), new osci::EffectParameter("Dash Length", "Controls the length of the dashed line.", "dashLength", VERSION_HINT, 0.2, 0.0, 1.0),
} }));
));
toggleableEffects.push_back(custom); toggleableEffects.push_back(custom);
toggleableEffects.push_back(trace); toggleableEffects.push_back(trace);
trace->getParameter("traceLength")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth); trace->getParameter("traceLength")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
@ -228,13 +219,12 @@ void OscirenderAudioProcessor::addLuaSlider() {
[this, sliderIndex](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) { [this, sliderIndex](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
luaValues[sliderIndex].store(values[0]); luaValues[sliderIndex].store(values[0]);
return input; return input;
}, new osci::EffectParameter( },
new osci::EffectParameter(
"Lua Slider " + sliderName, "Lua Slider " + sliderName,
"Controls the value of the Lua variable called slider_" + sliderName.toLowerCase() + ".", "Controls the value of the Lua variable called slider_" + sliderName.toLowerCase() + ".",
"lua" + sliderName, "lua" + sliderName,
VERSION_HINT, 0.0, 0.0, 1.0 VERSION_HINT, 0.0, 0.0, 1.0)));
)
));
} }
void OscirenderAudioProcessor::addErrorListener(ErrorListener* listener) { void OscirenderAudioProcessor::addErrorListener(ErrorListener* listener) {
@ -514,6 +504,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
outputBuffer3d.clear(); outputBuffer3d.clear();
{ {
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
juce::SpinLock::ScopedLockType sLock(syphonLock); juce::SpinLock::ScopedLockType sLock(syphonLock);
if (isSyphonInputActive()) { if (isSyphonInputActive()) {
for (int sample = 0; sample < outputBuffer3d.getNumSamples(); sample++) { for (int sample = 0; sample < outputBuffer3d.getNumSamples(); sample++) {
@ -521,7 +512,9 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
outputBuffer3d.setSample(0, sample, point.x); outputBuffer3d.setSample(0, sample, point.x);
outputBuffer3d.setSample(1, sample, point.y); outputBuffer3d.setSample(1, sample, point.y);
} }
} else if (usingInput && totalNumInputChannels >= 1) { } else
#endif
if (usingInput && totalNumInputChannels >= 1) {
if (totalNumInputChannels >= 2) { if (totalNumInputChannels >= 2) {
for (auto channel = 0; channel < juce::jmin(2, totalNumInputChannels); channel++) { for (auto channel = 0; channel < juce::jmin(2, totalNumInputChannels); channel++) {
outputBuffer3d.copyFrom(channel, 0, inputBuffer, channel, 0, buffer.getNumSamples()); outputBuffer3d.copyFrom(channel, 0, inputBuffer, channel, 0, buffer.getNumSamples());
@ -536,8 +529,9 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
auto midiIterator = midiMessages.cbegin(); auto midiIterator = midiMessages.cbegin();
std::for_each(midiIterator, std::for_each(midiIterator,
midiMessages.cend(), midiMessages.cend(),
[&] (const juce::MidiMessageMetadata& meta) { synth.publicHandleMidiEvent(meta.getMessage()); } [&](const juce::MidiMessageMetadata& meta) {
); synth.publicHandleMidiEvent(meta.getMessage());
});
} else { } else {
juce::SpinLock::ScopedLockType lock1(parsersLock); juce::SpinLock::ScopedLockType lock1(parsersLock);
juce::SpinLock::ScopedLockType lock2(effectsLock); juce::SpinLock::ScopedLockType lock2(effectsLock);
@ -904,7 +898,7 @@ void OscirenderAudioProcessor::envelopeChanged(EnvelopeComponent* changedEnvelop
} }
} }
#if JUCE_MAC || JUCE_WINDOWS #if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
// Syphon/Spout input management // Syphon/Spout input management
// syphonLock must be held when calling this function // syphonLock must be held when calling this function

Wyświetl plik

@ -11,24 +11,26 @@
#define VERSION_HINT 2 #define VERSION_HINT 2
#include <JuceHeader.h> #include <JuceHeader.h>
#include "audio/ShapeSound.h"
#include "audio/ShapeVoice.h"
#include "audio/PublicSynthesiser.h"
#include "audio/SampleRateManager.h"
#include <numbers> #include <numbers>
#include "audio/DelayEffect.h"
#include "audio/WobbleEffect.h" #include "CommonPluginProcessor.h"
#include "audio/PerspectiveEffect.h"
#include "obj/ObjectServer.h"
#include "UGen/Env.h" #include "UGen/Env.h"
#include "UGen/ugen_JuceEnvelopeComponent.h" #include "UGen/ugen_JuceEnvelopeComponent.h"
#include "audio/CustomEffect.h" #include "audio/CustomEffect.h"
#include "audio/DashedLineEffect.h" #include "audio/DashedLineEffect.h"
#include "CommonPluginProcessor.h" #include "audio/DelayEffect.h"
#include "SyphonFrameGrabber.h" #include "audio/PerspectiveEffect.h"
#include "audio/PublicSynthesiser.h"
#include "audio/SampleRateManager.h"
#include "audio/ShapeSound.h"
#include "audio/ShapeVoice.h"
#include "audio/WobbleEffect.h"
#include "obj/ObjectServer.h"
#if JUCE_MAC || JUCE_WINDOWS #if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
#include "../modules/juce_sharedtexture/SharedTexture.h" #include "../modules/juce_sharedtexture/SharedTexture.h"
#include "video/SyphonFrameGrabber.h"
#endif #endif
//============================================================================== //==============================================================================
@ -36,7 +38,8 @@
*/ */
class OscirenderAudioProcessor : public CommonAudioProcessor, juce::AudioProcessorParameter::Listener, public EnvelopeComponentListener class OscirenderAudioProcessor : public CommonAudioProcessor, juce::AudioProcessorParameter::Listener, public EnvelopeComponentListener
#if JucePlugin_Enable_ARA #if JucePlugin_Enable_ARA
, public juce::AudioProcessorARAExtension ,
public juce::AudioProcessorARAExtension
#endif #endif
{ {
public: public:
@ -64,13 +67,12 @@ public:
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) { [this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
frequency = values[0].load(); frequency = values[0].load();
return input; return input;
}, new osci::EffectParameter( },
new osci::EffectParameter(
"Frequency", "Frequency",
"Controls how many times per second the image is drawn, thereby controlling the pitch of the sound. Lower frequencies result in more-accurately drawn images, but more flickering, and vice versa.", "Controls how many times per second the image is drawn, thereby controlling the pitch of the sound. Lower frequencies result in more-accurately drawn images, but more flickering, and vice versa.",
"frequency", "frequency",
VERSION_HINT, 220.0, 0.0, 4200.0 VERSION_HINT, 220.0, 0.0, 4200.0));
)
);
std::shared_ptr<osci::Effect> trace = std::make_shared<osci::Effect>( std::shared_ptr<osci::Effect> trace = std::make_shared<osci::Effect>(
std::vector<osci::EffectParameter*>{ std::vector<osci::EffectParameter*>{
@ -78,16 +80,13 @@ public:
"Trace Start", "Trace Start",
"Defines how far into the frame the drawing is started at. This has the effect of 'tracing' out the image from a single dot when animated. By default, we start drawing from the beginning of the frame, so this value is 0.0.", "Defines how far into the frame the drawing is started at. This has the effect of 'tracing' out the image from a single dot when animated. By default, we start drawing from the beginning of the frame, so this value is 0.0.",
"traceStart", "traceStart",
VERSION_HINT, 0.0, 0.0, 1.0, 0.001 VERSION_HINT, 0.0, 0.0, 1.0, 0.001),
),
new osci::EffectParameter( new osci::EffectParameter(
"Trace Length", "Trace Length",
"Defines how much of the frame is drawn per cycle. This has the effect of 'tracing' out the image from a single dot when animated. By default, we draw the whole frame, corresponding to a value of 1.0.", "Defines how much of the frame is drawn per cycle. This has the effect of 'tracing' out the image from a single dot when animated. By default, we draw the whole frame, corresponding to a value of 1.0.",
"traceLength", "traceLength",
VERSION_HINT, 1.0, 0.0, 1.0, 0.001 VERSION_HINT, 1.0, 0.0, 1.0, 0.001),
), });
}
);
std::shared_ptr<DelayEffect> delayEffect = std::make_shared<DelayEffect>(); std::shared_ptr<DelayEffect> delayEffect = std::make_shared<DelayEffect>();
@ -97,8 +96,7 @@ public:
std::shared_ptr<CustomEffect> customEffect = std::make_shared<CustomEffect>(errorCallback, luaValues); std::shared_ptr<CustomEffect> customEffect = std::make_shared<CustomEffect>(errorCallback, luaValues);
std::shared_ptr<osci::Effect> custom = std::make_shared<osci::Effect>( std::shared_ptr<osci::Effect> custom = std::make_shared<osci::Effect>(
customEffect, customEffect,
new osci::EffectParameter("Lua Effect", "Controls the strength of the custom Lua effect applied. You can write your own custom effect using Lua by pressing the edit button on the right.", "customEffectStrength", VERSION_HINT, 1.0, 0.0, 1.0) new osci::EffectParameter("Lua Effect", "Controls the strength of the custom Lua effect applied. You can write your own custom effect using Lua by pressing the edit button on the right.", "customEffectStrength", VERSION_HINT, 1.0, 0.0, 1.0));
);
std::shared_ptr<PerspectiveEffect> perspectiveEffect = std::make_shared<PerspectiveEffect>(); std::shared_ptr<PerspectiveEffect> perspectiveEffect = std::make_shared<PerspectiveEffect>();
std::shared_ptr<osci::Effect> perspective = std::make_shared<osci::Effect>( std::shared_ptr<osci::Effect> perspective = std::make_shared<osci::Effect>(
@ -106,8 +104,7 @@ public:
std::vector<osci::EffectParameter*>{ std::vector<osci::EffectParameter*>{
new osci::EffectParameter("Perspective", "Controls the strength of the 3D perspective projection.", "perspectiveStrength", VERSION_HINT, 1.0, 0.0, 1.0), new osci::EffectParameter("Perspective", "Controls the strength of the 3D perspective projection.", "perspectiveStrength", VERSION_HINT, 1.0, 0.0, 1.0),
new osci::EffectParameter("Focal Length", "Controls the focal length of the 3D perspective effect. A higher focal length makes the image look more flat, and a lower focal length makes the image look more 3D.", "perspectiveFocalLength", VERSION_HINT, 2.0, 0.0, 10.0), new osci::EffectParameter("Focal Length", "Controls the focal length of the 3D perspective effect. A higher focal length makes the image look more flat, and a lower focal length makes the image look more 3D.", "perspectiveFocalLength", VERSION_HINT, 2.0, 0.0, 10.0),
} });
);
osci::BooleanParameter* midiEnabled = new osci::BooleanParameter("MIDI Enabled", "midiEnabled", VERSION_HINT, false, "Enable MIDI input for the synth. If disabled, the synth will play a constant tone, as controlled by the frequency slider."); osci::BooleanParameter* midiEnabled = new osci::BooleanParameter("MIDI Enabled", "midiEnabled", VERSION_HINT, false, "Enable MIDI input for the synth. If disabled, the synth will play a constant tone, as controlled by the frequency slider.");
osci::BooleanParameter* inputEnabled = new osci::BooleanParameter("Audio Input Enabled", "inputEnabled", VERSION_HINT, false, "Enable to use input audio, instead of the generated audio."); osci::BooleanParameter* inputEnabled = new osci::BooleanParameter("Audio Input Enabled", "inputEnabled", VERSION_HINT, false, "Enable to use input audio, instead of the generated audio.");
@ -141,8 +138,7 @@ public:
sustainLevel->getValueUnnormalised(), sustainLevel->getValueUnnormalised(),
releaseTime->getValueUnnormalised(), releaseTime->getValueUnnormalised(),
1.0, 1.0,
std::vector<EnvCurve>{ attackShape->getValueUnnormalised(), decayShape->getValueUnnormalised(), releaseShape->getValueUnnormalised() } std::vector<EnvCurve>{attackShape->getValueUnnormalised(), decayShape->getValueUnnormalised(), releaseShape->getValueUnnormalised()});
);
juce::MidiKeyboardState keyboardState; juce::MidiKeyboardState keyboardState;
@ -158,23 +154,21 @@ public:
std::shared_ptr<osci::Effect> imageThreshold = std::make_shared<osci::Effect>( std::shared_ptr<osci::Effect> imageThreshold = std::make_shared<osci::Effect>(
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) { [this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
return input; return input;
}, new osci::EffectParameter( },
new osci::EffectParameter(
"Image Threshold", "Image Threshold",
"Controls the probability of visiting a dark pixel versus a light pixel. Darker pixels are less likely to be visited, so turning the threshold to a lower value makes it more likely to visit dark pixels.", "Controls the probability of visiting a dark pixel versus a light pixel. Darker pixels are less likely to be visited, so turning the threshold to a lower value makes it more likely to visit dark pixels.",
"imageThreshold", "imageThreshold",
VERSION_HINT, 0.5, 0, 1 VERSION_HINT, 0.5, 0, 1));
)
);
std::shared_ptr<osci::Effect> imageStride = std::make_shared<osci::Effect>( std::shared_ptr<osci::Effect> imageStride = std::make_shared<osci::Effect>(
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) { [this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
return input; return input;
}, new osci::EffectParameter( },
new osci::EffectParameter(
"Image Stride", "Image Stride",
"Controls the spacing between pixels when drawing an image. Larger values mean more of the image can be drawn, but at a lower fidelity.", "Controls the spacing between pixels when drawing an image. Larger values mean more of the image can be drawn, but at a lower fidelity.",
"imageStride", "imageStride",
VERSION_HINT, 4, 1, 50, 1 VERSION_HINT, 4, 1, 50, 1));
)
);
std::atomic<double> animationFrame = 0.f; std::atomic<double> animationFrame = 0.f;
@ -240,7 +234,6 @@ public:
}; };
private: private:
std::atomic<bool> prevMidiEnabled = !midiEnabled->getBoolValue(); std::atomic<bool> prevMidiEnabled = !midiEnabled->getBoolValue();
juce::SpinLock audioThreadCallbackLock; juce::SpinLock audioThreadCallbackLock;
@ -287,8 +280,7 @@ private:
juce::AudioPlayHead* playHead; juce::AudioPlayHead* playHead;
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
#if JUCE_MAC || JUCE_WINDOWS
public: public:
bool isSyphonInputActive() const; bool isSyphonInputActive() const;
bool isSyphonInputStarted() const; bool isSyphonInputStarted() const;
@ -297,6 +289,7 @@ public:
juce::String getSyphonSourceName() const; juce::String getSyphonSourceName() const;
juce::SpinLock syphonLock; juce::SpinLock syphonLock;
private: private:
ImageParser syphonImageParser = ImageParser(*this); ImageParser syphonImageParser = ImageParser(*this);
std::unique_ptr<SyphonFrameGrabber> syphonFrameGrabber; std::unique_ptr<SyphonFrameGrabber> syphonFrameGrabber;

Wyświetl plik

@ -1,4 +1,5 @@
#include "SettingsComponent.h" #include "SettingsComponent.h"
#include "PluginEditor.h" #include "PluginEditor.h"
SettingsComponent::SettingsComponent(OscirenderAudioProcessor& p, OscirenderAudioProcessorEditor& editor) : audioProcessor(p), pluginEditor(editor) { SettingsComponent::SettingsComponent(OscirenderAudioProcessor& p, OscirenderAudioProcessorEditor& editor) : audioProcessor(p), pluginEditor(editor) {
@ -23,7 +24,6 @@ SettingsComponent::SettingsComponent(OscirenderAudioProcessor& p, OscirenderAudi
mainLayout.setItemLayout(2, -0.1, -0.9, -(1.0 + mainLayoutPreferredSize)); mainLayout.setItemLayout(2, -0.1, -0.9, -(1.0 + mainLayoutPreferredSize));
} }
void SettingsComponent::resized() { void SettingsComponent::resized() {
auto area = getLocalBounds(); auto area = getLocalBounds();
area.removeFromLeft(5); area.removeFromLeft(5);
@ -79,8 +79,25 @@ void SettingsComponent::fileUpdated(juce::String fileName) {
juce::String extension = fileName.fromLastOccurrenceOf(".", true, false).toLowerCase(); juce::String extension = fileName.fromLastOccurrenceOf(".", true, false).toLowerCase();
txt.setVisible(false); txt.setVisible(false);
frame.setVisible(false); frame.setVisible(false);
bool isImage = extension == ".gif" || extension == ".png" || extension == ".jpg" || extension == ".jpeg" || extension == ".mov" || extension == ".mp4" || audioProcessor.isSyphonInputStarted();
if ((fileName.isEmpty() && !audioProcessor.isSyphonInputStarted()) || audioProcessor.objectServerRendering) { // Check if the file is an image based on extension or Syphon/Spout input
bool isSyphonActive = false;
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
isSyphonActive = audioProcessor.isSyphonInputStarted();
#endif
bool isImage = isSyphonActive ||
(extension == ".gif" ||
extension == ".png" ||
extension == ".jpg" ||
extension == ".jpeg" ||
extension == ".mov" ||
extension == ".mp4");
// Skip processing if object server is rendering or if no file is selected and no Syphon input
bool skipProcessing = audioProcessor.objectServerRendering || (fileName.isEmpty() && !isSyphonActive);
if (skipProcessing) {
// do nothing // do nothing
} else if (extension == ".txt") { } else if (extension == ".txt") {
txt.setVisible(true); txt.setVisible(true);

Wyświetl plik

@ -62,6 +62,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend
editor.openRecordingSettings(); editor.openRecordingSettings();
}); });
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
// Add Syphon/Spout input menu item under Recording // Add Syphon/Spout input menu item under Recording
addMenuItem(2, audioProcessor.isSyphonInputActive() ? "Disconnect Syphon/Spout Input" : "Select Syphon/Spout Input...", [this] { addMenuItem(2, audioProcessor.isSyphonInputActive() ? "Disconnect Syphon/Spout Input" : "Select Syphon/Spout Input...", [this] {
if (audioProcessor.isSyphonInputActive()) if (audioProcessor.isSyphonInputActive())
@ -69,6 +70,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend
else else
openSyphonInputDialog(); openSyphonInputDialog();
}); });
#endif
if (editor.processor.wrapperType == juce::AudioProcessor::WrapperType::wrapperType_Standalone) { if (editor.processor.wrapperType == juce::AudioProcessor::WrapperType::wrapperType_Standalone) {
addMenuItem(3, "Settings...", [this] { addMenuItem(3, "Settings...", [this] {
@ -77,6 +79,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend
} }
} }
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
void OsciMainMenuBarModel::openSyphonInputDialog() { void OsciMainMenuBarModel::openSyphonInputDialog() {
editor.openSyphonInputDialog(); editor.openSyphonInputDialog();
} }
@ -84,3 +87,4 @@ void OsciMainMenuBarModel::openSyphonInputDialog() {
void OsciMainMenuBarModel::disconnectSyphonInput() { void OsciMainMenuBarModel::disconnectSyphonInput() {
audioProcessor.disconnectSyphonInput(); audioProcessor.disconnectSyphonInput();
} }
#endif

Wyświetl plik

@ -0,0 +1,343 @@
#include "FFmpegEncoderManager.h"
FFmpegEncoderManager::FFmpegEncoderManager(juce::File& ffmpegExecutable)
: ffmpegExecutable(ffmpegExecutable) {
queryAvailableEncoders();
}
juce::String FFmpegEncoderManager::buildVideoEncodingCommand(
VideoCodec codec,
int crf,
int videoToolboxQuality,
int width,
int height,
double frameRate,
const juce::String& compressionPreset,
const juce::File& outputFile) {
switch (codec) {
case VideoCodec::H264:
return buildH264EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile);
case VideoCodec::H265:
return buildH265EncodingCommand(crf, videoToolboxQuality, width, height, frameRate, compressionPreset, outputFile);
case VideoCodec::VP9:
return buildVP9EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile);
#if JUCE_MAC
case VideoCodec::ProRes:
return buildProResEncodingCommand(width, height, frameRate, outputFile);
#endif
default:
// Default to H.264 if unknown codec
return buildH264EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile);
}
}
juce::Array<FFmpegEncoderManager::EncoderDetails> FFmpegEncoderManager::getAvailableEncodersForCodec(VideoCodec codec) {
// Return cached list of encoders if available
auto it = availableEncoders.find(codec);
if (it != availableEncoders.end()) {
return it->second;
}
return {};
}
bool FFmpegEncoderManager::isHardwareEncoderAvailable(const juce::String& encoderName) {
// Check if the encoder is available and supported
for (auto& pair : availableEncoders) {
for (auto& encoder : pair.second) {
if (encoder.name == encoderName && encoder.isSupported && encoder.isHardwareAccelerated) {
return true;
}
}
}
return false;
}
juce::String FFmpegEncoderManager::getBestEncoderForCodec(VideoCodec codec) {
auto encoders = getAvailableEncodersForCodec(codec);
// Define priority lists for each codec type
juce::StringArray h264Encoders = {"h264_nvenc", "h264_amf", "h264_qsv", "h264_videotoolbox", "libx264"};
juce::StringArray h265Encoders = {"hevc_nvenc", "hevc_amf", "hevc_qsv", "hevc_videotoolbox", "libx265"};
juce::StringArray vp9Encoders = {"libvpx-vp9"};
#if JUCE_MAC
juce::StringArray proResEncoders = {"prores_ks", "prores"};
#endif
// Select the appropriate priority list based on codec
juce::StringArray* priorityList = nullptr;
switch (codec) {
case VideoCodec::H264:
priorityList = &h264Encoders;
break;
case VideoCodec::H265:
priorityList = &h265Encoders;
break;
case VideoCodec::VP9:
priorityList = &vp9Encoders;
break;
#if JUCE_MAC
case VideoCodec::ProRes:
priorityList = &proResEncoders;
break;
#endif
default:
priorityList = &h264Encoders; // Default to H.264
}
// Find the highest priority encoder that is available
for (const auto& encoderName : *priorityList) {
for (const auto& encoder : encoders) {
if (encoder.name == encoderName && encoder.isSupported) {
return encoderName;
}
}
}
// Return default software encoder if no hardware encoder is available
switch (codec) {
case VideoCodec::H264:
return "libx264";
case VideoCodec::H265:
return "libx265";
case VideoCodec::VP9:
return "libvpx-vp9";
#if JUCE_MAC
case VideoCodec::ProRes:
return "prores";
#endif
default:
return "libx264";
}
}
void FFmpegEncoderManager::queryAvailableEncoders() {
// Query available encoders using ffmpeg -encoders
juce::String output = runFFmpegCommand({"-encoders", "-hide_banner"});
parseEncoderList(output);
}
void FFmpegEncoderManager::parseEncoderList(const juce::String& output) {
// Clear current encoders
availableEncoders.clear();
// Initialize codec-specific encoder arrays
availableEncoders[VideoCodec::H264] = {};
availableEncoders[VideoCodec::H265] = {};
availableEncoders[VideoCodec::VP9] = {};
#if JUCE_MAC
availableEncoders[VideoCodec::ProRes] = {};
#endif
// Split the output into lines
juce::StringArray lines;
lines.addLines(output);
// Skip the first 10 lines (header information from ffmpeg -encoders)
int linesToSkip = juce::jmin(10, lines.size());
// Parse each line to find encoder information
for (int i = linesToSkip; i < lines.size(); ++i) {
const auto& line = lines[i];
// Format: V..... libx264 H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10
juce::String flags = line.substring(0, 6).trim();
juce::String name = line.substring(8).upToFirstOccurrenceOf(" ", false, true);
juce::String description = line.substring(8 + name.length()).trim();
EncoderDetails encoder;
encoder.name = name;
encoder.description = description;
encoder.isHardwareAccelerated = name.contains("nvenc") || name.contains("amf") ||
name.contains("qsv") || name.contains("videotoolbox");
encoder.isSupported = flags.contains("V"); // Video encoder
// Add encoder to appropriate codec list
if (name == "libx264" || name.startsWith("h264_")) {
availableEncoders[VideoCodec::H264].add(encoder);
} else if (name == "libx265" || name.startsWith("hevc_")) {
availableEncoders[VideoCodec::H265].add(encoder);
} else if (name == "libvpx-vp9") {
availableEncoders[VideoCodec::VP9].add(encoder);
}
#if JUCE_MAC
else if (name.startsWith("prores")) {
availableEncoders[VideoCodec::ProRes].add(encoder);
}
#endif
}
}
juce::String FFmpegEncoderManager::runFFmpegCommand(const juce::StringArray& args) {
juce::ChildProcess process;
juce::StringArray command;
command.add(ffmpegExecutable.getFullPathName());
command.addArray(args);
process.start(command, juce::ChildProcess::wantStdOut);
juce::String output = process.readAllProcessOutput();
return output;
}
juce::String FFmpegEncoderManager::buildBaseEncodingCommand(
int width,
int height,
double frameRate,
const juce::File& outputFile) {
juce::String resolution = juce::String(width) + "x" + juce::String(height);
juce::String cmd = "\"" + ffmpegExecutable.getFullPathName() + "\"" +
" -r " + juce::String(frameRate) +
" -f rawvideo" +
" -pix_fmt rgba" +
" -s " + resolution +
" -i -" +
" -threads 4" +
" -y" +
" -pix_fmt yuv420p" +
" -vf vflip";
return cmd;
}
juce::String FFmpegEncoderManager::addH264EncoderSettings(
juce::String cmd,
const juce::String& encoderName,
int crf,
const juce::String& compressionPreset) {
if (encoderName == "h264_nvenc") {
cmd += " -c:v h264_nvenc";
cmd += " -preset p7";
cmd += " -profile:v high";
cmd += " -rc vbr";
cmd += " -cq " + juce::String(crf);
cmd += " -b:v 0";
} else if (encoderName == "h264_amf") {
cmd += " -c:v h264_amf";
cmd += " -quality quality";
cmd += " -rc cqp";
cmd += " -qp_i " + juce::String(crf);
cmd += " -qp_p " + juce::String(crf);
} else if (encoderName == "h264_qsv") {
cmd += " -c:v h264_qsv";
cmd += " -global_quality " + juce::String(crf);
cmd += " -preset " + compressionPreset;
} else if (encoderName == "h264_videotoolbox") {
cmd += " -c:v h264_videotoolbox";
cmd += " -q " + juce::String(crf);
} else { // libx264 (software)
cmd += " -c:v libx264";
cmd += " -preset " + compressionPreset;
cmd += " -crf " + juce::String(crf);
}
return cmd;
}
juce::String FFmpegEncoderManager::addH265EncoderSettings(
juce::String cmd,
const juce::String& encoderName,
int crf,
int videoToolboxQuality,
const juce::String& compressionPreset) {
if (encoderName == "hevc_nvenc") {
cmd += " -c:v hevc_nvenc";
cmd += " -preset p7";
cmd += " -profile:v main";
cmd += " -rc vbr";
cmd += " -cq " + juce::String(crf);
cmd += " -b:v 0";
} else if (encoderName == "hevc_amf") {
cmd += " -c:v hevc_amf";
cmd += " -quality quality";
cmd += " -rc cqp";
cmd += " -qp_i " + juce::String(crf);
cmd += " -qp_p " + juce::String(crf);
} else if (encoderName == "hevc_qsv") {
cmd += " -c:v hevc_qsv";
cmd += " -global_quality " + juce::String(crf);
cmd += " -preset " + compressionPreset;
} else if (encoderName == "hevc_videotoolbox") {
cmd += " -c:v hevc_videotoolbox";
cmd += " -q:v " + juce::String(videoToolboxQuality);
cmd += " -tag:v hvc1";
} else { // libx265 (software)
cmd += " -c:v libx265";
cmd += " -preset " + compressionPreset;
cmd += " -crf " + juce::String(crf);
}
return cmd;
}
juce::String FFmpegEncoderManager::buildH264EncodingCommand(
int crf,
int width,
int height,
double frameRate,
const juce::String& compressionPreset,
const juce::File& outputFile) {
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::H264);
cmd = addH264EncoderSettings(cmd, bestEncoder, crf, compressionPreset);
cmd += " \"" + outputFile.getFullPathName() + "\"";
return cmd;
}
juce::String FFmpegEncoderManager::buildH265EncodingCommand(
int crf,
int videoToolboxQuality,
int width,
int height,
double frameRate,
const juce::String& compressionPreset,
const juce::File& outputFile) {
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::H265);
cmd = addH265EncoderSettings(cmd, bestEncoder, crf, videoToolboxQuality, compressionPreset);
cmd += " \"" + outputFile.getFullPathName() + "\"";
return cmd;
}
juce::String FFmpegEncoderManager::buildVP9EncodingCommand(
int crf,
int width,
int height,
double frameRate,
const juce::String& compressionPreset,
const juce::File& outputFile) {
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
cmd += juce::String(" -c:v libvpx-vp9") +
" -b:v 0" +
" -crf " + juce::String(crf) +
" -deadline good -cpu-used 2";
cmd += " \"" + outputFile.getFullPathName() + "\"";
return cmd;
}
#if JUCE_MAC
juce::String FFmpegEncoderManager::buildProResEncodingCommand(
int width,
int height,
double frameRate,
const juce::File& outputFile) {
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::ProRes);
cmd += " -c:v " + bestEncoder +
" -profile:v 3"; // ProRes 422 HQ
cmd += " \"" + outputFile.getFullPathName() + "\"";
return cmd;
}
#endif

Wyświetl plik

@ -0,0 +1,112 @@
#pragma once
#include <JuceHeader.h>
#include "../visualiser/RecordingSettings.h"
class FFmpegEncoderManager {
public:
FFmpegEncoderManager(juce::File& ffmpegExecutable);
~FFmpegEncoderManager() = default;
struct EncoderDetails {
juce::String name;
juce::String description;
bool isHardwareAccelerated;
bool isSupported;
};
// FFMPEG command builder
juce::String buildVideoEncodingCommand(
VideoCodec codec,
int crf,
int videoToolboxQuality,
int width,
int height,
double frameRate,
const juce::String& compressionPreset,
const juce::File& outputFile);
// Get available encoders for a given codec
juce::Array<EncoderDetails> getAvailableEncodersForCodec(VideoCodec codec);
// Check if a hardware encoder is available
bool isHardwareEncoderAvailable(const juce::String& encoderName);
// Get the best encoder for a given codec
juce::String getBestEncoderForCodec(VideoCodec codec);
private:
juce::File ffmpegExecutable;
std::map<VideoCodec, juce::Array<EncoderDetails>> availableEncoders;
// Query available encoders from FFmpeg
void queryAvailableEncoders();
// Parse encoder output from FFmpeg
void parseEncoderList(const juce::String& output);
// Run FFmpeg with given arguments and return output
juce::String runFFmpegCommand(const juce::StringArray& args);
// Common base command builder to reduce duplication
juce::String buildBaseEncodingCommand(
int width,
int height,
double frameRate,
const juce::File& outputFile);
// H.264 encoder settings helper
juce::String addH264EncoderSettings(
juce::String cmd,
const juce::String& encoderName,
int crf,
const juce::String& compressionPreset);
// H.265 encoder settings helper
juce::String addH265EncoderSettings(
juce::String cmd,
const juce::String& encoderName,
int crf,
int videoToolboxQuality,
const juce::String& compressionPreset);
// Build H.264 encoding command
juce::String buildH264EncodingCommand(
int crf,
int width,
int height,
double frameRate,
const juce::String& compressionPreset,
const juce::File& outputFile);
// Build H.265 encoding command
juce::String buildH265EncodingCommand(
int crf,
int videoToolboxQuality,
int width,
int height,
double frameRate,
const juce::String& compressionPreset,
const juce::File& outputFile);
// Build VP9 encoding command
juce::String buildVP9EncodingCommand(
int crf,
int width,
int height,
double frameRate,
const juce::String& compressionPreset,
const juce::File& outputFile);
#if JUCE_MAC
// Build ProRes encoding command
juce::String buildProResEncodingCommand(
int width,
int height,
double frameRate,
const juce::File& outputFile);
#endif
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(FFmpegEncoderManager)
};

Wyświetl plik

@ -1,13 +1,12 @@
#pragma once #pragma once
#include <JuceHeader.h> #include <JuceHeader.h>
#include "InvisibleOpenGLContextComponent.h" #include "InvisibleOpenGLContextComponent.h"
class SyphonFrameGrabber : private juce::Thread, public juce::Component class SyphonFrameGrabber : private juce::Thread, public juce::Component {
{
public: public:
SyphonFrameGrabber(SharedTextureManager& manager, juce::String server, juce::String app, ImageParser& parser, int pollMs = 16) SyphonFrameGrabber(SharedTextureManager& manager, juce::String server, juce::String app, ImageParser& parser, int pollMs = 16)
: juce::Thread("SyphonFrameGrabber"), pollIntervalMs(pollMs), manager(manager), parser(parser) : juce::Thread("SyphonFrameGrabber"), pollIntervalMs(pollMs), manager(manager), parser(parser) {
{
// Create the invisible OpenGL context component // Create the invisible OpenGL context component
glContextComponent = std::make_unique<InvisibleOpenGLContextComponent>(); glContextComponent = std::make_unique<InvisibleOpenGLContextComponent>();
receiver = manager.addReceiver(server, app); receiver = manager.addReceiver(server, app);
@ -45,13 +44,11 @@ public:
} }
} }
bool isActive() const bool isActive() const {
{
return receiver != nullptr && receiver->isInit && receiver->enabled; return receiver != nullptr && receiver->isInit && receiver->enabled;
} }
juce::String getSourceName() const juce::String getSourceName() const {
{
if (receiver) { if (receiver) {
return receiver->sharingName + " (" + receiver->sharingAppName + ")"; return receiver->sharingName + " (" + receiver->sharingAppName + ")";
} }

Wyświetl plik

@ -1,14 +1,12 @@
#include "../LookAndFeel.h"
#include "VisualiserComponent.h" #include "VisualiserComponent.h"
#include "../CommonPluginProcessor.h"
#include "../CommonPluginEditor.h"
#include "../CommonPluginEditor.h"
#include "../CommonPluginProcessor.h"
#include "../LookAndFeel.h"
#include "AfterglowFragmentShader.glsl" #include "AfterglowFragmentShader.glsl"
#include "AfterglowVertexShader.glsl" #include "AfterglowVertexShader.glsl"
#include "BlurFragmentShader.glsl" #include "BlurFragmentShader.glsl"
#include "BlurVertexShader.glsl" #include "BlurVertexShader.glsl"
#include "WideBlurFragmentShader.glsl"
#include "WideBlurVertexShader.glsl"
#include "GlowFragmentShader.glsl" #include "GlowFragmentShader.glsl"
#include "GlowVertexShader.glsl" #include "GlowVertexShader.glsl"
#include "LineFragmentShader.glsl" #include "LineFragmentShader.glsl"
@ -19,6 +17,8 @@
#include "SimpleVertexShader.glsl" #include "SimpleVertexShader.glsl"
#include "TexturedFragmentShader.glsl" #include "TexturedFragmentShader.glsl"
#include "TexturedVertexShader.glsl" #include "TexturedVertexShader.glsl"
#include "WideBlurFragmentShader.glsl"
#include "WideBlurVertexShader.glsl"
VisualiserComponent::VisualiserComponent( VisualiserComponent::VisualiserComponent(
CommonAudioProcessor &processor, CommonAudioProcessor &processor,
@ -30,11 +30,11 @@ VisualiserComponent::VisualiserComponent(
VisualiserSettings &settings, VisualiserSettings &settings,
RecordingSettings &recordingSettings, RecordingSettings &recordingSettings,
VisualiserComponent *parent, VisualiserComponent *parent,
bool visualiserOnly bool visualiserOnly) : audioProcessor(processor),
) : audioProcessor(processor),
ffmpegFile(ffmpegFile), ffmpegFile(ffmpegFile),
#if OSCI_PREMIUM #if OSCI_PREMIUM
sharedTextureManager(sharedTextureManager), sharedTextureManager(sharedTextureManager),
ffmpegEncoderManager(ffmpegFile),
#endif #endif
settings(settings), settings(settings),
recordingSettings(recordingSettings), recordingSettings(recordingSettings),
@ -82,13 +82,11 @@ VisualiserComponent::VisualiserComponent(
sharedTextureButton.setTooltip("Toggles sending the oscilloscope's visuals to a Syphon/Spout receiver."); sharedTextureButton.setTooltip("Toggles sending the oscilloscope's visuals to a Syphon/Spout receiver.");
sharedTextureButton.onClick = [this] { sharedTextureButton.onClick = [this] {
if (sharedTextureSender != nullptr) { if (sharedTextureSender != nullptr) {
openGLContext.executeOnGLThread([this](juce::OpenGLContext& context) { openGLContext.executeOnGLThread([this](juce::OpenGLContext &context) { closeSharedTexture(); },
closeSharedTexture(); false);
}, false);
} else { } else {
openGLContext.executeOnGLThread([this](juce::OpenGLContext& context) { openGLContext.executeOnGLThread([this](juce::OpenGLContext &context) { initialiseSharedTexture(); },
initialiseSharedTexture(); false);
}, false);
} }
}; };
#endif #endif
@ -113,9 +111,7 @@ VisualiserComponent::VisualiserComponent(
audioInputButton.setClickingTogglesState(false); audioInputButton.setClickingTogglesState(false);
audioInputButton.setToggleState(!audioPlayer.isInitialised(), juce::NotificationType::dontSendNotification); audioInputButton.setToggleState(!audioPlayer.isInitialised(), juce::NotificationType::dontSendNotification);
audioPlayer.onParserChanged = [this] { audioPlayer.onParserChanged = [this] {
juce::MessageManager::callAsync([this] { juce::MessageManager::callAsync([this] { audioInputButton.setToggleState(!audioPlayer.isInitialised(), juce::NotificationType::dontSendNotification); });
audioInputButton.setToggleState(!audioPlayer.isInitialised(), juce::NotificationType::dontSendNotification);
});
}; };
audioInputButton.onClick = [this] { audioInputButton.onClick = [this] {
audioProcessor.stopAudioFile(); audioProcessor.stopAudioFile();
@ -138,9 +134,7 @@ VisualiserComponent::~VisualiserComponent() {
audioProcessor.haltRecording = nullptr; audioProcessor.haltRecording = nullptr;
} }
openGLContext.detach(); openGLContext.detach();
setShouldBeRunning(false, [this] { setShouldBeRunning(false, [this] { renderingSemaphore.release(); });
renderingSemaphore.release();
});
} }
void VisualiserComponent::setFullScreen(bool fullScreen) { void VisualiserComponent::setFullScreen(bool fullScreen) {
@ -356,8 +350,7 @@ void VisualiserComponent::mouseMove(const juce::MouseEvent& event) {
resized(); resized();
} }
} }
} } });
});
} }
resized(); resized();
} }
@ -417,15 +410,13 @@ void VisualiserComponent::setRecording(bool recording) {
downloading = false; downloading = false;
resized(); resized();
}); });
}); }); });
});
}; };
auto onDownloadStart = [this] { auto onDownloadStart = [this] {
juce::MessageManager::callAsync([this] { juce::MessageManager::callAsync([this] {
record.setEnabled(false); record.setEnabled(false);
downloading = true; downloading = true;
resized(); resized(); });
});
}; };
if (!audioProcessor.ensureFFmpegExists(onDownloadStart, onDownloadSuccess)) { if (!audioProcessor.ensureFFmpegExists(onDownloadStart, onDownloadSuccess)) {
record.setToggleState(false, juce::NotificationType::dontSendNotification); record.setToggleState(false, juce::NotificationType::dontSendNotification);
@ -436,47 +427,16 @@ void VisualiserComponent::setRecording(bool recording) {
juce::String fileExtension = recordingSettings.getFileExtensionForCodec(); juce::String fileExtension = recordingSettings.getFileExtensionForCodec();
tempVideoFile = std::make_unique<juce::TemporaryFile>("." + fileExtension); tempVideoFile = std::make_unique<juce::TemporaryFile>("." + fileExtension);
juce::String resolution = std::to_string(renderTexture.width) + "x" + std::to_string(renderTexture.height);
juce::String cmd = "\"" + ffmpegFile.getFullPathName() + "\"" +
" -r " + juce::String(recordingSettings.getFrameRate()) +
" -f rawvideo" +
" -pix_fmt rgba" +
" -s " + resolution +
" -i -" +
" -threads 4" +
" -preset " + recordingSettings.getCompressionPreset() +
" -y" +
" -pix_fmt yuv420p";
// Apply codec-specific parameters
VideoCodec codec = recordingSettings.getVideoCodec(); VideoCodec codec = recordingSettings.getVideoCodec();
if (codec == VideoCodec::H264) { juce::String cmd = ffmpegEncoderManager.buildVideoEncodingCommand(
cmd += " -c:v libx264"; codec,
cmd += " -crf " + juce::String(recordingSettings.getCRF()); recordingSettings.getCRF(),
} else if (codec == VideoCodec::H265) { recordingSettings.getVideoToolboxQuality(),
cmd += " -c:v libx265"; renderTexture.width,
cmd += " -crf " + juce::String(recordingSettings.getCRF()); renderTexture.height,
#if JUCE_MAC && JUCE_ARM recordingSettings.getFrameRate(),
// use hardware encoding on Apple Silicon recordingSettings.getCompressionPreset(),
cmd += " -c:v hevc_videotoolbox"; tempVideoFile->getFile());
cmd += " -q:v " + juce::String(recordingSettings.getVideoToolboxQuality());
cmd += " -tag:v hvc1";
#endif
} else if (codec == VideoCodec::VP9) {
cmd += " -c:v libvpx-vp9";
cmd += " -b:v 0";
cmd += " -crf " + juce::String(recordingSettings.getCRF());
cmd += " -deadline good -cpu-used 2";
}
#if JUCE_MAC
else if (codec == VideoCodec::ProRes) {
cmd += " -c:v prores";
cmd += " -profile:v 3"; // ProRes 422 HQ
}
#endif
cmd += " -vf vflip";
cmd += " \"" + tempVideoFile->getFile().getFullPathName() + "\"";
ffmpegProcess.start(cmd); ffmpegProcess.start(cmd);
framePixels.resize(renderTexture.width * renderTexture.height * 4); framePixels.resize(renderTexture.width * renderTexture.height * 4);
@ -528,16 +488,14 @@ void VisualiserComponent::setRecording(bool recording) {
tempVideoFile->getFile().copyFileTo(file); tempVideoFile->getFile().copyFileTo(file);
} }
audioProcessor.setLastOpenedDirectory(file.getParentDirectory()); audioProcessor.setLastOpenedDirectory(file.getParentDirectory());
} } });
});
#else #else
chooser->launchAsync(flags, [this](const juce::FileChooser &chooser) { chooser->launchAsync(flags, [this](const juce::FileChooser &chooser) {
auto file = chooser.getResult(); auto file = chooser.getResult();
if (file != juce::File()) { if (file != juce::File()) {
tempAudioFile->getFile().copyFileTo(file); tempAudioFile->getFile().copyFileTo(file);
audioProcessor.setLastOpenedDirectory(file.getParentDirectory()); audioProcessor.setLastOpenedDirectory(file.getParentDirectory());
} } });
});
#endif #endif
} }
@ -625,8 +583,7 @@ void VisualiserComponent::popoutWindow() {
settings, settings,
recordingSettings, recordingSettings,
this, this,
visualiserOnly visualiserOnly);
);
visualiser->settings.setLookAndFeel(&getLookAndFeel()); visualiser->settings.setLookAndFeel(&getLookAndFeel());
visualiser->openSettings = openSettings; visualiser->openSettings = openSettings;
visualiser->closeSettings = closeSettings; visualiser->closeSettings = closeSettings;
@ -671,8 +628,7 @@ void VisualiserComponent::initialiseSharedTexture() {
sharedTextureSender->setSharedTextureId(renderTexture.id); sharedTextureSender->setSharedTextureId(renderTexture.id);
sharedTextureSender->setDrawFunction([this] { sharedTextureSender->setDrawFunction([this] {
setShader(texturedShader.get()); setShader(texturedShader.get());
drawTexture({renderTexture}); drawTexture({renderTexture}); });
});
} }
void VisualiserComponent::closeSharedTexture() { void VisualiserComponent::closeSharedTexture() {
@ -680,7 +636,6 @@ void VisualiserComponent::closeSharedTexture() {
sharedTextureManager.removeSender(sharedTextureSender); sharedTextureManager.removeSender(sharedTextureSender);
sharedTextureSender = nullptr; sharedTextureSender = nullptr;
} }
} }
#endif #endif
@ -1111,7 +1066,8 @@ void VisualiserComponent::drawLine(const std::vector<float>& xPoints, const std:
int nPoints = xPoints.size(); int nPoints = xPoints.size();
// Without this, there's an access violation that seems to occur only on some systems // Without this, there's an access violation that seems to occur only on some systems
if (scratchVertices.size() != nPoints * 12) scratchVertices.resize(nPoints * 12); if (scratchVertices.size() != nPoints * 12)
scratchVertices.resize(nPoints * 12);
for (int i = 0; i < nPoints; ++i) { for (int i = 0; i < nPoints; ++i) {
int p = i * 12; int p = i * 12;
@ -1366,7 +1322,8 @@ Texture VisualiserComponent::createScreenTexture() {
for (int j = 0; j < 51; j++) { for (int j = 0; j < 51; j++) {
float t = j * step / 5; float t = j * step / 5;
if (static_cast<int>(t) % 5 == 0) continue; if (static_cast<int>(t) % 5 == 0)
continue;
data.insert(data.begin(), {t - 2, 2.5f * step, t + 2, 2.5f * step}); data.insert(data.begin(), {t - 2, 2.5f * step, t + 2, 2.5f * step});
data.insert(data.begin(), {t - 2, 7.5f * step, t + 2, 7.5f * step}); data.insert(data.begin(), {t - 2, 7.5f * step, t + 2, 7.5f * step});
@ -1399,20 +1356,35 @@ void VisualiserComponent::checkGLErrors(juce::String file, int line) {
while ((error = glGetError()) != GL_NO_ERROR) { while ((error = glGetError()) != GL_NO_ERROR) {
juce::String errorMessage; juce::String errorMessage;
switch (error) { switch (error) {
case GL_INVALID_ENUM: errorMessage = "GL_INVALID_ENUM"; break; case GL_INVALID_ENUM:
case GL_INVALID_VALUE: errorMessage = "GL_INVALID_VALUE"; break; errorMessage = "GL_INVALID_ENUM";
case GL_INVALID_OPERATION: errorMessage = "GL_INVALID_OPERATION"; break; break;
case GL_STACK_OVERFLOW: errorMessage = "GL_STACK_OVERFLOW"; break; case GL_INVALID_VALUE:
case GL_STACK_UNDERFLOW: errorMessage = "GL_STACK_UNDERFLOW"; break; errorMessage = "GL_INVALID_VALUE";
case GL_OUT_OF_MEMORY: errorMessage = "GL_OUT_OF_MEMORY"; break; break;
case GL_INVALID_FRAMEBUFFER_OPERATION: errorMessage = "GL_INVALID_FRAMEBUFFER_OPERATION"; break; case GL_INVALID_OPERATION:
default: errorMessage = "Unknown OpenGL error"; break; errorMessage = "GL_INVALID_OPERATION";
break;
case GL_STACK_OVERFLOW:
errorMessage = "GL_STACK_OVERFLOW";
break;
case GL_STACK_UNDERFLOW:
errorMessage = "GL_STACK_UNDERFLOW";
break;
case GL_OUT_OF_MEMORY:
errorMessage = "GL_OUT_OF_MEMORY";
break;
case GL_INVALID_FRAMEBUFFER_OPERATION:
errorMessage = "GL_INVALID_FRAMEBUFFER_OPERATION";
break;
default:
errorMessage = "Unknown OpenGL error";
break;
} }
DBG("OpenGL error at " + file + ":" + juce::String(line) + " - " + errorMessage); DBG("OpenGL error at " + file + ":" + juce::String(line) + " - " + errorMessage);
} }
} }
void VisualiserComponent::paint(juce::Graphics &g) { void VisualiserComponent::paint(juce::Graphics &g) {
g.setColour(Colours::veryDark); g.setColour(Colours::veryDark);
g.fillRect(buttonRow); g.fillRect(buttonRow);

Wyświetl plik

@ -1,17 +1,20 @@
#pragma once #pragma once
#include <algorithm>
#include <JuceHeader.h> #include <JuceHeader.h>
#include <algorithm>
#include "../LookAndFeel.h" #include "../LookAndFeel.h"
#include "../components/SvgButton.h"
#include "VisualiserSettings.h"
#include "RecordingSettings.h"
#include "../components/StopwatchComponent.h"
#include "../img/qoixx.hpp"
#include "../components/DownloaderComponent.h"
#include "../audio/AudioRecorder.h" #include "../audio/AudioRecorder.h"
#include "../wav/WavParser.h"
#include "../components/AudioPlayerComponent.h" #include "../components/AudioPlayerComponent.h"
#include "../components/DownloaderComponent.h"
#include "../components/StopwatchComponent.h"
#include "../components/SvgButton.h"
#include "../img/qoixx.hpp"
#include "../video/FFmpegEncoderManager.h"
#include "../wav/WavParser.h"
#include "RecordingSettings.h"
#include "VisualiserSettings.h"
#define FILE_RENDER_DUMMY 0 #define FILE_RENDER_DUMMY 0
#define FILE_RENDER_PNG 1 #define FILE_RENDER_PNG 1
@ -44,8 +47,7 @@ public:
VisualiserSettings& settings, VisualiserSettings& settings,
RecordingSettings& recordingSettings, RecordingSettings& recordingSettings,
VisualiserComponent* parent = nullptr, VisualiserComponent* parent = nullptr,
bool visualiserOnly = false bool visualiserOnly = false);
);
~VisualiserComponent() override; ~VisualiserComponent() override;
std::function<void()> openSettings; std::function<void()> openSettings;
@ -118,6 +120,7 @@ private:
std::vector<unsigned char> framePixels; std::vector<unsigned char> framePixels;
osci::WriteProcess ffmpegProcess; osci::WriteProcess ffmpegProcess;
std::unique_ptr<juce::TemporaryFile> tempVideoFile; std::unique_ptr<juce::TemporaryFile> tempVideoFile;
FFmpegEncoderManager ffmpegEncoderManager;
#endif #endif
StopwatchComponent stopwatch; StopwatchComponent stopwatch;

@ -1 +1 @@
Subproject commit cf124cc5de4d9857c7633e9c03117f20e1550e81 Subproject commit f8ac3007c25df061ca6e71ad2eaff4a5d01e2d7b

Wyświetl plik

@ -574,6 +574,16 @@
<FILE id="mC1tUv" name="ugen_JuceUtility.h" compile="0" resource="0" <FILE id="mC1tUv" name="ugen_JuceUtility.h" compile="0" resource="0"
file="Source/UGen/ugen_JuceUtility.h"/> file="Source/UGen/ugen_JuceUtility.h"/>
</GROUP> </GROUP>
<GROUP id="{0F62E77C-5385-0C56-69A1-3C8866A6E6E3}" name="video">
<FILE id="DniMew" name="FFmpegEncoderManager.cpp" compile="1" resource="0"
file="Source/video/FFmpegEncoderManager.cpp"/>
<FILE id="t2oI5O" name="FFmpegEncoderManager.h" compile="0" resource="0"
file="Source/video/FFmpegEncoderManager.h"/>
<FILE id="xEIRCs" name="InvisibleOpenGLContextComponent.h" compile="0"
resource="0" file="Source/video/InvisibleOpenGLContextComponent.h"/>
<FILE id="OyC3qj" name="SyphonFrameGrabber.h" compile="0" resource="0"
file="Source/video/SyphonFrameGrabber.h"/>
</GROUP>
<GROUP id="{16A8DC64-BA02-898D-4DBA-AA3DDF6F9297}" name="visualiser"> <GROUP id="{16A8DC64-BA02-898D-4DBA-AA3DDF6F9297}" name="visualiser">
<FILE id="DkDKBX" name="AfterglowFragmentShader.glsl" compile="0" resource="0" <FILE id="DkDKBX" name="AfterglowFragmentShader.glsl" compile="0" resource="0"
file="Source/visualiser/AfterglowFragmentShader.glsl"/> file="Source/visualiser/AfterglowFragmentShader.glsl"/>
@ -644,8 +654,6 @@
file="Source/FrameSettingsComponent.cpp"/> file="Source/FrameSettingsComponent.cpp"/>
<FILE id="lzBNS1" name="FrameSettingsComponent.h" compile="0" resource="0" <FILE id="lzBNS1" name="FrameSettingsComponent.h" compile="0" resource="0"
file="Source/FrameSettingsComponent.h"/> file="Source/FrameSettingsComponent.h"/>
<FILE id="nfoWJk" name="InvisibleOpenGLContextComponent.h" compile="0"
resource="0" file="Source/InvisibleOpenGLContextComponent.h"/>
<FILE id="d2zFqF" name="LookAndFeel.cpp" compile="1" resource="0" file="Source/LookAndFeel.cpp"/> <FILE id="d2zFqF" name="LookAndFeel.cpp" compile="1" resource="0" file="Source/LookAndFeel.cpp"/>
<FILE id="TJDqWs" name="LookAndFeel.h" compile="0" resource="0" file="Source/LookAndFeel.h"/> <FILE id="TJDqWs" name="LookAndFeel.h" compile="0" resource="0" file="Source/LookAndFeel.h"/>
<FILE id="X26RjJ" name="LuaComponent.cpp" compile="1" resource="0" <FILE id="X26RjJ" name="LuaComponent.cpp" compile="1" resource="0"
@ -673,8 +681,6 @@
file="Source/SettingsComponent.cpp"/> file="Source/SettingsComponent.cpp"/>
<FILE id="Vlmozi" name="SettingsComponent.h" compile="0" resource="0" <FILE id="Vlmozi" name="SettingsComponent.h" compile="0" resource="0"
file="Source/SettingsComponent.h"/> file="Source/SettingsComponent.h"/>
<FILE id="jyHVpz" name="SyphonFrameGrabber.h" compile="0" resource="0"
file="Source/SyphonFrameGrabber.h"/>
<FILE id="UxZu4n" name="TxtComponent.cpp" compile="1" resource="0" <FILE id="UxZu4n" name="TxtComponent.cpp" compile="1" resource="0"
file="Source/TxtComponent.cpp"/> file="Source/TxtComponent.cpp"/>
<FILE id="kxPbsL" name="TxtComponent.h" compile="0" resource="0" file="Source/TxtComponent.h"/> <FILE id="kxPbsL" name="TxtComponent.h" compile="0" resource="0" file="Source/TxtComponent.h"/>

Wyświetl plik

@ -77,6 +77,12 @@
</GROUP> </GROUP>
</GROUP> </GROUP>
<GROUP id="{75439074-E50C-362F-1EDF-8B4BE9011259}" name="Source"> <GROUP id="{75439074-E50C-362F-1EDF-8B4BE9011259}" name="Source">
<GROUP id="{34BCEBE9-062C-27E1-5661-B33652D8F4F5}" name="video">
<FILE id="pmHHqY" name="FFmpegEncoderManager.cpp" compile="1" resource="0"
file="Source/video/FFmpegEncoderManager.cpp"/>
<FILE id="oKPzgR" name="FFmpegEncoderManager.h" compile="0" resource="0"
file="Source/video/FFmpegEncoderManager.h"/>
</GROUP>
<FILE id="fqqP0r" name="CustomStandalone.cpp" compile="1" resource="0" <FILE id="fqqP0r" name="CustomStandalone.cpp" compile="1" resource="0"
file="Source/CustomStandalone.cpp"/> file="Source/CustomStandalone.cpp"/>
<FILE id="TFmWW0" name="CustomStandaloneFilterWindow.h" compile="0" <FILE id="TFmWW0" name="CustomStandaloneFilterWindow.h" compile="0"