From a125abfa7f32ea200d46a1f081bcbc33dbb8782c Mon Sep 17 00:00:00 2001 From: James H Ball Date: Sun, 27 Apr 2025 11:36:37 +0100 Subject: [PATCH] Fix compilation issues on free version, add more extensive hardware accelleration support for video --- Source/MainComponent.cpp | 11 +- Source/PluginEditor.cpp | 4 +- Source/PluginProcessor.cpp | 212 ++++---- Source/PluginProcessor.h | 109 ++-- Source/SettingsComponent.cpp | 35 +- Source/components/OsciMainMenuBarModel.cpp | 4 + Source/video/FFmpegEncoderManager.cpp | 343 ++++++++++++ Source/video/FFmpegEncoderManager.h | 112 ++++ .../InvisibleOpenGLContextComponent.h | 0 Source/{ => video}/SyphonFrameGrabber.h | 15 +- Source/visualiser/VisualiserComponent.cpp | 508 +++++++++--------- Source/visualiser/VisualiserComponent.h | 99 ++-- modules/osci_render_core | 2 +- osci-render.jucer | 14 +- sosci.jucer | 6 + 15 files changed, 962 insertions(+), 512 deletions(-) create mode 100644 Source/video/FFmpegEncoderManager.cpp create mode 100644 Source/video/FFmpegEncoderManager.h rename Source/{ => video}/InvisibleOpenGLContextComponent.h (100%) rename Source/{ => video}/SyphonFrameGrabber.h (93%) diff --git a/Source/MainComponent.cpp b/Source/MainComponent.cpp index e0d734e..f27f8bc 100644 --- a/Source/MainComponent.cpp +++ b/Source/MainComponent.cpp @@ -158,12 +158,15 @@ void MainComponent::updateFileLabel() { showRightArrow = audioProcessor.getCurrentFileIndex() < audioProcessor.numFiles() - 1; { +#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock); - if (audioProcessor.objectServerRendering) { - fileLabel.setText("Rendering from Blender", juce::dontSendNotification); - } else if (audioProcessor.isSyphonInputActive()) { + if (audioProcessor.isSyphonInputActive()) { fileLabel.setText(audioProcessor.getSyphonSourceName(), juce::dontSendNotification); - } else if (audioProcessor.getCurrentFileIndex() == -1) { + } else +#endif + if (audioProcessor.objectServerRendering) { + fileLabel.setText("Rendering from Blender", juce::dontSendNotification); + }else if (audioProcessor.getCurrentFileIndex() == -1) { fileLabel.setText("No file open", juce::dontSendNotification); } else { fileLabel.setText(audioProcessor.getCurrentFileName(), juce::dontSendNotification); diff --git a/Source/PluginEditor.cpp b/Source/PluginEditor.cpp index 6a35896..1f63e97 100644 --- a/Source/PluginEditor.cpp +++ b/Source/PluginEditor.cpp @@ -526,8 +526,8 @@ void OscirenderAudioProcessorEditor::openVisualiserSettings() { visualiserSettingsWindow.toFront(true); } +#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM void OscirenderAudioProcessorEditor::openSyphonInputDialog() { -#if JUCE_MAC || JUCE_WINDOWS SyphonInputSelectorComponent* selector = nullptr; { juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock); @@ -548,7 +548,6 @@ void OscirenderAudioProcessorEditor::openSyphonInputDialog() { options.useNativeTitleBar = true; options.resizable = false; options.launchAsync(); -#endif } void OscirenderAudioProcessorEditor::onSyphonInputSelected(const juce::String& server, const juce::String& app) { @@ -560,3 +559,4 @@ void OscirenderAudioProcessorEditor::onSyphonInputDisconnected() { juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock); audioProcessor.disconnectSyphonInput(); } +#endif diff --git a/Source/PluginProcessor.cpp b/Source/PluginProcessor.cpp index ce07697..1a17e44 100644 --- a/Source/PluginProcessor.cpp +++ b/Source/PluginProcessor.cpp @@ -7,19 +7,18 @@ */ #include "PluginProcessor.h" + #include "PluginEditor.h" -#include "parser/FileParser.h" -#include "parser/FrameProducer.h" -#include "audio/VectorCancellingEffect.h" -#include "audio/DistortEffect.h" -#include "audio/SmoothEffect.h" #include "audio/BitCrushEffect.h" #include "audio/BulgeEffect.h" +#include "audio/DistortEffect.h" +#include "audio/SmoothEffect.h" +#include "audio/VectorCancellingEffect.h" +#include "parser/FileParser.h" +#include "parser/FrameProducer.h" -#if JUCE_MAC || JUCE_WINDOWS - #include "SyphonFrameGrabber.h" - #include "img/ImageParser.h" - #include "../modules/juce_sharedtexture/SharedTexture.h" +#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM +#include "img/ImageParser.h" #endif //============================================================================== @@ -28,111 +27,103 @@ OscirenderAudioProcessor::OscirenderAudioProcessor() : CommonAudioProcessor(Buse toggleableEffects.push_back(std::make_shared( std::make_shared(), - new osci::EffectParameter("Bit Crush", "Limits the resolution of points drawn to the screen, making the object look pixelated, and making the audio sound more 'digital' and distorted.", "bitCrush", VERSION_HINT, 0.6, 0.0, 1.0) - )); + new osci::EffectParameter("Bit Crush", "Limits the resolution of points drawn to the screen, making the object look pixelated, and making the audio sound more 'digital' and distorted.", "bitCrush", VERSION_HINT, 0.6, 0.0, 1.0))); toggleableEffects.push_back(std::make_shared( std::make_shared(), - new osci::EffectParameter("Bulge", "Applies a bulge that makes the centre of the image larger, and squishes the edges of the image. This applies a distortion to the audio.", "bulge", VERSION_HINT, 0.5, 0.0, 1.0) - )); + new osci::EffectParameter("Bulge", "Applies a bulge that makes the centre of the image larger, and squishes the edges of the image. This applies a distortion to the audio.", "bulge", VERSION_HINT, 0.5, 0.0, 1.0))); toggleableEffects.push_back(std::make_shared( std::make_shared(), - new osci::EffectParameter("Vector Cancelling", "Inverts the audio and image every few samples to 'cancel out' the audio, making the audio quiet, and distorting the image.", "vectorCancelling", VERSION_HINT, 0.1111111, 0.0, 1.0) - )); - toggleableEffects.push_back(std::make_shared( - [this](int index, osci::Point input, const std::vector>& values, double sampleRate) { - return input * osci::Point(values[0], values[1], values[2]); - }, std::vector{ - new osci::EffectParameter("Scale X", "Scales the object in the horizontal direction.", "scaleX", VERSION_HINT, 1.0, -5.0, 5.0), - new osci::EffectParameter("Scale Y", "Scales the object in the vertical direction.", "scaleY", VERSION_HINT, 1.0, -5.0, 5.0), - new osci::EffectParameter("Scale Z", "Scales the depth of the object.", "scaleZ", VERSION_HINT, 1.0, -5.0, 5.0), - } - )); + new osci::EffectParameter("Vector Cancelling", "Inverts the audio and image every few samples to 'cancel out' the audio, making the audio quiet, and distorting the image.", "vectorCancelling", VERSION_HINT, 0.1111111, 0.0, 1.0))); toggleableEffects.push_back(std::make_shared( [this](int index, osci::Point input, const std::vector>& values, double sampleRate) { - int flip = index % 2 == 0 ? 1 : -1; - osci::Point jitter = osci::Point(flip * values[0], flip * values[1], flip * values[2]); - return input + jitter; - }, std::vector{ + return input * osci::Point(values[0], values[1], values[2]); + }, + std::vector{ + new osci::EffectParameter("Scale X", "Scales the object in the horizontal direction.", "scaleX", VERSION_HINT, 1.0, -5.0, 5.0), + new osci::EffectParameter("Scale Y", "Scales the object in the vertical direction.", "scaleY", VERSION_HINT, 1.0, -5.0, 5.0), + new osci::EffectParameter("Scale Z", "Scales the depth of the object.", "scaleZ", VERSION_HINT, 1.0, -5.0, 5.0), + })); + toggleableEffects.push_back(std::make_shared( + [this](int index, osci::Point input, const std::vector>& values, double sampleRate) { + int flip = index % 2 == 0 ? 1 : -1; + osci::Point jitter = osci::Point(flip * values[0], flip * values[1], flip * values[2]); + return input + jitter; + }, + std::vector{ new osci::EffectParameter("Distort X", "Distorts the image in the horizontal direction by jittering the audio sample being drawn.", "distortX", VERSION_HINT, 0.0, 0.0, 1.0), new osci::EffectParameter("Distort Y", "Distorts the image in the vertical direction by jittering the audio sample being drawn.", "distortY", VERSION_HINT, 0.0, 0.0, 1.0), new osci::EffectParameter("Distort Z", "Distorts the depth of the image by jittering the audio sample being drawn.", "distortZ", VERSION_HINT, 0.1, 0.0, 1.0), - } - )); + })); auto rippleEffect = std::make_shared( [this](int index, osci::Point input, const std::vector>& values, double sampleRate) { double phase = values[1] * std::numbers::pi; double distance = 100 * values[2] * (input.x * input.x + input.y * input.y); input.z += values[0] * std::sin(phase + distance); return input; - }, std::vector{ + }, + std::vector{ new osci::EffectParameter("Ripple Depth", "Controls how large the ripples applied to the image are.", "rippleDepth", VERSION_HINT, 0.2, 0.0, 1.0), new osci::EffectParameter("Ripple Phase", "Controls the position of the ripple. Animate this to see a moving ripple effect.", "ripplePhase", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Ripple Amount", "Controls how many ripples are applied to the image.", "rippleAmount", VERSION_HINT, 0.1, 0.0, 1.0), - } - ); - rippleEffect->getParameter("ripplePhase")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth); + }); + rippleEffect->getParameter("ripplePhase")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth); toggleableEffects.push_back(rippleEffect); auto rotateEffect = std::make_shared( [this](int index, osci::Point input, const std::vector>& values, double sampleRate) { input.rotate(values[0] * std::numbers::pi, values[1] * std::numbers::pi, values[2] * std::numbers::pi); return input; - }, std::vector{ + }, + std::vector{ new osci::EffectParameter("Rotate X", "Controls the rotation of the object in the X axis.", "rotateX", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Rotate Y", "Controls the rotation of the object in the Y axis.", "rotateY", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Rotate Z", "Controls the rotation of the object in the Z axis.", "rotateZ", VERSION_HINT, 0.0, -1.0, 1.0), - } - ); - rotateEffect->getParameter("rotateY")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth); + }); + rotateEffect->getParameter("rotateY")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth); rotateEffect->getParameter("rotateY")->lfoRate->setUnnormalisedValueNotifyingHost(0.2); toggleableEffects.push_back(rotateEffect); toggleableEffects.push_back(std::make_shared( [this](int index, osci::Point input, const std::vector>& values, double sampleRate) { return input + osci::Point(values[0], values[1], values[2]); - }, std::vector{ + }, + std::vector{ new osci::EffectParameter("Translate X", "Moves the object horizontally.", "translateX", VERSION_HINT, 0.0, -1.0, 1.0), new osci::EffectParameter("Translate Y", "Moves the object vertically.", "translateY", VERSION_HINT, 0.0, -1.0, 1.0), - new osci::EffectParameter("Translate Z", "Moves the object away from the camera.", "translateZ", VERSION_HINT, 0.0, -1.0, 1.0), - } - )); + new osci::EffectParameter("Translate Z", "Moves the object away from the camera.", "translateZ", VERSION_HINT, 0.0, -1.0, 1.0), + })); toggleableEffects.push_back(std::make_shared( [this](int index, osci::Point input, const std::vector>& values, double sampleRate) { double length = 10 * values[0] * input.magnitude(); double newX = input.x * std::cos(length) - input.y * std::sin(length); double newY = input.x * std::sin(length) + input.y * std::cos(length); return osci::Point(newX, newY, input.z); - }, std::vector{ + }, + std::vector{ new osci::EffectParameter("Swirl", "Swirls the image in a spiral pattern.", "swirl", VERSION_HINT, 0.3, -1.0, 1.0), - } - )); + })); toggleableEffects.push_back(std::make_shared( std::make_shared(), - new osci::EffectParameter("Smoothing", "This works as a low-pass frequency filter that removes high frequencies, making the image look smoother, and audio sound less harsh.", "smoothing", VERSION_HINT, 0.75, 0.0, 1.0) - )); + new osci::EffectParameter("Smoothing", "This works as a low-pass frequency filter that removes high frequencies, making the image look smoother, and audio sound less harsh.", "smoothing", VERSION_HINT, 0.75, 0.0, 1.0))); std::shared_ptr wobble = std::make_shared( wobbleEffect, std::vector{ new osci::EffectParameter("Wobble Amount", "Adds a sine wave of the prominent frequency in the audio currently playing. The sine wave's frequency is slightly offset to create a subtle 'wobble' in the image. Increasing the slider increases the strength of the wobble.", "wobble", VERSION_HINT, 0.3, 0.0, 1.0), new osci::EffectParameter("Wobble Phase", "Controls the phase of the wobble.", "wobblePhase", VERSION_HINT, 0.0, -1.0, 1.0), - } - ); - wobble->getParameter("wobblePhase")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth); + }); + wobble->getParameter("wobblePhase")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth); toggleableEffects.push_back(wobble); toggleableEffects.push_back(std::make_shared( delayEffect, std::vector{ new osci::EffectParameter("Delay Decay", "Adds repetitions, delays, or echos to the audio. This slider controls the volume of the echo.", "delayDecay", VERSION_HINT, 0.4, 0.0, 1.0), - new osci::EffectParameter("Delay Length", "Controls the time in seconds between echos.", "delayLength", VERSION_HINT, 0.5, 0.0, 1.0) - } - )); + new osci::EffectParameter("Delay Length", "Controls the time in seconds between echos.", "delayLength", VERSION_HINT, 0.5, 0.0, 1.0)})); toggleableEffects.push_back(std::make_shared( dashedLineEffect, std::vector{ new osci::EffectParameter("Dash Length", "Controls the length of the dashed line.", "dashLength", VERSION_HINT, 0.2, 0.0, 1.0), - } - )); + })); toggleableEffects.push_back(custom); toggleableEffects.push_back(trace); - trace->getParameter("traceLength")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth); + trace->getParameter("traceLength")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth); for (int i = 0; i < toggleableEffects.size(); i++) { auto effect = toggleableEffects[i]; @@ -186,7 +177,7 @@ OscirenderAudioProcessor::OscirenderAudioProcessor() : CommonAudioProcessor(Buse for (int i = 0; i < luaEffects.size(); i++) { luaEffects[i]->parameters[0]->addListener(this); } - + synth.addSound(defaultSound); addAllParameters(); @@ -228,13 +219,12 @@ void OscirenderAudioProcessor::addLuaSlider() { [this, sliderIndex](int index, osci::Point input, const std::vector>& values, double sampleRate) { luaValues[sliderIndex].store(values[0]); return input; - }, new osci::EffectParameter( + }, + new osci::EffectParameter( "Lua Slider " + sliderName, "Controls the value of the Lua variable called slider_" + sliderName.toLowerCase() + ".", "lua" + sliderName, - VERSION_HINT, 0.0, 0.0, 1.0 - ) - )); + VERSION_HINT, 0.0, 0.0, 1.0))); } void OscirenderAudioProcessor::addErrorListener(ErrorListener* listener) { @@ -258,10 +248,10 @@ void OscirenderAudioProcessor::updateEffectPrecedence() { // parsersLock AND effectsLock must be locked before calling this function void OscirenderAudioProcessor::updateFileBlock(int index, std::shared_ptr block) { if (index < 0 || index >= fileBlocks.size()) { - return; - } - fileBlocks[index] = block; - openFile(index); + return; + } + fileBlocks[index] = block; + openFile(index); } // parsersLock AND effectsLock must be locked before calling this function @@ -269,7 +259,7 @@ void OscirenderAudioProcessor::addFile(juce::File file) { fileBlocks.push_back(std::make_shared()); fileNames.push_back(file.getFileName()); fileIds.push_back(currentFileId++); - parsers.push_back(std::make_shared(*this, errorCallback)); + parsers.push_back(std::make_shared(*this, errorCallback)); sounds.push_back(new ShapeSound(*this, parsers.back())); file.createInputStream()->readIntoMemoryBlock(*fileBlocks.back()); @@ -306,9 +296,9 @@ void OscirenderAudioProcessor::setFileRemovedCallback(std::function c // parsersLock AND effectsLock must be locked before calling this function void OscirenderAudioProcessor::removeFile(int index) { - if (index < 0 || index >= fileBlocks.size()) { - return; - } + if (index < 0 || index >= fileBlocks.size()) { + return; + } fileBlocks.erase(fileBlocks.begin() + index); fileNames.erase(fileNames.begin() + index); fileIds.erase(fileIds.begin() + index); @@ -350,9 +340,9 @@ int OscirenderAudioProcessor::numFiles() { // it will reparse any existing files, so it is safer. // parsersLock AND effectsLock must be locked before calling this function void OscirenderAudioProcessor::openFile(int index) { - if (index < 0 || index >= fileBlocks.size()) { - return; - } + if (index < 0 || index >= fileBlocks.size()) { + return; + } parsers[index]->parse(juce::String(fileIds[index]), fileNames[index], fileNames[index].fromLastOccurrenceOf(".", true, false).toLowerCase(), std::make_unique(*fileBlocks[index], false), font); changeCurrentFile(index); } @@ -365,9 +355,9 @@ void OscirenderAudioProcessor::changeCurrentFile(int index) { currentFile = -1; changeSound(defaultSound); } - if (index < 0 || index >= fileBlocks.size()) { - return; - } + if (index < 0 || index >= fileBlocks.size()) { + return; + } currentFile = index; changeSound(sounds[index]); } @@ -402,7 +392,7 @@ std::shared_ptr OscirenderAudioProcessor::getCurrentFileParser() { juce::String OscirenderAudioProcessor::getCurrentFileName() { if (objectServerRendering || currentFile == -1) { - return ""; + return ""; } else { return fileNames[currentFile]; } @@ -446,7 +436,7 @@ void OscirenderAudioProcessor::setObjectServerPort(int port) { void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, juce::MidiBuffer& midiMessages) { juce::ScopedNoDenormals noDenormals; // Audio info variables - int totalNumInputChannels = getTotalNumInputChannels(); + int totalNumInputChannels = getTotalNumInputChannels(); int totalNumOutputChannels = getTotalNumOutputChannels(); double sampleRate = getSampleRate(); @@ -473,7 +463,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, ju // TODO: To make this more resilient to changing BPMs, we should change how this is calculated // or use another property of the AudioPlayHead::PositionInfo double playTimeBeats = bpm * playTimeSeconds / 60; - + // Calculated time per sample in seconds and beats double sTimeSec = 1.f / sampleRate; double sTimeBeats = bpm * sTimeSec / 60; @@ -487,20 +477,20 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, ju if (!usingMidi) { midiMessages.clear(); } - + // if midi enabled has changed state if (prevMidiEnabled != usingMidi) { for (int i = 1; i <= 16; i++) { midiMessages.addEvent(juce::MidiMessage::allNotesOff(i), i); } } - + // if midi has just been disabled or we need to retrigger if (!usingMidi && (retriggerMidi || prevMidiEnabled)) { midiMessages.addEvent(juce::MidiMessage::noteOn(1, 60, 1.0f), 17); retriggerMidi = false; } - + prevMidiEnabled = usingMidi; const double EPSILON = 0.00001; @@ -512,8 +502,9 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, ju juce::AudioBuffer outputBuffer3d = juce::AudioBuffer(3, buffer.getNumSamples()); outputBuffer3d.clear(); - + { +#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM juce::SpinLock::ScopedLockType sLock(syphonLock); if (isSyphonInputActive()) { for (int sample = 0; sample < outputBuffer3d.getNumSamples(); sample++) { @@ -521,7 +512,9 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, ju outputBuffer3d.setSample(0, sample, point.x); outputBuffer3d.setSample(1, sample, point.y); } - } else if (usingInput && totalNumInputChannels >= 1) { + } else +#endif + if (usingInput && totalNumInputChannels >= 1) { if (totalNumInputChannels >= 2) { for (auto channel = 0; channel < juce::jmin(2, totalNumInputChannels); channel++) { outputBuffer3d.copyFrom(channel, 0, inputBuffer, channel, 0, buffer.getNumSamples()); @@ -535,9 +528,10 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, ju // handle all midi messages auto midiIterator = midiMessages.cbegin(); std::for_each(midiIterator, - midiMessages.cend(), - [&] (const juce::MidiMessageMetadata& meta) { synth.publicHandleMidiEvent(meta.getMessage()); } - ); + midiMessages.cend(), + [&](const juce::MidiMessageMetadata& meta) { + synth.publicHandleMidiEvent(meta.getMessage()); + }); } else { juce::SpinLock::ScopedLockType lock1(parsersLock); juce::SpinLock::ScopedLockType lock2(effectsLock); @@ -551,12 +545,12 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, ju } } } - + midiMessages.clear(); - + auto* channelData = buffer.getArrayOfWritePointers(); - - for (int sample = 0; sample < buffer.getNumSamples(); ++sample) { + + for (int sample = 0; sample < buffer.getNumSamples(); ++sample) { if (animateFrames->getBoolValue()) { if (juce::JUCEApplicationBase::isStandaloneApp()) { animationFrame = animationFrame + sTimeSec * animationRate->getValueUnnormalised(); @@ -573,7 +567,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, ju if (loopAnimation->getBoolValue()) { animationFrame = std::fmod(animationFrame, totalFrames); } else { - animationFrame = juce::jlimit(0.0, (double) totalFrames - 1, animationFrame.load()); + animationFrame = juce::jlimit(0.0, (double)totalFrames - 1, animationFrame.load()); } sounds[currentFile]->parser->setFrame(animationFrame); } @@ -598,7 +592,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, ju currentVolume = std::sqrt(squaredVolume); currentVolume = juce::jlimit(0.0, 1.0, currentVolume); - osci::Point channels = { outputBuffer3d.getSample(0, sample), outputBuffer3d.getSample(1, sample), outputBuffer3d.getSample(2, sample) }; + osci::Point channels = {outputBuffer3d.getSample(0, sample), outputBuffer3d.getSample(1, sample), outputBuffer3d.getSample(2, sample)}; { juce::SpinLock::ScopedLockType lock1(parsersLock); @@ -621,8 +615,8 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, ju } } - double x = channels.x; - double y = channels.y; + double x = channels.x; + double y = channels.y; x *= volume; y *= volume; @@ -630,19 +624,19 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, ju // clip x = juce::jmax(-threshold, juce::jmin(threshold.load(), x)); y = juce::jmax(-threshold, juce::jmin(threshold.load(), y)); - + threadManager.write(osci::Point(x, y, 1)); - + // Apply mute if active if (muteParameter->getBoolValue()) { x = 0.0; y = 0.0; } - + if (totalNumOutputChannels >= 2) { - channelData[0][sample] = x; - channelData[1][sample] = y; - } else if (totalNumOutputChannels == 1) { + channelData[0][sample] = x; + channelData[1][sample] = y; + } else if (totalNumOutputChannels == 1) { channelData[0][sample] = x; } @@ -650,7 +644,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer& buffer, ju playTimeSeconds += sTimeSec; playTimeBeats += sTimeBeats; } - } + } // used for any callback that must guarantee all audio is recieved (e.g. when recording to a file) juce::SpinLock::ScopedLockType lock(audioThreadCallbackLock); @@ -669,11 +663,11 @@ void OscirenderAudioProcessor::getStateInformation(juce::MemoryBlock& destData) // we need to stop recording the visualiser when saving the state, otherwise // there are issues. This is the only place we can do this because there is // no callback when closing the standalone app except for this. - + if (haltRecording != nullptr && juce::JUCEApplicationBase::isStandaloneApp()) { haltRecording(); } - + juce::SpinLock::ScopedLockType lock1(parsersLock); juce::SpinLock::ScopedLockType lock2(effectsLock); @@ -711,7 +705,7 @@ void OscirenderAudioProcessor::getStateInformation(juce::MemoryBlock& destData) fontXml->setAttribute("italic", font.isItalic()); auto filesXml = xml->createNewChildElement("files"); - + for (int i = 0; i < fileBlocks.size(); i++) { auto fileXml = filesXml->createNewChildElement("file"); fileXml->setAttribute("name", fileNames[i]); @@ -721,7 +715,7 @@ void OscirenderAudioProcessor::getStateInformation(juce::MemoryBlock& destData) xml->setAttribute("currentFile", currentFile); recordingParameters.save(xml.get()); - + saveProperties(*xml); copyXmlToBinary(*xml, destData); @@ -731,7 +725,7 @@ void OscirenderAudioProcessor::setStateInformation(const void* data, int sizeInB if (juce::JUCEApplicationBase::isStandaloneApp() && programCrashedAndUserWantsToReset()) { return; } - + std::unique_ptr xml; const uint32_t magicXmlNumber = 0x21324356; @@ -835,14 +829,14 @@ void OscirenderAudioProcessor::setStateInformation(const void* data, int sizeInB fileBlock = std::make_shared(); fileBlock->fromBase64Encoding(text); } - + addFile(fileName, fileBlock); } } changeCurrentFile(xml->getIntAttribute("currentFile", -1)); recordingParameters.load(xml.get()); - + loadProperties(*xml); objectServer.reload(); @@ -904,7 +898,7 @@ void OscirenderAudioProcessor::envelopeChanged(EnvelopeComponent* changedEnvelop } } -#if JUCE_MAC || JUCE_WINDOWS +#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM // Syphon/Spout input management // syphonLock must be held when calling this function diff --git a/Source/PluginProcessor.h b/Source/PluginProcessor.h index 6ddd427..1ea8fb6 100644 --- a/Source/PluginProcessor.h +++ b/Source/PluginProcessor.h @@ -11,40 +11,43 @@ #define VERSION_HINT 2 #include -#include "audio/ShapeSound.h" -#include "audio/ShapeVoice.h" -#include "audio/PublicSynthesiser.h" -#include "audio/SampleRateManager.h" + #include -#include "audio/DelayEffect.h" -#include "audio/WobbleEffect.h" -#include "audio/PerspectiveEffect.h" -#include "obj/ObjectServer.h" + +#include "CommonPluginProcessor.h" #include "UGen/Env.h" #include "UGen/ugen_JuceEnvelopeComponent.h" #include "audio/CustomEffect.h" #include "audio/DashedLineEffect.h" -#include "CommonPluginProcessor.h" -#include "SyphonFrameGrabber.h" +#include "audio/DelayEffect.h" +#include "audio/PerspectiveEffect.h" +#include "audio/PublicSynthesiser.h" +#include "audio/SampleRateManager.h" +#include "audio/ShapeSound.h" +#include "audio/ShapeVoice.h" +#include "audio/WobbleEffect.h" +#include "obj/ObjectServer.h" -#if JUCE_MAC || JUCE_WINDOWS - #include "../modules/juce_sharedtexture/SharedTexture.h" +#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM +#include "../modules/juce_sharedtexture/SharedTexture.h" +#include "video/SyphonFrameGrabber.h" #endif //============================================================================== /** -*/ -class OscirenderAudioProcessor : public CommonAudioProcessor, juce::AudioProcessorParameter::Listener, public EnvelopeComponentListener - #if JucePlugin_Enable_ARA - , public juce::AudioProcessorARAExtension - #endif + */ +class OscirenderAudioProcessor : public CommonAudioProcessor, juce::AudioProcessorParameter::Listener, public EnvelopeComponentListener +#if JucePlugin_Enable_ARA + , + public juce::AudioProcessorARAExtension +#endif { public: OscirenderAudioProcessor(); ~OscirenderAudioProcessor() override; - void prepareToPlay (double sampleRate, int samplesPerBlock) override; - void processBlock (juce::AudioBuffer&, juce::MidiBuffer&) override; + void prepareToPlay(double sampleRate, int samplesPerBlock) override; + void processBlock(juce::AudioBuffer&, juce::MidiBuffer&) override; juce::AudioProcessorEditor* createEditor() override; @@ -56,38 +59,34 @@ public: void parameterGestureChanged(int parameterIndex, bool gestureIsStarting) override; void envelopeChanged(EnvelopeComponent* changedEnvelope) override; - std::vector> toggleableEffects; + std::vector> toggleableEffects; std::vector> luaEffects; - std::atomic luaValues[26] = { 0.0 }; + std::atomic luaValues[26] = {0.0}; std::shared_ptr frequencyEffect = std::make_shared( [this](int index, osci::Point input, const std::vector>& values, double sampleRate) { frequency = values[0].load(); return input; - }, new osci::EffectParameter( + }, + new osci::EffectParameter( "Frequency", "Controls how many times per second the image is drawn, thereby controlling the pitch of the sound. Lower frequencies result in more-accurately drawn images, but more flickering, and vice versa.", "frequency", - VERSION_HINT, 220.0, 0.0, 4200.0 - ) - ); - + VERSION_HINT, 220.0, 0.0, 4200.0)); + std::shared_ptr trace = std::make_shared( std::vector{ new osci::EffectParameter( "Trace Start", "Defines how far into the frame the drawing is started at. This has the effect of 'tracing' out the image from a single dot when animated. By default, we start drawing from the beginning of the frame, so this value is 0.0.", "traceStart", - VERSION_HINT, 0.0, 0.0, 1.0, 0.001 - ), + VERSION_HINT, 0.0, 0.0, 1.0, 0.001), new osci::EffectParameter( "Trace Length", "Defines how much of the frame is drawn per cycle. This has the effect of 'tracing' out the image from a single dot when animated. By default, we draw the whole frame, corresponding to a value of 1.0.", "traceLength", - VERSION_HINT, 1.0, 0.0, 1.0, 0.001 - ), - } - ); + VERSION_HINT, 1.0, 0.0, 1.0, 0.001), + }); std::shared_ptr delayEffect = std::make_shared(); @@ -97,8 +96,7 @@ public: std::shared_ptr customEffect = std::make_shared(errorCallback, luaValues); std::shared_ptr custom = std::make_shared( customEffect, - new osci::EffectParameter("Lua Effect", "Controls the strength of the custom Lua effect applied. You can write your own custom effect using Lua by pressing the edit button on the right.", "customEffectStrength", VERSION_HINT, 1.0, 0.0, 1.0) - ); + new osci::EffectParameter("Lua Effect", "Controls the strength of the custom Lua effect applied. You can write your own custom effect using Lua by pressing the edit button on the right.", "customEffectStrength", VERSION_HINT, 1.0, 0.0, 1.0)); std::shared_ptr perspectiveEffect = std::make_shared(); std::shared_ptr perspective = std::make_shared( @@ -106,13 +104,12 @@ public: std::vector{ new osci::EffectParameter("Perspective", "Controls the strength of the 3D perspective projection.", "perspectiveStrength", VERSION_HINT, 1.0, 0.0, 1.0), new osci::EffectParameter("Focal Length", "Controls the focal length of the 3D perspective effect. A higher focal length makes the image look more flat, and a lower focal length makes the image look more 3D.", "perspectiveFocalLength", VERSION_HINT, 2.0, 0.0, 10.0), - } - ); - + }); + osci::BooleanParameter* midiEnabled = new osci::BooleanParameter("MIDI Enabled", "midiEnabled", VERSION_HINT, false, "Enable MIDI input for the synth. If disabled, the synth will play a constant tone, as controlled by the frequency slider."); osci::BooleanParameter* inputEnabled = new osci::BooleanParameter("Audio Input Enabled", "inputEnabled", VERSION_HINT, false, "Enable to use input audio, instead of the generated audio."); std::atomic frequency = 220.0; - + juce::SpinLock parsersLock; std::vector> parsers; std::vector sounds; @@ -133,7 +130,7 @@ public: osci::FloatParameter* releaseTime = new osci::FloatParameter("Release Time", "releaseTime", VERSION_HINT, 0.4, 0.0, 1.0); osci::FloatParameter* attackShape = new osci::FloatParameter("Attack Shape", "attackShape", VERSION_HINT, 5, -50, 50); osci::FloatParameter* decayShape = new osci::FloatParameter("Decay osci::Shape", "decayShape", VERSION_HINT, -20, -50, 50); - osci::FloatParameter* releaseShape = new osci::FloatParameter("Release Shape", "releaseShape", VERSION_HINT, -5,-50, 50); + osci::FloatParameter* releaseShape = new osci::FloatParameter("Release Shape", "releaseShape", VERSION_HINT, -5, -50, 50); Env adsrEnv = Env::adsr( attackTime->getValueUnnormalised(), @@ -141,8 +138,7 @@ public: sustainLevel->getValueUnnormalised(), releaseTime->getValueUnnormalised(), 1.0, - std::vector{ attackShape->getValueUnnormalised(), decayShape->getValueUnnormalised(), releaseShape->getValueUnnormalised() } - ); + std::vector{attackShape->getValueUnnormalised(), decayShape->getValueUnnormalised(), releaseShape->getValueUnnormalised()}); juce::MidiKeyboardState keyboardState; @@ -158,33 +154,31 @@ public: std::shared_ptr imageThreshold = std::make_shared( [this](int index, osci::Point input, const std::vector>& values, double sampleRate) { return input; - }, new osci::EffectParameter( + }, + new osci::EffectParameter( "Image Threshold", "Controls the probability of visiting a dark pixel versus a light pixel. Darker pixels are less likely to be visited, so turning the threshold to a lower value makes it more likely to visit dark pixels.", "imageThreshold", - VERSION_HINT, 0.5, 0, 1 - ) - ); + VERSION_HINT, 0.5, 0, 1)); std::shared_ptr imageStride = std::make_shared( [this](int index, osci::Point input, const std::vector>& values, double sampleRate) { return input; - }, new osci::EffectParameter( + }, + new osci::EffectParameter( "Image Stride", "Controls the spacing between pixels when drawing an image. Larger values mean more of the image can be drawn, but at a lower fidelity.", "imageStride", - VERSION_HINT, 4, 1, 50, 1 - ) - ); + VERSION_HINT, 4, 1, 50, 1)); std::atomic animationFrame = 0.f; - + std::shared_ptr wobbleEffect = std::make_shared(*this); const double FONT_SIZE = 1.0f; juce::Font font = juce::Font(juce::Font::getDefaultSansSerifFontName(), FONT_SIZE, juce::Font::plain); ShapeSound::Ptr objectServerSound = new ShapeSound(); - + std::function haltRecording; // Add a callback to notify the editor when a file is removed @@ -202,10 +196,10 @@ public: void openFile(int index); int getCurrentFileIndex(); std::shared_ptr getCurrentFileParser(); - juce::String getCurrentFileName(); + juce::String getCurrentFileName(); juce::String getFileName(int index); juce::String getFileId(int index); - std::shared_ptr getFileBlock(int index); + std::shared_ptr getFileBlock(int index); void setObjectServerRendering(bool enabled); void setObjectServerPort(int port); void addErrorListener(ErrorListener* listener); @@ -240,7 +234,6 @@ public: }; private: - std::atomic prevMidiEnabled = !midiEnabled->getBoolValue(); juce::SpinLock audioThreadCallbackLock; @@ -271,7 +264,7 @@ private: std::istringstream parser(input.toStdString()); parser >> result[0]; for (int idx = 1; idx < 3; idx++) { - parser.get(); //Skip period + parser.get(); // Skip period parser >> result[idx]; } } @@ -287,8 +280,7 @@ private: juce::AudioPlayHead* playHead; - -#if JUCE_MAC || JUCE_WINDOWS +#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM public: bool isSyphonInputActive() const; bool isSyphonInputStarted() const; @@ -297,11 +289,12 @@ public: juce::String getSyphonSourceName() const; juce::SpinLock syphonLock; + private: ImageParser syphonImageParser = ImageParser(*this); std::unique_ptr syphonFrameGrabber; #endif //============================================================================== - JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OscirenderAudioProcessor) + JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(OscirenderAudioProcessor) }; diff --git a/Source/SettingsComponent.cpp b/Source/SettingsComponent.cpp index b7ca804..02a28cb 100644 --- a/Source/SettingsComponent.cpp +++ b/Source/SettingsComponent.cpp @@ -1,4 +1,5 @@ #include "SettingsComponent.h" + #include "PluginEditor.h" SettingsComponent::SettingsComponent(OscirenderAudioProcessor& p, OscirenderAudioProcessorEditor& editor) : audioProcessor(p), pluginEditor(editor) { @@ -10,27 +11,26 @@ SettingsComponent::SettingsComponent(OscirenderAudioProcessor& p, OscirenderAudi addAndMakeVisible(midi); addChildComponent(txt); addChildComponent(frame); - + double midiLayoutPreferredSize = std::any_cast(audioProcessor.getProperty("midiLayoutPreferredSize", pluginEditor.CLOSED_PREF_SIZE)); double mainLayoutPreferredSize = std::any_cast(audioProcessor.getProperty("mainLayoutPreferredSize", -0.4)); midiLayout.setItemLayout(0, -0.1, -1.0, -(1.0 + midiLayoutPreferredSize)); midiLayout.setItemLayout(1, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE); midiLayout.setItemLayout(2, pluginEditor.CLOSED_PREF_SIZE, -0.9, midiLayoutPreferredSize); - + mainLayout.setItemLayout(0, -0.1, -0.9, mainLayoutPreferredSize); mainLayout.setItemLayout(1, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE); mainLayout.setItemLayout(2, -0.1, -0.9, -(1.0 + mainLayoutPreferredSize)); } - void SettingsComponent::resized() { auto area = getLocalBounds(); area.removeFromLeft(5); area.removeFromRight(5); area.removeFromTop(5); area.removeFromBottom(5); - + if (area.getWidth() <= 0 || area.getHeight() <= 0) { return; } @@ -38,11 +38,11 @@ void SettingsComponent::resized() { juce::Component dummy; juce::Component dummy2; - juce::Component* midiComponents[] = { &dummy, &midiResizerBar, &midi }; + juce::Component* midiComponents[] = {&dummy, &midiResizerBar, &midi}; midiLayout.layOutComponents(midiComponents, 3, area.getX(), area.getY(), area.getWidth(), area.getHeight(), true, true); midi.setBounds(midi.getBounds()); - juce::Component* columns[] = { &dummy2, &mainResizerBar, &dummy }; + juce::Component* columns[] = {&dummy2, &mainResizerBar, &dummy}; mainLayout.layOutComponents(columns, 3, dummy.getX(), dummy.getY(), dummy.getWidth(), dummy.getHeight(), false, true); auto bounds = dummy2.getBounds(); @@ -66,7 +66,7 @@ void SettingsComponent::resized() { } effects.setBounds(dummyBounds); - + if (isVisible() && getWidth() > 0 && getHeight() > 0) { audioProcessor.setProperty("midiLayoutPreferredSize", midiLayout.getItemCurrentRelativeSize(2)); audioProcessor.setProperty("mainLayoutPreferredSize", mainLayout.getItemCurrentRelativeSize(0)); @@ -79,8 +79,25 @@ void SettingsComponent::fileUpdated(juce::String fileName) { juce::String extension = fileName.fromLastOccurrenceOf(".", true, false).toLowerCase(); txt.setVisible(false); frame.setVisible(false); - bool isImage = extension == ".gif" || extension == ".png" || extension == ".jpg" || extension == ".jpeg" || extension == ".mov" || extension == ".mp4" || audioProcessor.isSyphonInputStarted(); - if ((fileName.isEmpty() && !audioProcessor.isSyphonInputStarted()) || audioProcessor.objectServerRendering) { + + // Check if the file is an image based on extension or Syphon/Spout input + bool isSyphonActive = false; +#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM + isSyphonActive = audioProcessor.isSyphonInputStarted(); +#endif + + bool isImage = isSyphonActive || + (extension == ".gif" || + extension == ".png" || + extension == ".jpg" || + extension == ".jpeg" || + extension == ".mov" || + extension == ".mp4"); + + // Skip processing if object server is rendering or if no file is selected and no Syphon input + bool skipProcessing = audioProcessor.objectServerRendering || (fileName.isEmpty() && !isSyphonActive); + + if (skipProcessing) { // do nothing } else if (extension == ".txt") { txt.setVisible(true); diff --git a/Source/components/OsciMainMenuBarModel.cpp b/Source/components/OsciMainMenuBarModel.cpp index e126a5f..65adbc9 100644 --- a/Source/components/OsciMainMenuBarModel.cpp +++ b/Source/components/OsciMainMenuBarModel.cpp @@ -62,6 +62,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend editor.openRecordingSettings(); }); +#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM // Add Syphon/Spout input menu item under Recording addMenuItem(2, audioProcessor.isSyphonInputActive() ? "Disconnect Syphon/Spout Input" : "Select Syphon/Spout Input...", [this] { if (audioProcessor.isSyphonInputActive()) @@ -69,6 +70,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend else openSyphonInputDialog(); }); +#endif if (editor.processor.wrapperType == juce::AudioProcessor::WrapperType::wrapperType_Standalone) { addMenuItem(3, "Settings...", [this] { @@ -77,6 +79,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend } } +#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM void OsciMainMenuBarModel::openSyphonInputDialog() { editor.openSyphonInputDialog(); } @@ -84,3 +87,4 @@ void OsciMainMenuBarModel::openSyphonInputDialog() { void OsciMainMenuBarModel::disconnectSyphonInput() { audioProcessor.disconnectSyphonInput(); } +#endif diff --git a/Source/video/FFmpegEncoderManager.cpp b/Source/video/FFmpegEncoderManager.cpp new file mode 100644 index 0000000..6c7c2e2 --- /dev/null +++ b/Source/video/FFmpegEncoderManager.cpp @@ -0,0 +1,343 @@ +#include "FFmpegEncoderManager.h" + +FFmpegEncoderManager::FFmpegEncoderManager(juce::File& ffmpegExecutable) + : ffmpegExecutable(ffmpegExecutable) { + queryAvailableEncoders(); +} + +juce::String FFmpegEncoderManager::buildVideoEncodingCommand( + VideoCodec codec, + int crf, + int videoToolboxQuality, + int width, + int height, + double frameRate, + const juce::String& compressionPreset, + const juce::File& outputFile) { + switch (codec) { + case VideoCodec::H264: + return buildH264EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile); + case VideoCodec::H265: + return buildH265EncodingCommand(crf, videoToolboxQuality, width, height, frameRate, compressionPreset, outputFile); + case VideoCodec::VP9: + return buildVP9EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile); +#if JUCE_MAC + case VideoCodec::ProRes: + return buildProResEncodingCommand(width, height, frameRate, outputFile); +#endif + default: + // Default to H.264 if unknown codec + return buildH264EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile); + } +} + +juce::Array FFmpegEncoderManager::getAvailableEncodersForCodec(VideoCodec codec) { + // Return cached list of encoders if available + auto it = availableEncoders.find(codec); + if (it != availableEncoders.end()) { + return it->second; + } + + return {}; +} + +bool FFmpegEncoderManager::isHardwareEncoderAvailable(const juce::String& encoderName) { + // Check if the encoder is available and supported + for (auto& pair : availableEncoders) { + for (auto& encoder : pair.second) { + if (encoder.name == encoderName && encoder.isSupported && encoder.isHardwareAccelerated) { + return true; + } + } + } + return false; +} + +juce::String FFmpegEncoderManager::getBestEncoderForCodec(VideoCodec codec) { + auto encoders = getAvailableEncodersForCodec(codec); + + // Define priority lists for each codec type + juce::StringArray h264Encoders = {"h264_nvenc", "h264_amf", "h264_qsv", "h264_videotoolbox", "libx264"}; + juce::StringArray h265Encoders = {"hevc_nvenc", "hevc_amf", "hevc_qsv", "hevc_videotoolbox", "libx265"}; + juce::StringArray vp9Encoders = {"libvpx-vp9"}; +#if JUCE_MAC + juce::StringArray proResEncoders = {"prores_ks", "prores"}; +#endif + + // Select the appropriate priority list based on codec + juce::StringArray* priorityList = nullptr; + switch (codec) { + case VideoCodec::H264: + priorityList = &h264Encoders; + break; + case VideoCodec::H265: + priorityList = &h265Encoders; + break; + case VideoCodec::VP9: + priorityList = &vp9Encoders; + break; +#if JUCE_MAC + case VideoCodec::ProRes: + priorityList = &proResEncoders; + break; +#endif + default: + priorityList = &h264Encoders; // Default to H.264 + } + + // Find the highest priority encoder that is available + for (const auto& encoderName : *priorityList) { + for (const auto& encoder : encoders) { + if (encoder.name == encoderName && encoder.isSupported) { + return encoderName; + } + } + } + + // Return default software encoder if no hardware encoder is available + switch (codec) { + case VideoCodec::H264: + return "libx264"; + case VideoCodec::H265: + return "libx265"; + case VideoCodec::VP9: + return "libvpx-vp9"; +#if JUCE_MAC + case VideoCodec::ProRes: + return "prores"; +#endif + default: + return "libx264"; + } +} + +void FFmpegEncoderManager::queryAvailableEncoders() { + // Query available encoders using ffmpeg -encoders + juce::String output = runFFmpegCommand({"-encoders", "-hide_banner"}); + parseEncoderList(output); +} + +void FFmpegEncoderManager::parseEncoderList(const juce::String& output) { + // Clear current encoders + availableEncoders.clear(); + + // Initialize codec-specific encoder arrays + availableEncoders[VideoCodec::H264] = {}; + availableEncoders[VideoCodec::H265] = {}; + availableEncoders[VideoCodec::VP9] = {}; +#if JUCE_MAC + availableEncoders[VideoCodec::ProRes] = {}; +#endif + + // Split the output into lines + juce::StringArray lines; + lines.addLines(output); + + // Skip the first 10 lines (header information from ffmpeg -encoders) + int linesToSkip = juce::jmin(10, lines.size()); + + // Parse each line to find encoder information + for (int i = linesToSkip; i < lines.size(); ++i) { + const auto& line = lines[i]; + + // Format: V..... libx264 H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 + juce::String flags = line.substring(0, 6).trim(); + juce::String name = line.substring(8).upToFirstOccurrenceOf(" ", false, true); + juce::String description = line.substring(8 + name.length()).trim(); + + EncoderDetails encoder; + encoder.name = name; + encoder.description = description; + encoder.isHardwareAccelerated = name.contains("nvenc") || name.contains("amf") || + name.contains("qsv") || name.contains("videotoolbox"); + encoder.isSupported = flags.contains("V"); // Video encoder + + // Add encoder to appropriate codec list + if (name == "libx264" || name.startsWith("h264_")) { + availableEncoders[VideoCodec::H264].add(encoder); + } else if (name == "libx265" || name.startsWith("hevc_")) { + availableEncoders[VideoCodec::H265].add(encoder); + } else if (name == "libvpx-vp9") { + availableEncoders[VideoCodec::VP9].add(encoder); + } +#if JUCE_MAC + else if (name.startsWith("prores")) { + availableEncoders[VideoCodec::ProRes].add(encoder); + } +#endif + } +} + +juce::String FFmpegEncoderManager::runFFmpegCommand(const juce::StringArray& args) { + juce::ChildProcess process; + juce::StringArray command; + + command.add(ffmpegExecutable.getFullPathName()); + command.addArray(args); + + process.start(command, juce::ChildProcess::wantStdOut); + + juce::String output = process.readAllProcessOutput(); + + return output; +} + +juce::String FFmpegEncoderManager::buildBaseEncodingCommand( + int width, + int height, + double frameRate, + const juce::File& outputFile) { + juce::String resolution = juce::String(width) + "x" + juce::String(height); + juce::String cmd = "\"" + ffmpegExecutable.getFullPathName() + "\"" + + " -r " + juce::String(frameRate) + + " -f rawvideo" + + " -pix_fmt rgba" + + " -s " + resolution + + " -i -" + + " -threads 4" + + " -y" + + " -pix_fmt yuv420p" + + " -vf vflip"; + + return cmd; +} + +juce::String FFmpegEncoderManager::addH264EncoderSettings( + juce::String cmd, + const juce::String& encoderName, + int crf, + const juce::String& compressionPreset) { + if (encoderName == "h264_nvenc") { + cmd += " -c:v h264_nvenc"; + cmd += " -preset p7"; + cmd += " -profile:v high"; + cmd += " -rc vbr"; + cmd += " -cq " + juce::String(crf); + cmd += " -b:v 0"; + } else if (encoderName == "h264_amf") { + cmd += " -c:v h264_amf"; + cmd += " -quality quality"; + cmd += " -rc cqp"; + cmd += " -qp_i " + juce::String(crf); + cmd += " -qp_p " + juce::String(crf); + } else if (encoderName == "h264_qsv") { + cmd += " -c:v h264_qsv"; + cmd += " -global_quality " + juce::String(crf); + cmd += " -preset " + compressionPreset; + } else if (encoderName == "h264_videotoolbox") { + cmd += " -c:v h264_videotoolbox"; + cmd += " -q " + juce::String(crf); + } else { // libx264 (software) + cmd += " -c:v libx264"; + cmd += " -preset " + compressionPreset; + cmd += " -crf " + juce::String(crf); + } + + return cmd; +} + +juce::String FFmpegEncoderManager::addH265EncoderSettings( + juce::String cmd, + const juce::String& encoderName, + int crf, + int videoToolboxQuality, + const juce::String& compressionPreset) { + if (encoderName == "hevc_nvenc") { + cmd += " -c:v hevc_nvenc"; + cmd += " -preset p7"; + cmd += " -profile:v main"; + cmd += " -rc vbr"; + cmd += " -cq " + juce::String(crf); + cmd += " -b:v 0"; + } else if (encoderName == "hevc_amf") { + cmd += " -c:v hevc_amf"; + cmd += " -quality quality"; + cmd += " -rc cqp"; + cmd += " -qp_i " + juce::String(crf); + cmd += " -qp_p " + juce::String(crf); + } else if (encoderName == "hevc_qsv") { + cmd += " -c:v hevc_qsv"; + cmd += " -global_quality " + juce::String(crf); + cmd += " -preset " + compressionPreset; + } else if (encoderName == "hevc_videotoolbox") { + cmd += " -c:v hevc_videotoolbox"; + cmd += " -q:v " + juce::String(videoToolboxQuality); + cmd += " -tag:v hvc1"; + } else { // libx265 (software) + cmd += " -c:v libx265"; + cmd += " -preset " + compressionPreset; + cmd += " -crf " + juce::String(crf); + } + + return cmd; +} + +juce::String FFmpegEncoderManager::buildH264EncodingCommand( + int crf, + int width, + int height, + double frameRate, + const juce::String& compressionPreset, + const juce::File& outputFile) { + juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile); + juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::H264); + + cmd = addH264EncoderSettings(cmd, bestEncoder, crf, compressionPreset); + cmd += " \"" + outputFile.getFullPathName() + "\""; + + return cmd; +} + +juce::String FFmpegEncoderManager::buildH265EncodingCommand( + int crf, + int videoToolboxQuality, + int width, + int height, + double frameRate, + const juce::String& compressionPreset, + const juce::File& outputFile) { + juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile); + juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::H265); + + cmd = addH265EncoderSettings(cmd, bestEncoder, crf, videoToolboxQuality, compressionPreset); + cmd += " \"" + outputFile.getFullPathName() + "\""; + + return cmd; +} + +juce::String FFmpegEncoderManager::buildVP9EncodingCommand( + int crf, + int width, + int height, + double frameRate, + const juce::String& compressionPreset, + const juce::File& outputFile) { + juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile); + + cmd += juce::String(" -c:v libvpx-vp9") + + " -b:v 0" + + " -crf " + juce::String(crf) + + " -deadline good -cpu-used 2"; + + cmd += " \"" + outputFile.getFullPathName() + "\""; + + return cmd; +} + +#if JUCE_MAC +juce::String FFmpegEncoderManager::buildProResEncodingCommand( + int width, + int height, + double frameRate, + const juce::File& outputFile) { + juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile); + juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::ProRes); + + cmd += " -c:v " + bestEncoder + + " -profile:v 3"; // ProRes 422 HQ + + cmd += " \"" + outputFile.getFullPathName() + "\""; + + return cmd; +} +#endif \ No newline at end of file diff --git a/Source/video/FFmpegEncoderManager.h b/Source/video/FFmpegEncoderManager.h new file mode 100644 index 0000000..3a74a41 --- /dev/null +++ b/Source/video/FFmpegEncoderManager.h @@ -0,0 +1,112 @@ +#pragma once + +#include + +#include "../visualiser/RecordingSettings.h" + +class FFmpegEncoderManager { +public: + FFmpegEncoderManager(juce::File& ffmpegExecutable); + ~FFmpegEncoderManager() = default; + + struct EncoderDetails { + juce::String name; + juce::String description; + bool isHardwareAccelerated; + bool isSupported; + }; + + // FFMPEG command builder + juce::String buildVideoEncodingCommand( + VideoCodec codec, + int crf, + int videoToolboxQuality, + int width, + int height, + double frameRate, + const juce::String& compressionPreset, + const juce::File& outputFile); + + // Get available encoders for a given codec + juce::Array getAvailableEncodersForCodec(VideoCodec codec); + + // Check if a hardware encoder is available + bool isHardwareEncoderAvailable(const juce::String& encoderName); + + // Get the best encoder for a given codec + juce::String getBestEncoderForCodec(VideoCodec codec); + +private: + juce::File ffmpegExecutable; + std::map> availableEncoders; + + // Query available encoders from FFmpeg + void queryAvailableEncoders(); + + // Parse encoder output from FFmpeg + void parseEncoderList(const juce::String& output); + + // Run FFmpeg with given arguments and return output + juce::String runFFmpegCommand(const juce::StringArray& args); + + // Common base command builder to reduce duplication + juce::String buildBaseEncodingCommand( + int width, + int height, + double frameRate, + const juce::File& outputFile); + + // H.264 encoder settings helper + juce::String addH264EncoderSettings( + juce::String cmd, + const juce::String& encoderName, + int crf, + const juce::String& compressionPreset); + + // H.265 encoder settings helper + juce::String addH265EncoderSettings( + juce::String cmd, + const juce::String& encoderName, + int crf, + int videoToolboxQuality, + const juce::String& compressionPreset); + + // Build H.264 encoding command + juce::String buildH264EncodingCommand( + int crf, + int width, + int height, + double frameRate, + const juce::String& compressionPreset, + const juce::File& outputFile); + + // Build H.265 encoding command + juce::String buildH265EncodingCommand( + int crf, + int videoToolboxQuality, + int width, + int height, + double frameRate, + const juce::String& compressionPreset, + const juce::File& outputFile); + + // Build VP9 encoding command + juce::String buildVP9EncodingCommand( + int crf, + int width, + int height, + double frameRate, + const juce::String& compressionPreset, + const juce::File& outputFile); + +#if JUCE_MAC + // Build ProRes encoding command + juce::String buildProResEncodingCommand( + int width, + int height, + double frameRate, + const juce::File& outputFile); +#endif + + JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(FFmpegEncoderManager) +}; \ No newline at end of file diff --git a/Source/InvisibleOpenGLContextComponent.h b/Source/video/InvisibleOpenGLContextComponent.h similarity index 100% rename from Source/InvisibleOpenGLContextComponent.h rename to Source/video/InvisibleOpenGLContextComponent.h diff --git a/Source/SyphonFrameGrabber.h b/Source/video/SyphonFrameGrabber.h similarity index 93% rename from Source/SyphonFrameGrabber.h rename to Source/video/SyphonFrameGrabber.h index 4731861..28bd88f 100644 --- a/Source/SyphonFrameGrabber.h +++ b/Source/video/SyphonFrameGrabber.h @@ -1,13 +1,12 @@ #pragma once #include + #include "InvisibleOpenGLContextComponent.h" -class SyphonFrameGrabber : private juce::Thread, public juce::Component -{ +class SyphonFrameGrabber : private juce::Thread, public juce::Component { public: SyphonFrameGrabber(SharedTextureManager& manager, juce::String server, juce::String app, ImageParser& parser, int pollMs = 16) - : juce::Thread("SyphonFrameGrabber"), pollIntervalMs(pollMs), manager(manager), parser(parser) - { + : juce::Thread("SyphonFrameGrabber"), pollIntervalMs(pollMs), manager(manager), parser(parser) { // Create the invisible OpenGL context component glContextComponent = std::make_unique(); receiver = manager.addReceiver(server, app); @@ -45,13 +44,11 @@ public: } } - bool isActive() const - { + bool isActive() const { return receiver != nullptr && receiver->isInit && receiver->enabled; } - juce::String getSourceName() const - { + juce::String getSourceName() const { if (receiver) { return receiver->sharingName + " (" + receiver->sharingAppName + ")"; } @@ -64,6 +61,6 @@ private: SharedTextureReceiver* receiver = nullptr; ImageParser& parser; std::unique_ptr glContextComponent; - + JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(SyphonFrameGrabber) }; diff --git a/Source/visualiser/VisualiserComponent.cpp b/Source/visualiser/VisualiserComponent.cpp index c1d71fc..4068c00 100644 --- a/Source/visualiser/VisualiserComponent.cpp +++ b/Source/visualiser/VisualiserComponent.cpp @@ -1,14 +1,12 @@ -#include "../LookAndFeel.h" #include "VisualiserComponent.h" -#include "../CommonPluginProcessor.h" -#include "../CommonPluginEditor.h" +#include "../CommonPluginEditor.h" +#include "../CommonPluginProcessor.h" +#include "../LookAndFeel.h" #include "AfterglowFragmentShader.glsl" #include "AfterglowVertexShader.glsl" #include "BlurFragmentShader.glsl" #include "BlurVertexShader.glsl" -#include "WideBlurFragmentShader.glsl" -#include "WideBlurVertexShader.glsl" #include "GlowFragmentShader.glsl" #include "GlowVertexShader.glsl" #include "LineFragmentShader.glsl" @@ -19,37 +17,39 @@ #include "SimpleVertexShader.glsl" #include "TexturedFragmentShader.glsl" #include "TexturedVertexShader.glsl" +#include "WideBlurFragmentShader.glsl" +#include "WideBlurVertexShader.glsl" VisualiserComponent::VisualiserComponent( - CommonAudioProcessor& processor, - CommonPluginEditor& pluginEditor, + CommonAudioProcessor &processor, + CommonPluginEditor &pluginEditor, #if OSCI_PREMIUM - SharedTextureManager& sharedTextureManager, + SharedTextureManager &sharedTextureManager, #endif juce::File ffmpegFile, - VisualiserSettings& settings, - RecordingSettings& recordingSettings, - VisualiserComponent* parent, - bool visualiserOnly -) : audioProcessor(processor), - ffmpegFile(ffmpegFile), + VisualiserSettings &settings, + RecordingSettings &recordingSettings, + VisualiserComponent *parent, + bool visualiserOnly) : audioProcessor(processor), + ffmpegFile(ffmpegFile), #if OSCI_PREMIUM - sharedTextureManager(sharedTextureManager), + sharedTextureManager(sharedTextureManager), + ffmpegEncoderManager(ffmpegFile), #endif - settings(settings), - recordingSettings(recordingSettings), - visualiserOnly(visualiserOnly), - osci::AudioBackgroundThread("VisualiserComponent" + juce::String(parent != nullptr ? " Child" : ""), processor.threadManager), - parent(parent), - editor(pluginEditor) { + settings(settings), + recordingSettings(recordingSettings), + visualiserOnly(visualiserOnly), + osci::AudioBackgroundThread("VisualiserComponent" + juce::String(parent != nullptr ? " Child" : ""), processor.threadManager), + parent(parent), + editor(pluginEditor) { #if OSCI_PREMIUM addAndMakeVisible(editor.ffmpegDownloader); #endif - + audioProcessor.haltRecording = [this] { setRecording(false); }; - + addAndMakeVisible(record); #if OSCI_PREMIUM record.setTooltip("Toggles recording of the oscilloscope's visuals and audio."); @@ -60,12 +60,12 @@ VisualiserComponent::VisualiserComponent( record.onClick = [this] { setRecording(record.getToggleState()); }; - + addAndMakeVisible(stopwatch); - + setMouseCursor(juce::MouseCursor::PointingHandCursor); setWantsKeyboardFocus(true); - + if (parent == nullptr) { addAndMakeVisible(fullScreenButton); fullScreenButton.setTooltip("Toggles fullscreen mode."); @@ -82,27 +82,25 @@ VisualiserComponent::VisualiserComponent( sharedTextureButton.setTooltip("Toggles sending the oscilloscope's visuals to a Syphon/Spout receiver."); sharedTextureButton.onClick = [this] { if (sharedTextureSender != nullptr) { - openGLContext.executeOnGLThread([this](juce::OpenGLContext& context) { - closeSharedTexture(); - }, false); + openGLContext.executeOnGLThread([this](juce::OpenGLContext &context) { closeSharedTexture(); }, + false); } else { - openGLContext.executeOnGLThread([this](juce::OpenGLContext& context) { - initialiseSharedTexture(); - }, false); + openGLContext.executeOnGLThread([this](juce::OpenGLContext &context) { initialiseSharedTexture(); }, + false); } }; #endif - + fullScreenButton.onClick = [this]() { enableFullScreen(); }; - + settingsButton.onClick = [this]() { if (openSettings != nullptr) { openSettings(); } }; - + popOutButton.onClick = [this]() { popoutWindow(); }; @@ -113,9 +111,7 @@ VisualiserComponent::VisualiserComponent( audioInputButton.setClickingTogglesState(false); audioInputButton.setToggleState(!audioPlayer.isInitialised(), juce::NotificationType::dontSendNotification); audioPlayer.onParserChanged = [this] { - juce::MessageManager::callAsync([this] { - audioInputButton.setToggleState(!audioPlayer.isInitialised(), juce::NotificationType::dontSendNotification); - }); + juce::MessageManager::callAsync([this] { audioInputButton.setToggleState(!audioPlayer.isInitialised(), juce::NotificationType::dontSendNotification); }); }; audioInputButton.onClick = [this] { audioProcessor.stopAudioFile(); @@ -124,7 +120,7 @@ VisualiserComponent::VisualiserComponent( addChildComponent(audioPlayer); audioPlayer.setVisible(visualiserOnly); - audioPlayer.addMouseListener(static_cast(this), true); + audioPlayer.addMouseListener(static_cast(this), true); openGLContext.setRenderer(this); openGLContext.attachTo(*this); @@ -138,19 +134,17 @@ VisualiserComponent::~VisualiserComponent() { audioProcessor.haltRecording = nullptr; } openGLContext.detach(); - setShouldBeRunning(false, [this] { - renderingSemaphore.release(); - }); + setShouldBeRunning(false, [this] { renderingSemaphore.release(); }); } void VisualiserComponent::setFullScreen(bool fullScreen) { this->fullScreen = fullScreen; hideButtonRow = false; setMouseCursor(juce::MouseCursor::PointingHandCursor); - + // Release renderingSemaphore to prevent deadlocks during layout changes renderingSemaphore.release(); - + resized(); } @@ -165,29 +159,29 @@ void VisualiserComponent::enableFullScreen() { grabKeyboardFocus(); } -void VisualiserComponent::mouseDoubleClick(const juce::MouseEvent& event) { +void VisualiserComponent::mouseDoubleClick(const juce::MouseEvent &event) { if (event.originalComponent == this) { enableFullScreen(); } } -void VisualiserComponent::runTask(const std::vector& points) { +void VisualiserComponent::runTask(const std::vector &points) { { juce::CriticalSection::ScopedLockType lock(samplesLock); - + // copy the points before applying effects audioOutputBuffer.setSize(2, points.size(), false, true, true); for (int i = 0; i < points.size(); ++i) { audioOutputBuffer.setSample(0, i, points[i].x); audioOutputBuffer.setSample(1, i, points[i].y); } - + xSamples.clear(); ySamples.clear(); zSamples.clear(); - + auto applyEffects = [&](osci::Point point) { - for (auto& effect : settings.parameters.audioEffects) { + for (auto &effect : settings.parameters.audioEffects) { point = effect->apply(0, point); } #if OSCI_PREMIUM @@ -200,40 +194,40 @@ void VisualiserComponent::runTask(const std::vector& points) { #endif return point; }; - + if (settings.isSweepEnabled()) { double sweepIncrement = getSweepIncrement(); long samplesPerSweep = sampleRate * settings.getSweepSeconds(); - + double triggerValue = settings.getTriggerValue(); bool belowTrigger = false; - - for (const osci::Point& point : points) { + + for (const osci::Point &point : points) { long samplePosition = sampleCount - lastTriggerPosition; double startPoint = 1.135; double sweep = samplePosition * sweepIncrement * 2 * startPoint - startPoint; - + double value = point.x; - + if (sweep > startPoint && belowTrigger && value >= triggerValue) { lastTriggerPosition = sampleCount; } - + belowTrigger = value < triggerValue; - + osci::Point sweepPoint = {sweep, value, 1}; sweepPoint = applyEffects(sweepPoint); - + xSamples.push_back(sweepPoint.x); ySamples.push_back(sweepPoint.y); zSamples.push_back(1); - + sampleCount++; } } else { - for (const osci::Point& rawPoint : points) { + for (const osci::Point &rawPoint : points) { osci::Point point = applyEffects(rawPoint); - + #if OSCI_PREMIUM if (settings.isGoniometer()) { // x and y go to a diagonal currently, so we need to scale them down, and rotate them @@ -241,23 +235,23 @@ void VisualiserComponent::runTask(const std::vector& points) { point.rotate(0, 0, -juce::MathConstants::pi / 4); } #endif - + xSamples.push_back(point.x); ySamples.push_back(point.y); zSamples.push_back(point.z); } } - + sampleBufferCount++; - + if (settings.parameters.upsamplingEnabled->getBoolValue()) { int newResampledSize = xSamples.size() * RESAMPLE_RATIO; - + smoothedXSamples.resize(newResampledSize); smoothedYSamples.resize(newResampledSize); smoothedZSamples.resize(newResampledSize); smoothedZSamples.resize(newResampledSize); - + if (settings.isSweepEnabled()) { // interpolate between sweep values to avoid any artifacts from quickly going from one sweep to the next for (int i = 0; i < newResampledSize; ++i) { @@ -266,7 +260,7 @@ void VisualiserComponent::runTask(const std::vector& points) { double thisSample = xSamples[index]; double nextSample = xSamples[index + 1]; if (nextSample > thisSample) { - smoothedXSamples[i] = xSamples[index] + (i % (int) RESAMPLE_RATIO) * (nextSample - thisSample) / RESAMPLE_RATIO; + smoothedXSamples[i] = xSamples[index] + (i % (int)RESAMPLE_RATIO) * (nextSample - thisSample) / RESAMPLE_RATIO; } else { smoothedXSamples[i] = xSamples[index]; } @@ -281,7 +275,7 @@ void VisualiserComponent::runTask(const std::vector& points) { zResampler.process(zSamples.data(), smoothedZSamples.data(), zSamples.size()); } } - + // this just triggers a repaint triggerAsyncUpdate(); // wait for rendering on the OpenGLRenderer thread to complete @@ -298,9 +292,9 @@ int VisualiserComponent::prepareTask(double sampleRate, int bufferSize) { zResampler.prepare(sampleRate, RESAMPLE_RATIO); audioRecorder.setSampleRate(sampleRate); - + int desiredBufferSize = sampleRate / recordingSettings.getFrameRate(); - + return desiredBufferSize; } @@ -323,28 +317,28 @@ void VisualiserComponent::setPaused(bool paused, bool affectAudio) { repaint(); } -void VisualiserComponent::mouseDrag(const juce::MouseEvent& event) { +void VisualiserComponent::mouseDrag(const juce::MouseEvent &event) { timerId = -1; } -void VisualiserComponent::mouseMove(const juce::MouseEvent& event) { +void VisualiserComponent::mouseMove(const juce::MouseEvent &event) { if (event.getScreenX() == lastMouseX && event.getScreenY() == lastMouseY) { return; } hideButtonRow = false; setMouseCursor(juce::MouseCursor::PointingHandCursor); - + // Treat both fullScreen mode and pop-out mode (parent != nullptr) as needing auto-hide controls if (fullScreen || parent != nullptr) { if (!getScreenBounds().removeFromBottom(25).contains(event.getScreenX(), event.getScreenY()) && !event.mods.isLeftButtonDown()) { lastMouseX = event.getScreenX(); lastMouseY = event.getScreenY(); - + int newTimerId = juce::Random::getSystemRandom().nextInt(); timerId = newTimerId; auto pos = event.getScreenPosition(); auto parent = this->parent; - + juce::WeakReference weakRef = this; juce::Timer::callAfterDelay(1000, [this, weakRef, newTimerId, pos, parent]() { if (weakRef) { @@ -356,14 +350,13 @@ void VisualiserComponent::mouseMove(const juce::MouseEvent& event) { resized(); } } - } - }); + } }); } resized(); } } -void VisualiserComponent::mouseDown(const juce::MouseEvent& event) { +void VisualiserComponent::mouseDown(const juce::MouseEvent &event) { if (event.originalComponent == this) { if (event.mods.isLeftButtonDown() && child == nullptr && !record.getToggleState()) { setPaused(active); @@ -371,7 +364,7 @@ void VisualiserComponent::mouseDown(const juce::MouseEvent& event) { } } -bool VisualiserComponent::keyPressed(const juce::KeyPress& key) { +bool VisualiserComponent::keyPressed(const juce::KeyPress &key) { if (key.isKeyCode(juce::KeyPress::escapeKey)) { if (fullScreenCallback) { fullScreenCallback(FullScreenMode::MAIN_COMPONENT); @@ -388,7 +381,7 @@ bool VisualiserComponent::keyPressed(const juce::KeyPress& key) { void VisualiserComponent::setRecording(bool recording) { stopwatch.stop(); stopwatch.reset(); - + #if OSCI_PREMIUM bool stillRecording = ffmpegProcess.isRunning() || audioRecorder.isRecording(); #else @@ -417,66 +410,33 @@ void VisualiserComponent::setRecording(bool recording) { downloading = false; resized(); }); - }); - }); + }); }); }; auto onDownloadStart = [this] { juce::MessageManager::callAsync([this] { record.setEnabled(false); downloading = true; - resized(); - }); + resized(); }); }; if (!audioProcessor.ensureFFmpegExists(onDownloadStart, onDownloadSuccess)) { record.setToggleState(false, juce::NotificationType::dontSendNotification); return; } - + // Get the appropriate file extension based on codec juce::String fileExtension = recordingSettings.getFileExtensionForCodec(); tempVideoFile = std::make_unique("." + fileExtension); - - juce::String resolution = std::to_string(renderTexture.width) + "x" + std::to_string(renderTexture.height); - juce::String cmd = "\"" + ffmpegFile.getFullPathName() + "\"" + - " -r " + juce::String(recordingSettings.getFrameRate()) + - " -f rawvideo" + - " -pix_fmt rgba" + - " -s " + resolution + - " -i -" + - " -threads 4" + - " -preset " + recordingSettings.getCompressionPreset() + - " -y" + - " -pix_fmt yuv420p"; - - // Apply codec-specific parameters + VideoCodec codec = recordingSettings.getVideoCodec(); - if (codec == VideoCodec::H264) { - cmd += " -c:v libx264"; - cmd += " -crf " + juce::String(recordingSettings.getCRF()); - } else if (codec == VideoCodec::H265) { - cmd += " -c:v libx265"; - cmd += " -crf " + juce::String(recordingSettings.getCRF()); -#if JUCE_MAC && JUCE_ARM - // use hardware encoding on Apple Silicon - cmd += " -c:v hevc_videotoolbox"; - cmd += " -q:v " + juce::String(recordingSettings.getVideoToolboxQuality()); - cmd += " -tag:v hvc1"; -#endif - } else if (codec == VideoCodec::VP9) { - cmd += " -c:v libvpx-vp9"; - cmd += " -b:v 0"; - cmd += " -crf " + juce::String(recordingSettings.getCRF()); - cmd += " -deadline good -cpu-used 2"; - } -#if JUCE_MAC - else if (codec == VideoCodec::ProRes) { - cmd += " -c:v prores"; - cmd += " -profile:v 3"; // ProRes 422 HQ - } -#endif - - cmd += " -vf vflip"; - cmd += " \"" + tempVideoFile->getFile().getFullPathName() + "\""; + juce::String cmd = ffmpegEncoderManager.buildVideoEncodingCommand( + codec, + recordingSettings.getCRF(), + recordingSettings.getVideoToolboxQuality(), + renderTexture.width, + renderTexture.height, + recordingSettings.getFrameRate(), + recordingSettings.getCompressionPreset(), + tempVideoFile->getFile()); ffmpegProcess.start(cmd); framePixels.resize(renderTexture.width * renderTexture.height * 4); @@ -516,7 +476,7 @@ void VisualiserComponent::setRecording(bool recording) { auto flags = juce::FileBrowserComponent::saveMode | juce::FileBrowserComponent::canSelectFiles | juce::FileBrowserComponent::warnAboutOverwriting; #if OSCI_PREMIUM - chooser->launchAsync(flags, [this, wasRecordingAudio, wasRecordingVideo](const juce::FileChooser& chooser) { + chooser->launchAsync(flags, [this, wasRecordingAudio, wasRecordingVideo](const juce::FileChooser &chooser) { auto file = chooser.getResult(); if (file != juce::File()) { if (wasRecordingAudio && wasRecordingVideo) { @@ -528,19 +488,17 @@ void VisualiserComponent::setRecording(bool recording) { tempVideoFile->getFile().copyFileTo(file); } audioProcessor.setLastOpenedDirectory(file.getParentDirectory()); - } - }); + } }); #else - chooser->launchAsync(flags, [this](const juce::FileChooser& chooser) { + chooser->launchAsync(flags, [this](const juce::FileChooser &chooser) { auto file = chooser.getResult(); if (file != juce::File()) { tempAudioFile->getFile().copyFileTo(file); audioProcessor.setLastOpenedDirectory(file.getParentDirectory()); - } - }); + } }); #endif } - + setBlockOnAudioThread(recording); #if OSCI_PREMIUM numFrames = 0; @@ -570,15 +528,15 @@ void VisualiserComponent::resized() { } else { settingsButton.setVisible(false); } - + if (visualiserOnly && juce::JUCEApplication::isStandaloneApp() && child == nullptr) { audioInputButton.setBounds(buttons.removeFromRight(30)); } - + #if OSCI_PREMIUM sharedTextureButton.setBounds(buttons.removeFromRight(30)); #endif - + record.setBounds(buttons.removeFromRight(25)); if (record.getToggleState()) { stopwatch.setVisible(true); @@ -586,20 +544,20 @@ void VisualiserComponent::resized() { } else { stopwatch.setVisible(false); } - + #if OSCI_PREMIUM if (child == nullptr && downloading) { auto bounds = buttons.removeFromRight(160); editor.ffmpegDownloader.setBounds(bounds.withSizeKeepingCentre(bounds.getWidth() - 10, bounds.getHeight() - 10)); } #endif - + buttons.removeFromRight(10); // padding - + if (child == nullptr) { audioPlayer.setBounds(buttons); } - + viewportArea = area; viewportChanged(viewportArea); } @@ -611,10 +569,10 @@ void VisualiserComponent::popoutWindow() { } #endif setRecording(false); - + // Release renderingSemaphore to prevent deadlock when creating a child visualizer renderingSemaphore.release(); - + auto visualiser = new VisualiserComponent( audioProcessor, editor, @@ -625,8 +583,7 @@ void VisualiserComponent::popoutWindow() { settings, recordingSettings, this, - visualiserOnly - ); + visualiserOnly); visualiser->settings.setLookAndFeel(&getLookAndFeel()); visualiser->openSettings = openSettings; visualiser->closeSettings = closeSettings; @@ -671,8 +628,7 @@ void VisualiserComponent::initialiseSharedTexture() { sharedTextureSender->setSharedTextureId(renderTexture.id); sharedTextureSender->setDrawFunction([this] { setShader(texturedShader.get()); - drawTexture({renderTexture}); - }); + drawTexture({renderTexture}); }); } void VisualiserComponent::closeSharedTexture() { @@ -680,43 +636,42 @@ void VisualiserComponent::closeSharedTexture() { sharedTextureManager.removeSender(sharedTextureSender); sharedTextureSender = nullptr; } - } #endif void VisualiserComponent::newOpenGLContextCreated() { using namespace juce::gl; - + juce::CriticalSection::ScopedLockType lock(samplesLock); - + glColorMask(true, true, true, true); viewportChanged(viewportArea); glEnable(GL_BLEND); glBlendEquation(GL_FUNC_ADD); - - fullScreenQuad = { -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f }; - + + fullScreenQuad = {-1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f}; + simpleShader = std::make_unique(openGLContext); simpleShader->addVertexShader(juce::OpenGLHelpers::translateVertexShaderToV3(simpleVertexShader)); simpleShader->addFragmentShader(simpleFragmentShader); simpleShader->link(); - + lineShader = std::make_unique(openGLContext); lineShader->addVertexShader(juce::OpenGLHelpers::translateVertexShaderToV3(lineVertexShader)); lineShader->addFragmentShader(lineFragmentShader); lineShader->link(); - + outputShader = std::make_unique(openGLContext); outputShader->addVertexShader(juce::OpenGLHelpers::translateVertexShaderToV3(outputVertexShader)); outputShader->addFragmentShader(outputFragmentShader); outputShader->link(); - + texturedShader = std::make_unique(openGLContext); texturedShader->addVertexShader(juce::OpenGLHelpers::translateVertexShaderToV3(texturedVertexShader)); texturedShader->addFragmentShader(texturedFragmentShader); texturedShader->link(); - + blurShader = std::make_unique(openGLContext); blurShader->addVertexShader(juce::OpenGLHelpers::translateVertexShaderToV3(blurVertexShader)); blurShader->addFragmentShader(blurFragmentShader); @@ -726,29 +681,29 @@ void VisualiserComponent::newOpenGLContextCreated() { wideBlurShader->addVertexShader(juce::OpenGLHelpers::translateVertexShaderToV3(wideBlurVertexShader)); wideBlurShader->addFragmentShader(wideBlurFragmentShader); wideBlurShader->link(); - + #if OSCI_PREMIUM glowShader = std::make_unique(openGLContext); glowShader->addVertexShader(juce::OpenGLHelpers::translateVertexShaderToV3(glowVertexShader)); glowShader->addFragmentShader(glowFragmentShader); glowShader->link(); - + afterglowShader = std::make_unique(openGLContext); afterglowShader->addVertexShader(juce::OpenGLHelpers::translateVertexShaderToV3(afterglowVertexShader)); afterglowShader->addFragmentShader(afterglowFragmentShader); afterglowShader->link(); #endif - + glGenBuffers(1, &vertexBuffer); glGenBuffers(1, &quadIndexBuffer); glGenBuffers(1, &vertexIndexBuffer); - + setupTextures(); } void VisualiserComponent::openGLContextClosing() { using namespace juce::gl; - + #if OSCI_PREMIUM closeSharedTexture(); #endif @@ -764,14 +719,14 @@ void VisualiserComponent::openGLContextClosing() { glDeleteTextures(1, &blur4Texture.id); glDeleteTextures(1, &renderTexture.id); screenOpenGLTexture.release(); - + #if OSCI_PREMIUM glDeleteTextures(1, &glowTexture.id); reflectionOpenGLTexture.release(); glowShader.reset(); afterglowShader.reset(); #endif - + simpleShader.reset(); texturedShader.reset(); blurShader.reset(); @@ -789,14 +744,14 @@ void VisualiserComponent::handleAsyncUpdate() { void VisualiserComponent::renderOpenGL() { using namespace juce::gl; - + if (openGLContext.isActive()) { juce::OpenGLHelpers::clear(juce::Colours::black); - + // we have a new buffer to render if (sampleBufferCount != prevSampleBufferCount) { prevSampleBufferCount = sampleBufferCount; - + if (!record.getToggleState()) { // don't change resolution or framerate if recording if (recordingSettings.getResolution() != renderTexture.width) { @@ -808,9 +763,9 @@ void VisualiserComponent::renderOpenGL() { setupArrays(RESAMPLE_RATIO * sampleRate / recordingSettings.getFrameRate()); } } - + juce::CriticalSection::ScopedLockType lock(samplesLock); - + if (settings.parameters.upsamplingEnabled->getBoolValue()) { renderScope(smoothedXSamples, smoothedYSamples, smoothedZSamples); } else { @@ -822,7 +777,7 @@ void VisualiserComponent::renderOpenGL() { sharedTextureSender->renderGL(); } #endif - + if (record.getToggleState()) { #if OSCI_PREMIUM if (recordingVideo) { @@ -836,11 +791,11 @@ void VisualiserComponent::renderOpenGL() { audioRecorder.audioThreadCallback(audioOutputBuffer); } } - + renderingSemaphore.release(); stopwatch.addTime(juce::RelativeTime::seconds(1.0 / recordingSettings.getFrameRate())); } - + // render texture to screen activateTargetTexture(std::nullopt); setShader(texturedShader.get()); @@ -854,25 +809,25 @@ void VisualiserComponent::viewportChanged(juce::Rectangle area) { if (openGLContext.isAttached()) { float realWidth = area.getWidth() * renderScale; float realHeight = area.getHeight() * renderScale; - + float xOffset = getWidth() * renderScale - realWidth; float yOffset = getHeight() * renderScale - realHeight; - + float minDim = juce::jmin(realWidth, realHeight); float x = (realWidth - minDim) / 2 + area.getX() * renderScale + xOffset; float y = (realHeight - minDim) / 2 - area.getY() * renderScale + yOffset; - + glViewport(juce::roundToInt(x), juce::roundToInt(y), juce::roundToInt(minDim), juce::roundToInt(minDim)); } } void VisualiserComponent::setupArrays(int nPoints) { using namespace juce::gl; - + if (nPoints == 0) { return; } - + nEdges = nPoints - 1; std::vector indices(4 * nEdges); @@ -883,7 +838,7 @@ void VisualiserComponent::setupArrays(int nPoints) { glBindBuffer(GL_ARRAY_BUFFER, quadIndexBuffer); glBufferData(GL_ARRAY_BUFFER, indices.size() * sizeof(float), indices.data(), GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, 0); // Unbind - + int len = nEdges * 2 * 3; std::vector vertexIndices(len); @@ -907,7 +862,7 @@ void VisualiserComponent::setupArrays(int nPoints) { void VisualiserComponent::setupTextures() { using namespace juce::gl; - + // Create the framebuffer glGenFramebuffers(1, &frameBuffer); glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer); @@ -919,10 +874,10 @@ void VisualiserComponent::setupTextures() { blur3Texture = makeTexture(128, 128); blur4Texture = makeTexture(128, 128); renderTexture = makeTexture(recordingSettings.getResolution(), recordingSettings.getResolution()); - + screenOpenGLTexture.loadImage(emptyScreenImage); - screenTexture = { screenOpenGLTexture.getTextureID(), screenTextureImage.getWidth(), screenTextureImage.getHeight() }; - + screenTexture = {screenOpenGLTexture.getTextureID(), screenTextureImage.getWidth(), screenTextureImage.getHeight()}; + #if OSCI_PREMIUM glowTexture = makeTexture(512, 512); reflectionTexture = createReflectionTexture(); @@ -946,7 +901,7 @@ Texture VisualiserComponent::makeTexture(int width, int height, GLuint textureID glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - float borderColor[] = { 0.0f, 0.0f, 0.0f, 1.0f }; + float borderColor[] = {0.0f, 0.0f, 0.0f, 1.0f}; glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, borderColor); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureID, 0); @@ -958,7 +913,7 @@ Texture VisualiserComponent::makeTexture(int width, int height, GLuint textureID glBindTexture(GL_TEXTURE_2D, 0); // Unbind - return { textureID, width, height }; + return {textureID, width, height}; } void VisualiserComponent::setResolution(int width) { @@ -975,45 +930,45 @@ void VisualiserComponent::setResolution(int width) { glBindFramebuffer(GL_FRAMEBUFFER, 0); // Unbind } -void VisualiserComponent::drawLineTexture(const std::vector& xPoints, const std::vector& yPoints, const std::vector& zPoints) { +void VisualiserComponent::drawLineTexture(const std::vector &xPoints, const std::vector &yPoints, const std::vector &zPoints) { using namespace juce::gl; - + double persistence = std::pow(0.5, settings.getPersistence()) * 0.4; persistence *= 60.0 / recordingSettings.getFrameRate(); fadeAmount = juce::jmin(1.0, persistence); - + activateTargetTexture(lineTexture); fade(); drawLine(xPoints, yPoints, zPoints); glBindTexture(GL_TEXTURE_2D, targetTexture.value().id); } -void VisualiserComponent::saveTextureToPNG(Texture texture, const juce::File& file) { +void VisualiserComponent::saveTextureToPNG(Texture texture, const juce::File &file) { using namespace juce::gl; GLuint textureID = texture.id; int width = texture.width; int height = texture.height; - + // Bind the texture to read its data glBindTexture(GL_TEXTURE_2D, textureID); std::vector pixels = std::vector(width * height * 4); // Read the pixels from the texture glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels.data()); - juce::Image image = juce::Image (juce::Image::PixelFormat::ARGB, width, height, true); + juce::Image image = juce::Image(juce::Image::PixelFormat::ARGB, width, height, true); juce::Image::BitmapData bitmapData(image, juce::Image::BitmapData::writeOnly); // Copy the pixel data to the JUCE image (and swap R and B channels) for (int y = 0; y < height; ++y) { for (int x = 0; x < width; ++x) { - int srcIndex = (y * width + x) * 4; // RGBA format + int srcIndex = (y * width + x) * 4; // RGBA format juce::uint8 r = (pixels)[srcIndex]; // Red juce::uint8 g = (pixels)[srcIndex + 1]; // Green juce::uint8 b = (pixels)[srcIndex + 2]; // Blue juce::uint8 a = (pixels)[srcIndex + 3]; // Alpha // This method uses colors in RGBA - bitmapData.setPixelColour(x, height-y-1, juce::Colour(r, g, b, a)); + bitmapData.setPixelColour(x, height - y - 1, juce::Colour(r, g, b, a)); } } @@ -1029,7 +984,7 @@ void VisualiserComponent::saveTextureToPNG(Texture texture, const juce::File& fi } } -void VisualiserComponent::saveTextureToQOI(Texture texture, const juce::File& file) { +void VisualiserComponent::saveTextureToQOI(Texture texture, const juce::File &file) { using namespace juce::gl; GLuint textureID = texture.id; int width = texture.width; @@ -1041,14 +996,14 @@ void VisualiserComponent::saveTextureToQOI(Texture texture, const juce::File& fi // Read the pixels from the texture glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels.data()); - const qoixx::qoi::desc imageFormat{ .width = (uint32_t) width, .height = (uint32_t) height, .channels = 4, .colorspace = qoixx::qoi::colorspace::srgb }; + const qoixx::qoi::desc imageFormat{.width = (uint32_t)width, .height = (uint32_t)height, .channels = 4, .colorspace = qoixx::qoi::colorspace::srgb}; std::vector binaryData = qoixx::qoi::encode>(pixels, imageFormat); file.replaceWithData(binaryData.data(), binaryData.size()); } void VisualiserComponent::activateTargetTexture(std::optional texture) { using namespace juce::gl; - + if (texture.has_value()) { glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture.value().id, 0); @@ -1060,14 +1015,14 @@ void VisualiserComponent::activateTargetTexture(std::optional texture) targetTexture = texture; } -void VisualiserComponent::setShader(juce::OpenGLShaderProgram* program) { +void VisualiserComponent::setShader(juce::OpenGLShaderProgram *program) { currentShader = program; program->use(); } void VisualiserComponent::drawTexture(std::vector> textures) { using namespace juce::gl; - + glEnableVertexAttribArray(glGetAttribLocation(currentShader->getProgramID(), "aPos")); for (int i = 0; i < textures.size(); ++i) { @@ -1093,29 +1048,30 @@ void VisualiserComponent::drawTexture(std::vector> textur void VisualiserComponent::setAdditiveBlending() { using namespace juce::gl; - + glBlendFunc(GL_ONE, GL_ONE); } void VisualiserComponent::setNormalBlending() { using namespace juce::gl; - + glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); } -void VisualiserComponent::drawLine(const std::vector& xPoints, const std::vector& yPoints, const std::vector& zPoints) { +void VisualiserComponent::drawLine(const std::vector &xPoints, const std::vector &yPoints, const std::vector &zPoints) { using namespace juce::gl; - + setAdditiveBlending(); - + int nPoints = xPoints.size(); // Without this, there's an access violation that seems to occur only on some systems - if (scratchVertices.size() != nPoints * 12) scratchVertices.resize(nPoints * 12); - + if (scratchVertices.size() != nPoints * 12) + scratchVertices.resize(nPoints * 12); + for (int i = 0; i < nPoints; ++i) { int p = i * 12; - scratchVertices[p] = scratchVertices[p + 3] = scratchVertices[p + 6] = scratchVertices[p + 9] = xPoints[i]; + scratchVertices[p] = scratchVertices[p + 3] = scratchVertices[p + 6] = scratchVertices[p + 9] = xPoints[i]; scratchVertices[p + 1] = scratchVertices[p + 4] = scratchVertices[p + 7] = scratchVertices[p + 10] = yPoints[i]; scratchVertices[p + 2] = scratchVertices[p + 5] = scratchVertices[p + 8] = scratchVertices[p + 11] = zPoints[i]; } @@ -1131,27 +1087,27 @@ void VisualiserComponent::drawLine(const std::vector& xPoints, const std: glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer); glVertexAttribPointer(glGetAttribLocation(lineShader->getProgramID(), "aStart"), 3, GL_FLOAT, GL_FALSE, 0, 0); - glVertexAttribPointer(glGetAttribLocation(lineShader->getProgramID(), "aEnd"), 3, GL_FLOAT, GL_FALSE, 0, (void*)(12 * sizeof(float))); + glVertexAttribPointer(glGetAttribLocation(lineShader->getProgramID(), "aEnd"), 3, GL_FLOAT, GL_FALSE, 0, (void *)(12 * sizeof(float))); glBindBuffer(GL_ARRAY_BUFFER, quadIndexBuffer); glVertexAttribPointer(glGetAttribLocation(lineShader->getProgramID(), "aIdx"), 1, GL_FLOAT, GL_FALSE, 0, 0); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, screenTexture.id); lineShader->setUniform("uScreen", 0); - lineShader->setUniform("uSize", (GLfloat) settings.getFocus()); + lineShader->setUniform("uSize", (GLfloat)settings.getFocus()); lineShader->setUniform("uGain", 450.0f / 512.0f); lineShader->setUniform("uInvert", 1.0f); if (settings.getUpsamplingEnabled()) { lineShader->setUniform("uIntensity", intensity); } else { - lineShader->setUniform("uIntensity", (GLfloat) (intensity * RESAMPLE_RATIO * 1.5)); + lineShader->setUniform("uIntensity", (GLfloat)(intensity * RESAMPLE_RATIO * 1.5)); } - + lineShader->setUniform("uFadeAmount", fadeAmount); - lineShader->setUniform("uNEdges", (GLfloat) nEdges); + lineShader->setUniform("uNEdges", (GLfloat)nEdges); setOffsetAndScale(lineShader.get()); - + #if OSCI_PREMIUM lineShader->setUniform("uFishEye", screenOverlay == ScreenOverlay::VectorDisplay ? VECTOR_DISPLAY_FISH_EYE : 0.0f); lineShader->setUniform("uShutterSync", settings.getShutterSync()); @@ -1171,14 +1127,14 @@ void VisualiserComponent::drawLine(const std::vector& xPoints, const std: void VisualiserComponent::fade() { using namespace juce::gl; - + setNormalBlending(); - + #if OSCI_PREMIUM setShader(afterglowShader.get()); afterglowShader->setUniform("fadeAmount", fadeAmount); - afterglowShader->setUniform("afterglowAmount", (float) settings.getAfterglow()); - afterglowShader->setUniform("uResizeForCanvas", lineTexture.width / (float) recordingSettings.getResolution()); + afterglowShader->setUniform("afterglowAmount", (float)settings.getAfterglow()); + afterglowShader->setUniform("uResizeForCanvas", lineTexture.width / (float)recordingSettings.getResolution()); drawTexture({lineTexture}); #else simpleShader->use(); @@ -1187,7 +1143,7 @@ void VisualiserComponent::fade() { glBufferData(GL_ARRAY_BUFFER, sizeof(float) * fullScreenQuad.size(), fullScreenQuad.data(), GL_STATIC_DRAW); glVertexAttribPointer(glGetAttribLocation(simpleShader->getProgramID(), "vertexPosition"), 2, GL_FLOAT, GL_FALSE, 0, 0); glBindBuffer(GL_ARRAY_BUFFER, 0); - + simpleShader->setUniform("colour", 0.0f, 0.0f, 0.0f, fadeAmount); glDrawArrays(GL_TRIANGLES, 0, 6); glDisableVertexAttribArray(glGetAttribLocation(simpleShader->getProgramID(), "vertexPosition")); @@ -1196,42 +1152,42 @@ void VisualiserComponent::fade() { void VisualiserComponent::drawCRT() { using namespace juce::gl; - + setNormalBlending(); activateTargetTexture(blur1Texture); setShader(texturedShader.get()); - texturedShader->setUniform("uResizeForCanvas", lineTexture.width / (float) recordingSettings.getResolution()); + texturedShader->setUniform("uResizeForCanvas", lineTexture.width / (float)recordingSettings.getResolution()); drawTexture({lineTexture}); - //horizontal blur 512x512 + // horizontal blur 512x512 activateTargetTexture(blur2Texture); setShader(blurShader.get()); blurShader->setUniform("uOffset", 1.0f / 512.0f, 0.0f); drawTexture({blur1Texture}); - //vertical blur 512x512 + // vertical blur 512x512 activateTargetTexture(blur1Texture); blurShader->setUniform("uOffset", 0.0f, 1.0f / 512.0f); drawTexture({blur2Texture}); - //preserve blur1 for later + // preserve blur1 for later activateTargetTexture(blur3Texture); setShader(texturedShader.get()); texturedShader->setUniform("uResizeForCanvas", 1.0f); drawTexture({blur1Texture}); - //horizontal blur 128x128 + // horizontal blur 128x128 activateTargetTexture(blur4Texture); setShader(wideBlurShader.get()); wideBlurShader->setUniform("uOffset", 1.0f / 128.0f, 0.0f); drawTexture({blur3Texture}); - //vertical blur 128x128 + // vertical blur 128x128 activateTargetTexture(blur3Texture); wideBlurShader->setUniform("uOffset", 0.0f, 1.0f / 128.0f); drawTexture({blur4Texture}); - + #if OSCI_PREMIUM if (settings.parameters.screenOverlay->isRealisticDisplay()) { // create glow texture @@ -1245,26 +1201,26 @@ void VisualiserComponent::drawCRT() { activateTargetTexture(renderTexture); setShader(outputShader.get()); outputShader->setUniform("uExposure", 0.25f); - outputShader->setUniform("uLineSaturation", (float) settings.getLineSaturation()); + outputShader->setUniform("uLineSaturation", (float)settings.getLineSaturation()); #if OSCI_PREMIUM - outputShader->setUniform("uScreenSaturation", (float) settings.getScreenSaturation()); - outputShader->setUniform("uHueShift", (float) settings.getScreenHue() / 360.0f); - outputShader->setUniform("uOverexposure", (float) settings.getOverexposure()); + outputShader->setUniform("uScreenSaturation", (float)settings.getScreenSaturation()); + outputShader->setUniform("uHueShift", (float)settings.getScreenHue() / 360.0f); + outputShader->setUniform("uOverexposure", (float)settings.getOverexposure()); #else outputShader->setUniform("uScreenSaturation", 1.0f); outputShader->setUniform("uHueShift", 0.0f); outputShader->setUniform("uOverexposure", 0.5f); #endif - outputShader->setUniform("uNoise", (float) settings.getNoise()); + outputShader->setUniform("uNoise", (float)settings.getNoise()); outputShader->setUniform("uRandom", juce::Random::getSystemRandom().nextFloat()); - outputShader->setUniform("uGlow", (float) settings.getGlow()); - outputShader->setUniform("uAmbient", (float) settings.getAmbient()); + outputShader->setUniform("uGlow", (float)settings.getGlow()); + outputShader->setUniform("uAmbient", (float)settings.getAmbient()); setOffsetAndScale(outputShader.get()); #if OSCI_PREMIUM outputShader->setUniform("uFishEye", screenOverlay == ScreenOverlay::VectorDisplay ? VECTOR_DISPLAY_FISH_EYE : 0.0f); outputShader->setUniform("uRealScreen", settings.parameters.screenOverlay->isRealisticDisplay() ? 1.0f : 0.0f); #endif - outputShader->setUniform("uResizeForCanvas", lineTexture.width / (float) recordingSettings.getResolution()); + outputShader->setUniform("uResizeForCanvas", lineTexture.width / (float)recordingSettings.getResolution()); juce::Colour colour = juce::Colour::fromHSV(settings.getHue() / 360.0f, 1.0, 1.0, 1.0); outputShader->setUniform("uColour", colour.getFloatRed(), colour.getFloatGreen(), colour.getFloatBlue()); drawTexture({ @@ -1279,9 +1235,9 @@ void VisualiserComponent::drawCRT() { }); } -void VisualiserComponent::setOffsetAndScale(juce::OpenGLShaderProgram* shader) { +void VisualiserComponent::setOffsetAndScale(juce::OpenGLShaderProgram *shader) { osci::Point offset; - osci::Point scale = { 1.0f }; + osci::Point scale = {1.0f}; #if OSCI_PREMIUM if (settings.getScreenOverlay() == ScreenOverlay::Real) { offset = REAL_SCREEN_OFFSET; @@ -1291,14 +1247,14 @@ void VisualiserComponent::setOffsetAndScale(juce::OpenGLShaderProgram* shader) { scale = VECTOR_DISPLAY_SCALE; } #endif - shader->setUniform("uOffset", (float) offset.x, (float) offset.y); - shader->setUniform("uScale", (float) scale.x, (float) scale.y); + shader->setUniform("uOffset", (float)offset.x, (float)offset.y); + shader->setUniform("uScale", (float)scale.x, (float)scale.y); } #if OSCI_PREMIUM Texture VisualiserComponent::createReflectionTexture() { using namespace juce::gl; - + if (settings.getScreenOverlay() == ScreenOverlay::VectorDisplay) { reflectionOpenGLTexture.loadImage(vectorDisplayReflectionImage); } else if (settings.getScreenOverlay() == ScreenOverlay::Real) { @@ -1306,16 +1262,16 @@ Texture VisualiserComponent::createReflectionTexture() { } else { reflectionOpenGLTexture.loadImage(emptyReflectionImage); } - - Texture texture = { reflectionOpenGLTexture.getTextureID(), reflectionOpenGLTexture.getWidth(), reflectionOpenGLTexture.getHeight() }; - + + Texture texture = {reflectionOpenGLTexture.getTextureID(), reflectionOpenGLTexture.getWidth(), reflectionOpenGLTexture.getHeight()}; + return texture; } #endif Texture VisualiserComponent::createScreenTexture() { using namespace juce::gl; - + if (screenOverlay == ScreenOverlay::Smudged || screenOverlay == ScreenOverlay::SmudgedGraticule) { screenOpenGLTexture.loadImage(screenTextureImage); #if OSCI_PREMIUM @@ -1328,8 +1284,8 @@ Texture VisualiserComponent::createScreenTexture() { screenOpenGLTexture.loadImage(emptyScreenImage); } checkGLErrors(__FILE__, __LINE__); - Texture texture = { screenOpenGLTexture.getTextureID(), screenTextureImage.getWidth(), screenTextureImage.getHeight() }; - + Texture texture = {screenOpenGLTexture.getTextureID(), screenTextureImage.getWidth(), screenTextureImage.getHeight()}; + if (screenOverlay == ScreenOverlay::Graticule || screenOverlay == ScreenOverlay::SmudgedGraticule) { activateTargetTexture(texture); checkGLErrors(__FILE__, __LINE__); @@ -1338,18 +1294,18 @@ Texture VisualiserComponent::createScreenTexture() { setShader(simpleShader.get()); checkGLErrors(__FILE__, __LINE__); glColorMask(true, false, false, true); - + std::vector data; - + int step = 45; - + for (int i = 0; i < 11; i++) { float s = i * step; - + // Inserting at the beginning of the vector (equivalent to splice(0,0,...)) data.insert(data.begin(), {0, s, 10.0f * step, s}); data.insert(data.begin(), {s, 0, s, 10.0f * step}); - + if (i != 0 && i != 10) { for (int j = 0; j < 51; j++) { float t = j * step / 5; @@ -1363,20 +1319,21 @@ Texture VisualiserComponent::createScreenTexture() { } } } - + for (int j = 0; j < 51; j++) { float t = j * step / 5; - if (static_cast(t) % 5 == 0) continue; - + if (static_cast(t) % 5 == 0) + continue; + data.insert(data.begin(), {t - 2, 2.5f * step, t + 2, 2.5f * step}); data.insert(data.begin(), {t - 2, 7.5f * step, t + 2, 7.5f * step}); } - + // Normalize the data for (size_t i = 0; i < data.size(); i++) { data[i] = (data[i] + 31.0f) / 256.0f - 1; } - + glEnableVertexAttribArray(glGetAttribLocation(simpleShader->getProgramID(), "vertexPosition")); glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer); glBufferData(GL_ARRAY_BUFFER, sizeof(float) * data.size(), data.data(), GL_STATIC_DRAW); @@ -1388,39 +1345,54 @@ Texture VisualiserComponent::createScreenTexture() { glBindTexture(GL_TEXTURE_2D, targetTexture.value().id); glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE); } - + return texture; } void VisualiserComponent::checkGLErrors(juce::String file, int line) { using namespace juce::gl; - + GLenum error; while ((error = glGetError()) != GL_NO_ERROR) { juce::String errorMessage; switch (error) { - case GL_INVALID_ENUM: errorMessage = "GL_INVALID_ENUM"; break; - case GL_INVALID_VALUE: errorMessage = "GL_INVALID_VALUE"; break; - case GL_INVALID_OPERATION: errorMessage = "GL_INVALID_OPERATION"; break; - case GL_STACK_OVERFLOW: errorMessage = "GL_STACK_OVERFLOW"; break; - case GL_STACK_UNDERFLOW: errorMessage = "GL_STACK_UNDERFLOW"; break; - case GL_OUT_OF_MEMORY: errorMessage = "GL_OUT_OF_MEMORY"; break; - case GL_INVALID_FRAMEBUFFER_OPERATION: errorMessage = "GL_INVALID_FRAMEBUFFER_OPERATION"; break; - default: errorMessage = "Unknown OpenGL error"; break; + case GL_INVALID_ENUM: + errorMessage = "GL_INVALID_ENUM"; + break; + case GL_INVALID_VALUE: + errorMessage = "GL_INVALID_VALUE"; + break; + case GL_INVALID_OPERATION: + errorMessage = "GL_INVALID_OPERATION"; + break; + case GL_STACK_OVERFLOW: + errorMessage = "GL_STACK_OVERFLOW"; + break; + case GL_STACK_UNDERFLOW: + errorMessage = "GL_STACK_UNDERFLOW"; + break; + case GL_OUT_OF_MEMORY: + errorMessage = "GL_OUT_OF_MEMORY"; + break; + case GL_INVALID_FRAMEBUFFER_OPERATION: + errorMessage = "GL_INVALID_FRAMEBUFFER_OPERATION"; + break; + default: + errorMessage = "Unknown OpenGL error"; + break; } DBG("OpenGL error at " + file + ":" + juce::String(line) + " - " + errorMessage); } } - -void VisualiserComponent::paint(juce::Graphics& g) { +void VisualiserComponent::paint(juce::Graphics &g) { g.setColour(Colours::veryDark); g.fillRect(buttonRow); if (!active) { // draw a translucent overlay g.setColour(juce::Colours::black.withAlpha(0.5f)); g.fillRect(viewportArea); - + g.setColour(juce::Colours::white); g.setFont(30.0f); juce::String text = child == nullptr ? "Paused" : "Open in another window"; @@ -1428,7 +1400,7 @@ void VisualiserComponent::paint(juce::Graphics& g) { } } -void VisualiserComponent::renderScope(const std::vector& xPoints, const std::vector& yPoints, const std::vector& zPoints) { +void VisualiserComponent::renderScope(const std::vector &xPoints, const std::vector &yPoints, const std::vector &zPoints) { if (screenOverlay != settings.getScreenOverlay()) { screenOverlay = settings.getScreenOverlay(); #if OSCI_PREMIUM @@ -1436,7 +1408,7 @@ void VisualiserComponent::renderScope(const std::vector& xPoints, const s #endif screenTexture = createScreenTexture(); } - + if (sampleRate != oldSampleRate || scratchVertices.empty()) { oldSampleRate = sampleRate; setupArrays(RESAMPLE_RATIO * sampleRate / recordingSettings.getFrameRate()); diff --git a/Source/visualiser/VisualiserComponent.h b/Source/visualiser/VisualiserComponent.h index 86a15ee..1eb380a 100644 --- a/Source/visualiser/VisualiserComponent.h +++ b/Source/visualiser/VisualiserComponent.h @@ -1,17 +1,20 @@ #pragma once -#include #include + +#include + #include "../LookAndFeel.h" -#include "../components/SvgButton.h" -#include "VisualiserSettings.h" -#include "RecordingSettings.h" -#include "../components/StopwatchComponent.h" -#include "../img/qoixx.hpp" -#include "../components/DownloaderComponent.h" #include "../audio/AudioRecorder.h" -#include "../wav/WavParser.h" #include "../components/AudioPlayerComponent.h" +#include "../components/DownloaderComponent.h" +#include "../components/StopwatchComponent.h" +#include "../components/SvgButton.h" +#include "../img/qoixx.hpp" +#include "../video/FFmpegEncoderManager.h" +#include "../wav/WavParser.h" +#include "RecordingSettings.h" +#include "VisualiserSettings.h" #define FILE_RENDER_DUMMY 0 #define FILE_RENDER_PNG 1 @@ -44,8 +47,7 @@ public: VisualiserSettings& settings, RecordingSettings& recordingSettings, VisualiserComponent* parent = nullptr, - bool visualiserOnly = false - ); + bool visualiserOnly = false); ~VisualiserComponent() override; std::function openSettings; @@ -75,7 +77,7 @@ public: VisualiserComponent* parent = nullptr; VisualiserComponent* child = nullptr; std::unique_ptr popout = nullptr; - + std::atomic active = true; private: @@ -83,17 +85,17 @@ private: CommonPluginEditor& editor; float intensity; - + bool visualiserOnly; AudioPlayerComponent audioPlayer{audioProcessor}; - - SvgButton fullScreenButton{ "fullScreen", BinaryData::fullscreen_svg, juce::Colours::white, juce::Colours::white }; - SvgButton popOutButton{ "popOut", BinaryData::open_in_new_svg, juce::Colours::white, juce::Colours::white }; - SvgButton settingsButton{ "settings", BinaryData::cog_svg, juce::Colours::white, juce::Colours::white }; - SvgButton audioInputButton{ "audioInput", BinaryData::microphone_svg, juce::Colours::white, juce::Colours::red }; - + + SvgButton fullScreenButton{"fullScreen", BinaryData::fullscreen_svg, juce::Colours::white, juce::Colours::white}; + SvgButton popOutButton{"popOut", BinaryData::open_in_new_svg, juce::Colours::white, juce::Colours::white}; + SvgButton settingsButton{"settings", BinaryData::cog_svg, juce::Colours::white, juce::Colours::white}; + SvgButton audioInputButton{"audioInput", BinaryData::microphone_svg, juce::Colours::white, juce::Colours::red}; + #if OSCI_PREMIUM - SvgButton sharedTextureButton{ "sharedTexture", BinaryData::spout_svg, juce::Colours::white, juce::Colours::red }; + SvgButton sharedTextureButton{"sharedTexture", BinaryData::spout_svg, juce::Colours::white, juce::Colours::red}; SharedTextureManager& sharedTextureManager; SharedTextureSender* sharedTextureSender = nullptr; #endif @@ -109,37 +111,38 @@ private: RecordingSettings& recordingSettings; juce::File ffmpegFile; bool recordingAudio = true; - + #if OSCI_PREMIUM bool recordingVideo = true; bool downloading = false; - + long numFrames = 0; std::vector framePixels; osci::WriteProcess ffmpegProcess; std::unique_ptr tempVideoFile; + FFmpegEncoderManager ffmpegEncoderManager; #endif - + StopwatchComponent stopwatch; SvgButton record{"Record", BinaryData::record_svg, juce::Colours::red, juce::Colours::red.withAlpha(0.01f)}; - + std::unique_ptr chooser; std::unique_ptr tempAudioFile; AudioRecorder audioRecorder; - + osci::Semaphore renderingSemaphore{0}; - + void popoutWindow(); - + // OPENGL - + juce::OpenGLContext openGLContext; - + juce::Rectangle buttonRow; juce::Rectangle viewportArea; - + float renderScale = 1.0f; - + GLuint quadIndexBuffer = 0; GLuint vertexIndexBuffer = 0; GLuint vertexBuffer = 0; @@ -158,10 +161,10 @@ private: std::atomic sampleBufferCount = 0; int prevSampleBufferCount = 0; long lastTriggerPosition = 0; - + std::vector scratchVertices; std::vector fullScreenQuad; - + GLuint frameBuffer = 0; double currentFrameRate = 60.0; @@ -175,32 +178,32 @@ private: Texture screenTexture; juce::OpenGLTexture screenOpenGLTexture; std::optional targetTexture = std::nullopt; - + juce::Image screenTextureImage = juce::ImageFileFormat::loadFrom(BinaryData::noise_jpg, BinaryData::noise_jpgSize); juce::Image emptyScreenImage = juce::ImageFileFormat::loadFrom(BinaryData::empty_jpg, BinaryData::empty_jpgSize); - + #if OSCI_PREMIUM juce::Image oscilloscopeImage = juce::ImageFileFormat::loadFrom(BinaryData::real_png, BinaryData::real_pngSize); juce::Image vectorDisplayImage = juce::ImageFileFormat::loadFrom(BinaryData::vector_display_png, BinaryData::vector_display_pngSize); - + juce::Image emptyReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::no_reflection_jpg, BinaryData::no_reflection_jpgSize); juce::Image oscilloscopeReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::real_reflection_png, BinaryData::real_reflection_pngSize); juce::Image vectorDisplayReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::vector_display_reflection_png, BinaryData::vector_display_reflection_pngSize); - - osci::Point REAL_SCREEN_OFFSET = { 0.02, -0.15 }; - osci::Point REAL_SCREEN_SCALE = { 0.6 }; - - osci::Point VECTOR_DISPLAY_OFFSET = { 0.075, -0.045 }; - osci::Point VECTOR_DISPLAY_SCALE = { 0.6 }; + + osci::Point REAL_SCREEN_OFFSET = {0.02, -0.15}; + osci::Point REAL_SCREEN_SCALE = {0.6}; + + osci::Point VECTOR_DISPLAY_OFFSET = {0.075, -0.045}; + osci::Point VECTOR_DISPLAY_SCALE = {0.6}; float VECTOR_DISPLAY_FISH_EYE = 0.5; - + juce::OpenGLTexture reflectionOpenGLTexture; Texture reflectionTexture; - + std::unique_ptr glowShader; std::unique_ptr afterglowShader; #endif - + std::unique_ptr simpleShader; std::unique_ptr texturedShader; std::unique_ptr blurShader; @@ -208,10 +211,10 @@ private: std::unique_ptr lineShader; std::unique_ptr outputShader; juce::OpenGLShaderProgram* currentShader; - + float fadeAmount; ScreenOverlay screenOverlay = ScreenOverlay::INVALID; - + const double RESAMPLE_RATIO = 6.0; double sampleRate = -1; double oldSampleRate = -1; @@ -243,7 +246,7 @@ private: void viewportChanged(juce::Rectangle area); void renderScope(const std::vector& xPoints, const std::vector& yPoints, const std::vector& zPoints); - + double getSweepIncrement(); Texture createScreenTexture(); @@ -260,7 +263,7 @@ public: VisualiserWindow(juce::String name, VisualiserComponent* parent) : parent(parent), wasPaused(!parent->active), juce::DocumentWindow(name, juce::Colours::black, juce::DocumentWindow::TitleBarButtons::allButtons) { setAlwaysOnTop(true); } - + void closeButtonPressed() override { // local copy of parent so that we can safely delete the child VisualiserComponent* parent = this->parent; diff --git a/modules/osci_render_core b/modules/osci_render_core index cf124cc..f8ac300 160000 --- a/modules/osci_render_core +++ b/modules/osci_render_core @@ -1 +1 @@ -Subproject commit cf124cc5de4d9857c7633e9c03117f20e1550e81 +Subproject commit f8ac3007c25df061ca6e71ad2eaff4a5d01e2d7b diff --git a/osci-render.jucer b/osci-render.jucer index 7172d59..9b053ee 100644 --- a/osci-render.jucer +++ b/osci-render.jucer @@ -574,6 +574,16 @@ + + + + + + @@ -644,8 +654,6 @@ file="Source/FrameSettingsComponent.cpp"/> - - diff --git a/sosci.jucer b/sosci.jucer index a3d75f7..c3419b2 100644 --- a/sosci.jucer +++ b/sosci.jucer @@ -77,6 +77,12 @@ + + + +