kopia lustrzana https://github.com/jameshball/osci-render
Fix compilation issues on free version, add more extensive hardware accelleration support for video
rodzic
231e1d8234
commit
a125abfa7f
|
@ -158,12 +158,15 @@ void MainComponent::updateFileLabel() {
|
|||
showRightArrow = audioProcessor.getCurrentFileIndex() < audioProcessor.numFiles() - 1;
|
||||
|
||||
{
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock);
|
||||
if (audioProcessor.objectServerRendering) {
|
||||
fileLabel.setText("Rendering from Blender", juce::dontSendNotification);
|
||||
} else if (audioProcessor.isSyphonInputActive()) {
|
||||
if (audioProcessor.isSyphonInputActive()) {
|
||||
fileLabel.setText(audioProcessor.getSyphonSourceName(), juce::dontSendNotification);
|
||||
} else if (audioProcessor.getCurrentFileIndex() == -1) {
|
||||
} else
|
||||
#endif
|
||||
if (audioProcessor.objectServerRendering) {
|
||||
fileLabel.setText("Rendering from Blender", juce::dontSendNotification);
|
||||
}else if (audioProcessor.getCurrentFileIndex() == -1) {
|
||||
fileLabel.setText("No file open", juce::dontSendNotification);
|
||||
} else {
|
||||
fileLabel.setText(audioProcessor.getCurrentFileName(), juce::dontSendNotification);
|
||||
|
|
|
@ -526,8 +526,8 @@ void OscirenderAudioProcessorEditor::openVisualiserSettings() {
|
|||
visualiserSettingsWindow.toFront(true);
|
||||
}
|
||||
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
void OscirenderAudioProcessorEditor::openSyphonInputDialog() {
|
||||
#if JUCE_MAC || JUCE_WINDOWS
|
||||
SyphonInputSelectorComponent* selector = nullptr;
|
||||
{
|
||||
juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock);
|
||||
|
@ -548,7 +548,6 @@ void OscirenderAudioProcessorEditor::openSyphonInputDialog() {
|
|||
options.useNativeTitleBar = true;
|
||||
options.resizable = false;
|
||||
options.launchAsync();
|
||||
#endif
|
||||
}
|
||||
|
||||
void OscirenderAudioProcessorEditor::onSyphonInputSelected(const juce::String& server, const juce::String& app) {
|
||||
|
@ -560,3 +559,4 @@ void OscirenderAudioProcessorEditor::onSyphonInputDisconnected() {
|
|||
juce::SpinLock::ScopedLockType lock(audioProcessor.syphonLock);
|
||||
audioProcessor.disconnectSyphonInput();
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -7,19 +7,18 @@
|
|||
*/
|
||||
|
||||
#include "PluginProcessor.h"
|
||||
|
||||
#include "PluginEditor.h"
|
||||
#include "parser/FileParser.h"
|
||||
#include "parser/FrameProducer.h"
|
||||
#include "audio/VectorCancellingEffect.h"
|
||||
#include "audio/DistortEffect.h"
|
||||
#include "audio/SmoothEffect.h"
|
||||
#include "audio/BitCrushEffect.h"
|
||||
#include "audio/BulgeEffect.h"
|
||||
#include "audio/DistortEffect.h"
|
||||
#include "audio/SmoothEffect.h"
|
||||
#include "audio/VectorCancellingEffect.h"
|
||||
#include "parser/FileParser.h"
|
||||
#include "parser/FrameProducer.h"
|
||||
|
||||
#if JUCE_MAC || JUCE_WINDOWS
|
||||
#include "SyphonFrameGrabber.h"
|
||||
#include "img/ImageParser.h"
|
||||
#include "../modules/juce_sharedtexture/SharedTexture.h"
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
#include "img/ImageParser.h"
|
||||
#endif
|
||||
|
||||
//==============================================================================
|
||||
|
@ -28,111 +27,103 @@ OscirenderAudioProcessor::OscirenderAudioProcessor() : CommonAudioProcessor(Buse
|
|||
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
std::make_shared<BitCrushEffect>(),
|
||||
new osci::EffectParameter("Bit Crush", "Limits the resolution of points drawn to the screen, making the object look pixelated, and making the audio sound more 'digital' and distorted.", "bitCrush", VERSION_HINT, 0.6, 0.0, 1.0)
|
||||
));
|
||||
new osci::EffectParameter("Bit Crush", "Limits the resolution of points drawn to the screen, making the object look pixelated, and making the audio sound more 'digital' and distorted.", "bitCrush", VERSION_HINT, 0.6, 0.0, 1.0)));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
std::make_shared<BulgeEffect>(),
|
||||
new osci::EffectParameter("Bulge", "Applies a bulge that makes the centre of the image larger, and squishes the edges of the image. This applies a distortion to the audio.", "bulge", VERSION_HINT, 0.5, 0.0, 1.0)
|
||||
));
|
||||
new osci::EffectParameter("Bulge", "Applies a bulge that makes the centre of the image larger, and squishes the edges of the image. This applies a distortion to the audio.", "bulge", VERSION_HINT, 0.5, 0.0, 1.0)));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
std::make_shared<VectorCancellingEffect>(),
|
||||
new osci::EffectParameter("Vector Cancelling", "Inverts the audio and image every few samples to 'cancel out' the audio, making the audio quiet, and distorting the image.", "vectorCancelling", VERSION_HINT, 0.1111111, 0.0, 1.0)
|
||||
));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
return input * osci::Point(values[0], values[1], values[2]);
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Scale X", "Scales the object in the horizontal direction.", "scaleX", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
new osci::EffectParameter("Scale Y", "Scales the object in the vertical direction.", "scaleY", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
new osci::EffectParameter("Scale Z", "Scales the depth of the object.", "scaleZ", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
}
|
||||
));
|
||||
new osci::EffectParameter("Vector Cancelling", "Inverts the audio and image every few samples to 'cancel out' the audio, making the audio quiet, and distorting the image.", "vectorCancelling", VERSION_HINT, 0.1111111, 0.0, 1.0)));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
int flip = index % 2 == 0 ? 1 : -1;
|
||||
osci::Point jitter = osci::Point(flip * values[0], flip * values[1], flip * values[2]);
|
||||
return input + jitter;
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
return input * osci::Point(values[0], values[1], values[2]);
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Scale X", "Scales the object in the horizontal direction.", "scaleX", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
new osci::EffectParameter("Scale Y", "Scales the object in the vertical direction.", "scaleY", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
new osci::EffectParameter("Scale Z", "Scales the depth of the object.", "scaleZ", VERSION_HINT, 1.0, -5.0, 5.0),
|
||||
}));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
int flip = index % 2 == 0 ? 1 : -1;
|
||||
osci::Point jitter = osci::Point(flip * values[0], flip * values[1], flip * values[2]);
|
||||
return input + jitter;
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Distort X", "Distorts the image in the horizontal direction by jittering the audio sample being drawn.", "distortX", VERSION_HINT, 0.0, 0.0, 1.0),
|
||||
new osci::EffectParameter("Distort Y", "Distorts the image in the vertical direction by jittering the audio sample being drawn.", "distortY", VERSION_HINT, 0.0, 0.0, 1.0),
|
||||
new osci::EffectParameter("Distort Z", "Distorts the depth of the image by jittering the audio sample being drawn.", "distortZ", VERSION_HINT, 0.1, 0.0, 1.0),
|
||||
}
|
||||
));
|
||||
}));
|
||||
auto rippleEffect = std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
double phase = values[1] * std::numbers::pi;
|
||||
double distance = 100 * values[2] * (input.x * input.x + input.y * input.y);
|
||||
input.z += values[0] * std::sin(phase + distance);
|
||||
return input;
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Ripple Depth", "Controls how large the ripples applied to the image are.", "rippleDepth", VERSION_HINT, 0.2, 0.0, 1.0),
|
||||
new osci::EffectParameter("Ripple Phase", "Controls the position of the ripple. Animate this to see a moving ripple effect.", "ripplePhase", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
new osci::EffectParameter("Ripple Amount", "Controls how many ripples are applied to the image.", "rippleAmount", VERSION_HINT, 0.1, 0.0, 1.0),
|
||||
}
|
||||
);
|
||||
rippleEffect->getParameter("ripplePhase")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth);
|
||||
});
|
||||
rippleEffect->getParameter("ripplePhase")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
|
||||
toggleableEffects.push_back(rippleEffect);
|
||||
auto rotateEffect = std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
input.rotate(values[0] * std::numbers::pi, values[1] * std::numbers::pi, values[2] * std::numbers::pi);
|
||||
return input;
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Rotate X", "Controls the rotation of the object in the X axis.", "rotateX", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
new osci::EffectParameter("Rotate Y", "Controls the rotation of the object in the Y axis.", "rotateY", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
new osci::EffectParameter("Rotate Z", "Controls the rotation of the object in the Z axis.", "rotateZ", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
}
|
||||
);
|
||||
rotateEffect->getParameter("rotateY")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth);
|
||||
});
|
||||
rotateEffect->getParameter("rotateY")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
|
||||
rotateEffect->getParameter("rotateY")->lfoRate->setUnnormalisedValueNotifyingHost(0.2);
|
||||
toggleableEffects.push_back(rotateEffect);
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
return input + osci::Point(values[0], values[1], values[2]);
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Translate X", "Moves the object horizontally.", "translateX", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
new osci::EffectParameter("Translate Y", "Moves the object vertically.", "translateY", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
new osci::EffectParameter("Translate Z", "Moves the object away from the camera.", "translateZ", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
}
|
||||
));
|
||||
new osci::EffectParameter("Translate Z", "Moves the object away from the camera.", "translateZ", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
}));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
double length = 10 * values[0] * input.magnitude();
|
||||
double newX = input.x * std::cos(length) - input.y * std::sin(length);
|
||||
double newY = input.x * std::sin(length) + input.y * std::cos(length);
|
||||
return osci::Point(newX, newY, input.z);
|
||||
}, std::vector<osci::EffectParameter*>{
|
||||
},
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Swirl", "Swirls the image in a spiral pattern.", "swirl", VERSION_HINT, 0.3, -1.0, 1.0),
|
||||
}
|
||||
));
|
||||
}));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
std::make_shared<SmoothEffect>(),
|
||||
new osci::EffectParameter("Smoothing", "This works as a low-pass frequency filter that removes high frequencies, making the image look smoother, and audio sound less harsh.", "smoothing", VERSION_HINT, 0.75, 0.0, 1.0)
|
||||
));
|
||||
new osci::EffectParameter("Smoothing", "This works as a low-pass frequency filter that removes high frequencies, making the image look smoother, and audio sound less harsh.", "smoothing", VERSION_HINT, 0.75, 0.0, 1.0)));
|
||||
std::shared_ptr<osci::Effect> wobble = std::make_shared<osci::Effect>(
|
||||
wobbleEffect,
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Wobble Amount", "Adds a sine wave of the prominent frequency in the audio currently playing. The sine wave's frequency is slightly offset to create a subtle 'wobble' in the image. Increasing the slider increases the strength of the wobble.", "wobble", VERSION_HINT, 0.3, 0.0, 1.0),
|
||||
new osci::EffectParameter("Wobble Phase", "Controls the phase of the wobble.", "wobblePhase", VERSION_HINT, 0.0, -1.0, 1.0),
|
||||
}
|
||||
);
|
||||
wobble->getParameter("wobblePhase")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth);
|
||||
});
|
||||
wobble->getParameter("wobblePhase")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
|
||||
toggleableEffects.push_back(wobble);
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
delayEffect,
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Delay Decay", "Adds repetitions, delays, or echos to the audio. This slider controls the volume of the echo.", "delayDecay", VERSION_HINT, 0.4, 0.0, 1.0),
|
||||
new osci::EffectParameter("Delay Length", "Controls the time in seconds between echos.", "delayLength", VERSION_HINT, 0.5, 0.0, 1.0)
|
||||
}
|
||||
));
|
||||
new osci::EffectParameter("Delay Length", "Controls the time in seconds between echos.", "delayLength", VERSION_HINT, 0.5, 0.0, 1.0)}));
|
||||
toggleableEffects.push_back(std::make_shared<osci::Effect>(
|
||||
dashedLineEffect,
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Dash Length", "Controls the length of the dashed line.", "dashLength", VERSION_HINT, 0.2, 0.0, 1.0),
|
||||
}
|
||||
));
|
||||
}));
|
||||
toggleableEffects.push_back(custom);
|
||||
toggleableEffects.push_back(trace);
|
||||
trace->getParameter("traceLength")->lfo->setUnnormalisedValueNotifyingHost((int) osci::LfoType::Sawtooth);
|
||||
trace->getParameter("traceLength")->lfo->setUnnormalisedValueNotifyingHost((int)osci::LfoType::Sawtooth);
|
||||
|
||||
for (int i = 0; i < toggleableEffects.size(); i++) {
|
||||
auto effect = toggleableEffects[i];
|
||||
|
@ -186,7 +177,7 @@ OscirenderAudioProcessor::OscirenderAudioProcessor() : CommonAudioProcessor(Buse
|
|||
for (int i = 0; i < luaEffects.size(); i++) {
|
||||
luaEffects[i]->parameters[0]->addListener(this);
|
||||
}
|
||||
|
||||
|
||||
synth.addSound(defaultSound);
|
||||
|
||||
addAllParameters();
|
||||
|
@ -228,13 +219,12 @@ void OscirenderAudioProcessor::addLuaSlider() {
|
|||
[this, sliderIndex](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
luaValues[sliderIndex].store(values[0]);
|
||||
return input;
|
||||
}, new osci::EffectParameter(
|
||||
},
|
||||
new osci::EffectParameter(
|
||||
"Lua Slider " + sliderName,
|
||||
"Controls the value of the Lua variable called slider_" + sliderName.toLowerCase() + ".",
|
||||
"lua" + sliderName,
|
||||
VERSION_HINT, 0.0, 0.0, 1.0
|
||||
)
|
||||
));
|
||||
VERSION_HINT, 0.0, 0.0, 1.0)));
|
||||
}
|
||||
|
||||
void OscirenderAudioProcessor::addErrorListener(ErrorListener* listener) {
|
||||
|
@ -258,10 +248,10 @@ void OscirenderAudioProcessor::updateEffectPrecedence() {
|
|||
// parsersLock AND effectsLock must be locked before calling this function
|
||||
void OscirenderAudioProcessor::updateFileBlock(int index, std::shared_ptr<juce::MemoryBlock> block) {
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
fileBlocks[index] = block;
|
||||
openFile(index);
|
||||
return;
|
||||
}
|
||||
fileBlocks[index] = block;
|
||||
openFile(index);
|
||||
}
|
||||
|
||||
// parsersLock AND effectsLock must be locked before calling this function
|
||||
|
@ -269,7 +259,7 @@ void OscirenderAudioProcessor::addFile(juce::File file) {
|
|||
fileBlocks.push_back(std::make_shared<juce::MemoryBlock>());
|
||||
fileNames.push_back(file.getFileName());
|
||||
fileIds.push_back(currentFileId++);
|
||||
parsers.push_back(std::make_shared<FileParser>(*this, errorCallback));
|
||||
parsers.push_back(std::make_shared<FileParser>(*this, errorCallback));
|
||||
sounds.push_back(new ShapeSound(*this, parsers.back()));
|
||||
file.createInputStream()->readIntoMemoryBlock(*fileBlocks.back());
|
||||
|
||||
|
@ -306,9 +296,9 @@ void OscirenderAudioProcessor::setFileRemovedCallback(std::function<void(int)> c
|
|||
|
||||
// parsersLock AND effectsLock must be locked before calling this function
|
||||
void OscirenderAudioProcessor::removeFile(int index) {
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
fileBlocks.erase(fileBlocks.begin() + index);
|
||||
fileNames.erase(fileNames.begin() + index);
|
||||
fileIds.erase(fileIds.begin() + index);
|
||||
|
@ -350,9 +340,9 @@ int OscirenderAudioProcessor::numFiles() {
|
|||
// it will reparse any existing files, so it is safer.
|
||||
// parsersLock AND effectsLock must be locked before calling this function
|
||||
void OscirenderAudioProcessor::openFile(int index) {
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
parsers[index]->parse(juce::String(fileIds[index]), fileNames[index], fileNames[index].fromLastOccurrenceOf(".", true, false).toLowerCase(), std::make_unique<juce::MemoryInputStream>(*fileBlocks[index], false), font);
|
||||
changeCurrentFile(index);
|
||||
}
|
||||
|
@ -365,9 +355,9 @@ void OscirenderAudioProcessor::changeCurrentFile(int index) {
|
|||
currentFile = -1;
|
||||
changeSound(defaultSound);
|
||||
}
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
if (index < 0 || index >= fileBlocks.size()) {
|
||||
return;
|
||||
}
|
||||
currentFile = index;
|
||||
changeSound(sounds[index]);
|
||||
}
|
||||
|
@ -402,7 +392,7 @@ std::shared_ptr<FileParser> OscirenderAudioProcessor::getCurrentFileParser() {
|
|||
|
||||
juce::String OscirenderAudioProcessor::getCurrentFileName() {
|
||||
if (objectServerRendering || currentFile == -1) {
|
||||
return "";
|
||||
return "";
|
||||
} else {
|
||||
return fileNames[currentFile];
|
||||
}
|
||||
|
@ -446,7 +436,7 @@ void OscirenderAudioProcessor::setObjectServerPort(int port) {
|
|||
void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, juce::MidiBuffer& midiMessages) {
|
||||
juce::ScopedNoDenormals noDenormals;
|
||||
// Audio info variables
|
||||
int totalNumInputChannels = getTotalNumInputChannels();
|
||||
int totalNumInputChannels = getTotalNumInputChannels();
|
||||
int totalNumOutputChannels = getTotalNumOutputChannels();
|
||||
double sampleRate = getSampleRate();
|
||||
|
||||
|
@ -473,7 +463,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
// TODO: To make this more resilient to changing BPMs, we should change how this is calculated
|
||||
// or use another property of the AudioPlayHead::PositionInfo
|
||||
double playTimeBeats = bpm * playTimeSeconds / 60;
|
||||
|
||||
|
||||
// Calculated time per sample in seconds and beats
|
||||
double sTimeSec = 1.f / sampleRate;
|
||||
double sTimeBeats = bpm * sTimeSec / 60;
|
||||
|
@ -487,20 +477,20 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
if (!usingMidi) {
|
||||
midiMessages.clear();
|
||||
}
|
||||
|
||||
|
||||
// if midi enabled has changed state
|
||||
if (prevMidiEnabled != usingMidi) {
|
||||
for (int i = 1; i <= 16; i++) {
|
||||
midiMessages.addEvent(juce::MidiMessage::allNotesOff(i), i);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// if midi has just been disabled or we need to retrigger
|
||||
if (!usingMidi && (retriggerMidi || prevMidiEnabled)) {
|
||||
midiMessages.addEvent(juce::MidiMessage::noteOn(1, 60, 1.0f), 17);
|
||||
retriggerMidi = false;
|
||||
}
|
||||
|
||||
|
||||
prevMidiEnabled = usingMidi;
|
||||
|
||||
const double EPSILON = 0.00001;
|
||||
|
@ -512,8 +502,9 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
|
||||
juce::AudioBuffer<float> outputBuffer3d = juce::AudioBuffer<float>(3, buffer.getNumSamples());
|
||||
outputBuffer3d.clear();
|
||||
|
||||
|
||||
{
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
juce::SpinLock::ScopedLockType sLock(syphonLock);
|
||||
if (isSyphonInputActive()) {
|
||||
for (int sample = 0; sample < outputBuffer3d.getNumSamples(); sample++) {
|
||||
|
@ -521,7 +512,9 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
outputBuffer3d.setSample(0, sample, point.x);
|
||||
outputBuffer3d.setSample(1, sample, point.y);
|
||||
}
|
||||
} else if (usingInput && totalNumInputChannels >= 1) {
|
||||
} else
|
||||
#endif
|
||||
if (usingInput && totalNumInputChannels >= 1) {
|
||||
if (totalNumInputChannels >= 2) {
|
||||
for (auto channel = 0; channel < juce::jmin(2, totalNumInputChannels); channel++) {
|
||||
outputBuffer3d.copyFrom(channel, 0, inputBuffer, channel, 0, buffer.getNumSamples());
|
||||
|
@ -535,9 +528,10 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
// handle all midi messages
|
||||
auto midiIterator = midiMessages.cbegin();
|
||||
std::for_each(midiIterator,
|
||||
midiMessages.cend(),
|
||||
[&] (const juce::MidiMessageMetadata& meta) { synth.publicHandleMidiEvent(meta.getMessage()); }
|
||||
);
|
||||
midiMessages.cend(),
|
||||
[&](const juce::MidiMessageMetadata& meta) {
|
||||
synth.publicHandleMidiEvent(meta.getMessage());
|
||||
});
|
||||
} else {
|
||||
juce::SpinLock::ScopedLockType lock1(parsersLock);
|
||||
juce::SpinLock::ScopedLockType lock2(effectsLock);
|
||||
|
@ -551,12 +545,12 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
midiMessages.clear();
|
||||
|
||||
|
||||
auto* channelData = buffer.getArrayOfWritePointers();
|
||||
|
||||
for (int sample = 0; sample < buffer.getNumSamples(); ++sample) {
|
||||
|
||||
for (int sample = 0; sample < buffer.getNumSamples(); ++sample) {
|
||||
if (animateFrames->getBoolValue()) {
|
||||
if (juce::JUCEApplicationBase::isStandaloneApp()) {
|
||||
animationFrame = animationFrame + sTimeSec * animationRate->getValueUnnormalised();
|
||||
|
@ -573,7 +567,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
if (loopAnimation->getBoolValue()) {
|
||||
animationFrame = std::fmod(animationFrame, totalFrames);
|
||||
} else {
|
||||
animationFrame = juce::jlimit(0.0, (double) totalFrames - 1, animationFrame.load());
|
||||
animationFrame = juce::jlimit(0.0, (double)totalFrames - 1, animationFrame.load());
|
||||
}
|
||||
sounds[currentFile]->parser->setFrame(animationFrame);
|
||||
}
|
||||
|
@ -598,7 +592,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
currentVolume = std::sqrt(squaredVolume);
|
||||
currentVolume = juce::jlimit(0.0, 1.0, currentVolume);
|
||||
|
||||
osci::Point channels = { outputBuffer3d.getSample(0, sample), outputBuffer3d.getSample(1, sample), outputBuffer3d.getSample(2, sample) };
|
||||
osci::Point channels = {outputBuffer3d.getSample(0, sample), outputBuffer3d.getSample(1, sample), outputBuffer3d.getSample(2, sample)};
|
||||
|
||||
{
|
||||
juce::SpinLock::ScopedLockType lock1(parsersLock);
|
||||
|
@ -621,8 +615,8 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
}
|
||||
}
|
||||
|
||||
double x = channels.x;
|
||||
double y = channels.y;
|
||||
double x = channels.x;
|
||||
double y = channels.y;
|
||||
|
||||
x *= volume;
|
||||
y *= volume;
|
||||
|
@ -630,19 +624,19 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
// clip
|
||||
x = juce::jmax(-threshold, juce::jmin(threshold.load(), x));
|
||||
y = juce::jmax(-threshold, juce::jmin(threshold.load(), y));
|
||||
|
||||
|
||||
threadManager.write(osci::Point(x, y, 1));
|
||||
|
||||
|
||||
// Apply mute if active
|
||||
if (muteParameter->getBoolValue()) {
|
||||
x = 0.0;
|
||||
y = 0.0;
|
||||
}
|
||||
|
||||
|
||||
if (totalNumOutputChannels >= 2) {
|
||||
channelData[0][sample] = x;
|
||||
channelData[1][sample] = y;
|
||||
} else if (totalNumOutputChannels == 1) {
|
||||
channelData[0][sample] = x;
|
||||
channelData[1][sample] = y;
|
||||
} else if (totalNumOutputChannels == 1) {
|
||||
channelData[0][sample] = x;
|
||||
}
|
||||
|
||||
|
@ -650,7 +644,7 @@ void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, ju
|
|||
playTimeSeconds += sTimeSec;
|
||||
playTimeBeats += sTimeBeats;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// used for any callback that must guarantee all audio is recieved (e.g. when recording to a file)
|
||||
juce::SpinLock::ScopedLockType lock(audioThreadCallbackLock);
|
||||
|
@ -669,11 +663,11 @@ void OscirenderAudioProcessor::getStateInformation(juce::MemoryBlock& destData)
|
|||
// we need to stop recording the visualiser when saving the state, otherwise
|
||||
// there are issues. This is the only place we can do this because there is
|
||||
// no callback when closing the standalone app except for this.
|
||||
|
||||
|
||||
if (haltRecording != nullptr && juce::JUCEApplicationBase::isStandaloneApp()) {
|
||||
haltRecording();
|
||||
}
|
||||
|
||||
|
||||
juce::SpinLock::ScopedLockType lock1(parsersLock);
|
||||
juce::SpinLock::ScopedLockType lock2(effectsLock);
|
||||
|
||||
|
@ -711,7 +705,7 @@ void OscirenderAudioProcessor::getStateInformation(juce::MemoryBlock& destData)
|
|||
fontXml->setAttribute("italic", font.isItalic());
|
||||
|
||||
auto filesXml = xml->createNewChildElement("files");
|
||||
|
||||
|
||||
for (int i = 0; i < fileBlocks.size(); i++) {
|
||||
auto fileXml = filesXml->createNewChildElement("file");
|
||||
fileXml->setAttribute("name", fileNames[i]);
|
||||
|
@ -721,7 +715,7 @@ void OscirenderAudioProcessor::getStateInformation(juce::MemoryBlock& destData)
|
|||
xml->setAttribute("currentFile", currentFile);
|
||||
|
||||
recordingParameters.save(xml.get());
|
||||
|
||||
|
||||
saveProperties(*xml);
|
||||
|
||||
copyXmlToBinary(*xml, destData);
|
||||
|
@ -731,7 +725,7 @@ void OscirenderAudioProcessor::setStateInformation(const void* data, int sizeInB
|
|||
if (juce::JUCEApplicationBase::isStandaloneApp() && programCrashedAndUserWantsToReset()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
std::unique_ptr<juce::XmlElement> xml;
|
||||
|
||||
const uint32_t magicXmlNumber = 0x21324356;
|
||||
|
@ -835,14 +829,14 @@ void OscirenderAudioProcessor::setStateInformation(const void* data, int sizeInB
|
|||
fileBlock = std::make_shared<juce::MemoryBlock>();
|
||||
fileBlock->fromBase64Encoding(text);
|
||||
}
|
||||
|
||||
|
||||
addFile(fileName, fileBlock);
|
||||
}
|
||||
}
|
||||
changeCurrentFile(xml->getIntAttribute("currentFile", -1));
|
||||
|
||||
recordingParameters.load(xml.get());
|
||||
|
||||
|
||||
loadProperties(*xml);
|
||||
objectServer.reload();
|
||||
|
||||
|
@ -904,7 +898,7 @@ void OscirenderAudioProcessor::envelopeChanged(EnvelopeComponent* changedEnvelop
|
|||
}
|
||||
}
|
||||
|
||||
#if JUCE_MAC || JUCE_WINDOWS
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
// Syphon/Spout input management
|
||||
|
||||
// syphonLock must be held when calling this function
|
||||
|
|
|
@ -11,40 +11,43 @@
|
|||
#define VERSION_HINT 2
|
||||
|
||||
#include <JuceHeader.h>
|
||||
#include "audio/ShapeSound.h"
|
||||
#include "audio/ShapeVoice.h"
|
||||
#include "audio/PublicSynthesiser.h"
|
||||
#include "audio/SampleRateManager.h"
|
||||
|
||||
#include <numbers>
|
||||
#include "audio/DelayEffect.h"
|
||||
#include "audio/WobbleEffect.h"
|
||||
#include "audio/PerspectiveEffect.h"
|
||||
#include "obj/ObjectServer.h"
|
||||
|
||||
#include "CommonPluginProcessor.h"
|
||||
#include "UGen/Env.h"
|
||||
#include "UGen/ugen_JuceEnvelopeComponent.h"
|
||||
#include "audio/CustomEffect.h"
|
||||
#include "audio/DashedLineEffect.h"
|
||||
#include "CommonPluginProcessor.h"
|
||||
#include "SyphonFrameGrabber.h"
|
||||
#include "audio/DelayEffect.h"
|
||||
#include "audio/PerspectiveEffect.h"
|
||||
#include "audio/PublicSynthesiser.h"
|
||||
#include "audio/SampleRateManager.h"
|
||||
#include "audio/ShapeSound.h"
|
||||
#include "audio/ShapeVoice.h"
|
||||
#include "audio/WobbleEffect.h"
|
||||
#include "obj/ObjectServer.h"
|
||||
|
||||
#if JUCE_MAC || JUCE_WINDOWS
|
||||
#include "../modules/juce_sharedtexture/SharedTexture.h"
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
#include "../modules/juce_sharedtexture/SharedTexture.h"
|
||||
#include "video/SyphonFrameGrabber.h"
|
||||
#endif
|
||||
|
||||
//==============================================================================
|
||||
/**
|
||||
*/
|
||||
class OscirenderAudioProcessor : public CommonAudioProcessor, juce::AudioProcessorParameter::Listener, public EnvelopeComponentListener
|
||||
#if JucePlugin_Enable_ARA
|
||||
, public juce::AudioProcessorARAExtension
|
||||
#endif
|
||||
*/
|
||||
class OscirenderAudioProcessor : public CommonAudioProcessor, juce::AudioProcessorParameter::Listener, public EnvelopeComponentListener
|
||||
#if JucePlugin_Enable_ARA
|
||||
,
|
||||
public juce::AudioProcessorARAExtension
|
||||
#endif
|
||||
{
|
||||
public:
|
||||
OscirenderAudioProcessor();
|
||||
~OscirenderAudioProcessor() override;
|
||||
|
||||
void prepareToPlay (double sampleRate, int samplesPerBlock) override;
|
||||
void processBlock (juce::AudioBuffer<float>&, juce::MidiBuffer&) override;
|
||||
void prepareToPlay(double sampleRate, int samplesPerBlock) override;
|
||||
void processBlock(juce::AudioBuffer<float>&, juce::MidiBuffer&) override;
|
||||
|
||||
juce::AudioProcessorEditor* createEditor() override;
|
||||
|
||||
|
@ -56,38 +59,34 @@ public:
|
|||
void parameterGestureChanged(int parameterIndex, bool gestureIsStarting) override;
|
||||
void envelopeChanged(EnvelopeComponent* changedEnvelope) override;
|
||||
|
||||
std::vector<std::shared_ptr<osci::Effect>> toggleableEffects;
|
||||
std::vector<std::shared_ptr<osci::Effect>> toggleableEffects;
|
||||
std::vector<std::shared_ptr<osci::Effect>> luaEffects;
|
||||
std::atomic<double> luaValues[26] = { 0.0 };
|
||||
std::atomic<double> luaValues[26] = {0.0};
|
||||
|
||||
std::shared_ptr<osci::Effect> frequencyEffect = std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
frequency = values[0].load();
|
||||
return input;
|
||||
}, new osci::EffectParameter(
|
||||
},
|
||||
new osci::EffectParameter(
|
||||
"Frequency",
|
||||
"Controls how many times per second the image is drawn, thereby controlling the pitch of the sound. Lower frequencies result in more-accurately drawn images, but more flickering, and vice versa.",
|
||||
"frequency",
|
||||
VERSION_HINT, 220.0, 0.0, 4200.0
|
||||
)
|
||||
);
|
||||
|
||||
VERSION_HINT, 220.0, 0.0, 4200.0));
|
||||
|
||||
std::shared_ptr<osci::Effect> trace = std::make_shared<osci::Effect>(
|
||||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter(
|
||||
"Trace Start",
|
||||
"Defines how far into the frame the drawing is started at. This has the effect of 'tracing' out the image from a single dot when animated. By default, we start drawing from the beginning of the frame, so this value is 0.0.",
|
||||
"traceStart",
|
||||
VERSION_HINT, 0.0, 0.0, 1.0, 0.001
|
||||
),
|
||||
VERSION_HINT, 0.0, 0.0, 1.0, 0.001),
|
||||
new osci::EffectParameter(
|
||||
"Trace Length",
|
||||
"Defines how much of the frame is drawn per cycle. This has the effect of 'tracing' out the image from a single dot when animated. By default, we draw the whole frame, corresponding to a value of 1.0.",
|
||||
"traceLength",
|
||||
VERSION_HINT, 1.0, 0.0, 1.0, 0.001
|
||||
),
|
||||
}
|
||||
);
|
||||
VERSION_HINT, 1.0, 0.0, 1.0, 0.001),
|
||||
});
|
||||
|
||||
std::shared_ptr<DelayEffect> delayEffect = std::make_shared<DelayEffect>();
|
||||
|
||||
|
@ -97,8 +96,7 @@ public:
|
|||
std::shared_ptr<CustomEffect> customEffect = std::make_shared<CustomEffect>(errorCallback, luaValues);
|
||||
std::shared_ptr<osci::Effect> custom = std::make_shared<osci::Effect>(
|
||||
customEffect,
|
||||
new osci::EffectParameter("Lua Effect", "Controls the strength of the custom Lua effect applied. You can write your own custom effect using Lua by pressing the edit button on the right.", "customEffectStrength", VERSION_HINT, 1.0, 0.0, 1.0)
|
||||
);
|
||||
new osci::EffectParameter("Lua Effect", "Controls the strength of the custom Lua effect applied. You can write your own custom effect using Lua by pressing the edit button on the right.", "customEffectStrength", VERSION_HINT, 1.0, 0.0, 1.0));
|
||||
|
||||
std::shared_ptr<PerspectiveEffect> perspectiveEffect = std::make_shared<PerspectiveEffect>();
|
||||
std::shared_ptr<osci::Effect> perspective = std::make_shared<osci::Effect>(
|
||||
|
@ -106,13 +104,12 @@ public:
|
|||
std::vector<osci::EffectParameter*>{
|
||||
new osci::EffectParameter("Perspective", "Controls the strength of the 3D perspective projection.", "perspectiveStrength", VERSION_HINT, 1.0, 0.0, 1.0),
|
||||
new osci::EffectParameter("Focal Length", "Controls the focal length of the 3D perspective effect. A higher focal length makes the image look more flat, and a lower focal length makes the image look more 3D.", "perspectiveFocalLength", VERSION_HINT, 2.0, 0.0, 10.0),
|
||||
}
|
||||
);
|
||||
|
||||
});
|
||||
|
||||
osci::BooleanParameter* midiEnabled = new osci::BooleanParameter("MIDI Enabled", "midiEnabled", VERSION_HINT, false, "Enable MIDI input for the synth. If disabled, the synth will play a constant tone, as controlled by the frequency slider.");
|
||||
osci::BooleanParameter* inputEnabled = new osci::BooleanParameter("Audio Input Enabled", "inputEnabled", VERSION_HINT, false, "Enable to use input audio, instead of the generated audio.");
|
||||
std::atomic<double> frequency = 220.0;
|
||||
|
||||
|
||||
juce::SpinLock parsersLock;
|
||||
std::vector<std::shared_ptr<FileParser>> parsers;
|
||||
std::vector<ShapeSound::Ptr> sounds;
|
||||
|
@ -133,7 +130,7 @@ public:
|
|||
osci::FloatParameter* releaseTime = new osci::FloatParameter("Release Time", "releaseTime", VERSION_HINT, 0.4, 0.0, 1.0);
|
||||
osci::FloatParameter* attackShape = new osci::FloatParameter("Attack Shape", "attackShape", VERSION_HINT, 5, -50, 50);
|
||||
osci::FloatParameter* decayShape = new osci::FloatParameter("Decay osci::Shape", "decayShape", VERSION_HINT, -20, -50, 50);
|
||||
osci::FloatParameter* releaseShape = new osci::FloatParameter("Release Shape", "releaseShape", VERSION_HINT, -5,-50, 50);
|
||||
osci::FloatParameter* releaseShape = new osci::FloatParameter("Release Shape", "releaseShape", VERSION_HINT, -5, -50, 50);
|
||||
|
||||
Env adsrEnv = Env::adsr(
|
||||
attackTime->getValueUnnormalised(),
|
||||
|
@ -141,8 +138,7 @@ public:
|
|||
sustainLevel->getValueUnnormalised(),
|
||||
releaseTime->getValueUnnormalised(),
|
||||
1.0,
|
||||
std::vector<EnvCurve>{ attackShape->getValueUnnormalised(), decayShape->getValueUnnormalised(), releaseShape->getValueUnnormalised() }
|
||||
);
|
||||
std::vector<EnvCurve>{attackShape->getValueUnnormalised(), decayShape->getValueUnnormalised(), releaseShape->getValueUnnormalised()});
|
||||
|
||||
juce::MidiKeyboardState keyboardState;
|
||||
|
||||
|
@ -158,33 +154,31 @@ public:
|
|||
std::shared_ptr<osci::Effect> imageThreshold = std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
return input;
|
||||
}, new osci::EffectParameter(
|
||||
},
|
||||
new osci::EffectParameter(
|
||||
"Image Threshold",
|
||||
"Controls the probability of visiting a dark pixel versus a light pixel. Darker pixels are less likely to be visited, so turning the threshold to a lower value makes it more likely to visit dark pixels.",
|
||||
"imageThreshold",
|
||||
VERSION_HINT, 0.5, 0, 1
|
||||
)
|
||||
);
|
||||
VERSION_HINT, 0.5, 0, 1));
|
||||
std::shared_ptr<osci::Effect> imageStride = std::make_shared<osci::Effect>(
|
||||
[this](int index, osci::Point input, const std::vector<std::atomic<double>>& values, double sampleRate) {
|
||||
return input;
|
||||
}, new osci::EffectParameter(
|
||||
},
|
||||
new osci::EffectParameter(
|
||||
"Image Stride",
|
||||
"Controls the spacing between pixels when drawing an image. Larger values mean more of the image can be drawn, but at a lower fidelity.",
|
||||
"imageStride",
|
||||
VERSION_HINT, 4, 1, 50, 1
|
||||
)
|
||||
);
|
||||
VERSION_HINT, 4, 1, 50, 1));
|
||||
|
||||
std::atomic<double> animationFrame = 0.f;
|
||||
|
||||
|
||||
std::shared_ptr<WobbleEffect> wobbleEffect = std::make_shared<WobbleEffect>(*this);
|
||||
|
||||
const double FONT_SIZE = 1.0f;
|
||||
juce::Font font = juce::Font(juce::Font::getDefaultSansSerifFontName(), FONT_SIZE, juce::Font::plain);
|
||||
|
||||
ShapeSound::Ptr objectServerSound = new ShapeSound();
|
||||
|
||||
|
||||
std::function<void()> haltRecording;
|
||||
|
||||
// Add a callback to notify the editor when a file is removed
|
||||
|
@ -202,10 +196,10 @@ public:
|
|||
void openFile(int index);
|
||||
int getCurrentFileIndex();
|
||||
std::shared_ptr<FileParser> getCurrentFileParser();
|
||||
juce::String getCurrentFileName();
|
||||
juce::String getCurrentFileName();
|
||||
juce::String getFileName(int index);
|
||||
juce::String getFileId(int index);
|
||||
std::shared_ptr<juce::MemoryBlock> getFileBlock(int index);
|
||||
std::shared_ptr<juce::MemoryBlock> getFileBlock(int index);
|
||||
void setObjectServerRendering(bool enabled);
|
||||
void setObjectServerPort(int port);
|
||||
void addErrorListener(ErrorListener* listener);
|
||||
|
@ -240,7 +234,6 @@ public:
|
|||
};
|
||||
|
||||
private:
|
||||
|
||||
std::atomic<bool> prevMidiEnabled = !midiEnabled->getBoolValue();
|
||||
|
||||
juce::SpinLock audioThreadCallbackLock;
|
||||
|
@ -271,7 +264,7 @@ private:
|
|||
std::istringstream parser(input.toStdString());
|
||||
parser >> result[0];
|
||||
for (int idx = 1; idx < 3; idx++) {
|
||||
parser.get(); //Skip period
|
||||
parser.get(); // Skip period
|
||||
parser >> result[idx];
|
||||
}
|
||||
}
|
||||
|
@ -287,8 +280,7 @@ private:
|
|||
|
||||
juce::AudioPlayHead* playHead;
|
||||
|
||||
|
||||
#if JUCE_MAC || JUCE_WINDOWS
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
public:
|
||||
bool isSyphonInputActive() const;
|
||||
bool isSyphonInputStarted() const;
|
||||
|
@ -297,11 +289,12 @@ public:
|
|||
juce::String getSyphonSourceName() const;
|
||||
|
||||
juce::SpinLock syphonLock;
|
||||
|
||||
private:
|
||||
ImageParser syphonImageParser = ImageParser(*this);
|
||||
std::unique_ptr<SyphonFrameGrabber> syphonFrameGrabber;
|
||||
#endif
|
||||
|
||||
//==============================================================================
|
||||
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (OscirenderAudioProcessor)
|
||||
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(OscirenderAudioProcessor)
|
||||
};
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "SettingsComponent.h"
|
||||
|
||||
#include "PluginEditor.h"
|
||||
|
||||
SettingsComponent::SettingsComponent(OscirenderAudioProcessor& p, OscirenderAudioProcessorEditor& editor) : audioProcessor(p), pluginEditor(editor) {
|
||||
|
@ -10,27 +11,26 @@ SettingsComponent::SettingsComponent(OscirenderAudioProcessor& p, OscirenderAudi
|
|||
addAndMakeVisible(midi);
|
||||
addChildComponent(txt);
|
||||
addChildComponent(frame);
|
||||
|
||||
|
||||
double midiLayoutPreferredSize = std::any_cast<double>(audioProcessor.getProperty("midiLayoutPreferredSize", pluginEditor.CLOSED_PREF_SIZE));
|
||||
double mainLayoutPreferredSize = std::any_cast<double>(audioProcessor.getProperty("mainLayoutPreferredSize", -0.4));
|
||||
|
||||
midiLayout.setItemLayout(0, -0.1, -1.0, -(1.0 + midiLayoutPreferredSize));
|
||||
midiLayout.setItemLayout(1, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE);
|
||||
midiLayout.setItemLayout(2, pluginEditor.CLOSED_PREF_SIZE, -0.9, midiLayoutPreferredSize);
|
||||
|
||||
|
||||
mainLayout.setItemLayout(0, -0.1, -0.9, mainLayoutPreferredSize);
|
||||
mainLayout.setItemLayout(1, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE);
|
||||
mainLayout.setItemLayout(2, -0.1, -0.9, -(1.0 + mainLayoutPreferredSize));
|
||||
}
|
||||
|
||||
|
||||
void SettingsComponent::resized() {
|
||||
auto area = getLocalBounds();
|
||||
area.removeFromLeft(5);
|
||||
area.removeFromRight(5);
|
||||
area.removeFromTop(5);
|
||||
area.removeFromBottom(5);
|
||||
|
||||
|
||||
if (area.getWidth() <= 0 || area.getHeight() <= 0) {
|
||||
return;
|
||||
}
|
||||
|
@ -38,11 +38,11 @@ void SettingsComponent::resized() {
|
|||
juce::Component dummy;
|
||||
juce::Component dummy2;
|
||||
|
||||
juce::Component* midiComponents[] = { &dummy, &midiResizerBar, &midi };
|
||||
juce::Component* midiComponents[] = {&dummy, &midiResizerBar, &midi};
|
||||
midiLayout.layOutComponents(midiComponents, 3, area.getX(), area.getY(), area.getWidth(), area.getHeight(), true, true);
|
||||
midi.setBounds(midi.getBounds());
|
||||
|
||||
juce::Component* columns[] = { &dummy2, &mainResizerBar, &dummy };
|
||||
juce::Component* columns[] = {&dummy2, &mainResizerBar, &dummy};
|
||||
mainLayout.layOutComponents(columns, 3, dummy.getX(), dummy.getY(), dummy.getWidth(), dummy.getHeight(), false, true);
|
||||
|
||||
auto bounds = dummy2.getBounds();
|
||||
|
@ -66,7 +66,7 @@ void SettingsComponent::resized() {
|
|||
}
|
||||
|
||||
effects.setBounds(dummyBounds);
|
||||
|
||||
|
||||
if (isVisible() && getWidth() > 0 && getHeight() > 0) {
|
||||
audioProcessor.setProperty("midiLayoutPreferredSize", midiLayout.getItemCurrentRelativeSize(2));
|
||||
audioProcessor.setProperty("mainLayoutPreferredSize", mainLayout.getItemCurrentRelativeSize(0));
|
||||
|
@ -79,8 +79,25 @@ void SettingsComponent::fileUpdated(juce::String fileName) {
|
|||
juce::String extension = fileName.fromLastOccurrenceOf(".", true, false).toLowerCase();
|
||||
txt.setVisible(false);
|
||||
frame.setVisible(false);
|
||||
bool isImage = extension == ".gif" || extension == ".png" || extension == ".jpg" || extension == ".jpeg" || extension == ".mov" || extension == ".mp4" || audioProcessor.isSyphonInputStarted();
|
||||
if ((fileName.isEmpty() && !audioProcessor.isSyphonInputStarted()) || audioProcessor.objectServerRendering) {
|
||||
|
||||
// Check if the file is an image based on extension or Syphon/Spout input
|
||||
bool isSyphonActive = false;
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
isSyphonActive = audioProcessor.isSyphonInputStarted();
|
||||
#endif
|
||||
|
||||
bool isImage = isSyphonActive ||
|
||||
(extension == ".gif" ||
|
||||
extension == ".png" ||
|
||||
extension == ".jpg" ||
|
||||
extension == ".jpeg" ||
|
||||
extension == ".mov" ||
|
||||
extension == ".mp4");
|
||||
|
||||
// Skip processing if object server is rendering or if no file is selected and no Syphon input
|
||||
bool skipProcessing = audioProcessor.objectServerRendering || (fileName.isEmpty() && !isSyphonActive);
|
||||
|
||||
if (skipProcessing) {
|
||||
// do nothing
|
||||
} else if (extension == ".txt") {
|
||||
txt.setVisible(true);
|
||||
|
|
|
@ -62,6 +62,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend
|
|||
editor.openRecordingSettings();
|
||||
});
|
||||
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
// Add Syphon/Spout input menu item under Recording
|
||||
addMenuItem(2, audioProcessor.isSyphonInputActive() ? "Disconnect Syphon/Spout Input" : "Select Syphon/Spout Input...", [this] {
|
||||
if (audioProcessor.isSyphonInputActive())
|
||||
|
@ -69,6 +70,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend
|
|||
else
|
||||
openSyphonInputDialog();
|
||||
});
|
||||
#endif
|
||||
|
||||
if (editor.processor.wrapperType == juce::AudioProcessor::WrapperType::wrapperType_Standalone) {
|
||||
addMenuItem(3, "Settings...", [this] {
|
||||
|
@ -77,6 +79,7 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend
|
|||
}
|
||||
}
|
||||
|
||||
#if (JUCE_MAC || JUCE_WINDOWS) && OSCI_PREMIUM
|
||||
void OsciMainMenuBarModel::openSyphonInputDialog() {
|
||||
editor.openSyphonInputDialog();
|
||||
}
|
||||
|
@ -84,3 +87,4 @@ void OsciMainMenuBarModel::openSyphonInputDialog() {
|
|||
void OsciMainMenuBarModel::disconnectSyphonInput() {
|
||||
audioProcessor.disconnectSyphonInput();
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -0,0 +1,343 @@
|
|||
#include "FFmpegEncoderManager.h"
|
||||
|
||||
FFmpegEncoderManager::FFmpegEncoderManager(juce::File& ffmpegExecutable)
|
||||
: ffmpegExecutable(ffmpegExecutable) {
|
||||
queryAvailableEncoders();
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::buildVideoEncodingCommand(
|
||||
VideoCodec codec,
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile) {
|
||||
switch (codec) {
|
||||
case VideoCodec::H264:
|
||||
return buildH264EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile);
|
||||
case VideoCodec::H265:
|
||||
return buildH265EncodingCommand(crf, videoToolboxQuality, width, height, frameRate, compressionPreset, outputFile);
|
||||
case VideoCodec::VP9:
|
||||
return buildVP9EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile);
|
||||
#if JUCE_MAC
|
||||
case VideoCodec::ProRes:
|
||||
return buildProResEncodingCommand(width, height, frameRate, outputFile);
|
||||
#endif
|
||||
default:
|
||||
// Default to H.264 if unknown codec
|
||||
return buildH264EncodingCommand(crf, width, height, frameRate, compressionPreset, outputFile);
|
||||
}
|
||||
}
|
||||
|
||||
juce::Array<FFmpegEncoderManager::EncoderDetails> FFmpegEncoderManager::getAvailableEncodersForCodec(VideoCodec codec) {
|
||||
// Return cached list of encoders if available
|
||||
auto it = availableEncoders.find(codec);
|
||||
if (it != availableEncoders.end()) {
|
||||
return it->second;
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
bool FFmpegEncoderManager::isHardwareEncoderAvailable(const juce::String& encoderName) {
|
||||
// Check if the encoder is available and supported
|
||||
for (auto& pair : availableEncoders) {
|
||||
for (auto& encoder : pair.second) {
|
||||
if (encoder.name == encoderName && encoder.isSupported && encoder.isHardwareAccelerated) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::getBestEncoderForCodec(VideoCodec codec) {
|
||||
auto encoders = getAvailableEncodersForCodec(codec);
|
||||
|
||||
// Define priority lists for each codec type
|
||||
juce::StringArray h264Encoders = {"h264_nvenc", "h264_amf", "h264_qsv", "h264_videotoolbox", "libx264"};
|
||||
juce::StringArray h265Encoders = {"hevc_nvenc", "hevc_amf", "hevc_qsv", "hevc_videotoolbox", "libx265"};
|
||||
juce::StringArray vp9Encoders = {"libvpx-vp9"};
|
||||
#if JUCE_MAC
|
||||
juce::StringArray proResEncoders = {"prores_ks", "prores"};
|
||||
#endif
|
||||
|
||||
// Select the appropriate priority list based on codec
|
||||
juce::StringArray* priorityList = nullptr;
|
||||
switch (codec) {
|
||||
case VideoCodec::H264:
|
||||
priorityList = &h264Encoders;
|
||||
break;
|
||||
case VideoCodec::H265:
|
||||
priorityList = &h265Encoders;
|
||||
break;
|
||||
case VideoCodec::VP9:
|
||||
priorityList = &vp9Encoders;
|
||||
break;
|
||||
#if JUCE_MAC
|
||||
case VideoCodec::ProRes:
|
||||
priorityList = &proResEncoders;
|
||||
break;
|
||||
#endif
|
||||
default:
|
||||
priorityList = &h264Encoders; // Default to H.264
|
||||
}
|
||||
|
||||
// Find the highest priority encoder that is available
|
||||
for (const auto& encoderName : *priorityList) {
|
||||
for (const auto& encoder : encoders) {
|
||||
if (encoder.name == encoderName && encoder.isSupported) {
|
||||
return encoderName;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Return default software encoder if no hardware encoder is available
|
||||
switch (codec) {
|
||||
case VideoCodec::H264:
|
||||
return "libx264";
|
||||
case VideoCodec::H265:
|
||||
return "libx265";
|
||||
case VideoCodec::VP9:
|
||||
return "libvpx-vp9";
|
||||
#if JUCE_MAC
|
||||
case VideoCodec::ProRes:
|
||||
return "prores";
|
||||
#endif
|
||||
default:
|
||||
return "libx264";
|
||||
}
|
||||
}
|
||||
|
||||
void FFmpegEncoderManager::queryAvailableEncoders() {
|
||||
// Query available encoders using ffmpeg -encoders
|
||||
juce::String output = runFFmpegCommand({"-encoders", "-hide_banner"});
|
||||
parseEncoderList(output);
|
||||
}
|
||||
|
||||
void FFmpegEncoderManager::parseEncoderList(const juce::String& output) {
|
||||
// Clear current encoders
|
||||
availableEncoders.clear();
|
||||
|
||||
// Initialize codec-specific encoder arrays
|
||||
availableEncoders[VideoCodec::H264] = {};
|
||||
availableEncoders[VideoCodec::H265] = {};
|
||||
availableEncoders[VideoCodec::VP9] = {};
|
||||
#if JUCE_MAC
|
||||
availableEncoders[VideoCodec::ProRes] = {};
|
||||
#endif
|
||||
|
||||
// Split the output into lines
|
||||
juce::StringArray lines;
|
||||
lines.addLines(output);
|
||||
|
||||
// Skip the first 10 lines (header information from ffmpeg -encoders)
|
||||
int linesToSkip = juce::jmin(10, lines.size());
|
||||
|
||||
// Parse each line to find encoder information
|
||||
for (int i = linesToSkip; i < lines.size(); ++i) {
|
||||
const auto& line = lines[i];
|
||||
|
||||
// Format: V..... libx264 H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10
|
||||
juce::String flags = line.substring(0, 6).trim();
|
||||
juce::String name = line.substring(8).upToFirstOccurrenceOf(" ", false, true);
|
||||
juce::String description = line.substring(8 + name.length()).trim();
|
||||
|
||||
EncoderDetails encoder;
|
||||
encoder.name = name;
|
||||
encoder.description = description;
|
||||
encoder.isHardwareAccelerated = name.contains("nvenc") || name.contains("amf") ||
|
||||
name.contains("qsv") || name.contains("videotoolbox");
|
||||
encoder.isSupported = flags.contains("V"); // Video encoder
|
||||
|
||||
// Add encoder to appropriate codec list
|
||||
if (name == "libx264" || name.startsWith("h264_")) {
|
||||
availableEncoders[VideoCodec::H264].add(encoder);
|
||||
} else if (name == "libx265" || name.startsWith("hevc_")) {
|
||||
availableEncoders[VideoCodec::H265].add(encoder);
|
||||
} else if (name == "libvpx-vp9") {
|
||||
availableEncoders[VideoCodec::VP9].add(encoder);
|
||||
}
|
||||
#if JUCE_MAC
|
||||
else if (name.startsWith("prores")) {
|
||||
availableEncoders[VideoCodec::ProRes].add(encoder);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::runFFmpegCommand(const juce::StringArray& args) {
|
||||
juce::ChildProcess process;
|
||||
juce::StringArray command;
|
||||
|
||||
command.add(ffmpegExecutable.getFullPathName());
|
||||
command.addArray(args);
|
||||
|
||||
process.start(command, juce::ChildProcess::wantStdOut);
|
||||
|
||||
juce::String output = process.readAllProcessOutput();
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::buildBaseEncodingCommand(
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::File& outputFile) {
|
||||
juce::String resolution = juce::String(width) + "x" + juce::String(height);
|
||||
juce::String cmd = "\"" + ffmpegExecutable.getFullPathName() + "\"" +
|
||||
" -r " + juce::String(frameRate) +
|
||||
" -f rawvideo" +
|
||||
" -pix_fmt rgba" +
|
||||
" -s " + resolution +
|
||||
" -i -" +
|
||||
" -threads 4" +
|
||||
" -y" +
|
||||
" -pix_fmt yuv420p" +
|
||||
" -vf vflip";
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::addH264EncoderSettings(
|
||||
juce::String cmd,
|
||||
const juce::String& encoderName,
|
||||
int crf,
|
||||
const juce::String& compressionPreset) {
|
||||
if (encoderName == "h264_nvenc") {
|
||||
cmd += " -c:v h264_nvenc";
|
||||
cmd += " -preset p7";
|
||||
cmd += " -profile:v high";
|
||||
cmd += " -rc vbr";
|
||||
cmd += " -cq " + juce::String(crf);
|
||||
cmd += " -b:v 0";
|
||||
} else if (encoderName == "h264_amf") {
|
||||
cmd += " -c:v h264_amf";
|
||||
cmd += " -quality quality";
|
||||
cmd += " -rc cqp";
|
||||
cmd += " -qp_i " + juce::String(crf);
|
||||
cmd += " -qp_p " + juce::String(crf);
|
||||
} else if (encoderName == "h264_qsv") {
|
||||
cmd += " -c:v h264_qsv";
|
||||
cmd += " -global_quality " + juce::String(crf);
|
||||
cmd += " -preset " + compressionPreset;
|
||||
} else if (encoderName == "h264_videotoolbox") {
|
||||
cmd += " -c:v h264_videotoolbox";
|
||||
cmd += " -q " + juce::String(crf);
|
||||
} else { // libx264 (software)
|
||||
cmd += " -c:v libx264";
|
||||
cmd += " -preset " + compressionPreset;
|
||||
cmd += " -crf " + juce::String(crf);
|
||||
}
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::addH265EncoderSettings(
|
||||
juce::String cmd,
|
||||
const juce::String& encoderName,
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
const juce::String& compressionPreset) {
|
||||
if (encoderName == "hevc_nvenc") {
|
||||
cmd += " -c:v hevc_nvenc";
|
||||
cmd += " -preset p7";
|
||||
cmd += " -profile:v main";
|
||||
cmd += " -rc vbr";
|
||||
cmd += " -cq " + juce::String(crf);
|
||||
cmd += " -b:v 0";
|
||||
} else if (encoderName == "hevc_amf") {
|
||||
cmd += " -c:v hevc_amf";
|
||||
cmd += " -quality quality";
|
||||
cmd += " -rc cqp";
|
||||
cmd += " -qp_i " + juce::String(crf);
|
||||
cmd += " -qp_p " + juce::String(crf);
|
||||
} else if (encoderName == "hevc_qsv") {
|
||||
cmd += " -c:v hevc_qsv";
|
||||
cmd += " -global_quality " + juce::String(crf);
|
||||
cmd += " -preset " + compressionPreset;
|
||||
} else if (encoderName == "hevc_videotoolbox") {
|
||||
cmd += " -c:v hevc_videotoolbox";
|
||||
cmd += " -q:v " + juce::String(videoToolboxQuality);
|
||||
cmd += " -tag:v hvc1";
|
||||
} else { // libx265 (software)
|
||||
cmd += " -c:v libx265";
|
||||
cmd += " -preset " + compressionPreset;
|
||||
cmd += " -crf " + juce::String(crf);
|
||||
}
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::buildH264EncodingCommand(
|
||||
int crf,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile) {
|
||||
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
|
||||
juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::H264);
|
||||
|
||||
cmd = addH264EncoderSettings(cmd, bestEncoder, crf, compressionPreset);
|
||||
cmd += " \"" + outputFile.getFullPathName() + "\"";
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::buildH265EncodingCommand(
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile) {
|
||||
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
|
||||
juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::H265);
|
||||
|
||||
cmd = addH265EncoderSettings(cmd, bestEncoder, crf, videoToolboxQuality, compressionPreset);
|
||||
cmd += " \"" + outputFile.getFullPathName() + "\"";
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
juce::String FFmpegEncoderManager::buildVP9EncodingCommand(
|
||||
int crf,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile) {
|
||||
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
|
||||
|
||||
cmd += juce::String(" -c:v libvpx-vp9") +
|
||||
" -b:v 0" +
|
||||
" -crf " + juce::String(crf) +
|
||||
" -deadline good -cpu-used 2";
|
||||
|
||||
cmd += " \"" + outputFile.getFullPathName() + "\"";
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
#if JUCE_MAC
|
||||
juce::String FFmpegEncoderManager::buildProResEncodingCommand(
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::File& outputFile) {
|
||||
juce::String cmd = buildBaseEncodingCommand(width, height, frameRate, outputFile);
|
||||
juce::String bestEncoder = getBestEncoderForCodec(VideoCodec::ProRes);
|
||||
|
||||
cmd += " -c:v " + bestEncoder +
|
||||
" -profile:v 3"; // ProRes 422 HQ
|
||||
|
||||
cmd += " \"" + outputFile.getFullPathName() + "\"";
|
||||
|
||||
return cmd;
|
||||
}
|
||||
#endif
|
|
@ -0,0 +1,112 @@
|
|||
#pragma once
|
||||
|
||||
#include <JuceHeader.h>
|
||||
|
||||
#include "../visualiser/RecordingSettings.h"
|
||||
|
||||
class FFmpegEncoderManager {
|
||||
public:
|
||||
FFmpegEncoderManager(juce::File& ffmpegExecutable);
|
||||
~FFmpegEncoderManager() = default;
|
||||
|
||||
struct EncoderDetails {
|
||||
juce::String name;
|
||||
juce::String description;
|
||||
bool isHardwareAccelerated;
|
||||
bool isSupported;
|
||||
};
|
||||
|
||||
// FFMPEG command builder
|
||||
juce::String buildVideoEncodingCommand(
|
||||
VideoCodec codec,
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile);
|
||||
|
||||
// Get available encoders for a given codec
|
||||
juce::Array<EncoderDetails> getAvailableEncodersForCodec(VideoCodec codec);
|
||||
|
||||
// Check if a hardware encoder is available
|
||||
bool isHardwareEncoderAvailable(const juce::String& encoderName);
|
||||
|
||||
// Get the best encoder for a given codec
|
||||
juce::String getBestEncoderForCodec(VideoCodec codec);
|
||||
|
||||
private:
|
||||
juce::File ffmpegExecutable;
|
||||
std::map<VideoCodec, juce::Array<EncoderDetails>> availableEncoders;
|
||||
|
||||
// Query available encoders from FFmpeg
|
||||
void queryAvailableEncoders();
|
||||
|
||||
// Parse encoder output from FFmpeg
|
||||
void parseEncoderList(const juce::String& output);
|
||||
|
||||
// Run FFmpeg with given arguments and return output
|
||||
juce::String runFFmpegCommand(const juce::StringArray& args);
|
||||
|
||||
// Common base command builder to reduce duplication
|
||||
juce::String buildBaseEncodingCommand(
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::File& outputFile);
|
||||
|
||||
// H.264 encoder settings helper
|
||||
juce::String addH264EncoderSettings(
|
||||
juce::String cmd,
|
||||
const juce::String& encoderName,
|
||||
int crf,
|
||||
const juce::String& compressionPreset);
|
||||
|
||||
// H.265 encoder settings helper
|
||||
juce::String addH265EncoderSettings(
|
||||
juce::String cmd,
|
||||
const juce::String& encoderName,
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
const juce::String& compressionPreset);
|
||||
|
||||
// Build H.264 encoding command
|
||||
juce::String buildH264EncodingCommand(
|
||||
int crf,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile);
|
||||
|
||||
// Build H.265 encoding command
|
||||
juce::String buildH265EncodingCommand(
|
||||
int crf,
|
||||
int videoToolboxQuality,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile);
|
||||
|
||||
// Build VP9 encoding command
|
||||
juce::String buildVP9EncodingCommand(
|
||||
int crf,
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::String& compressionPreset,
|
||||
const juce::File& outputFile);
|
||||
|
||||
#if JUCE_MAC
|
||||
// Build ProRes encoding command
|
||||
juce::String buildProResEncodingCommand(
|
||||
int width,
|
||||
int height,
|
||||
double frameRate,
|
||||
const juce::File& outputFile);
|
||||
#endif
|
||||
|
||||
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(FFmpegEncoderManager)
|
||||
};
|
|
@ -1,13 +1,12 @@
|
|||
#pragma once
|
||||
#include <JuceHeader.h>
|
||||
|
||||
#include "InvisibleOpenGLContextComponent.h"
|
||||
|
||||
class SyphonFrameGrabber : private juce::Thread, public juce::Component
|
||||
{
|
||||
class SyphonFrameGrabber : private juce::Thread, public juce::Component {
|
||||
public:
|
||||
SyphonFrameGrabber(SharedTextureManager& manager, juce::String server, juce::String app, ImageParser& parser, int pollMs = 16)
|
||||
: juce::Thread("SyphonFrameGrabber"), pollIntervalMs(pollMs), manager(manager), parser(parser)
|
||||
{
|
||||
: juce::Thread("SyphonFrameGrabber"), pollIntervalMs(pollMs), manager(manager), parser(parser) {
|
||||
// Create the invisible OpenGL context component
|
||||
glContextComponent = std::make_unique<InvisibleOpenGLContextComponent>();
|
||||
receiver = manager.addReceiver(server, app);
|
||||
|
@ -45,13 +44,11 @@ public:
|
|||
}
|
||||
}
|
||||
|
||||
bool isActive() const
|
||||
{
|
||||
bool isActive() const {
|
||||
return receiver != nullptr && receiver->isInit && receiver->enabled;
|
||||
}
|
||||
|
||||
juce::String getSourceName() const
|
||||
{
|
||||
juce::String getSourceName() const {
|
||||
if (receiver) {
|
||||
return receiver->sharingName + " (" + receiver->sharingAppName + ")";
|
||||
}
|
||||
|
@ -64,6 +61,6 @@ private:
|
|||
SharedTextureReceiver* receiver = nullptr;
|
||||
ImageParser& parser;
|
||||
std::unique_ptr<InvisibleOpenGLContextComponent> glContextComponent;
|
||||
|
||||
|
||||
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(SyphonFrameGrabber)
|
||||
};
|
Plik diff jest za duży
Load Diff
|
@ -1,17 +1,20 @@
|
|||
#pragma once
|
||||
|
||||
#include <algorithm>
|
||||
#include <JuceHeader.h>
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "../LookAndFeel.h"
|
||||
#include "../components/SvgButton.h"
|
||||
#include "VisualiserSettings.h"
|
||||
#include "RecordingSettings.h"
|
||||
#include "../components/StopwatchComponent.h"
|
||||
#include "../img/qoixx.hpp"
|
||||
#include "../components/DownloaderComponent.h"
|
||||
#include "../audio/AudioRecorder.h"
|
||||
#include "../wav/WavParser.h"
|
||||
#include "../components/AudioPlayerComponent.h"
|
||||
#include "../components/DownloaderComponent.h"
|
||||
#include "../components/StopwatchComponent.h"
|
||||
#include "../components/SvgButton.h"
|
||||
#include "../img/qoixx.hpp"
|
||||
#include "../video/FFmpegEncoderManager.h"
|
||||
#include "../wav/WavParser.h"
|
||||
#include "RecordingSettings.h"
|
||||
#include "VisualiserSettings.h"
|
||||
|
||||
#define FILE_RENDER_DUMMY 0
|
||||
#define FILE_RENDER_PNG 1
|
||||
|
@ -44,8 +47,7 @@ public:
|
|||
VisualiserSettings& settings,
|
||||
RecordingSettings& recordingSettings,
|
||||
VisualiserComponent* parent = nullptr,
|
||||
bool visualiserOnly = false
|
||||
);
|
||||
bool visualiserOnly = false);
|
||||
~VisualiserComponent() override;
|
||||
|
||||
std::function<void()> openSettings;
|
||||
|
@ -75,7 +77,7 @@ public:
|
|||
VisualiserComponent* parent = nullptr;
|
||||
VisualiserComponent* child = nullptr;
|
||||
std::unique_ptr<VisualiserWindow> popout = nullptr;
|
||||
|
||||
|
||||
std::atomic<bool> active = true;
|
||||
|
||||
private:
|
||||
|
@ -83,17 +85,17 @@ private:
|
|||
CommonPluginEditor& editor;
|
||||
|
||||
float intensity;
|
||||
|
||||
|
||||
bool visualiserOnly;
|
||||
AudioPlayerComponent audioPlayer{audioProcessor};
|
||||
|
||||
SvgButton fullScreenButton{ "fullScreen", BinaryData::fullscreen_svg, juce::Colours::white, juce::Colours::white };
|
||||
SvgButton popOutButton{ "popOut", BinaryData::open_in_new_svg, juce::Colours::white, juce::Colours::white };
|
||||
SvgButton settingsButton{ "settings", BinaryData::cog_svg, juce::Colours::white, juce::Colours::white };
|
||||
SvgButton audioInputButton{ "audioInput", BinaryData::microphone_svg, juce::Colours::white, juce::Colours::red };
|
||||
|
||||
|
||||
SvgButton fullScreenButton{"fullScreen", BinaryData::fullscreen_svg, juce::Colours::white, juce::Colours::white};
|
||||
SvgButton popOutButton{"popOut", BinaryData::open_in_new_svg, juce::Colours::white, juce::Colours::white};
|
||||
SvgButton settingsButton{"settings", BinaryData::cog_svg, juce::Colours::white, juce::Colours::white};
|
||||
SvgButton audioInputButton{"audioInput", BinaryData::microphone_svg, juce::Colours::white, juce::Colours::red};
|
||||
|
||||
#if OSCI_PREMIUM
|
||||
SvgButton sharedTextureButton{ "sharedTexture", BinaryData::spout_svg, juce::Colours::white, juce::Colours::red };
|
||||
SvgButton sharedTextureButton{"sharedTexture", BinaryData::spout_svg, juce::Colours::white, juce::Colours::red};
|
||||
SharedTextureManager& sharedTextureManager;
|
||||
SharedTextureSender* sharedTextureSender = nullptr;
|
||||
#endif
|
||||
|
@ -109,37 +111,38 @@ private:
|
|||
RecordingSettings& recordingSettings;
|
||||
juce::File ffmpegFile;
|
||||
bool recordingAudio = true;
|
||||
|
||||
|
||||
#if OSCI_PREMIUM
|
||||
bool recordingVideo = true;
|
||||
bool downloading = false;
|
||||
|
||||
|
||||
long numFrames = 0;
|
||||
std::vector<unsigned char> framePixels;
|
||||
osci::WriteProcess ffmpegProcess;
|
||||
std::unique_ptr<juce::TemporaryFile> tempVideoFile;
|
||||
FFmpegEncoderManager ffmpegEncoderManager;
|
||||
#endif
|
||||
|
||||
|
||||
StopwatchComponent stopwatch;
|
||||
SvgButton record{"Record", BinaryData::record_svg, juce::Colours::red, juce::Colours::red.withAlpha(0.01f)};
|
||||
|
||||
|
||||
std::unique_ptr<juce::FileChooser> chooser;
|
||||
std::unique_ptr<juce::TemporaryFile> tempAudioFile;
|
||||
AudioRecorder audioRecorder;
|
||||
|
||||
|
||||
osci::Semaphore renderingSemaphore{0};
|
||||
|
||||
|
||||
void popoutWindow();
|
||||
|
||||
|
||||
// OPENGL
|
||||
|
||||
|
||||
juce::OpenGLContext openGLContext;
|
||||
|
||||
|
||||
juce::Rectangle<int> buttonRow;
|
||||
juce::Rectangle<int> viewportArea;
|
||||
|
||||
|
||||
float renderScale = 1.0f;
|
||||
|
||||
|
||||
GLuint quadIndexBuffer = 0;
|
||||
GLuint vertexIndexBuffer = 0;
|
||||
GLuint vertexBuffer = 0;
|
||||
|
@ -158,10 +161,10 @@ private:
|
|||
std::atomic<int> sampleBufferCount = 0;
|
||||
int prevSampleBufferCount = 0;
|
||||
long lastTriggerPosition = 0;
|
||||
|
||||
|
||||
std::vector<float> scratchVertices;
|
||||
std::vector<float> fullScreenQuad;
|
||||
|
||||
|
||||
GLuint frameBuffer = 0;
|
||||
|
||||
double currentFrameRate = 60.0;
|
||||
|
@ -175,32 +178,32 @@ private:
|
|||
Texture screenTexture;
|
||||
juce::OpenGLTexture screenOpenGLTexture;
|
||||
std::optional<Texture> targetTexture = std::nullopt;
|
||||
|
||||
|
||||
juce::Image screenTextureImage = juce::ImageFileFormat::loadFrom(BinaryData::noise_jpg, BinaryData::noise_jpgSize);
|
||||
juce::Image emptyScreenImage = juce::ImageFileFormat::loadFrom(BinaryData::empty_jpg, BinaryData::empty_jpgSize);
|
||||
|
||||
|
||||
#if OSCI_PREMIUM
|
||||
juce::Image oscilloscopeImage = juce::ImageFileFormat::loadFrom(BinaryData::real_png, BinaryData::real_pngSize);
|
||||
juce::Image vectorDisplayImage = juce::ImageFileFormat::loadFrom(BinaryData::vector_display_png, BinaryData::vector_display_pngSize);
|
||||
|
||||
|
||||
juce::Image emptyReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::no_reflection_jpg, BinaryData::no_reflection_jpgSize);
|
||||
juce::Image oscilloscopeReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::real_reflection_png, BinaryData::real_reflection_pngSize);
|
||||
juce::Image vectorDisplayReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::vector_display_reflection_png, BinaryData::vector_display_reflection_pngSize);
|
||||
|
||||
osci::Point REAL_SCREEN_OFFSET = { 0.02, -0.15 };
|
||||
osci::Point REAL_SCREEN_SCALE = { 0.6 };
|
||||
|
||||
osci::Point VECTOR_DISPLAY_OFFSET = { 0.075, -0.045 };
|
||||
osci::Point VECTOR_DISPLAY_SCALE = { 0.6 };
|
||||
|
||||
osci::Point REAL_SCREEN_OFFSET = {0.02, -0.15};
|
||||
osci::Point REAL_SCREEN_SCALE = {0.6};
|
||||
|
||||
osci::Point VECTOR_DISPLAY_OFFSET = {0.075, -0.045};
|
||||
osci::Point VECTOR_DISPLAY_SCALE = {0.6};
|
||||
float VECTOR_DISPLAY_FISH_EYE = 0.5;
|
||||
|
||||
|
||||
juce::OpenGLTexture reflectionOpenGLTexture;
|
||||
Texture reflectionTexture;
|
||||
|
||||
|
||||
std::unique_ptr<juce::OpenGLShaderProgram> glowShader;
|
||||
std::unique_ptr<juce::OpenGLShaderProgram> afterglowShader;
|
||||
#endif
|
||||
|
||||
|
||||
std::unique_ptr<juce::OpenGLShaderProgram> simpleShader;
|
||||
std::unique_ptr<juce::OpenGLShaderProgram> texturedShader;
|
||||
std::unique_ptr<juce::OpenGLShaderProgram> blurShader;
|
||||
|
@ -208,10 +211,10 @@ private:
|
|||
std::unique_ptr<juce::OpenGLShaderProgram> lineShader;
|
||||
std::unique_ptr<juce::OpenGLShaderProgram> outputShader;
|
||||
juce::OpenGLShaderProgram* currentShader;
|
||||
|
||||
|
||||
float fadeAmount;
|
||||
ScreenOverlay screenOverlay = ScreenOverlay::INVALID;
|
||||
|
||||
|
||||
const double RESAMPLE_RATIO = 6.0;
|
||||
double sampleRate = -1;
|
||||
double oldSampleRate = -1;
|
||||
|
@ -243,7 +246,7 @@ private:
|
|||
void viewportChanged(juce::Rectangle<int> area);
|
||||
|
||||
void renderScope(const std::vector<float>& xPoints, const std::vector<float>& yPoints, const std::vector<float>& zPoints);
|
||||
|
||||
|
||||
double getSweepIncrement();
|
||||
|
||||
Texture createScreenTexture();
|
||||
|
@ -260,7 +263,7 @@ public:
|
|||
VisualiserWindow(juce::String name, VisualiserComponent* parent) : parent(parent), wasPaused(!parent->active), juce::DocumentWindow(name, juce::Colours::black, juce::DocumentWindow::TitleBarButtons::allButtons) {
|
||||
setAlwaysOnTop(true);
|
||||
}
|
||||
|
||||
|
||||
void closeButtonPressed() override {
|
||||
// local copy of parent so that we can safely delete the child
|
||||
VisualiserComponent* parent = this->parent;
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit cf124cc5de4d9857c7633e9c03117f20e1550e81
|
||||
Subproject commit f8ac3007c25df061ca6e71ad2eaff4a5d01e2d7b
|
|
@ -574,6 +574,16 @@
|
|||
<FILE id="mC1tUv" name="ugen_JuceUtility.h" compile="0" resource="0"
|
||||
file="Source/UGen/ugen_JuceUtility.h"/>
|
||||
</GROUP>
|
||||
<GROUP id="{0F62E77C-5385-0C56-69A1-3C8866A6E6E3}" name="video">
|
||||
<FILE id="DniMew" name="FFmpegEncoderManager.cpp" compile="1" resource="0"
|
||||
file="Source/video/FFmpegEncoderManager.cpp"/>
|
||||
<FILE id="t2oI5O" name="FFmpegEncoderManager.h" compile="0" resource="0"
|
||||
file="Source/video/FFmpegEncoderManager.h"/>
|
||||
<FILE id="xEIRCs" name="InvisibleOpenGLContextComponent.h" compile="0"
|
||||
resource="0" file="Source/video/InvisibleOpenGLContextComponent.h"/>
|
||||
<FILE id="OyC3qj" name="SyphonFrameGrabber.h" compile="0" resource="0"
|
||||
file="Source/video/SyphonFrameGrabber.h"/>
|
||||
</GROUP>
|
||||
<GROUP id="{16A8DC64-BA02-898D-4DBA-AA3DDF6F9297}" name="visualiser">
|
||||
<FILE id="DkDKBX" name="AfterglowFragmentShader.glsl" compile="0" resource="0"
|
||||
file="Source/visualiser/AfterglowFragmentShader.glsl"/>
|
||||
|
@ -644,8 +654,6 @@
|
|||
file="Source/FrameSettingsComponent.cpp"/>
|
||||
<FILE id="lzBNS1" name="FrameSettingsComponent.h" compile="0" resource="0"
|
||||
file="Source/FrameSettingsComponent.h"/>
|
||||
<FILE id="nfoWJk" name="InvisibleOpenGLContextComponent.h" compile="0"
|
||||
resource="0" file="Source/InvisibleOpenGLContextComponent.h"/>
|
||||
<FILE id="d2zFqF" name="LookAndFeel.cpp" compile="1" resource="0" file="Source/LookAndFeel.cpp"/>
|
||||
<FILE id="TJDqWs" name="LookAndFeel.h" compile="0" resource="0" file="Source/LookAndFeel.h"/>
|
||||
<FILE id="X26RjJ" name="LuaComponent.cpp" compile="1" resource="0"
|
||||
|
@ -673,8 +681,6 @@
|
|||
file="Source/SettingsComponent.cpp"/>
|
||||
<FILE id="Vlmozi" name="SettingsComponent.h" compile="0" resource="0"
|
||||
file="Source/SettingsComponent.h"/>
|
||||
<FILE id="jyHVpz" name="SyphonFrameGrabber.h" compile="0" resource="0"
|
||||
file="Source/SyphonFrameGrabber.h"/>
|
||||
<FILE id="UxZu4n" name="TxtComponent.cpp" compile="1" resource="0"
|
||||
file="Source/TxtComponent.cpp"/>
|
||||
<FILE id="kxPbsL" name="TxtComponent.h" compile="0" resource="0" file="Source/TxtComponent.h"/>
|
||||
|
|
|
@ -77,6 +77,12 @@
|
|||
</GROUP>
|
||||
</GROUP>
|
||||
<GROUP id="{75439074-E50C-362F-1EDF-8B4BE9011259}" name="Source">
|
||||
<GROUP id="{34BCEBE9-062C-27E1-5661-B33652D8F4F5}" name="video">
|
||||
<FILE id="pmHHqY" name="FFmpegEncoderManager.cpp" compile="1" resource="0"
|
||||
file="Source/video/FFmpegEncoderManager.cpp"/>
|
||||
<FILE id="oKPzgR" name="FFmpegEncoderManager.h" compile="0" resource="0"
|
||||
file="Source/video/FFmpegEncoderManager.h"/>
|
||||
</GROUP>
|
||||
<FILE id="fqqP0r" name="CustomStandalone.cpp" compile="1" resource="0"
|
||||
file="Source/CustomStandalone.cpp"/>
|
||||
<FILE id="TFmWW0" name="CustomStandaloneFilterWindow.h" compile="0"
|
||||
|
|
Ładowanie…
Reference in New Issue