Merge pull request #283 from jameshball/develop

Next major version
pull/296/head v2.4.8.0
James H Ball 2025-02-04 20:16:27 +00:00 zatwierdzone przez GitHub
commit f2f5059e47
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: B5690EEEBB952194
64 zmienionych plików z 1357 dodań i 520 usunięć

Wyświetl plik

@ -37,9 +37,9 @@ jobs:
shell: bash
- name: Upload Artifact
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: Binaries
name: "${{ matrix.project }}-${{ matrix.version }}-linux"
path: bin
retention-days: 7
build-macos:
@ -128,9 +128,9 @@ jobs:
run: spctl -a -vvv -t install "bin/${{ matrix.project }}-${{ matrix.version }}.pkg"
- name: Upload Artifact
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: Binaries
name: "${{ matrix.project }}-${{ matrix.version }}-macos"
path: bin
retention-days: 7
build-windows:
@ -178,8 +178,8 @@ jobs:
run: mv "packaging/build/${{ matrix.project }}.exe" "bin/${{ matrix.project }}-${{ matrix.version }}.exe"
- name: Upload Artifact
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: Binaries
name: "${{ matrix.project }}-${{ matrix.version }}-windows"
path: bin
retention-days: 7

Plik binarny nie jest wyświetlany.

Przed

Szerokość:  |  Wysokość:  |  Rozmiar: 21 KiB

Po

Szerokość:  |  Wysokość:  |  Rozmiar: 25 KiB

Plik binarny nie jest wyświetlany.

Przed

Szerokość:  |  Wysokość:  |  Rozmiar: 6.3 KiB

Po

Szerokość:  |  Wysokość:  |  Rozmiar: 25 KiB

Plik binarny nie jest wyświetlany.

Przed

Szerokość:  |  Wysokość:  |  Rozmiar: 84 KiB

Po

Szerokość:  |  Wysokość:  |  Rozmiar: 465 KiB

Plik binarny nie jest wyświetlany.

Przed

Szerokość:  |  Wysokość:  |  Rozmiar: 79 KiB

Plik binarny nie jest wyświetlany.

Po

Szerokość:  |  Wysokość:  |  Rozmiar: 4.7 MiB

Plik binarny nie jest wyświetlany.

Przed

Szerokość:  |  Wysokość:  |  Rozmiar: 33 KiB

Plik binarny nie jest wyświetlany.

Po

Szerokość:  |  Wysokość:  |  Rozmiar: 1.5 MiB

Plik binarny nie jest wyświetlany.

Przed

Szerokość:  |  Wysokość:  |  Rozmiar: 56 KiB

Plik binarny nie jest wyświetlany.

Po

Szerokość:  |  Wysokość:  |  Rozmiar: 4.1 MiB

Plik binarny nie jest wyświetlany.

Przed

Szerokość:  |  Wysokość:  |  Rozmiar: 64 KiB

Plik binarny nie jest wyświetlany.

Po

Szerokość:  |  Wysokość:  |  Rozmiar: 3.6 MiB

Wyświetl plik

@ -2,7 +2,7 @@
#include "CommonPluginEditor.h"
#include <juce_audio_plugin_client/Standalone/juce_StandaloneFilterWindow.h>
CommonPluginEditor::CommonPluginEditor(CommonAudioProcessor& p, juce::String appName, juce::String projectFileType, int width, int height)
CommonPluginEditor::CommonPluginEditor(CommonAudioProcessor& p, juce::String appName, juce::String projectFileType, int defaultWidth, int defaultHeight)
: AudioProcessorEditor(&p), audioProcessor(p), appName(appName), projectFileType(projectFileType)
{
if (!applicationFolder.exists()) {
@ -37,14 +37,17 @@ CommonPluginEditor::CommonPluginEditor(CommonAudioProcessor& p, juce::String app
}
addAndMakeVisible(visualiser);
int width = std::any_cast<int>(audioProcessor.getProperty("appWidth", defaultWidth));
int height = std::any_cast<int>(audioProcessor.getProperty("appHeight", defaultHeight));
visualiserSettings.setLookAndFeel(&getLookAndFeel());
visualiserSettings.setSize(550, VISUALISER_SETTINGS_HEIGHT);
visualiserSettings.setColour(juce::ResizableWindow::backgroundColourId, Colours::dark);
recordingSettings.setLookAndFeel(&getLookAndFeel());
recordingSettings.setSize(350, 230);
recordingSettingsWindow.centreWithSize(350, 260);
recordingSettings.setSize(350, 280);
recordingSettingsWindow.centreWithSize(350, 320);
#if JUCE_WINDOWS
// if not standalone, use native title bar for compatibility with DAWs
recordingSettingsWindow.setUsingNativeTitleBar(processor.wrapperType == juce::AudioProcessor::WrapperType::wrapperType_Standalone);
@ -65,6 +68,11 @@ CommonPluginEditor::CommonPluginEditor(CommonAudioProcessor& p, juce::String app
#endif
}
void CommonPluginEditor::resized() {
audioProcessor.setProperty("appWidth", getWidth());
audioProcessor.setProperty("appHeight", getHeight());
}
void CommonPluginEditor::initialiseMenuBar(juce::MenuBarModel& menuBarModel) {
menuBar.setModel(&menuBarModel);
}
@ -95,22 +103,25 @@ bool CommonPluginEditor::keyPressed(const juce::KeyPress& key) {
return false;
}
void CommonPluginEditor::openProject(const juce::File& file) {
if (file != juce::File()) {
auto data = juce::MemoryBlock();
if (file.loadFileAsData(data)) {
audioProcessor.setStateInformation(data.getData(), data.getSize());
}
audioProcessor.currentProjectFile = file.getFullPathName();
audioProcessor.lastOpenedDirectory = file.getParentDirectory();
updateTitle();
}
}
void CommonPluginEditor::openProject() {
chooser = std::make_unique<juce::FileChooser>("Load " + appName + " Project", audioProcessor.lastOpenedDirectory, "*." + projectFileType);
auto flags = juce::FileBrowserComponent::openMode |
juce::FileBrowserComponent::canSelectFiles;
chooser->launchAsync(flags, [this](const juce::FileChooser& chooser) {
auto file = chooser.getResult();
if (file != juce::File()) {
auto data = juce::MemoryBlock();
if (file.loadFileAsData(data)) {
audioProcessor.setStateInformation(data.getData(), data.getSize());
}
audioProcessor.currentProjectFile = file.getFullPathName();
audioProcessor.lastOpenedDirectory = file.getParentDirectory();
updateTitle();
}
openProject(chooser.getResult());
});
}
@ -143,7 +154,7 @@ void CommonPluginEditor::saveProjectAs() {
void CommonPluginEditor::updateTitle() {
juce::String title = appName;
if (!audioProcessor.currentProjectFile.isEmpty()) {
appName += " - " + audioProcessor.currentProjectFile;
title += " - " + audioProcessor.currentProjectFile;
}
getTopLevelComponent()->setName(title);
}

Wyświetl plik

@ -15,6 +15,7 @@ public:
~CommonPluginEditor() override;
void initialiseMenuBar(juce::MenuBarModel& menuBarModel);
void openProject(const juce::File& file);
void openProject();
void saveProject();
void saveProjectAs();
@ -22,6 +23,7 @@ public:
void openAudioSettings();
void openRecordingSettings();
void resetToDefault();
void resized() override;
private:
CommonAudioProcessor& audioProcessor;
@ -49,7 +51,11 @@ public:
SharedTextureManager sharedTextureManager;
#endif
int VISUALISER_SETTINGS_HEIGHT = 750;
#if SOSCI_FEATURES
int VISUALISER_SETTINGS_HEIGHT = 1100;
#else
int VISUALISER_SETTINGS_HEIGHT = 700;
#endif
VisualiserSettings visualiserSettings = VisualiserSettings(audioProcessor.visualiserParameters, 3);
RecordingSettings recordingSettings = RecordingSettings(audioProcessor.recordingParameters);

Wyświetl plik

@ -225,3 +225,79 @@ void CommonAudioProcessor::removeAudioPlayerListener(AudioPlayerListener* listen
juce::SpinLock::ScopedLockType lock(audioPlayerListenersLock);
audioPlayerListeners.erase(std::remove(audioPlayerListeners.begin(), audioPlayerListeners.end(), listener), audioPlayerListeners.end());
}
std::any CommonAudioProcessor::getProperty(const std::string& key) {
juce::SpinLock::ScopedLockType lock(propertiesLock);
return properties[key];
}
std::any CommonAudioProcessor::getProperty(const std::string& key, std::any defaultValue) {
juce::SpinLock::ScopedLockType lock(propertiesLock);
auto it = properties.find(key);
if (it == properties.end()) {
properties[key] = defaultValue;
return defaultValue;
}
return it->second;
}
void CommonAudioProcessor::setProperty(const std::string& key, std::any value) {
juce::SpinLock::ScopedLockType lock(propertiesLock);
properties[key] = value;
}
void CommonAudioProcessor::saveProperties(juce::XmlElement& xml) {
juce::SpinLock::ScopedLockType lock(propertiesLock);
auto propertiesXml = xml.createNewChildElement("properties");
for (auto& property : properties) {
auto element = propertiesXml->createNewChildElement("property");
element->setAttribute("key", property.first);
if (std::any_cast<int>(&property.second) != nullptr) {
element->setAttribute("type", "int");
element->setAttribute("value", std::any_cast<int>(property.second));
} else if (std::any_cast<float>(&property.second) != nullptr) {
element->setAttribute("type", "float");
element->setAttribute("value", std::any_cast<float>(property.second));
} else if (std::any_cast<double>(&property.second) != nullptr) {
element->setAttribute("type", "double");
element->setAttribute("value", std::any_cast<double>(property.second));
} else if (std::any_cast<bool>(&property.second) != nullptr) {
element->setAttribute("type", "bool");
element->setAttribute("value", std::any_cast<bool>(property.second));
} else if (std::any_cast<juce::String>(&property.second) != nullptr) {
element->setAttribute("type", "string");
element->setAttribute("value", std::any_cast<juce::String>(property.second));
} else {
jassertfalse;
}
}
}
void CommonAudioProcessor::loadProperties(juce::XmlElement& xml) {
juce::SpinLock::ScopedLockType lock(propertiesLock);
auto propertiesXml = xml.getChildByName("properties");
if (propertiesXml != nullptr) {
for (auto property : propertiesXml->getChildIterator()) {
auto key = property->getStringAttribute("key").toStdString();
auto type = property->getStringAttribute("type");
if (type == "int") {
properties[key] = property->getIntAttribute("value");
} else if (type == "float") {
properties[key] = property->getDoubleAttribute("value");
} else if (type == "double") {
properties[key] = property->getDoubleAttribute("value");
} else if (type == "bool") {
properties[key] = property->getBoolAttribute("value");
} else if (type == "string") {
properties[key] = property->getStringAttribute("value");
} else {
jassertfalse;
}
}
}
}

Wyświetl plik

@ -9,6 +9,7 @@
#pragma once
#include <JuceHeader.h>
#include <any>
#include "concurrency/AudioBackgroundThread.h"
#include "concurrency/AudioBackgroundThreadManager.h"
#include "audio/SampleRateManager.h"
@ -59,6 +60,9 @@ public:
void stopAudioFile();
void addAudioPlayerListener(AudioPlayerListener* listener);
void removeAudioPlayerListener(AudioPlayerListener* listener);
std::any getProperty(const std::string& key);
std::any getProperty(const std::string& key, std::any defaultValue);
void setProperty(const std::string& key, std::any value);
juce::SpinLock audioPlayerListenersLock;
std::vector<AudioPlayerListener*> audioPlayerListeners;
@ -122,6 +126,12 @@ protected:
BooleanParameter* getBooleanParameter(juce::String id);
FloatParameter* getFloatParameter(juce::String id);
IntParameter* getIntParameter(juce::String id);
void saveProperties(juce::XmlElement& xml);
void loadProperties(juce::XmlElement& xml);
juce::SpinLock propertiesLock;
std::unordered_map<std::string, std::any> properties;
//==============================================================================
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (CommonAudioProcessor)

Wyświetl plik

@ -12,7 +12,7 @@ EffectsComponent::EffectsComponent(OscirenderAudioProcessor& p, OscirenderAudioP
frequency.slider.setValue(audioProcessor.frequencyEffect->getValue(), juce::dontSendNotification);
frequency.slider.onValueChange = [this] {
audioProcessor.frequencyEffect->setValue(frequency.slider.getValue());
audioProcessor.frequencyEffect->parameters[0]->setUnnormalisedValueNotifyingHost(frequency.slider.getValue());
};
/*addBtn.setButtonText("Add Item...");

Wyświetl plik

@ -28,6 +28,7 @@ OscirenderLookAndFeel::OscirenderLookAndFeel() {
setColour(juce::TextButton::buttonOnColourId, Colours::darker);
setColour(juce::AlertWindow::outlineColourId, Colours::darker);
setColour(juce::AlertWindow::backgroundColourId, Colours::darker);
setColour(juce::ColourSelector::backgroundColourId, Colours::darker);
// combo box
setColour(juce::ComboBox::backgroundColourId, Colours::veryDark);

Wyświetl plik

@ -17,9 +17,8 @@ MainComponent::MainComponent(OscirenderAudioProcessor& p, OscirenderAudioProcess
chooser->launchAsync(flags, [this](const juce::FileChooser& chooser) {
juce::SpinLock::ScopedLockType lock(audioProcessor.parsersLock);
bool fileAdded = false;
for (auto& url : chooser.getURLResults()) {
if (url.isLocalFile()) {
juce::File file = url.getLocalFile();
for (auto& file : chooser.getResults()) {
if (file != juce::File()) {
audioProcessor.lastOpenedDirectory = file.getParentDirectory();
audioProcessor.addFile(file);
pluginEditor.addCodeEditor(audioProcessor.getCurrentFileIndex());

Wyświetl plik

@ -5,7 +5,6 @@
#include "parser/FileParser.h"
#include "parser/FrameProducer.h"
#include "visualiser/VisualiserComponent.h"
#include "audio/PitchDetector.h"
#include "UGen/ugen_JuceEnvelopeComponent.h"
#include "components/SvgButton.h"

Wyświetl plik

@ -2,8 +2,7 @@
#include "PluginEditor.h"
#include <juce_audio_plugin_client/Standalone/juce_StandaloneFilterWindow.h>
OscirenderAudioProcessorEditor::OscirenderAudioProcessorEditor(OscirenderAudioProcessor& p)
: CommonPluginEditor(p, "osci-render", "osci", 1100, 750), audioProcessor(p), collapseButton("Collapse", juce::Colours::white, juce::Colours::white, juce::Colours::white) {
OscirenderAudioProcessorEditor::OscirenderAudioProcessorEditor(OscirenderAudioProcessor& p) : CommonPluginEditor(p, "osci-render", "osci", 1100, 750), audioProcessor(p), collapseButton("Collapse", juce::Colours::white, juce::Colours::white, juce::Colours::white) {
#if !SOSCI_FEATURES
addAndMakeVisible(upgradeButton);
upgradeButton.onClick = [this] {
@ -28,16 +27,9 @@ OscirenderAudioProcessorEditor::OscirenderAudioProcessorEditor(OscirenderAudioPr
addAndMakeVisible(collapseButton);
collapseButton.onClick = [this] {
{
juce::SpinLock::ScopedLockType lock(audioProcessor.parsersLock);
int originalIndex = audioProcessor.getCurrentFileIndex();
int index = editingCustomFunction ? 0 : audioProcessor.getCurrentFileIndex() + 1;
if (originalIndex != -1 || editingCustomFunction) {
codeEditors[index]->setVisible(!codeEditors[index]->isVisible());
updateCodeEditor(!editingCustomFunction && isBinaryFile(audioProcessor.getCurrentFileName()));
}
}
setCodeEditorVisible(std::nullopt);
};
juce::Path path;
path.addTriangle(0.0f, 0.5f, 1.0f, 1.0f, 1.0f, 0.0f);
collapseButton.setShape(path, false, true, true);
@ -55,17 +47,20 @@ OscirenderAudioProcessorEditor::OscirenderAudioProcessorEditor(OscirenderAudioPr
audioProcessor.fileChangeBroadcaster.addChangeListener(this);
audioProcessor.broadcaster.addChangeListener(this);
}
double codeEditorLayoutPreferredSize = std::any_cast<double>(audioProcessor.getProperty("codeEditorLayoutPreferredSize", -0.7));
double luaLayoutPreferredSize = std::any_cast<double>(audioProcessor.getProperty("luaLayoutPreferredSize", -0.7));
layout.setItemLayout(0, -0.3, -1.0, -0.7);
layout.setItemLayout(0, -0.3, -1.0, codeEditorLayoutPreferredSize);
layout.setItemLayout(1, RESIZER_BAR_SIZE, RESIZER_BAR_SIZE, RESIZER_BAR_SIZE);
layout.setItemLayout(2, -0.0, -1.0, -0.3);
layout.setItemLayout(2, -0.0, -1.0, -(1.0 + codeEditorLayoutPreferredSize));
addAndMakeVisible(settings);
addAndMakeVisible(resizerBar);
luaLayout.setItemLayout(0, -0.3, -1.0, -0.7);
luaLayout.setItemLayout(0, -0.3, -1.0, luaLayoutPreferredSize);
luaLayout.setItemLayout(1, RESIZER_BAR_SIZE, RESIZER_BAR_SIZE, RESIZER_BAR_SIZE);
luaLayout.setItemLayout(2, -0.1, -1.0, -0.3);
luaLayout.setItemLayout(2, -0.1, -1.0, -(1.0 + luaLayoutPreferredSize));
addAndMakeVisible(lua);
addAndMakeVisible(luaResizerBar);
@ -79,7 +74,6 @@ OscirenderAudioProcessorEditor::OscirenderAudioProcessorEditor(OscirenderAudioPr
visualiserSettingsWindow.setVisible(false);
};
visualiserSettingsWindow.centreWithSize(550, 400);
#if JUCE_WINDOWS
// if not standalone, use native title bar for compatibility with DAWs
visualiserSettingsWindow.setUsingNativeTitleBar(processor.wrapperType == juce::AudioProcessor::WrapperType::wrapperType_Standalone);
@ -97,6 +91,53 @@ OscirenderAudioProcessorEditor::~OscirenderAudioProcessorEditor() {
audioProcessor.fileChangeBroadcaster.removeChangeListener(this);
}
void OscirenderAudioProcessorEditor::setCodeEditorVisible(std::optional<bool> visible) {
juce::SpinLock::ScopedLockType lock(audioProcessor.parsersLock);
int originalIndex = audioProcessor.getCurrentFileIndex();
int index = editingCustomFunction ? 0 : audioProcessor.getCurrentFileIndex() + 1;
if (originalIndex != -1 || editingCustomFunction) {
codeEditors[index]->setVisible(visible.has_value() ? visible.value() : !codeEditors[index]->isVisible());
updateCodeEditor(!editingCustomFunction && isBinaryFile(audioProcessor.getCurrentFileName()));
}
}
bool OscirenderAudioProcessorEditor::isInterestedInFileDrag(const juce::StringArray& files) {
if (files.size() != 1) {
return false;
}
juce::File file(files[0]);
return
file.hasFileExtension("wav") ||
file.hasFileExtension("aiff") ||
file.hasFileExtension("osci") ||
file.hasFileExtension("txt") ||
file.hasFileExtension("lua") ||
file.hasFileExtension("svg") ||
file.hasFileExtension("obj") ||
file.hasFileExtension("gif") ||
file.hasFileExtension("png") ||
file.hasFileExtension("jpg") ||
file.hasFileExtension("gpla");
}
void OscirenderAudioProcessorEditor::filesDropped(const juce::StringArray& files, int x, int y) {
if (files.size() != 1) {
return;
}
juce::File file(files[0]);
if (file.hasFileExtension("osci")) {
openProject(file);
} else {
juce::SpinLock::ScopedLockType lock1(audioProcessor.parsersLock);
juce::SpinLock::ScopedLockType lock2(audioProcessor.effectsLock);
audioProcessor.addFile(file);
addCodeEditor(audioProcessor.getCurrentFileIndex());
fileUpdated(audioProcessor.getCurrentFileName());
}
}
bool OscirenderAudioProcessorEditor::isBinaryFile(juce::String name) {
return name.endsWith(".gpla") || name.endsWith(".gif") || name.endsWith(".png") || name.endsWith(".jpg") || name.endsWith(".jpeg") || name.endsWith(".wav") || name.endsWith(".aiff");
}
@ -110,7 +151,8 @@ void OscirenderAudioProcessorEditor::initialiseCodeEditors() {
for (int i = 0; i < audioProcessor.numFiles(); i++) {
addCodeEditor(i);
}
fileUpdated(audioProcessor.getCurrentFileName());
bool codeEditorVisible = std::any_cast<bool>(audioProcessor.getProperty("codeEditorVisible", false));
fileUpdated(audioProcessor.getCurrentFileName(), codeEditorVisible);
}
void OscirenderAudioProcessorEditor::paint(juce::Graphics& g) {
@ -118,6 +160,8 @@ void OscirenderAudioProcessorEditor::paint(juce::Graphics& g) {
}
void OscirenderAudioProcessorEditor::resized() {
CommonPluginEditor::resized();
auto area = getLocalBounds();
if (audioProcessor.visualiserParameters.visualiserFullScreen->getBoolValue()) {
@ -218,6 +262,10 @@ void OscirenderAudioProcessorEditor::resized() {
}
settings.setBounds(area);
audioProcessor.setProperty("codeEditorLayoutPreferredSize", layout.getItemCurrentRelativeSize(0));
audioProcessor.setProperty("luaLayoutPreferredSize", luaLayout.getItemCurrentRelativeSize(0));
repaint();
}
@ -299,6 +347,9 @@ void OscirenderAudioProcessorEditor::updateCodeEditor(bool binaryFile, bool shou
updatingDocumentsWithParserLock = false;
}
}
audioProcessor.setProperty("codeEditorVisible", visible);
triggerAsyncUpdate();
}
@ -335,7 +386,7 @@ void OscirenderAudioProcessorEditor::toggleLayout(juce::StretchableLayoutManager
layout.getItemLayout(2, minSize, maxSize, preferredSize);
layout.getItemLayout(0, otherMinSize, otherMaxSize, otherPreferredSize);
if (preferredSize == CLOSED_PREF_SIZE) {
if (layout.getItemCurrentAbsoluteSize(2) <= CLOSED_PREF_SIZE) {
double otherPrefSize = -(1 + prefSize);
if (prefSize > 0) {
otherPrefSize = -1.0;

Wyświetl plik

@ -11,7 +11,7 @@
#include "visualiser/VisualiserSettings.h"
#include "CommonPluginEditor.h"
class OscirenderAudioProcessorEditor : public CommonPluginEditor, private juce::CodeDocument::Listener, public juce::AsyncUpdater, public juce::ChangeListener {
class OscirenderAudioProcessorEditor : public CommonPluginEditor, private juce::CodeDocument::Listener, public juce::AsyncUpdater, public juce::ChangeListener, public juce::FileDragAndDropTarget {
public:
OscirenderAudioProcessorEditor(OscirenderAudioProcessor&);
~OscirenderAudioProcessorEditor() override;
@ -28,6 +28,8 @@ public:
void changeListenerCallback(juce::ChangeBroadcaster* source) override;
void toggleLayout(juce::StretchableLayoutManager& layout, double prefSize);
void openVisualiserSettings();
bool isInterestedInFileDrag(const juce::StringArray& files) override;
void filesDropped(const juce::StringArray& files, int x, int y) override;
void editCustomFunction(bool enabled);
@ -76,6 +78,7 @@ public:
void codeDocumentTextDeleted(int startIndex, int endIndex) override;
void updateCodeDocument();
void updateCodeEditor(bool binaryFile, bool shouldOpenEditor = false);
void setCodeEditorVisible(std::optional<bool> visible);
bool keyPressed(const juce::KeyPress& key) override;
void mouseDown(const juce::MouseEvent& event) override;

Wyświetl plik

@ -103,10 +103,15 @@ OscirenderAudioProcessor::OscirenderAudioProcessor() : CommonAudioProcessor(Buse
std::make_shared<SmoothEffect>(),
new EffectParameter("Smoothing", "This works as a low-pass frequency filter that removes high frequencies, making the image look smoother, and audio sound less harsh.", "smoothing", VERSION_HINT, 0.75, 0.0, 1.0)
));
toggleableEffects.push_back(std::make_shared<Effect>(
std::shared_ptr<Effect> wobble = std::make_shared<Effect>(
wobbleEffect,
new EffectParameter("Wobble", "Adds a sine wave of the prominent frequency in the audio currently playing. The sine wave's frequency is slightly offset to create a subtle 'wobble' in the image. Increasing the slider increases the strength of the wobble.", "wobble", VERSION_HINT, 0.3, 0.0, 1.0)
));
std::vector<EffectParameter*>{
new EffectParameter("Wobble Amount", "Adds a sine wave of the prominent frequency in the audio currently playing. The sine wave's frequency is slightly offset to create a subtle 'wobble' in the image. Increasing the slider increases the strength of the wobble.", "wobble", VERSION_HINT, 0.3, 0.0, 1.0),
new EffectParameter("Wobble Phase", "Controls the phase of the wobble.", "wobblePhase", VERSION_HINT, 0.0, -1.0, 1.0),
}
);
wobble->getParameter("wobblePhase")->lfo->setUnnormalisedValueNotifyingHost((int) LfoType::Sawtooth);
toggleableEffects.push_back(wobble);
toggleableEffects.push_back(std::make_shared<Effect>(
delayEffect,
std::vector<EffectParameter*>{
@ -121,8 +126,8 @@ OscirenderAudioProcessor::OscirenderAudioProcessor() : CommonAudioProcessor(Buse
}
));
toggleableEffects.push_back(custom);
toggleableEffects.push_back(traceMax);
toggleableEffects.push_back(traceMin);
toggleableEffects.push_back(trace);
trace->getParameter("traceLength")->lfo->setUnnormalisedValueNotifyingHost((int) LfoType::Sawtooth);
for (int i = 0; i < toggleableEffects.size(); i++) {
auto effect = toggleableEffects[i];
@ -316,7 +321,6 @@ void OscirenderAudioProcessor::openFile(int index) {
if (index < 0 || index >= fileBlocks.size()) {
return;
}
juce::SpinLock::ScopedLockType lock(fontLock);
parsers[index]->parse(juce::String(fileIds[index]), fileNames[index].fromLastOccurrenceOf(".", true, false), std::make_unique<juce::MemoryInputStream>(*fileBlocks[index], false), font);
changeCurrentFile(index);
}
@ -402,6 +406,11 @@ void OscirenderAudioProcessor::setObjectServerRendering(bool enabled) {
}
}
void OscirenderAudioProcessor::setObjectServerPort(int port) {
setProperty("objectServerPort", port);
objectServer.reload();
}
void OscirenderAudioProcessor::processBlock(juce::AudioBuffer<float>& buffer, juce::MidiBuffer& midiMessages) {
juce::ScopedNoDenormals noDenormals;
// Audio info variables
@ -661,6 +670,8 @@ void OscirenderAudioProcessor::getStateInformation(juce::MemoryBlock& destData)
xml->setAttribute("currentFile", currentFile);
recordingParameters.save(xml.get());
saveProperties(*xml);
copyXmlToBinary(*xml, destData);
}
@ -744,7 +755,6 @@ void OscirenderAudioProcessor::setStateInformation(const void* data, int sizeInB
auto family = fontXml->getStringAttribute("family");
auto bold = fontXml->getBoolAttribute("bold");
auto italic = fontXml->getBoolAttribute("italic");
juce::SpinLock::ScopedLockType lock(fontLock);
font = juce::Font(family, 1.0, (bold ? juce::Font::bold : 0) | (italic ? juce::Font::italic : 0));
}
@ -777,6 +787,9 @@ void OscirenderAudioProcessor::setStateInformation(const void* data, int sizeInB
changeCurrentFile(xml->getIntAttribute("currentFile", -1));
recordingParameters.load(xml.get());
loadProperties(*xml);
objectServer.reload();
broadcaster.sendChangeMessage();
prevMidiEnabled = !midiEnabled->getBoolValue();

Wyświetl plik

@ -21,7 +21,6 @@
#include "audio/SampleRateManager.h"
#include <numbers>
#include "audio/DelayEffect.h"
#include "audio/PitchDetector.h"
#include "audio/WobbleEffect.h"
#include "audio/PerspectiveEffect.h"
#include "obj/ObjectServer.h"
@ -68,25 +67,25 @@ public:
"Frequency",
"Controls how many times per second the image is drawn, thereby controlling the pitch of the sound. Lower frequencies result in more-accurately drawn images, but more flickering, and vice versa.",
"frequency",
VERSION_HINT, 220.0, 0.0, 12000.0
VERSION_HINT, 220.0, 0.0, 4200.0
)
);
std::shared_ptr<Effect> traceMax = std::make_shared<Effect>(
new EffectParameter(
"Trace max",
"Defines the maximum proportion of the image that is drawn before skipping to the next frame. This has the effect of 'tracing' out the image from a single dot when animated. By default, we draw until the end of the frame, so this value is 1.0.",
"traceMax",
VERSION_HINT, 0.75, 0.0, 1.0
)
);
std::shared_ptr<Effect> traceMin = std::make_shared<Effect>(
new EffectParameter(
"Trace min",
"Defines the proportion of the image that drawing starts from. This has the effect of 'tracing' out the image from a single dot when animated. By default, we start drawing from the beginning of the frame, so this value is 0.0.",
"traceMin",
VERSION_HINT, 0.25, 0.0, 1.0
)
std::shared_ptr<Effect> trace = std::make_shared<Effect>(
std::vector<EffectParameter*>{
new EffectParameter(
"Trace Start",
"Defines how far into the frame the drawing is started at. This has the effect of 'tracing' out the image from a single dot when animated. By default, we start drawing from the beginning of the frame, so this value is 0.0.",
"traceStart",
VERSION_HINT, 0.0, 0.0, 1.0, 0.001, 0.001
),
new EffectParameter(
"Trace Length",
"Defines how much of the frame is drawn per cycle. This has the effect of 'tracing' out the image from a single dot when animated. By default, we draw the whole frame, corresponding to a value of 1.0.",
"traceLength",
VERSION_HINT, 1.0, 0.0, 1.0, 0.001, 0.001
),
}
);
std::shared_ptr<DelayEffect> delayEffect = std::make_shared<DelayEffect>();
@ -111,7 +110,7 @@ public:
BooleanParameter* midiEnabled = new BooleanParameter("MIDI Enabled", "midiEnabled", VERSION_HINT, false, "Enable MIDI input for the synth. If disabled, the synth will play a constant tone, as controlled by the frequency slider.");
BooleanParameter* inputEnabled = new BooleanParameter("Audio Input Enabled", "inputEnabled", VERSION_HINT, false, "Enable to use input audio, instead of the generated audio.");
std::atomic<float> frequency = 220.0f;
std::atomic<double> frequency = 220.0;
juce::SpinLock parsersLock;
std::vector<std::shared_ptr<FileParser>> parsers;
@ -177,10 +176,8 @@ public:
double animationTime = 0.f;
PitchDetector pitchDetector{*this};
std::shared_ptr<WobbleEffect> wobbleEffect = std::make_shared<WobbleEffect>(pitchDetector);
std::shared_ptr<WobbleEffect> wobbleEffect = std::make_shared<WobbleEffect>(*this);
juce::SpinLock fontLock;
juce::Font font = juce::Font(juce::Font::getDefaultSansSerifFontName(), 1.0f, juce::Font::plain);
ShapeSound::Ptr objectServerSound = new ShapeSound();
@ -204,12 +201,13 @@ public:
juce::String getFileId(int index);
std::shared_ptr<juce::MemoryBlock> getFileBlock(int index);
void setObjectServerRendering(bool enabled);
void setObjectServerPort(int port);
void addErrorListener(ErrorListener* listener);
void removeErrorListener(ErrorListener* listener);
void notifyErrorListeners(int lineNumber, juce::String id, juce::String error);
private:
bool prevMidiEnabled = !midiEnabled->getBoolValue();
std::atomic<bool> prevMidiEnabled = !midiEnabled->getBoolValue();
juce::SpinLock audioThreadCallbackLock;
std::function<void(const juce::AudioBuffer<float>&)> audioThreadCallback;

Wyświetl plik

@ -10,14 +10,17 @@ SettingsComponent::SettingsComponent(OscirenderAudioProcessor& p, OscirenderAudi
addAndMakeVisible(midi);
addChildComponent(txt);
addChildComponent(frame);
double midiLayoutPreferredSize = std::any_cast<double>(audioProcessor.getProperty("midiLayoutPreferredSize", pluginEditor.CLOSED_PREF_SIZE));
double mainLayoutPreferredSize = std::any_cast<double>(audioProcessor.getProperty("mainLayoutPreferredSize", -0.4));
midiLayout.setItemLayout(0, -0.1, -1.0, -1.0);
midiLayout.setItemLayout(0, -0.1, -1.0, -(1.0 + midiLayoutPreferredSize));
midiLayout.setItemLayout(1, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE);
midiLayout.setItemLayout(2, pluginEditor.CLOSED_PREF_SIZE, -0.9, pluginEditor.CLOSED_PREF_SIZE);
mainLayout.setItemLayout(0, -0.1, -0.9, -0.4);
midiLayout.setItemLayout(2, pluginEditor.CLOSED_PREF_SIZE, -0.9, midiLayoutPreferredSize);
mainLayout.setItemLayout(0, -0.1, -0.9, mainLayoutPreferredSize);
mainLayout.setItemLayout(1, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE, pluginEditor.RESIZER_BAR_SIZE);
mainLayout.setItemLayout(2, -0.1, -0.9, -0.6);
mainLayout.setItemLayout(2, -0.1, -0.9, -(1.0 + mainLayoutPreferredSize));
}
@ -59,6 +62,11 @@ void SettingsComponent::resized() {
}
effects.setBounds(dummyBounds);
if (isVisible() && getWidth() > 0 && getHeight() > 0) {
audioProcessor.setProperty("midiLayoutPreferredSize", midiLayout.getItemCurrentRelativeSize(2));
audioProcessor.setProperty("mainLayoutPreferredSize", mainLayout.getItemCurrentRelativeSize(0));
}
repaint();
}

Wyświetl plik

@ -50,6 +50,7 @@ void SosciPluginEditor::paint(juce::Graphics& g) {
}
void SosciPluginEditor::resized() {
CommonPluginEditor::resized();
auto area = getLocalBounds();
if (audioProcessor.visualiserParameters.visualiserFullScreen->getBoolValue()) {
@ -86,7 +87,8 @@ bool SosciPluginEditor::isInterestedInFileDrag(const juce::StringArray& files) {
file.hasFileExtension("mp3") ||
file.hasFileExtension("aiff") ||
file.hasFileExtension("flac") ||
file.hasFileExtension("ogg");
file.hasFileExtension("ogg") ||
file.hasFileExtension("sosci");
}
void SosciPluginEditor::filesDropped(const juce::StringArray& files, int x, int y) {
@ -94,7 +96,12 @@ void SosciPluginEditor::filesDropped(const juce::StringArray& files, int x, int
return;
}
juce::File file(files[0]);
audioProcessor.loadAudioFile(file);
if (file.hasFileExtension("sosci")) {
openProject(file);
} else {
audioProcessor.loadAudioFile(file);
}
}
void SosciPluginEditor::visualiserFullScreenChanged() {

Wyświetl plik

@ -122,6 +122,8 @@ void SosciAudioProcessor::getStateInformation(juce::MemoryBlock& destData) {
}
recordingParameters.save(xml.get());
saveProperties(*xml);
copyXmlToBinary(*xml, destData);
}
@ -182,6 +184,8 @@ void SosciAudioProcessor::setStateInformation(const void* data, int sizeInBytes)
}
recordingParameters.load(xml.get());
loadProperties(*xml);
}
}

Wyświetl plik

@ -17,14 +17,7 @@ TxtComponent::TxtComponent(OscirenderAudioProcessor& p, OscirenderAudioProcessor
auto updateFont = [this]() {
juce::SpinLock::ScopedLockType lock1(audioProcessor.parsersLock);
juce::SpinLock::ScopedLockType lock2(audioProcessor.effectsLock);
{
juce::SpinLock::ScopedLockType lock3(audioProcessor.fontLock);
audioProcessor.font.setTypefaceName(installedFonts[font.getSelectedItemIndex()]);
audioProcessor.font.setBold(bold.getToggleState());
audioProcessor.font.setItalic(italic.getToggleState());
}
audioProcessor.openFile(audioProcessor.currentFile);
audioProcessor.font = juce::Font(installedFonts[font.getSelectedItemIndex()], 1.0, (bold.getToggleState() ? juce::Font::bold : 0) | (italic.getToggleState() ? juce::Font::italic : 0));
};
font.onChange = updateFont;
@ -41,7 +34,6 @@ void TxtComponent::resized() {
}
void TxtComponent::update() {
juce::SpinLock::ScopedLockType lock(audioProcessor.fontLock);
juce::String defaultFont = audioProcessor.font.getTypefaceName();
int index = installedFonts.indexOf(defaultFont);
if (index == -1) {

Wyświetl plik

@ -69,7 +69,10 @@ void Effect::animateValues(double volume) {
actualValues[i] = ((float)rand() / RAND_MAX) * (maxValue - minValue) + minValue;
break;
default:
double weight = parameter->smoothValueChange ? 0.0005 : 1.0;
double weight = 1.0;
if (parameter->smoothValueChange < 1.0 && parameter->smoothValueChange > SMOOTHING_SPEED_MIN) {
weight = parameter->smoothValueChange.load() * 192000 / sampleRate;
}
double newValue;
if (parameter->sidechain != nullptr && parameter->sidechain->getBoolValue()) {
newValue = volume * (maxValue - minValue) + minValue;

Wyświetl plik

@ -3,13 +3,18 @@
#include <JuceHeader.h>
#include "BooleanParameter.h"
#define SMOOTHING_SPEED_CONSTANT 0.0003
#define SMOOTHING_SPEED_MIN 0.0001
class FloatParameter : public juce::AudioProcessorParameterWithID {
public:
std::atomic<float> min = 0.0;
std::atomic<float> max = 0.0;
std::atomic<float> step = 0.0;
std::atomic<float> defaultValue = 0.0;
FloatParameter(juce::String name, juce::String id, int versionHint, float value, float min, float max, float step = 0.69, juce::String label = "") : juce::AudioProcessorParameterWithID(juce::ParameterID(id, versionHint), name), step(step), value(value), label(label) {
FloatParameter(juce::String name, juce::String id, int versionHint, float value, float min, float max, float step = 0.69, juce::String label = "") : juce::AudioProcessorParameterWithID(juce::ParameterID(id, versionHint), name), step(step), value(value), label(label), defaultValue(value) {
// need to initialise here because of naming conflicts on Windows
this->min = min;
this->max = max;
@ -23,7 +28,8 @@ public:
return label;
}
// returns value in range [0, 1]
// returns value in
// [0, 1]
float getNormalisedValue(float value) const {
// clip value to valid range
auto min = this->min.load();
@ -61,7 +67,7 @@ public:
}
float getDefaultValue() const override {
return 0.0f;
return getNormalisedValue(defaultValue.load());
}
int getNumSteps() const override {
@ -135,8 +141,10 @@ class IntParameter : public juce::AudioProcessorParameterWithID {
public:
std::atomic<int> min = 0;
std::atomic<int> max = 10;
std::atomic<int> defaultValue = 0;
IntParameter(juce::String name, juce::String id, int versionHint, int value, int min, int max) : AudioProcessorParameterWithID(juce::ParameterID(id, versionHint), name), value(value) {
IntParameter(juce::String name, juce::String id, int versionHint, int value, int min, int max) : AudioProcessorParameterWithID(juce::ParameterID(id, versionHint), name), value(value), defaultValue(value) {
// need to initialise here because of naming conflicts on Windows
this->min = min;
this->max = max;
@ -188,7 +196,7 @@ public:
}
float getDefaultValue() const override {
return 0;
return getNormalisedValue(defaultValue.load());
}
int getNumSteps() const override {
@ -326,13 +334,11 @@ public:
class EffectParameter : public FloatParameter {
public:
std::atomic<bool> smoothValueChange = true;
std::atomic<double> smoothValueChange = SMOOTHING_SPEED_CONSTANT;
LfoTypeParameter* lfo = new LfoTypeParameter(name + " LFO", paramID + "Lfo", getVersionHint(), 1);
FloatParameter* lfoRate = new FloatParameter(name + " LFO Rate", paramID + "LfoRate", getVersionHint(), 1.0f, 0.0f, 10000.0f, 0.001f, "Hz");
BooleanParameter* sidechain = new BooleanParameter(name + " Sidechain Enabled", paramID + "Sidechain", getVersionHint(), false, "Toggles " + name + " Sidechain.");
std::atomic<float> phase = 0.0f;
// this is what the value will get reset to on double-click.
std::atomic<float> defaultValue;
juce::String description;
std::vector<juce::AudioProcessorParameter*> getParameters() {
@ -401,5 +407,5 @@ public:
}
}
EffectParameter(juce::String name, juce::String description, juce::String id, int versionHint, float value, float min, float max, float step = 0.0001, bool smoothValueChange = true) : FloatParameter(name, id, versionHint, value, min, max, step), smoothValueChange(smoothValueChange), description(description), defaultValue(value) {}
EffectParameter(juce::String name, juce::String description, juce::String id, int versionHint, float value, float min, float max, float step = 0.0001, double smoothValueChange = SMOOTHING_SPEED_CONSTANT) : FloatParameter(name, id, versionHint, value, min, max, step), smoothValueChange(smoothValueChange), description(description) {}
};

Wyświetl plik

@ -1,59 +0,0 @@
#include "PitchDetector.h"
#include "../PluginProcessor.h"
PitchDetector::PitchDetector(OscirenderAudioProcessor& audioProcessor) : AudioBackgroundThread("PitchDetector", audioProcessor.threadManager), audioProcessor(audioProcessor) {}
void PitchDetector::runTask(const std::vector<OsciPoint>& points) {
// buffer is for 2 channels, so we need to only use one
for (int i = 0; i < fftSize; i++) {
fftData[i] = points[i].x;
}
forwardFFT.performFrequencyOnlyForwardTransform(fftData.data());
// get frequency of the peak
int maxIndex = 0;
for (int i = 0; i < fftSize / 2; ++i) {
if (frequencyFromIndex(i) < 20 || frequencyFromIndex(i) > 20000) {
continue;
}
auto current = fftData[i];
if (current > fftData[maxIndex]) {
maxIndex = i;
}
}
frequency = frequencyFromIndex(maxIndex);
triggerAsyncUpdate();
}
int PitchDetector::prepareTask(double sampleRate, int samplesPerBlock) {
this->sampleRate = sampleRate;
return fftSize;
}
void PitchDetector::stopTask() {}
void PitchDetector::handleAsyncUpdate() {
juce::SpinLock::ScopedLockType scope(lock);
for (auto& callback : callbacks) {
callback(frequency);
}
}
int PitchDetector::addCallback(std::function<void(float)> callback) {
juce::SpinLock::ScopedLockType scope(lock);
callbacks.push_back(callback);
return callbacks.size() - 1;
}
void PitchDetector::removeCallback(int index) {
juce::SpinLock::ScopedLockType scope(lock);
callbacks.erase(callbacks.begin() + index);
}
float PitchDetector::frequencyFromIndex(int index) {
auto binWidth = sampleRate / fftSize;
return index * binWidth;
}

Wyświetl plik

@ -1,33 +0,0 @@
#pragma once
#include <JuceHeader.h>
#include "../concurrency/AudioBackgroundThread.h"
class OscirenderAudioProcessor;
class PitchDetector : public AudioBackgroundThread, public juce::AsyncUpdater {
public:
PitchDetector(OscirenderAudioProcessor& audioProcessor);
int prepareTask(double sampleRate, int samplesPerBlock) override;
void runTask(const std::vector<OsciPoint>& points) override;
void stopTask() override;
void handleAsyncUpdate() override;
int addCallback(std::function<void(float)> callback);
void removeCallback(int index);
std::atomic<float> frequency = 0.0f;
private:
static constexpr int fftOrder = 15;
static constexpr int fftSize = 1 << fftOrder;
juce::dsp::FFT forwardFFT{fftOrder};
std::array<float, fftSize * 2> fftData;
OscirenderAudioProcessor& audioProcessor;
std::vector<std::function<void(float)>> callbacks;
juce::SpinLock lock;
float sampleRate = 192000.0f;
float frequencyFromIndex(int index);
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(PitchDetector)
};

Wyświetl plik

@ -2,8 +2,8 @@
#include "../PluginProcessor.h"
ShapeVoice::ShapeVoice(OscirenderAudioProcessor& p) : audioProcessor(p) {
actualTraceMin = audioProcessor.traceMin->getValue();
actualTraceMax = audioProcessor.traceMax->getValue();
actualTraceStart = audioProcessor.trace->getValue(0);
actualTraceLength = audioProcessor.trace->getValue(1);
}
bool ShapeVoice::canPlaySound(juce::SynthesiserSound* sound) {
@ -43,6 +43,7 @@ void ShapeVoice::startNote(int midiNoteNumber, float velocity, juce::Synthesiser
// TODO this is the slowest part of the program - any way to improve this would help!
void ShapeVoice::incrementShapeDrawing() {
if (frame.size() <= 0) return;
double length = currentShape < frame.size() ? frame[currentShape]->len : 0.0;
frameDrawn += lengthIncrement;
shapeDrawn += lengthIncrement;
@ -55,7 +56,6 @@ void ShapeVoice::incrementShapeDrawing() {
currentShape++;
if (currentShape >= frame.size()) {
currentShape = 0;
break;
}
// POTENTIAL TODO: Think of a way to make this more efficient when iterating
// this loop many times
@ -83,17 +83,16 @@ void ShapeVoice::renderNextBlock(juce::AudioSampleBuffer& outputBuffer, int star
if (audioProcessor.midiEnabled->getBoolValue()) {
actualFrequency = frequency * pitchWheelAdjustment;
} else {
actualFrequency = audioProcessor.frequency;
actualFrequency = audioProcessor.frequency.load();
}
for (auto sample = startSample; sample < startSample + numSamples; ++sample) {
bool traceMinEnabled = audioProcessor.traceMin->enabled->getBoolValue();
bool traceMaxEnabled = audioProcessor.traceMax->enabled->getBoolValue();
bool traceEnabled = audioProcessor.trace->enabled->getBoolValue();
// update length increment
double traceMax = traceMaxEnabled ? actualTraceMax : 1.0;
double traceMin = traceMinEnabled ? actualTraceMin : 0.0;
double proportionalLength = (traceMax - traceMin) * frameLength;
double traceLen = traceEnabled ? actualTraceLength : 1.0;
double traceMin = traceEnabled ? actualTraceStart : 0.0;
double proportionalLength = std::max(0.001, traceLen) * frameLength;
lengthIncrement = juce::jmax(proportionalLength / (audioProcessor.currentSampleRate / actualFrequency), MIN_LENGTH_INCREMENT);
OsciPoint channels;
@ -148,33 +147,47 @@ void ShapeVoice::renderNextBlock(juce::AudioSampleBuffer& outputBuffer, int star
outputBuffer.addSample(0, sample, x * gain);
}
double traceMinValue = audioProcessor.traceMin->getActualValue();
double traceMaxValue = audioProcessor.traceMax->getActualValue();
traceMaxValue = traceMaxEnabled ? traceMaxValue : 1.0;
traceMinValue = traceMinEnabled ? traceMinValue : 0.0;
actualTraceMax = juce::jmax(actualTraceMin, juce::jmin(traceMaxValue, 1.0));
actualTraceMin = juce::jmax(MIN_TRACE, juce::jmin(traceMinValue, actualTraceMax - MIN_TRACE));
double traceStartValue = audioProcessor.trace->getActualValue(0);
double traceLengthValue = audioProcessor.trace->getActualValue(1);
traceLengthValue = traceEnabled ? traceLengthValue : 1.0;
traceStartValue = traceEnabled ? traceStartValue : 0.0;
actualTraceLength = std::max(0.01, traceLengthValue);
actualTraceStart = traceStartValue;
if (actualTraceStart < 0) {
actualTraceStart = 0;
}
if (!renderingSample) {
incrementShapeDrawing();
}
double drawnFrameLength = traceMaxEnabled ? actualTraceMax * frameLength : frameLength;
double drawnFrameLength = frameLength;
bool willLoopOver = false;
if (traceEnabled) {
drawnFrameLength *= actualTraceLength + actualTraceStart;
}
if (!renderingSample && frameDrawn >= drawnFrameLength) {
double currentShapeLength = 0;
if (currentShape < frame.size()) {
currentShapeLength = frame[currentShape]->len;
}
if (sound.load() != nullptr && currentlyPlaying) {
frameLength = sound.load()->updateFrame(frame);
}
frameDrawn -= drawnFrameLength;
if (traceEnabled) {
shapeDrawn = juce::jlimit(0.0, currentShapeLength, frameDrawn);
}
currentShape = 0;
// TODO: updateFrame already iterates over all the shapes,
// so we can improve performance by calculating frameDrawn
// and shapeDrawn directly. frameDrawn is simply actualTraceMin * frameLength
// and shapeDrawn directly. frameDrawn is simply actualTraceStart * frameLength
// but shapeDrawn is the amount of the current shape that has been drawn so
// we need to iterate over all the shapes to calculate it.
if (traceMinEnabled) {
while (frameDrawn < actualTraceMin * frameLength) {
if (traceEnabled) {
while (frameDrawn < actualTraceStart * frameLength) {
incrementShapeDrawing();
}
}

Wyświetl plik

@ -27,8 +27,8 @@ private:
OscirenderAudioProcessor& audioProcessor;
std::vector<std::unique_ptr<Shape>> frame;
std::atomic<ShapeSound*> sound = nullptr;
double actualTraceMin;
double actualTraceMax;
double actualTraceStart;
double actualTraceLength;
double frameLength = 0.0;
int currentShape = 0;

Wyświetl plik

@ -1,14 +1,14 @@
#include "WobbleEffect.h"
#include "../PluginProcessor.h"
WobbleEffect::WobbleEffect(PitchDetector& pitchDetector) : pitchDetector(pitchDetector) {}
WobbleEffect::WobbleEffect(OscirenderAudioProcessor& p) : audioProcessor(p) {}
WobbleEffect::~WobbleEffect() {}
OsciPoint WobbleEffect::apply(int index, OsciPoint input, const std::vector<std::atomic<double>>& values, double sampleRate) {
// TODO: this doesn't consider sample rate
smoothedFrequency = smoothedFrequency * 0.99995 + pitchDetector.frequency * 0.00005;
double theta = nextPhase(smoothedFrequency, sampleRate);
double delta = 0.5 * values[0] * std::sin(theta);
double wobblePhase = values[1] * std::numbers::pi;
double theta = nextPhase(audioProcessor.frequency, sampleRate) + wobblePhase;
double delta = 0.5 * values[0] * std::sin(theta);
return input + delta;
}

Wyświetl plik

@ -1,16 +1,16 @@
#pragma once
#include "EffectApplication.h"
#include "../shape/OsciPoint.h"
#include "PitchDetector.h"
class OscirenderAudioProcessor;
class WobbleEffect : public EffectApplication {
public:
WobbleEffect(PitchDetector& pitchDetector);
WobbleEffect(OscirenderAudioProcessor& p);
~WobbleEffect();
OsciPoint apply(int index, OsciPoint input, const std::vector<std::atomic<double>>& values, double sampleRate) override;
private:
PitchDetector& pitchDetector;
OscirenderAudioProcessor& audioProcessor;
double smoothedFrequency = 0;
};

Wyświetl plik

@ -1,6 +1,6 @@
#include "AboutComponent.h"
AboutComponent::AboutComponent(const void *image, size_t imageSize, juce::String sectionText) {
AboutComponent::AboutComponent(const void *image, size_t imageSize, juce::String sectionText, int port) {
addAndMakeVisible(logoComponent);
addAndMakeVisible(text);
@ -16,11 +16,26 @@ AboutComponent::AboutComponent(const void *image, size_t imageSize, juce::String
text.setColour(juce::TextEditor::outlineColourId, juce::Colours::transparentBlack);
text.setJustification(juce::Justification(juce::Justification::centred));
text.setText(sectionText);
if (port > 0) {
addAndMakeVisible(portText);
// TODO: Integrate this better
portText.setMultiLine(false);
portText.setReadOnly(true);
portText.setInterceptsMouseClicks(false, false);
portText.setOpaque(false);
portText.setColour(juce::TextEditor::backgroundColourId, juce::Colours::transparentBlack);
portText.setColour(juce::TextEditor::outlineColourId, juce::Colours::transparentBlack);
portText.setJustification(juce::Justification(juce::Justification::centred));
portText.setText(juce::String("Blender Port: ") + juce::String(port));
}
}
void AboutComponent::resized() {
auto area = getLocalBounds();
area.removeFromTop(10);
logoComponent.setBounds(area.removeFromTop(110));
portText.setBounds(area.removeFromBottom(20).removeFromTop(15));
text.setBounds(area);
}

Wyświetl plik

@ -4,15 +4,16 @@
class AboutComponent : public juce::Component {
public:
AboutComponent(const void *image, size_t imageSize, juce::String sectionText);
AboutComponent(const void *image, size_t imageSize, juce::String sectionText, int port = -1);
void resized() override;
private:
juce::Image logo;
juce::ImageComponent logoComponent;
juce::TextEditor text;
juce::TextEditor portText;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (AboutComponent)
};

Wyświetl plik

@ -17,7 +17,11 @@ EffectComponent::EffectComponent(Effect& effect, int index) : effect(effect), in
slider.setSliderStyle(juce::Slider::LinearHorizontal);
slider.setTextBoxStyle(juce::Slider::TextBoxRight, false, TEXT_BOX_WIDTH, slider.getTextBoxHeight());
slider.setNumDecimalPlacesToDisplay(4);
if (effect.parameters[index]->step == 1.0) {
slider.setNumDecimalPlacesToDisplay(0);
} else {
slider.setNumDecimalPlacesToDisplay(4);
}
lfoSlider.setSliderStyle(juce::Slider::LinearHorizontal);
lfoSlider.setTextBoxStyle(juce::Slider::TextBoxRight, false, TEXT_BOX_WIDTH, lfoSlider.getTextBoxHeight());
@ -50,17 +54,32 @@ EffectComponent::EffectComponent(Effect& effect, int index) : effect(effect), in
EffectComponent::EffectComponent(Effect& effect) : EffectComponent(effect, 0) {}
void EffectComponent::setSliderValueIfChanged(FloatParameter* parameter, juce::Slider& slider) {
juce::String newSliderValue = juce::String(parameter->getValueUnnormalised(), 3);
juce::String oldSliderValue = juce::String((float) slider.getValue(), 3);
// only set the slider value if the parameter value is different so that we prefer the more
// precise slider value.
if (newSliderValue != oldSliderValue) {
slider.setValue(parameter->getValueUnnormalised(), juce::dontSendNotification);
}
}
void EffectComponent::setupComponent() {
EffectParameter* parameter = effect.parameters[index];
setEnabled(effect.enabled == nullptr || effect.enabled->getBoolValue());
if (updateToggleState != nullptr) {
updateToggleState();
}
setTooltip(parameter->description);
label.setText(parameter->name, juce::dontSendNotification);
label.setInterceptsMouseClicks(false, false);
slider.setRange(parameter->min, parameter->max, parameter->step);
slider.setValue(parameter->getValueUnnormalised(), juce::dontSendNotification);
setSliderValueIfChanged(parameter, slider);
slider.setDoubleClickReturnValue(true, parameter->defaultValue);
lfoEnabled = parameter->lfo != nullptr && parameter->lfoRate != nullptr;
@ -82,6 +101,7 @@ void EffectComponent::setupComponent() {
};
lfoSlider.setRange(parameter->lfoRate->min, parameter->lfoRate->max, parameter->lfoRate->step);
setSliderValueIfChanged(parameter->lfoRate, lfoSlider);
lfoSlider.setValue(parameter->lfoRate->getValueUnnormalised(), juce::dontSendNotification);
lfoSlider.setSkewFactorFromMidPoint(parameter->lfoRate->min + 0.1 * (parameter->lfoRate->max - parameter->lfoRate->min));
lfoSlider.setDoubleClickReturnValue(true, 1.0);

Wyświetl plik

@ -78,14 +78,17 @@ public:
LabelledTextBox min{"Min"};
LabelledTextBox max{"Max"};
};
std::function<void()> updateToggleState;
private:
const int TEXT_BOX_WIDTH = 70;
const int SMALL_TEXT_BOX_WIDTH = 50;
const int TEXT_WIDTH = 120;
const int SMALL_TEXT_WIDTH = 60;
const int SMALL_TEXT_WIDTH = 90;
void setSliderValueIfChanged(FloatParameter* parameter, juce::Slider& slider);
void setupComponent();
bool lfoEnabled = true;
bool sidechainEnabled = true;

Wyświetl plik

@ -28,6 +28,13 @@ effect(effect), audioProcessor(data.audioProcessor), editor(data.editor) {
}
repaint();
};
effectComponent->updateToggleState = [this, i, weakEffectComponent] {
if (auto effectComponent = weakEffectComponent.lock()) {
selected.setToggleState(effectComponent->effect.enabled == nullptr || effectComponent->effect.enabled->getValue(), juce::dontSendNotification);
list.setEnabled(selected.getToggleState());
}
repaint();
};
auto component = createComponent(parameters[i]);
if (component != nullptr) {

Wyświetl plik

@ -31,8 +31,8 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend
"DJ_Level_3, for contributing several features to osci-render\n"
"BUS ERROR Collective, for providing the source code for the Hilligoss encoder\n"
"Jean Perbet (@jeanprbt) for the osci-render macOS icon\n"
"All the community, for suggesting features and reporting issues!"
);
"All the community, for suggesting features and reporting issues!",
std::any_cast<int>(audioProcessor.getProperty("objectServerPort")));
options.content.setOwned(about);
options.content->setSize(500, 270);
options.dialogTitle = "About";
@ -48,6 +48,9 @@ OsciMainMenuBarModel::OsciMainMenuBarModel(OscirenderAudioProcessor& p, Oscirend
juce::DialogWindow* dw = options.launchAsync();
});
addMenuItem(1, "Randomize Blender Port", [this] {
audioProcessor.setObjectServerPort(juce::Random::getSystemRandom().nextInt(juce::Range<int>(51600, 51700)));
});
#if !SOSCI_FEATURES
addMenuItem(1, "Purchase osci-render premium!", [this] {

Wyświetl plik

@ -15,18 +15,15 @@ AudioBackgroundThread::~AudioBackgroundThread() {
}
void AudioBackgroundThread::prepare(double sampleRate, int samplesPerBlock) {
if (isThreadRunning()) {
stop();
}
bool threadShouldBeRunning = shouldBeRunning;
setShouldBeRunning(false);
isPrepared = false;
int requestedDataSize = prepareTask(sampleRate, samplesPerBlock);
consumer = std::make_unique<BufferConsumer>(requestedDataSize);
isPrepared = true;
if (shouldBeRunning) {
start();
}
setShouldBeRunning(threadShouldBeRunning);
}
void AudioBackgroundThread::setShouldBeRunning(bool shouldBeRunning, std::function<void()> stopCallback) {

Wyświetl plik

@ -23,7 +23,7 @@ private:
AudioBackgroundThreadManager& manager;
std::unique_ptr<BufferConsumer> consumer = nullptr;
bool shouldBeRunning = false;
std::atomic<bool> shouldBeRunning = false;
std::atomic<bool> isPrepared = false;
std::atomic<bool> deleting = false;

Wyświetl plik

@ -2,16 +2,248 @@
LineArtParser::LineArtParser(juce::String json) {
parseJsonFrames(json);
frames.clear();
numFrames = 0;
frames = parseJsonFrames(json);
numFrames = frames.size();
}
LineArtParser::LineArtParser(char* data, int dataLength) {
frames.clear();
numFrames = 0;
frames = parseBinaryFrames(data, dataLength);
numFrames = frames.size();
if (numFrames == 0) frames = epicFail();
}
LineArtParser::~LineArtParser() {
frames.clear();
}
void LineArtParser::parseJsonFrames(juce::String jsonStr) {
frames.clear();
numFrames = 0;
double LineArtParser::makeDouble(int64_t data) {
return *(double*)&data;
}
void LineArtParser::makeChars(int64_t data, char* chars) {
for (int i = 0; i < 8; i++) {
chars[i] = (data >> (i * 8)) & 0xFF;
}
}
std::vector<std::vector<Line>> LineArtParser::epicFail() {
return parseJsonFrames(juce::String(BinaryData::fallback_gpla, BinaryData::fallback_gplaSize));
}
std::vector<std::vector<Line>> LineArtParser::parseBinaryFrames(char* bytes, int bytesLength) {
int64_t* data = (int64_t*)bytes;
int dataLength = bytesLength / 8;
std::vector<std::vector<Line>> tFrames;
if (dataLength < 4) return epicFail();
int index = 0;
int64_t rawData = data[index];
index++;
char tag[9] = " ";
makeChars(rawData, tag);
if (strcmp(tag, "GPLA ") != 0) return epicFail();
// Major
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
// Minor
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
// Patch
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
if (strcmp(tag, "FILE ") != 0) return epicFail();
int reportedNumFrames = 0;
int frameRate = 0;
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
while (strcmp(tag, "DONE ") != 0) {
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
if (strcmp(tag, "fCount ") == 0) {
reportedNumFrames = rawData;
} else if (strcmp(tag, "fRate ") == 0) {
frameRate = rawData;
}
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
}
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
while (strcmp(tag, "END GPLA") != 0) {
if (strcmp(tag, "FRAME ") == 0) {
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
double focalLength;
std::vector<std::vector<double>> allMatrices;
std::vector<std::vector<std::vector<OsciPoint>>> allVertices;
while (strcmp(tag, "OBJECTS ") != 0) {
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
if (strcmp(tag, "focalLen") == 0) {
focalLength = makeDouble(rawData);
}
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
}
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
while (strcmp(tag, "DONE ") != 0) {
if (strcmp(tag, "OBJECT ") == 0) {
std::vector<std::vector<OsciPoint>> vertices;
std::vector<double> matrix;
if (index >= dataLength) return epicFail();
int strokeNum = 0;
rawData = data[index];
index++;
makeChars(rawData, tag);
while (strcmp(tag, "DONE ") != 0) {
if (strcmp(tag, "MATRIX ") == 0) {
matrix.clear();
for (int i = 0; i < 16; i++) {
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
matrix.push_back(makeDouble(rawData));
}
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
} else if (strcmp(tag, "STROKES ") == 0) {
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
while (strcmp(tag, "DONE ") != 0) {
if (strcmp(tag, "STROKE ") == 0) {
vertices.push_back(std::vector<OsciPoint>());
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
int vertexCount = 0;
while (strcmp(tag, "DONE ") != 0) {
if (strcmp(tag, "vertexCt") == 0) {
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
vertexCount = rawData;
}
else if (strcmp(tag, "VERTICES") == 0) {
double x = 0;
double y = 0;
double z = 0;
for (int i = 0; i < vertexCount; i++) {
if (index + 2 >= dataLength) return epicFail();
rawData = data[index];
index++;
x = makeDouble(rawData);
rawData = data[index];
index++;
y = makeDouble(rawData);
rawData = data[index];
index++;
z = makeDouble(rawData);
vertices[strokeNum].push_back(OsciPoint(x, y, z));
}
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
while (strcmp(tag, "DONE ") != 0) {
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
}
}
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
}
strokeNum++;
}
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
}
}
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
}
allVertices.push_back(reorderVertices(vertices));
allMatrices.push_back(matrix);
vertices.clear();
matrix.clear();
}
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
}
std::vector<Line> frame = assembleFrame(allVertices, allMatrices, focalLength);
tFrames.push_back(frame);
}
if (index >= dataLength) return epicFail();
rawData = data[index];
index++;
makeChars(rawData, tag);
}
return tFrames;
}
std::vector<std::vector<Line>> LineArtParser::parseJsonFrames(juce::String jsonStr) {
std::vector<std::vector<Line>> frames;
// format of json is:
// {
@ -44,19 +276,13 @@ void LineArtParser::parseJsonFrames(juce::String jsonStr) {
auto json = juce::JSON::parse(jsonStr);
// If json parse failed, stop and parse default fallback instead
if (json.isVoid()) {
parseJsonFrames(juce::String(BinaryData::fallback_gpla, BinaryData::fallback_gplaSize));
return;
}
if (json.isVoid()) return epicFail();
auto jsonFrames = *json.getProperty("frames", juce::Array<juce::var>()).getArray();
numFrames = jsonFrames.size();
int numFrames = jsonFrames.size();
// If json does not contain any frames, stop and parse no-frames fallback instead
if (numFrames == 0) {
parseJsonFrames(juce::String(BinaryData::noframes_gpla, BinaryData::noframes_gplaSize));
return;
}
if (numFrames == 0) return parseJsonFrames(juce::String(BinaryData::noframes_gpla, BinaryData::noframes_gplaSize));
bool hasValidFrames = false;
@ -76,10 +302,9 @@ void LineArtParser::parseJsonFrames(juce::String jsonStr) {
}
// If no frames were valid, stop and parse invalid fallback instead
if (!hasValidFrames) {
parseJsonFrames(juce::String(BinaryData::invalid_gpla, BinaryData::invalid_gplaSize));
return;
}
if (!hasValidFrames) return parseJsonFrames(juce::String(BinaryData::invalid_gpla, BinaryData::invalid_gplaSize));
return frames;
}
void LineArtParser::setFrame(int fNum) {
@ -97,6 +322,50 @@ std::vector<std::unique_ptr<Shape>> LineArtParser::draw() {
return tempShapes;
}
std::vector<std::vector<OsciPoint>> LineArtParser::reorderVertices(std::vector<std::vector<OsciPoint>> vertices) {
std::vector<std::vector<OsciPoint>> reorderedVertices;
if (vertices.size() > 0) {
std::vector<bool> visited = std::vector<bool>(vertices.size(), false);
std::vector<int> order = std::vector<int>(vertices.size(), 0);
visited[0] = true;
auto endPoint = vertices[0].back();
for (int i = 1; i < vertices.size(); i++) {
int minPath = 0;
double minDistance = 9999999;
for (int j = 0; j < vertices.size(); j++) {
if (!visited[j]) {
auto startPoint = vertices[j][0];
double diffX = endPoint.x - startPoint.x;
double diffY = endPoint.y - startPoint.y;
double diffZ = endPoint.z - startPoint.z;
double distance = std::sqrt(diffX * diffX + diffY * diffY + diffZ * diffZ);
if (distance < minDistance) {
minPath = j;
minDistance = distance;
}
}
}
visited[minPath] = true;
order[i] = minPath;
endPoint = vertices[minPath].back();
}
for (int i = 0; i < vertices.size(); i++) {
std::vector<OsciPoint> reorderedVertex;
int index = order[i];
for (int j = 0; j < vertices[index].size(); j++) {
reorderedVertex.push_back(vertices[index][j]);
}
reorderedVertices.push_back(reorderedVertex);
}
}
return reorderedVertices;
}
std::vector<Line> LineArtParser::generateFrame(juce::Array <juce::var> objects, double focalLength)
{
@ -124,55 +393,16 @@ std::vector<Line> LineArtParser::generateFrame(juce::Array <juce::var> objects,
allMatrices[i].push_back(value);
}
std::vector<std::vector<OsciPoint>> reorderedVertices;
if (vertices.size() > 0 && matrix.size() == 16) {
std::vector<bool> visited = std::vector<bool>(vertices.size(), false);
std::vector<int> order = std::vector<int>(vertices.size(), 0);
visited[0] = true;
auto endPoint = vertices[0].back();
for (int i = 1; i < vertices.size(); i++) {
int minPath = 0;
double minDistance = 9999999;
for (int j = 0; j < vertices.size(); j++) {
if (!visited[j]) {
auto startPoint = vertices[j][0];
double diffX = endPoint.x - startPoint.x;
double diffY = endPoint.y - startPoint.y;
double diffZ = endPoint.z - startPoint.z;
double distance = std::sqrt(diffX * diffX + diffY * diffY + diffZ * diffZ);
if (distance < minDistance) {
minPath = j;
minDistance = distance;
}
}
}
visited[minPath] = true;
order[i] = minPath;
endPoint = vertices[minPath].back();
}
for (int i = 0; i < vertices.size(); i++) {
std::vector<OsciPoint> reorderedVertex;
int index = order[i];
for (int j = 0; j < vertices[index].size(); j++) {
reorderedVertex.push_back(vertices[index][j]);
}
reorderedVertices.push_back(reorderedVertex);
}
}
allVertices.push_back(reorderedVertices);
allVertices.push_back(reorderVertices(vertices));
}
return assembleFrame(allVertices, allMatrices, focalLength);
}
std::vector<Line> LineArtParser::assembleFrame(std::vector<std::vector<std::vector<OsciPoint>>> allVertices, std::vector<std::vector<double>> allMatrices, double focalLength) {
// generate a frame from the vertices and matrix
std::vector<Line> frame;
for (int i = 0; i < objects.size(); i++) {
for (int i = 0; i < allVertices.size(); i++) {
for (int j = 0; j < allVertices[i].size(); j++) {
for (int k = 0; k < allVertices[i][j].size() - 1; k++) {
auto start = allVertices[i][j][k];

Wyświetl plik

@ -8,14 +8,22 @@
class LineArtParser {
public:
LineArtParser(juce::String json);
LineArtParser(char* data, int dataLength);
~LineArtParser();
void setFrame(int fNum);
std::vector<std::unique_ptr<Shape>> draw();
static std::vector<std::vector<Line>> parseJsonFrames(juce::String jsonStr);
static std::vector<std::vector<Line>> parseBinaryFrames(char* data, int dataLength);
static std::vector<Line> generateFrame(juce::Array < juce::var> objects, double focalLength);
private:
void parseJsonFrames(juce::String jsonStr);
static std::vector<std::vector<Line>> epicFail();
static double makeDouble(int64_t data);
static void makeChars(int64_t data, char* chars);
static std::vector<std::vector<OsciPoint>> reorderVertices(std::vector<std::vector<OsciPoint>> vertices);
static std::vector<Line> assembleFrame(std::vector<std::vector<std::vector<OsciPoint>>> allVertices, std::vector<std::vector<double>> allMatrices, double focalLength);
int frameNumber = 0;
std::vector<std::vector<Line>> frames;
int numFrames = 0;

Wyświetl plik

@ -102,6 +102,9 @@ void ImageParser::resetPosition() {
float ImageParser::getPixelValue(int x, int y, bool invert) {
int index = (height - y - 1) * width + x;
if (index < 0 || index >= frames[frameIndex].size()) {
return 0;
}
float pixel = frames[frameIndex][index] / (float) std::numeric_limits<uint8_t>::max();
// never traverse transparent pixels
if (invert && pixel > 0) {

Wyświetl plik

@ -10,8 +10,14 @@ ObjectServer::~ObjectServer() {
stopThread(1000);
}
void ObjectServer::reload() {
stopThread(1000);
startThread();
}
void ObjectServer::run() {
if (socket.createListener(51677, "127.0.0.1")) {
port = std::any_cast<int>(audioProcessor.getProperty("objectServerPort", 51677));
if (socket.createListener(port, "127.0.0.1")) {
// preallocating a large buffer to avoid allocations in the loop
std::unique_ptr<char[]> message{ new char[10 * 1024 * 1024] };
@ -25,6 +31,7 @@ void ObjectServer::run() {
while (!threadShouldExit() && connection->isConnected()) {
if (connection->waitUntilReady(true, 200) == 1) {
int i = 0;
std::vector<Line> frameContainer;
// read until we get a newline
while (!threadShouldExit()) {
@ -52,36 +59,55 @@ void ObjectServer::run() {
break;
}
// format of json is:
// {
// "objects": [
// {
// "name": "Line Art",
// "vertices": [
// [
// {
// "x": double value,
// "y": double value,
// "z": double value
// },
// ...
// ],
// ...
// ],
// "matrix": [
// 16 double values
// ]
// }
// ],
// "focalLength": double value
// }
if (strncmp(message.get(), "R1BMQSAg", 8) == 0) {
juce::MemoryOutputStream binStream;
juce::String messageString = message.get();
if (juce::Base64::convertFromBase64(binStream, messageString)) {
std::vector< std::vector<Line>> receivedFrames;
int bytesRead = binStream.getDataSize();
if (bytesRead < 8) return;
char* gplaData = (char*)binStream.getData();
receivedFrames = LineArtParser::parseBinaryFrames(gplaData, bytesRead);
if (receivedFrames.size() <= 0) continue;
frameContainer = receivedFrames[0];
}
else {
continue;
}
}
else {
auto json = juce::JSON::parse(message.get());
// format of json is:
// {
// "objects": [
// {
// "name": "Line Art",
// "vertices": [
// [
// {
// "x": double value,
// "y": double value,
// "z": double value
// },
// ...
// ],
// ...
// ],
// "matrix": [
// 16 double values
// ]
// }
// ],
// "focalLength": double value
// }
juce::Array<juce::var> objects = *json.getProperty("objects", juce::Array<juce::var>()).getArray();
double focalLength = json.getProperty("focalLength", 1);
auto json = juce::JSON::parse(message.get());
std::vector<Line> frameContainer = LineArtParser::generateFrame(objects, focalLength);
juce::Array<juce::var> objects = *json.getProperty("objects", juce::Array<juce::var>()).getArray();
double focalLength = json.getProperty("focalLength", 1);
frameContainer = LineArtParser::generateFrame(objects, focalLength);
}
std::vector<std::unique_ptr<Shape>> frame;

Wyświetl plik

@ -10,9 +10,11 @@ public:
~ObjectServer();
void run() override;
void reload();
private:
OscirenderAudioProcessor& audioProcessor;
int port = 51677;
juce::StreamingSocket socket;
};

Wyświetl plik

@ -26,11 +26,26 @@ void FileParser::parse(juce::String fileId, juce::String extension, std::unique_
} else if (extension == ".svg") {
svg = std::make_shared<SvgParser>(stream->readEntireStreamAsString());
} else if (extension == ".txt") {
text = std::make_shared<TextParser>(stream->readEntireStreamAsString(), font);
text = std::make_shared<TextParser>(audioProcessor, stream->readEntireStreamAsString(), font);
} else if (extension == ".lua") {
lua = std::make_shared<LuaParser>(fileId, stream->readEntireStreamAsString(), errorCallback, fallbackLuaScript);
} else if (extension == ".gpla") {
gpla = std::make_shared<LineArtParser>(stream->readEntireStreamAsString());
juce::MemoryBlock buffer{};
int bytesRead = stream->readIntoMemoryBlock(buffer);
if (bytesRead < 8) return;
char* gplaData = (char*)buffer.getData();
const char tag[] = "GPLA ";
bool isBinary = true;
for (int i = 0; i < 8; i++) {
isBinary = isBinary && tag[i] == gplaData[i];
}
if (isBinary) {
gpla = std::make_shared<LineArtParser>(gplaData, bytesRead);
}
else {
stream->setPosition(0);
gpla = std::make_shared<LineArtParser>(stream->readEntireStreamAsString());
}
} else if (extension == ".gif" || extension == ".png" || extension == ".jpg" || extension == ".jpeg") {
juce::MemoryBlock buffer{};
int bytesRead = stream->readIntoMemoryBlock(buffer);

Wyświetl plik

@ -1,20 +1,32 @@
#include "TextParser.h"
#include "../svg/SvgParser.h"
#include "../PluginProcessor.h"
TextParser::TextParser(juce::String text, juce::Font font) {
juce::Path textPath;
juce::GlyphArrangement glyphs;
glyphs.addFittedText(font, text, -2, -2, 4, 4, juce::Justification::centred, 2);
glyphs.createPath(textPath);
SvgParser::pathToShapes(textPath, shapes);
TextParser::TextParser(OscirenderAudioProcessor &p, juce::String text, juce::Font font) : audioProcessor(p), text(text) {
parse(text, font);
}
TextParser::~TextParser() {
}
void TextParser::parse(juce::String text, juce::Font font) {
lastFont = font;
juce::Path textPath;
juce::GlyphArrangement glyphs;
glyphs.addFittedText(font, text, -2, -2, 4, 4, juce::Justification::centred, 2);
glyphs.createPath(textPath);
shapes = std::vector<std::unique_ptr<Shape>>();
SvgParser::pathToShapes(textPath, shapes);
}
std::vector<std::unique_ptr<Shape>> TextParser::draw() {
// reparse text if font changes
if (audioProcessor.font != lastFont) {
parse(text, audioProcessor.font);
}
// clone with deep copy
std::vector<std::unique_ptr<Shape>> tempShapes;

Wyświetl plik

@ -3,12 +3,18 @@
#include <JuceHeader.h>
#include "../shape/Shape.h"
class OscirenderAudioProcessor;
class TextParser {
public:
TextParser(juce::String text, juce::Font font);
TextParser(OscirenderAudioProcessor &p, juce::String text, juce::Font font);
~TextParser();
std::vector<std::unique_ptr<Shape>> draw();
private:
void parse(juce::String text, juce::Font font);
OscirenderAudioProcessor &audioProcessor;
std::vector<std::unique_ptr<Shape>> shapes;
juce::Font lastFont;
juce::String text;
};

Wyświetl plik

@ -0,0 +1,23 @@
std::string afterglowFragmentShader = R"(
uniform sampler2D uTexture0;
varying vec2 vTexCoord;
uniform float fadeAmount;
uniform float afterglowAmount;
// tanh is not available in GLSL ES 1.0, so we define it here.
float hypTan(float x) {
return (exp(x) - exp(-x)) / (exp(x) + exp(-x));
}
void main() {
vec4 line = texture2D(uTexture0, vTexCoord);
float x = min(line.r / afterglowAmount, 10.0);
float minFade = 0.1 * (1.0 - clamp(afterglowAmount / 10.0, 0.0, 1.0));
float fade = fadeAmount * ((1.0 - minFade) * hypTan(x) + minFade);
fade = clamp(fade, 0.0, fadeAmount);
gl_FragColor = vec4(0.0, 0.0, 0.0, fade);
}
)";

Wyświetl plik

@ -0,0 +1,11 @@
std::string afterglowVertexShader = R"(
attribute vec2 aPos;
varying vec2 vTexCoord;
void main() {
gl_Position = vec4(aPos, 0.0, 1.0);
vTexCoord = (0.5 * aPos + 0.5);
}
)";

Wyświetl plik

@ -7,6 +7,7 @@ uniform sampler2D uTexture3; //screen
uniform sampler2D uTexture4; //reflection
uniform sampler2D uTexture5; //screen glow
uniform float uExposure;
uniform float uOverexposure;
uniform float uLineSaturation;
uniform float uScreenSaturation;
uniform float uNoise;
@ -15,6 +16,7 @@ uniform float uGlow;
uniform float uAmbient;
uniform float uFishEye;
uniform float uRealScreen;
uniform float uHueShift;
uniform vec2 uOffset;
uniform vec2 uScale;
uniform vec3 uColour;
@ -35,6 +37,16 @@ float noise(vec2 texCoord, float time) {
return fract(sin(seed) * 43758.5453) - 0.5;
}
vec3 hueShift(vec3 color, float shift) {
vec3 p = vec3(0.55735) * dot(vec3(0.55735), color);
vec3 u = color - p;
vec3 v = cross(vec3(0.55735), u);
color = u * cos(shift * 6.2832) + v * sin(shift * 6.2832) + p;
return color;
}
vec4 max4(vec4 a, vec4 b) {
return vec4(max(a.r, b.r), max(a.g, b.g), max(a.b, b.b), max(a.a, b.a));
}
@ -58,19 +70,19 @@ void main() {
if (uRealScreen > 0.5) {
vec4 reflection = texture2D(uTexture4, vTexCoord);
vec4 screenGlow = texture2D(uTexture5, vTexCoord);
scatter += max4(screenGlow * reflection * max(1.0 - uAmbient, 0.0), vec4(0.0));
scatter += max4(screenGlow * reflection * max(1.0 - 0.5 * uAmbient, 0.0), vec4(0.0));
}
float light = line.r + uGlow * 1.5 * screen.g * screen.g * tightGlow.r;
light += uGlow * 0.3 * scatter.g * (2.0 + 1.0 * screen.g + 0.5 * screen.r);
float tlight = 1.0-pow(2.0, -uExposure*light);
float tlight2 = tlight * tlight * tlight;
gl_FragColor.rgb = mix(uColour, vec3(1.0), 0.3+tlight2*tlight2*0.5) * tlight;
gl_FragColor.rgb = mix(uColour, vec3(1.0), 0.3+tlight2*tlight2*uOverexposure) * tlight;
gl_FragColor.rgb = desaturate(gl_FragColor.rgb, 1.0 - uLineSaturation);
if (uRealScreen > 0.5) {
// this isn't how light works, but it looks cool
float ambient = uExposure * uAmbient;
vec3 screen = ambient * screen.rgb;
vec3 screen = ambient * hueShift(screen.rgb, uHueShift);
gl_FragColor.rgb += desaturate(screen, 1.0 - uScreenSaturation);
}
gl_FragColor.rgb += uNoise * noise(gl_FragCoord.xy * 0.01, uRandom * 100.0);

Wyświetl plik

@ -6,6 +6,8 @@
RecordingSettings::RecordingSettings(RecordingParameters& ps) : parameters(ps) {
#if SOSCI_FEATURES
addAndMakeVisible(quality);
addAndMakeVisible(resolution);
addAndMakeVisible(frameRate);
addAndMakeVisible(losslessVideo);
addAndMakeVisible(recordAudio);
addAndMakeVisible(recordVideo);
@ -16,6 +18,11 @@ RecordingSettings::RecordingSettings(RecordingParameters& ps) : parameters(ps) {
quality.setSliderOnValueChange();
quality.setRangeEnabled(false);
resolution.setSliderOnValueChange();
resolution.setRangeEnabled(false);
frameRate.setSliderOnValueChange();
frameRate.setRangeEnabled(false);
recordAudio.onClick = [this] {
if (!recordAudio.getToggleState() && !recordVideo.getToggleState()) {
recordVideo.setToggleState(true, juce::NotificationType::sendNotification);
@ -66,6 +73,8 @@ void RecordingSettings::resized() {
#if SOSCI_FEATURES
losslessVideo.setBounds(area.removeFromTop(rowHeight));
quality.setBounds(area.removeFromTop(rowHeight).expanded(6, 0));
resolution.setBounds(area.removeFromTop(rowHeight).expanded(6, 0));
frameRate.setBounds(area.removeFromTop(rowHeight).expanded(6, 0));
recordAudio.setBounds(area.removeFromTop(rowHeight));
recordVideo.setBounds(area.removeFromTop(rowHeight));
auto row = area.removeFromTop(rowHeight);

Wyświetl plik

@ -13,6 +13,10 @@ public:
RecordingParameters() {
qualityParameter.disableLfo();
qualityParameter.disableSidechain();
resolution.disableLfo();
resolution.disableSidechain();
frameRate.disableLfo();
frameRate.disableSidechain();
}
private:
@ -36,6 +40,22 @@ public:
BooleanParameter recordAudio = BooleanParameter("Record Audio", "recordAudio", VERSION_HINT, true, "Record audio along with the video.");
BooleanParameter recordVideo = BooleanParameter("Record Video", "recordVideo", VERSION_HINT, sosciFeatures, "Record video output of the visualiser.");
EffectParameter resolution = EffectParameter(
"Resolution",
"The resolution of the recorded video. This only changes when not recording.",
"resolution",
VERSION_HINT, 1024, 128, 2048, 1.0
);
Effect resolutionEffect = Effect(&resolution);
EffectParameter frameRate = EffectParameter(
"Frame Rate",
"The frame rate of the recorded video. This only changes when not recording.",
"frameRate",
VERSION_HINT, 60.0, 10, 240, 0.01
);
Effect frameRateEffect = Effect(&frameRate);
juce::String compressionPreset = "fast";
@ -49,6 +69,12 @@ public:
auto qualityXml = settingsXml->createNewChildElement("quality");
qualityEffect.save(qualityXml);
auto resolutionXml = settingsXml->createNewChildElement("resolution");
resolutionEffect.save(resolutionXml);
auto frameRateXml = settingsXml->createNewChildElement("frameRate");
frameRateEffect.save(frameRateXml);
}
// opt to not change any values if not found
@ -72,6 +98,12 @@ public:
if (auto* qualityXml = settingsXml->getChildByName("quality")) {
qualityEffect.load(qualityXml);
}
if (auto* resolutionXml = settingsXml->getChildByName("resolution")) {
resolutionEffect.load(resolutionXml);
}
if (auto* frameRateXml = settingsXml->getChildByName("frameRate")) {
frameRateEffect.load(frameRateXml);
}
}
}
@ -95,6 +127,14 @@ public:
// not supported by all media players)
return 50 * (1.0 - quality) + 1;
}
int getVideoToolboxQuality() {
if (parameters.losslessVideo.getBoolValue()) {
return 100;
}
double quality = juce::jlimit(0.0, 1.0, parameters.qualityEffect.getValue());
return 100 * quality;
}
bool recordingVideo() {
return parameters.recordVideo.getBoolValue();
@ -114,11 +154,21 @@ public:
}
return parameters.customSharedTextureServerName;
}
int getResolution() {
return parameters.resolution.getValueUnnormalised();
}
double getFrameRate() {
return parameters.frameRate.getValueUnnormalised();
}
RecordingParameters& parameters;
private:
EffectComponent quality{parameters.qualityEffect};
EffectComponent resolution{parameters.resolutionEffect};
EffectComponent frameRate{parameters.frameRateEffect};
jux::SwitchButton losslessVideo{&parameters.losslessVideo};
jux::SwitchButton recordAudio{&parameters.recordAudio};

Wyświetl plik

@ -2,6 +2,8 @@
#include "VisualiserComponent.h"
#include "../CommonPluginProcessor.h"
#include "AfterglowFragmentShader.glsl"
#include "AfterglowVertexShader.glsl"
#include "BlurFragmentShader.glsl"
#include "BlurVertexShader.glsl"
#include "WideBlurFragmentShader.glsl"
@ -189,6 +191,21 @@ void VisualiserComponent::runTask(const std::vector<OsciPoint>& points) {
ySamples.clear();
zSamples.clear();
auto applyEffects = [&](OsciPoint point) {
for (auto& effect : settings.parameters.audioEffects) {
point = effect->apply(0, point);
}
#if SOSCI_FEATURES
if (settings.isFlippedHorizontal()) {
point.x = -point.x;
}
if (settings.isFlippedVertical()) {
point.y = -point.y;
}
#endif
return point;
};
if (settings.isSweepEnabled()) {
double sweepIncrement = getSweepIncrement();
long samplesPerSweep = sampleRate * settings.getSweepSeconds();
@ -196,14 +213,12 @@ void VisualiserComponent::runTask(const std::vector<OsciPoint>& points) {
double triggerValue = settings.getTriggerValue();
bool belowTrigger = false;
for (auto& point : points) {
OsciPoint smoothPoint = settings.parameters.smoothEffect->apply(0, point);
for (const OsciPoint& point : points) {
long samplePosition = sampleCount - lastTriggerPosition;
double startPoint = 1.135;
double sweep = samplePosition * sweepIncrement * 2 * startPoint - startPoint;
double value = smoothPoint.x;
double value = point.x;
if (sweep > startPoint && belowTrigger && value >= triggerValue) {
lastTriggerPosition = sampleCount;
@ -211,25 +226,19 @@ void VisualiserComponent::runTask(const std::vector<OsciPoint>& points) {
belowTrigger = value < triggerValue;
xSamples.push_back(sweep);
ySamples.push_back(value);
OsciPoint sweepPoint = {sweep, value, 1};
sweepPoint = applyEffects(sweepPoint);
xSamples.push_back(sweepPoint.x);
ySamples.push_back(sweepPoint.y);
zSamples.push_back(1);
sampleCount++;
}
} else {
for (OsciPoint point : points) {
for (auto& effect : settings.parameters.audioEffects) {
point = effect->apply(0, point);
}
#if SOSCI_FEATURES
if (settings.isFlippedHorizontal()) {
point.x = -point.x;
}
if (settings.isFlippedVertical()) {
point.y = -point.y;
}
#endif
for (const OsciPoint& rawPoint : points) {
OsciPoint point = applyEffects(rawPoint);
xSamples.push_back(point.x);
ySamples.push_back(point.y);
zSamples.push_back(point.z);
@ -284,7 +293,7 @@ int VisualiserComponent::prepareTask(double sampleRate, int bufferSize) {
audioRecorder.setSampleRate(sampleRate);
int desiredBufferSize = sampleRate / FRAME_RATE;
int desiredBufferSize = sampleRate / recordingSettings.getFrameRate();
return desiredBufferSize;
}
@ -411,7 +420,7 @@ void VisualiserComponent::setRecording(bool recording) {
tempVideoFile = std::make_unique<juce::TemporaryFile>(".mp4");
juce::String resolution = std::to_string(renderTexture.width) + "x" + std::to_string(renderTexture.height);
juce::String cmd = "\"" + ffmpegFile.getFullPathName() + "\"" +
" -r " + juce::String(FRAME_RATE) +
" -r " + juce::String(recordingSettings.getFrameRate()) +
" -f rawvideo" +
" -pix_fmt rgba" +
" -s " + resolution +
@ -421,6 +430,14 @@ void VisualiserComponent::setRecording(bool recording) {
" -y" +
" -pix_fmt yuv420p" +
" -crf " + juce::String(recordingSettings.getCRF()) +
#if JUCE_MAC
#if JUCE_ARM
// use software encoding on Apple Silicon
" -c:v hevc_videotoolbox" +
" -q:v " + juce::String(recordingSettings.getVideoToolboxQuality()) +
" -tag:v hvc1" +
#endif
#endif
" -vf vflip" +
" \"" + tempVideoFile->getFile().getFullPathName() + "\"";
@ -659,6 +676,11 @@ void VisualiserComponent::newOpenGLContextCreated() {
glowShader->addVertexShader(juce::OpenGLHelpers::translateVertexShaderToV3(glowVertexShader));
glowShader->addFragmentShader(glowFragmentShader);
glowShader->link();
afterglowShader = std::make_unique<juce::OpenGLShaderProgram>(openGLContext);
afterglowShader->addVertexShader(juce::OpenGLHelpers::translateVertexShaderToV3(afterglowVertexShader));
afterglowShader->addFragmentShader(afterglowFragmentShader);
afterglowShader->link();
#endif
glGenBuffers(1, &vertexBuffer);
@ -691,6 +713,7 @@ void VisualiserComponent::openGLContextClosing() {
glDeleteTextures(1, &glowTexture.id);
reflectionOpenGLTexture.release();
glowShader.reset();
afterglowShader.reset();
#endif
simpleShader.reset();
@ -717,6 +740,19 @@ void VisualiserComponent::renderOpenGL() {
// we have a new buffer to render
if (sampleBufferCount != prevSampleBufferCount) {
prevSampleBufferCount = sampleBufferCount;
if (!record.getToggleState()) {
// don't change resolution or framerate if recording
if (recordingSettings.getResolution() != renderTexture.width) {
setResolution(recordingSettings.getResolution());
}
if (recordingSettings.getFrameRate() != currentFrameRate) {
currentFrameRate = recordingSettings.getFrameRate();
prepare(sampleRate, -1);
setupArrays(RESAMPLE_RATIO * sampleRate / recordingSettings.getFrameRate());
}
}
juce::CriticalSection::ScopedLockType lock(samplesLock);
if (settings.parameters.upsamplingEnabled->getBoolValue()) {
@ -746,7 +782,7 @@ void VisualiserComponent::renderOpenGL() {
}
renderingSemaphore.release();
stopwatch.addTime(juce::RelativeTime::seconds(1.0 / FRAME_RATE));
stopwatch.addTime(juce::RelativeTime::seconds(1.0 / recordingSettings.getFrameRate()));
}
// render texture to screen
@ -821,12 +857,12 @@ void VisualiserComponent::setupTextures() {
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
// Create textures
lineTexture = makeTexture(1024, 1024);
lineTexture = makeTexture(recordingSettings.getResolution(), recordingSettings.getResolution());
blur1Texture = makeTexture(512, 512);
blur2Texture = makeTexture(512, 512);
blur3Texture = makeTexture(128, 128);
blur4Texture = makeTexture(128, 128);
renderTexture = makeTexture(1024, 1024);
renderTexture = makeTexture(recordingSettings.getResolution(), recordingSettings.getResolution());
screenOpenGLTexture.loadImage(emptyScreenImage);
screenTexture = { screenOpenGLTexture.getTextureID(), screenTextureImage.getWidth(), screenTextureImage.getHeight() };
@ -839,11 +875,13 @@ void VisualiserComponent::setupTextures() {
glBindFramebuffer(GL_FRAMEBUFFER, 0); // Unbind
}
Texture VisualiserComponent::makeTexture(int width, int height) {
Texture VisualiserComponent::makeTexture(int width, int height, GLuint textureID) {
using namespace juce::gl;
GLuint textureID;
glGenTextures(1, &textureID);
// replace existing texture if it exists, otherwise create new texture
if (textureID == 0) {
glGenTextures(1, &textureID);
}
glBindTexture(GL_TEXTURE_2D, textureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, width, height, 0, GL_RGBA, GL_FLOAT, nullptr);
@ -860,10 +898,20 @@ Texture VisualiserComponent::makeTexture(int width, int height) {
return { textureID, width, height };
}
void VisualiserComponent::setResolution(int width) {
using namespace juce::gl;
lineTexture = makeTexture(width, width, lineTexture.id);
renderTexture = makeTexture(width, width, renderTexture.id);
}
void VisualiserComponent::drawLineTexture(const std::vector<float>& xPoints, const std::vector<float>& yPoints, const std::vector<float>& zPoints) {
using namespace juce::gl;
fadeAmount = juce::jmin(1.0, std::pow(0.5, settings.getPersistence()) * 0.4);
double persistence = std::pow(0.5, settings.getPersistence()) * 0.4;
persistence *= 60.0 / recordingSettings.getFrameRate();
fadeAmount = juce::jmin(1.0, persistence);
activateTargetTexture(lineTexture);
fade();
drawLine(xPoints, yPoints, zPoints);
@ -1053,6 +1101,13 @@ void VisualiserComponent::fade() {
setNormalBlending();
#if SOSCI_FEATURES
setShader(afterglowShader.get());
afterglowShader->setUniform("fadeAmount", fadeAmount);
afterglowShader->setUniform("afterglowAmount", (float) settings.getAfterglow());
afterglowShader->setUniform("uResizeForCanvas", lineTexture.width / (float) recordingSettings.getResolution());
drawTexture({lineTexture});
#else
simpleShader->use();
glEnableVertexAttribArray(glGetAttribLocation(simpleShader->getProgramID(), "vertexPosition"));
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
@ -1063,6 +1118,7 @@ void VisualiserComponent::fade() {
simpleShader->setUniform("colour", 0.0f, 0.0f, 0.0f, fadeAmount);
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableVertexAttribArray(glGetAttribLocation(simpleShader->getProgramID(), "vertexPosition"));
#endif
}
void VisualiserComponent::drawCRT() {
@ -1072,7 +1128,7 @@ void VisualiserComponent::drawCRT() {
activateTargetTexture(blur1Texture);
setShader(texturedShader.get());
texturedShader->setUniform("uResizeForCanvas", lineTexture.width / 1024.0f);
texturedShader->setUniform("uResizeForCanvas", lineTexture.width / (float) recordingSettings.getResolution());
drawTexture({lineTexture});
//horizontal blur 512x512
@ -1119,8 +1175,12 @@ void VisualiserComponent::drawCRT() {
outputShader->setUniform("uLineSaturation", (float) settings.getLineSaturation());
#if SOSCI_FEATURES
outputShader->setUniform("uScreenSaturation", (float) settings.getScreenSaturation());
outputShader->setUniform("uHueShift", (float) settings.getScreenHue() / 360.0f);
outputShader->setUniform("uOverexposure", (float) settings.getOverexposure());
#else
outputShader->setUniform("uScreenSaturation", 1.0f);
outputShader->setUniform("uHueShift", 0.0f);
outputShader->setUniform("uOverexposure", 0.5f);
#endif
outputShader->setUniform("uNoise", (float) settings.getNoise());
outputShader->setUniform("uRandom", juce::Random::getSystemRandom().nextFloat());
@ -1131,7 +1191,7 @@ void VisualiserComponent::drawCRT() {
outputShader->setUniform("uFishEye", screenOverlay == ScreenOverlay::VectorDisplay ? VECTOR_DISPLAY_FISH_EYE : 0.0f);
outputShader->setUniform("uRealScreen", settings.parameters.screenOverlay->isRealisticDisplay() ? 1.0f : 0.0f);
#endif
outputShader->setUniform("uResizeForCanvas", lineTexture.width / 1024.0f);
outputShader->setUniform("uResizeForCanvas", lineTexture.width / (float) recordingSettings.getResolution());
juce::Colour colour = juce::Colour::fromHSV(settings.getHue() / 360.0f, 1.0, 1.0, 1.0);
outputShader->setUniform("uColour", colour.getFloatRed(), colour.getFloatGreen(), colour.getFloatBlue());
drawTexture({
@ -1250,7 +1310,7 @@ Texture VisualiserComponent::createScreenTexture() {
glVertexAttribPointer(glGetAttribLocation(simpleShader->getProgramID(), "vertexPosition"), 2, GL_FLOAT, GL_FALSE, 0, nullptr);
glBindBuffer(GL_ARRAY_BUFFER, 0);
simpleShader->setUniform("colour", 0.01f, 0.05f, 0.01f, 1.0f);
glLineWidth(2.0f);
glLineWidth(4.0f);
glDrawArrays(GL_LINES, 0, data.size() / 2);
glBindTexture(GL_TEXTURE_2D, targetTexture.value().id);
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
@ -1306,7 +1366,7 @@ void VisualiserComponent::renderScope(const std::vector<float>& xPoints, const s
if (sampleRate != oldSampleRate || scratchVertices.empty()) {
oldSampleRate = sampleRate;
setupArrays(RESAMPLE_RATIO * sampleRate / FRAME_RATE);
setupArrays(RESAMPLE_RATIO * sampleRate / recordingSettings.getFrameRate());
}
intensity = settings.getIntensity() * (41000.0f / sampleRate);

Wyświetl plik

@ -82,7 +82,6 @@ private:
CommonAudioProcessor& audioProcessor;
float intensity;
const double FRAME_RATE = 60.0;
bool visualiserOnly;
AudioPlayerComponent audioPlayer{audioProcessor};
@ -186,7 +185,7 @@ private:
std::vector<float> smoothedXSamples;
std::vector<float> smoothedYSamples;
std::vector<float> smoothedZSamples;
int sampleBufferCount = 0;
std::atomic<int> sampleBufferCount = 0;
int prevSampleBufferCount = 0;
long lastTriggerPosition = 0;
@ -194,6 +193,8 @@ private:
std::vector<float> fullScreenQuad;
GLuint frameBuffer = 0;
double currentFrameRate = 60.0;
Texture lineTexture;
Texture blur1Texture;
Texture blur2Texture;
@ -209,12 +210,12 @@ private:
juce::Image emptyScreenImage = juce::ImageFileFormat::loadFrom(BinaryData::empty_jpg, BinaryData::empty_jpgSize);
#if SOSCI_FEATURES
juce::Image oscilloscopeImage = juce::ImageFileFormat::loadFrom(BinaryData::real_jpg, BinaryData::real_jpgSize);
juce::Image vectorDisplayImage = juce::ImageFileFormat::loadFrom(BinaryData::vector_display_jpg, BinaryData::vector_display_jpgSize);
juce::Image oscilloscopeImage = juce::ImageFileFormat::loadFrom(BinaryData::real_png, BinaryData::real_pngSize);
juce::Image vectorDisplayImage = juce::ImageFileFormat::loadFrom(BinaryData::vector_display_png, BinaryData::vector_display_pngSize);
juce::Image emptyReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::no_reflection_jpg, BinaryData::no_reflection_jpgSize);
juce::Image oscilloscopeReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::real_reflection_jpg, BinaryData::real_reflection_jpgSize);
juce::Image vectorDisplayReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::vector_display_reflection_jpg, BinaryData::vector_display_reflection_jpgSize);
juce::Image oscilloscopeReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::real_reflection_png, BinaryData::real_reflection_pngSize);
juce::Image vectorDisplayReflectionImage = juce::ImageFileFormat::loadFrom(BinaryData::vector_display_reflection_png, BinaryData::vector_display_reflection_pngSize);
OsciPoint REAL_SCREEN_OFFSET = { 0.02, -0.15 };
OsciPoint REAL_SCREEN_SCALE = { 0.6 };
@ -227,6 +228,7 @@ private:
Texture reflectionTexture;
std::unique_ptr<juce::OpenGLShaderProgram> glowShader;
std::unique_ptr<juce::OpenGLShaderProgram> afterglowShader;
#endif
std::unique_ptr<juce::OpenGLShaderProgram> simpleShader;
@ -252,7 +254,8 @@ private:
void initialiseSharedTexture();
void closeSharedTexture();
#endif
Texture makeTexture(int width, int height);
Texture makeTexture(int width, int height, GLuint textureID = 0);
void setResolution(int width);
void setupArrays(int num_points);
void setupTextures();
void drawLineTexture(const std::vector<float>& xPoints, const std::vector<float>& yPoints, const std::vector<float>& zPoints);

Wyświetl plik

@ -4,15 +4,10 @@
VisualiserSettings::VisualiserSettings(VisualiserParameters& p, int numChannels) : parameters(p), numChannels(numChannels) {
addAndMakeVisible(intensity);
addAndMakeVisible(persistence);
addAndMakeVisible(hue);
addAndMakeVisible(lineSaturation);
addAndMakeVisible(focus);
addAndMakeVisible(noise);
addAndMakeVisible(glow);
addAndMakeVisible(ambient);
addAndMakeVisible(smooth);
addAndMakeVisible(lineColour);
addAndMakeVisible(lightEffects);
addAndMakeVisible(videoEffects);
addAndMakeVisible(lineEffects);
addAndMakeVisible(sweepMs);
addAndMakeVisible(triggerValue);
addAndMakeVisible(upsamplingToggle);
@ -20,12 +15,8 @@ VisualiserSettings::VisualiserSettings(VisualiserParameters& p, int numChannels)
addAndMakeVisible(screenOverlayLabel);
addAndMakeVisible(screenOverlay);
#if SOSCI_FEATURES
addAndMakeVisible(screenSaturation);
addAndMakeVisible(stereo);
addAndMakeVisible(xOffset);
addAndMakeVisible(yOffset);
addAndMakeVisible(xScale);
addAndMakeVisible(yScale);
addAndMakeVisible(positionSize);
addAndMakeVisible(screenColour);
addAndMakeVisible(flipVerticalToggle);
addAndMakeVisible(flipHorizontalToggle);
#endif
@ -38,15 +29,6 @@ VisualiserSettings::VisualiserSettings(VisualiserParameters& p, int numChannels)
parameters.screenOverlay->setUnnormalisedValueNotifyingHost(screenOverlay.getSelectedId());
};
intensity.setSliderOnValueChange();
persistence.setSliderOnValueChange();
hue.setSliderOnValueChange();
lineSaturation.setSliderOnValueChange();
focus.setSliderOnValueChange();
noise.setSliderOnValueChange();
glow.setSliderOnValueChange();
ambient.setSliderOnValueChange();
smooth.setSliderOnValueChange();
sweepMs.setSliderOnValueChange();
triggerValue.setSliderOnValueChange();
@ -61,15 +43,6 @@ VisualiserSettings::VisualiserSettings(VisualiserParameters& p, int numChannels)
resized();
};
#if SOSCI_FEATURES
screenSaturation.setSliderOnValueChange();
stereo.setSliderOnValueChange();
xOffset.setSliderOnValueChange();
yOffset.setSliderOnValueChange();
xScale.setSliderOnValueChange();
yScale.setSliderOnValueChange();
#endif
parameters.screenOverlay->addListener(this);
}
@ -90,29 +63,29 @@ void VisualiserSettings::resized() {
screenOverlayLabel.setBounds(screenOverlayArea.removeFromLeft(120));
screenOverlay.setBounds(screenOverlayArea.removeFromRight(180));
intensity.setBounds(area.removeFromTop(rowHeight));
persistence.setBounds(area.removeFromTop(rowHeight));
hue.setBounds(area.removeFromTop(rowHeight));
lineSaturation.setBounds(area.removeFromTop(rowHeight));
lineColour.setBounds(area.removeFromTop(lineColour.getHeight()));
#if SOSCI_FEATURES
screenSaturation.setBounds(area.removeFromTop(rowHeight));
area.removeFromTop(10);
screenColour.setBounds(area.removeFromTop(screenColour.getHeight()));
#endif
focus.setBounds(area.removeFromTop(rowHeight));
noise.setBounds(area.removeFromTop(rowHeight));
glow.setBounds(area.removeFromTop(rowHeight));
ambient.setBounds(area.removeFromTop(rowHeight));
smooth.setBounds(area.removeFromTop(rowHeight));
area.removeFromTop(10);
lightEffects.setBounds(area.removeFromTop(lightEffects.getHeight()));
area.removeFromTop(10);
videoEffects.setBounds(area.removeFromTop(videoEffects.getHeight()));
area.removeFromTop(10);
lineEffects.setBounds(area.removeFromTop(lineEffects.getHeight()));
#if SOSCI_FEATURES
stereo.setBounds(area.removeFromTop(rowHeight));
xScale.setBounds(area.removeFromTop(rowHeight));
yScale.setBounds(area.removeFromTop(rowHeight));
xOffset.setBounds(area.removeFromTop(rowHeight));
yOffset.setBounds(area.removeFromTop(rowHeight));
area.removeFromTop(10);
positionSize.setBounds(area.removeFromTop(positionSize.getHeight()));
area.removeFromTop(10);
flipVerticalToggle.setBounds(area.removeFromTop(rowHeight));
flipHorizontalToggle.setBounds(area.removeFromTop(rowHeight));
#endif
#if !SOSCI_FEATURES
area.removeFromTop(10);
#endif
upsamplingToggle.setBounds(area.removeFromTop(rowHeight));
sweepToggle.setBounds(area.removeFromTop(rowHeight));
sweepMs.setBounds(area.removeFromTop(rowHeight));

Wyświetl plik

@ -106,6 +106,30 @@ public:
VERSION_HINT, 1.0, 0.0, 5.0
)
);
std::shared_ptr<Effect> screenHueEffect = std::make_shared<Effect>(
new EffectParameter(
"Screen Hue",
"Controls the hue shift of the oscilloscope screen.",
"screenHue",
VERSION_HINT, 0, 0, 359, 1
)
);
std::shared_ptr<Effect> afterglowEffect = std::make_shared<Effect>(
new EffectParameter(
"Afterglow",
"Controls how quickly the image disappears after glowing brightly. Closely related to persistence.",
"afterglow",
VERSION_HINT, 1.0, 0.0, 10.0
)
);
std::shared_ptr<Effect> overexposureEffect = std::make_shared<Effect>(
new EffectParameter(
"Overexposure",
"Controls at which point the line becomes overexposed and clips, turning white.",
"overexposure",
VERSION_HINT, 0.5, 0.0, 1.0
)
);
std::shared_ptr<StereoEffect> stereoEffectApplication = std::make_shared<StereoEffect>();
std::shared_ptr<Effect> stereoEffect = std::make_shared<Effect>(
stereoEffectApplication,
@ -164,15 +188,15 @@ public:
);
std::shared_ptr<Effect> hueEffect = std::make_shared<Effect>(
new EffectParameter(
"Hue",
"Controls the hue/colour of the oscilloscope display.",
"Line Hue",
"Controls the hue of the beam of the oscilloscope.",
"hue",
VERSION_HINT, 125, 0, 359, 1
)
);
std::shared_ptr<Effect> intensityEffect = std::make_shared<Effect>(
new EffectParameter(
"Intensity",
"Line Intensity",
"Controls how bright the electron beam of the oscilloscope is.",
"intensity",
VERSION_HINT, 5.0, 0.0, 10.0
@ -255,11 +279,16 @@ public:
ambientEffect,
sweepMsEffect,
triggerValueEffect,
#if SOSCI_FEATURES
afterglowEffect,
screenSaturationEffect,
screenHueEffect,
overexposureEffect,
#endif
};
std::vector<std::shared_ptr<Effect>> audioEffects = {
smoothEffect,
#if SOSCI_FEATURES
screenSaturationEffect,
stereoEffect,
scaleEffect,
offsetEffect,
@ -279,6 +308,37 @@ public:
};
};
class GroupedSettings : public juce::GroupComponent {
public:
GroupedSettings(std::vector<std::shared_ptr<EffectComponent>> effects, juce::String label) : effects(effects), juce::GroupComponent(label, label) {
for (auto effect : effects) {
addAndMakeVisible(effect.get());
effect->setSliderOnValueChange();
}
setColour(groupComponentBackgroundColourId, Colours::veryDark.withMultipliedBrightness(3.0));
}
void resized() override {
auto area = getLocalBounds();
area.removeFromTop(35);
double rowHeight = 30;
for (auto effect : effects) {
effect->setBounds(area.removeFromTop(rowHeight));
}
}
int getHeight() {
return 40 + effects.size() * 30;
}
private:
std::vector<std::shared_ptr<EffectComponent>> effects;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR(GroupedSettings)
};
class VisualiserSettings : public juce::Component, public juce::AudioProcessorParameter::Listener {
public:
VisualiserSettings(VisualiserParameters&, int numChannels = 2);
@ -309,6 +369,18 @@ public:
double getScreenSaturation() {
return parameters.screenSaturationEffect->getActualValue();
}
double getScreenHue() {
return parameters.screenHueEffect->getActualValue();
}
double getAfterglow() {
return parameters.afterglowEffect->getActualValue();
}
double getOverexposure() {
return parameters.overexposureEffect->getActualValue();
}
bool isFlippedVertical() {
return parameters.flipVertical->getBoolValue();
@ -359,15 +431,58 @@ public:
int numChannels;
private:
EffectComponent intensity{*parameters.intensityEffect};
EffectComponent persistence{*parameters.persistenceEffect};
EffectComponent hue{*parameters.hueEffect};
EffectComponent lineSaturation{*parameters.lineSaturationEffect};
EffectComponent focus{*parameters.focusEffect};
EffectComponent noise{*parameters.noiseEffect};
EffectComponent glow{*parameters.glowEffect};
EffectComponent ambient{*parameters.ambientEffect};
EffectComponent smooth{*parameters.smoothEffect};
GroupedSettings lineColour{
std::vector<std::shared_ptr<EffectComponent>>{
std::make_shared<EffectComponent>(*parameters.hueEffect),
std::make_shared<EffectComponent>(*parameters.lineSaturationEffect),
std::make_shared<EffectComponent>(*parameters.intensityEffect),
},
"Line Colour"
};
#if SOSCI_FEATURES
GroupedSettings screenColour{
std::vector<std::shared_ptr<EffectComponent>>{
std::make_shared<EffectComponent>(*parameters.screenHueEffect),
std::make_shared<EffectComponent>(*parameters.screenSaturationEffect),
std::make_shared<EffectComponent>(*parameters.ambientEffect),
},
"Screen Colour"
};
#endif
GroupedSettings lightEffects{
std::vector<std::shared_ptr<EffectComponent>>{
std::make_shared<EffectComponent>(*parameters.persistenceEffect),
std::make_shared<EffectComponent>(*parameters.focusEffect),
std::make_shared<EffectComponent>(*parameters.glowEffect),
#if SOSCI_FEATURES
std::make_shared<EffectComponent>(*parameters.afterglowEffect),
std::make_shared<EffectComponent>(*parameters.overexposureEffect),
#else
std::make_shared<EffectComponent>(*parameters.ambientEffect),
#endif
},
"Light Effects"
};
GroupedSettings videoEffects{
std::vector<std::shared_ptr<EffectComponent>>{
std::make_shared<EffectComponent>(*parameters.noiseEffect),
},
"Video Effects"
};
GroupedSettings lineEffects{
std::vector<std::shared_ptr<EffectComponent>>{
std::make_shared<EffectComponent>(*parameters.smoothEffect),
#if SOSCI_FEATURES
std::make_shared<EffectComponent>(*parameters.stereoEffect),
#endif
},
"Line Effects"
};
EffectComponent sweepMs{*parameters.sweepMsEffect};
EffectComponent triggerValue{*parameters.triggerValueEffect};
@ -378,12 +493,15 @@ private:
jux::SwitchButton sweepToggle{parameters.sweepEnabled};
#if SOSCI_FEATURES
EffectComponent screenSaturation{*parameters.screenSaturationEffect};
EffectComponent stereo{*parameters.stereoEffect};
EffectComponent xScale{*parameters.scaleEffect, 0};
EffectComponent yScale{*parameters.scaleEffect, 1};
EffectComponent xOffset{*parameters.offsetEffect, 0};
EffectComponent yOffset{*parameters.offsetEffect, 1};
GroupedSettings positionSize{
std::vector<std::shared_ptr<EffectComponent>>{
std::make_shared<EffectComponent>(*parameters.scaleEffect, 0),
std::make_shared<EffectComponent>(*parameters.scaleEffect, 1),
std::make_shared<EffectComponent>(*parameters.offsetEffect, 0),
std::make_shared<EffectComponent>(*parameters.offsetEffect, 1),
},
"Line Position & Scale"
};
jux::SwitchButton flipVerticalToggle{parameters.flipVertical};
jux::SwitchButton flipHorizontalToggle{parameters.flipHorizontal};
@ -417,7 +535,9 @@ class SettingsWindow : public juce::DialogWindow {
public:
SettingsWindow(juce::String name, juce::Component& component) : juce::DialogWindow(name, Colours::darker, true, true), component(component) {
setContentComponent(&viewport);
setResizable(false, false);
centreWithSize(550, 500);
setResizeLimits(getWidth(), 300, getWidth(), 1080);
setResizable(true, false);
viewport.setColour(juce::ScrollBar::trackColourId, juce::Colours::white);
viewport.setViewedComponent(&component, false);
viewport.setScrollBarsShown(true, false, true, false);

Wyświetl plik

@ -1,7 +1,7 @@
bl_info = {
"name": "osci-render",
"author": "James Ball",
"version": (1, 0, 3),
"version": (1, 1, 0),
"blender": (3, 1, 2),
"location": "View3D",
"description": "Addon to send gpencil frames over to osci-render",
@ -16,6 +16,8 @@ import bmesh
import socket
import json
import atexit
import struct
import base64
from bpy.props import StringProperty
from bpy.app.handlers import persistent
from bpy_extras.io_utils import ImportHelper
@ -26,6 +28,11 @@ PORT = 51677
sock = None
GPLA_MAJOR = 2
GPLA_MINOR = 0
GPLA_PATCH = 0
class OBJECT_PT_osci_render_settings(bpy.types.Panel):
bl_idname = "OBJECT_PT_osci_render_settings"
bl_label = "osci-render settings"
@ -37,6 +44,7 @@ class OBJECT_PT_osci_render_settings(bpy.types.Panel):
layout = self.layout
def draw(self, context):
self.layout.prop(context.scene, "oscirenderPort")
global sock
if sock is None:
self.layout.operator("render.osci_render_connect", text="Connect to osci-render instance")
@ -55,7 +63,7 @@ class osci_render_connect(bpy.types.Operator):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
sock.connect((HOST, PORT))
sock.connect((HOST, context.scene.oscirenderPort))
send_scene_to_osci_render(bpy.context.scene)
except socket.error as exp:
sock = None
@ -119,77 +127,181 @@ def close_osci_render():
except socket.error as exp:
sock = None
def append_matrix(object_info, obj):
camera_space = bpy.context.scene.camera.matrix_world.inverted() @ obj.matrix_world
object_info["matrix"] = [camera_space[i][j] for i in range(4) for j in range(4)]
return object_info
def get_frame_info():
frame_info = {"objects": []}
if (bpy.app.version[0] > 4) or (bpy.app.version[0] == 4 and bpy.app.version[1] >= 3):
for obj in bpy.data.objects:
if obj.visible_get() and obj.type == 'GREASEPENCIL':
object_info = {"name": obj.name}
strokes = obj.data.layers.active.frames.data.current_frame().drawing.strokes
object_info["vertices"] = []
for stroke in strokes:
object_info["vertices"].append([{
"x": vert.position.x,
"y": vert.position.y,
"z": vert.position.z,
} for vert in stroke.points])
frame_info["objects"].append(append_matrix(object_info, obj))
else:
for obj in bpy.data.objects:
if obj.visible_get() and obj.type == 'GPENCIL':
object_info = {"name": obj.name}
strokes = obj.data.layers.active.frames.data.active_frame.strokes
object_info["vertices"] = []
for stroke in strokes:
object_info["vertices"].append([{
"x": vert.co[0],
"y": vert.co[1],
"z": vert.co[2],
} for vert in stroke.points])
frame_info["objects"].append(append_matrix(object_info, obj))
def get_gpla_file_allframes(scene):
bin = bytearray()
frame_info["focalLength"] = -0.05 * bpy.data.cameras[0].lens
return frame_info
# header
bin.extend(("GPLA ").encode("utf8"))
bin.extend(GPLA_MAJOR.to_bytes(8, "little"))
bin.extend(GPLA_MINOR.to_bytes(8, "little"))
bin.extend(GPLA_PATCH.to_bytes(8, "little"))
# file info
bin.extend(("FILE ").encode("utf8"))
bin.extend(("fCount ").encode("utf8"))
bin.extend((scene.frame_end - scene.frame_start + 1).to_bytes(8, "little"))
bin.extend(("fRate ").encode("utf8"))
bin.extend(scene.render.fps.to_bytes(8, "little"))
bin.extend(("DONE ").encode("utf8"))
for frame in range(0, scene.frame_end - scene.frame_start + 1):
scene.frame_set(frame + scene.frame_start)
bin.extend(get_frame_info_binary())
bin.extend(("END GPLA").encode("utf8"))
return bin
def get_gpla_file(scene):
bin = bytearray()
# header
bin.extend(("GPLA ").encode("utf8"))
bin.extend(GPLA_MAJOR.to_bytes(8, "little"))
bin.extend(GPLA_MINOR.to_bytes(8, "little"))
bin.extend(GPLA_PATCH.to_bytes(8, "little"))
# file info
bin.extend(("FILE ").encode("utf8"))
bin.extend(("fCount ").encode("utf8"))
bin.extend((scene.frame_end - scene.frame_start + 1).to_bytes(8, "little"))
bin.extend(("fRate ").encode("utf8"))
bin.extend(scene.render.fps.to_bytes(8, "little"))
bin.extend(("DONE ").encode("utf8"))
bin.extend(get_frame_info_binary())
bin.extend(("END GPLA").encode("utf8"))
return bin
@persistent
def save_scene_to_file(scene, file_path):
return_frame = scene.frame_current
scene_info = {"frames": []}
for frame in range(0, scene.frame_end - scene.frame_start):
scene.frame_set(frame + scene.frame_start)
scene_info["frames"].append(get_frame_info())
json_str = json.dumps(scene_info, separators=(',', ':'))
bin = get_gpla_file_allframes(scene)
if file_path is not None:
f = open(file_path, "w")
f.write(json_str)
f.close()
with open(file_path, "wb") as f:
f.write(bytes(bin))
else:
return 1
scene.frame_set(return_frame)
return 0
def get_frame_info_binary():
frame_info = bytearray()
frame_info.extend(("FRAME ").encode("utf8"))
frame_info.extend(("focalLen").encode("utf8"))
frame_info.extend(struct.pack("d", -0.05 * bpy.data.cameras[0].lens))
frame_info.extend(("OBJECTS ").encode("utf8"))
if (bpy.app.version[0] > 4) or (bpy.app.version[0] == 4 and bpy.app.version[1] >= 3):
for object in bpy.data.objects:
if object.visible_get() and object.type == 'GREASEPENCIL':
dg = bpy.context.evaluated_depsgraph_get()
obj = object.evaluated_get(dg)
frame_info.extend(("OBJECT ").encode("utf8"))
# matrix
frame_info.extend(("MATRIX ").encode("utf8"))
camera_space = bpy.context.scene.camera.matrix_world.inverted() @ obj.matrix_world
for i in range(4):
for j in range(4):
frame_info.extend(struct.pack("d", camera_space[i][j]))
frame_info.extend(("DONE ").encode("utf8"))
# strokes
frame_info.extend(("STROKES ").encode("utf8"))
layers = obj.data.layers
for layer in layers:
strokes = layer.frames.data.current_frame().drawing.strokes
for stroke in strokes:
frame_info.extend(("STROKE ").encode("utf8"))
frame_info.extend(("vertexCt").encode("utf8"))
frame_info.extend(len(stroke.points).to_bytes(8, "little"))
frame_info.extend(("VERTICES").encode("utf8"))
for vert in stroke.points:
frame_info.extend(struct.pack("d", vert.position.x))
frame_info.extend(struct.pack("d", vert.position.y))
frame_info.extend(struct.pack("d", vert.position.z))
# VERTICES
frame_info.extend(("DONE ").encode("utf8"))
# STROKE
frame_info.extend(("DONE ").encode("utf8"))
# STROKES
frame_info.extend(("DONE ").encode("utf8"))
# OBJECT
frame_info.extend(("DONE ").encode("utf8"))
else:
for object in bpy.data.objects:
if object.visible_get() and object.type == 'GPENCIL':
dg = bpy.context.evaluated_depsgraph_get()
obj = object.evaluated_get(dg)
frame_info.extend(("OBJECT ").encode("utf8"))
# matrix
frame_info.extend(("MATRIX ").encode("utf8"))
camera_space = bpy.context.scene.camera.matrix_world.inverted() @ obj.matrix_world
for i in range(4):
for j in range(4):
frame_info.extend(struct.pack("d", camera_space[i][j]))
# MATRIX
frame_info.extend(("DONE ").encode("utf8"))
# strokes
frame_info.extend(("STROKES ").encode("utf8"))
layers = obj.data.layers
for layer in layers:
strokes = layer.frames.data.active_frame.strokes
for stroke in strokes:
frame_info.extend(("STROKE ").encode("utf8"))
frame_info.extend(("vertexCt").encode("utf8"))
frame_info.extend(len(stroke.points).to_bytes(8, "little"))
frame_info.extend(("VERTICES").encode("utf8"))
for vert in stroke.points:
frame_info.extend(struct.pack("d", vert.co[0]))
frame_info.extend(struct.pack("d", vert.co[1]))
frame_info.extend(struct.pack("d", vert.co[2]))
# VERTICES
frame_info.extend(("DONE ").encode("utf8"))
# STROKE
frame_info.extend(("DONE ").encode("utf8"))
# STROKES
frame_info.extend(("DONE ").encode("utf8"))
# OBJECT
frame_info.extend(("DONE ").encode("utf8"))
# OBJECTS
frame_info.extend(("DONE ").encode("utf8"))
# FRAME
frame_info.extend(("DONE ").encode("utf8"))
return frame_info
@persistent
def send_scene_to_osci_render(scene):
global sock
if sock is not None:
frame_info = get_frame_info()
json_str = json.dumps(frame_info, separators=(',', ':')) + '\n'
bin = get_gpla_file(scene)
try:
sock.sendall(json_str.encode('utf-8'))
sock.sendall(base64.b64encode(bytes(bin)) + "\n".encode("utf8"))
except socket.error as exp:
sock = None
@ -198,6 +310,7 @@ operations = [OBJECT_PT_osci_render_settings, osci_render_connect, osci_render_c
def register():
bpy.types.Scene.oscirenderPort = bpy.props.IntProperty(name="osci-render port",description="The port through which osci-render will connect",min=51600,max=51699,default=51677)
bpy.app.handlers.frame_change_pre.append(send_scene_to_osci_render)
bpy.app.handlers.depsgraph_update_post.append(send_scene_to_osci_render)
atexit.register(close_osci_render)
@ -206,6 +319,7 @@ def register():
def unregister():
del bpy.types.Object.oscirenderPort
bpy.app.handlers.frame_change_pre.remove(send_scene_to_osci_render)
bpy.app.handlers.depsgraph_update_post.remove(send_scene_to_osci_render)
atexit.unregister(close_osci_render)

Wyświetl plik

@ -4,7 +4,7 @@
addUsingNamespaceToJuceHeader="0" jucerFormatVersion="1" pluginCharacteristicsValue="pluginWantsMidiIn"
pluginManufacturer="jameshball" aaxIdentifier="sh.ball.oscirender"
cppLanguageStandard="20" projectLineFeed="&#10;" headerPath="./include"
version="2.4.3.3" companyName="James H Ball" companyWebsite="https://osci-render.com"
version="2.4.8.0" companyName="James H Ball" companyWebsite="https://osci-render.com"
companyEmail="james@ball.sh" defines="NOMINMAX=1&#10;INTERNET_FLAG_NO_AUTO_REDIRECT=0&#10;SOSCI_FEATURES=1"
pluginAUMainType="'aumf'">
<MAINGROUP id="j5Ge2T" name="osci-render">
@ -37,13 +37,13 @@
<FILE id="RgmiPU" name="no_reflection.jpg" compile="0" resource="1"
file="Resources/oscilloscope/no_reflection.jpg"/>
<FILE id="dNtZYs" name="noise.jpg" compile="0" resource="1" file="Resources/oscilloscope/noise.jpg"/>
<FILE id="FyEDbA" name="real.jpg" compile="0" resource="1" file="Resources/oscilloscope/real.jpg"/>
<FILE id="v8E6y9" name="real_reflection.jpg" compile="0" resource="1"
file="Resources/oscilloscope/real_reflection.jpg"/>
<FILE id="QrCP4w" name="vector_display.jpg" compile="0" resource="1"
file="Resources/oscilloscope/vector_display.jpg"/>
<FILE id="s3VNsJ" name="vector_display_reflection.jpg" compile="0"
resource="1" file="Resources/oscilloscope/vector_display_reflection.jpg"/>
<FILE id="ZeLdkV" name="real.png" compile="0" resource="1" file="Resources/oscilloscope/real.png"/>
<FILE id="LRgixi" name="real_reflection.png" compile="0" resource="1"
file="Resources/oscilloscope/real_reflection.png"/>
<FILE id="arGLS1" name="vector_display.png" compile="0" resource="1"
file="Resources/oscilloscope/vector_display.png"/>
<FILE id="a6URcP" name="vector_display_reflection.png" compile="0"
resource="1" file="Resources/oscilloscope/vector_display_reflection.png"/>
</GROUP>
<GROUP id="{82BCD6F1-A8BF-F30B-5587-81EE70168883}" name="svg">
<FILE id="rl17ZK" name="cog.svg" compile="0" resource="1" file="Resources/svg/cog.svg"/>
@ -110,9 +110,6 @@
file="Source/audio/PerspectiveEffect.cpp"/>
<FILE id="h0dMim" name="PerspectiveEffect.h" compile="0" resource="0"
file="Source/audio/PerspectiveEffect.h"/>
<FILE id="t2bsR8" name="PitchDetector.cpp" compile="1" resource="0"
file="Source/audio/PitchDetector.cpp"/>
<FILE id="rQC2gX" name="PitchDetector.h" compile="0" resource="0" file="Source/audio/PitchDetector.h"/>
<FILE id="t5g8pf" name="PublicSynthesiser.h" compile="0" resource="0"
file="Source/audio/PublicSynthesiser.h"/>
<FILE id="Q5kjpU" name="SampleRateManager.h" compile="0" resource="0"
@ -619,6 +616,10 @@
file="Source/UGen/ugen_JuceUtility.h"/>
</GROUP>
<GROUP id="{16A8DC64-BA02-898D-4DBA-AA3DDF6F9297}" name="visualiser">
<FILE id="DkDKBX" name="AfterglowFragmentShader.glsl" compile="0" resource="0"
file="Source/visualiser/AfterglowFragmentShader.glsl"/>
<FILE id="eM1kV3" name="AfterglowVertexShader.glsl" compile="0" resource="0"
file="Source/visualiser/AfterglowVertexShader.glsl"/>
<FILE id="kfMvdQ" name="BlurFragmentShader.glsl" compile="0" resource="0"
file="Source/visualiser/BlurFragmentShader.glsl" xcodeResource="0"/>
<FILE id="c59gvD" name="BlurVertexShader.glsl" compile="0" resource="0"
@ -751,11 +752,12 @@
</MODULEPATHS>
</LINUX_MAKE>
<VS2022 targetFolder="Builds/osci-render/VisualStudio2022" smallIcon="pSc1mq"
bigIcon="pSc1mq">
bigIcon="pSc1mq" extraCompilerFlags="/wd4005 /wd4244 /wd4305 /wd4584"
extraLinkerFlags="/IGNORE:4006">
<CONFIGURATIONS>
<CONFIGURATION isDebug="1" name="Debug" targetName="osci-render"/>
<CONFIGURATION isDebug="1" name="Debug" targetName="osci-render" winWarningLevel="2"/>
<CONFIGURATION isDebug="0" name="Release" targetName="osci-render" alwaysGenerateDebugSymbols="1"
debugInformationFormat="ProgramDatabase"/>
debugInformationFormat="ProgramDatabase" winWarningLevel="2"/>
</CONFIGURATIONS>
<MODULEPATHS>
<MODULEPATH id="juce_audio_basics" path="../../../JUCE/modules"/>

Wyświetl plik

@ -3,7 +3,7 @@
<JUCERPROJECT id="HH2E72" name="sosci" projectType="audioplug" useAppConfig="0"
addUsingNamespaceToJuceHeader="0" jucerFormatVersion="1" pluginManufacturer="jameshball"
aaxIdentifier="sh.ball.sosci" cppLanguageStandard="20" projectLineFeed="&#10;"
headerPath="./include" version="1.0.3.3" companyName="James H Ball"
headerPath="./include" version="1.1.2.0" companyName="James H Ball"
companyWebsite="https://osci-render.com" companyEmail="james@ball.sh"
defines="NOMINMAX=1&#10;INTERNET_FLAG_NO_AUTO_REDIRECT=0&#10;SOSCI_FEATURES=1"
pluginManufacturerCode="Jhba" pluginCode="Sosc" pluginAUMainType="'aufx'">
@ -32,13 +32,13 @@
<FILE id="ZgcesA" name="no_reflection.jpg" compile="0" resource="1"
file="Resources/oscilloscope/no_reflection.jpg"/>
<FILE id="dNtZYs" name="noise.jpg" compile="0" resource="1" file="Resources/oscilloscope/noise.jpg"/>
<FILE id="xxXx4Q" name="real.jpg" compile="0" resource="1" file="Resources/oscilloscope/real.jpg"/>
<FILE id="nuEXXn" name="real_reflection.jpg" compile="0" resource="1"
file="Resources/oscilloscope/real_reflection.jpg"/>
<FILE id="bdQp0Q" name="vector_display.jpg" compile="0" resource="1"
file="Resources/oscilloscope/vector_display.jpg"/>
<FILE id="XojumY" name="vector_display_reflection.jpg" compile="0"
resource="1" file="Resources/oscilloscope/vector_display_reflection.jpg"/>
<FILE id="LAmlcm" name="real.png" compile="0" resource="1" file="Resources/oscilloscope/real.png"/>
<FILE id="FKUOID" name="real_reflection.png" compile="0" resource="1"
file="Resources/oscilloscope/real_reflection.png"/>
<FILE id="P9kYdq" name="vector_display.png" compile="0" resource="1"
file="Resources/oscilloscope/vector_display.png"/>
<FILE id="IiqBke" name="vector_display_reflection.png" compile="0"
resource="1" file="Resources/oscilloscope/vector_display_reflection.png"/>
</GROUP>
<GROUP id="{08DE8F94-4A00-7C71-4AF3-4C34C821A5DF}" name="sosci">
<FILE id="jtOYDo" name="clean.sosci" compile="0" resource="1" file="Resources/sosci/clean.sosci"/>
@ -167,6 +167,10 @@
<FILE id="NmptSY" name="Shape.h" compile="0" resource="0" file="Source/shape/Shape.h"/>
</GROUP>
<GROUP id="{04ACA01B-2ADE-8356-BF1E-32942E0F3CFA}" name="visualiser">
<FILE id="O444rE" name="AfterglowFragmentShader.glsl" compile="0" resource="0"
file="Source/visualiser/AfterglowFragmentShader.glsl"/>
<FILE id="kG6Zku" name="AfterglowVertexShader.glsl" compile="0" resource="0"
file="Source/visualiser/AfterglowVertexShader.glsl"/>
<FILE id="Y4j91J" name="BlurFragmentShader.glsl" compile="0" resource="0"
file="Source/visualiser/BlurFragmentShader.glsl"/>
<FILE id="Fimn0E" name="BlurVertexShader.glsl" compile="0" resource="0"