kopia lustrzana https://github.com/jameshball/osci-render
Remove unused webview based oscilloscope code
rodzic
08fbbae925
commit
ac382aa07a
|
@ -1,146 +0,0 @@
|
|||
/*
|
||||
==============================================================================
|
||||
|
||||
This file is part of the JUCE framework.
|
||||
Copyright (c) Raw Material Software Limited
|
||||
|
||||
JUCE is an open source framework subject to commercial or open source
|
||||
licensing.
|
||||
|
||||
By downloading, installing, or using the JUCE framework, or combining the
|
||||
JUCE framework with any other source code, object code, content or any other
|
||||
copyrightable work, you agree to the terms of the JUCE End User Licence
|
||||
Agreement, and all incorporated terms including the JUCE Privacy Policy and
|
||||
the JUCE Website Terms of Service, as applicable, which will bind you. If you
|
||||
do not agree to the terms of these agreements, we will not license the JUCE
|
||||
framework to you, and you must discontinue the installation or download
|
||||
process and cease use of the JUCE framework.
|
||||
|
||||
JUCE End User Licence Agreement: https://juce.com/legal/juce-8-licence/
|
||||
JUCE Privacy Policy: https://juce.com/juce-privacy-policy
|
||||
JUCE Website Terms of Service: https://juce.com/juce-website-terms-of-service/
|
||||
|
||||
Or:
|
||||
|
||||
You may also use this code under the terms of the AGPLv3:
|
||||
https://www.gnu.org/licenses/agpl-3.0.en.html
|
||||
|
||||
THE JUCE FRAMEWORK IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL
|
||||
WARRANTIES, WHETHER EXPRESSED OR IMPLIED, INCLUDING WARRANTY OF
|
||||
MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, ARE DISCLAIMED.
|
||||
|
||||
==============================================================================
|
||||
*/
|
||||
|
||||
if (
|
||||
typeof window.__JUCE__ !== "undefined" &&
|
||||
typeof window.__JUCE__.getAndroidUserScripts !== "undefined" &&
|
||||
typeof window.inAndroidUserScriptEval === "undefined"
|
||||
) {
|
||||
window.inAndroidUserScriptEval = true;
|
||||
eval(window.__JUCE__.getAndroidUserScripts());
|
||||
delete window.inAndroidUserScriptEval;
|
||||
}
|
||||
|
||||
{
|
||||
if (typeof window.__JUCE__ === "undefined") {
|
||||
console.warn(
|
||||
"The 'window.__JUCE__' object is undefined." +
|
||||
" Native integration features will not work." +
|
||||
" Defining a placeholder 'window.__JUCE__' object."
|
||||
);
|
||||
|
||||
window.__JUCE__ = {
|
||||
postMessage: function () {},
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof window.__JUCE__.initialisationData === "undefined") {
|
||||
window.__JUCE__.initialisationData = {
|
||||
__juce__platform: [],
|
||||
__juce__functions: [],
|
||||
__juce__registeredGlobalEventIds: [],
|
||||
__juce__sliders: [],
|
||||
__juce__toggles: [],
|
||||
__juce__comboBoxes: [],
|
||||
};
|
||||
}
|
||||
|
||||
class ListenerList {
|
||||
constructor() {
|
||||
this.listeners = new Map();
|
||||
this.listenerId = 0;
|
||||
}
|
||||
|
||||
addListener(fn) {
|
||||
const newListenerId = this.listenerId++;
|
||||
this.listeners.set(newListenerId, fn);
|
||||
return newListenerId;
|
||||
}
|
||||
|
||||
removeListener(id) {
|
||||
if (this.listeners.has(id)) {
|
||||
this.listeners.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
callListeners(payload) {
|
||||
for (const [, value] of this.listeners) {
|
||||
value(payload);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class EventListenerList {
|
||||
constructor() {
|
||||
this.eventListeners = new Map();
|
||||
}
|
||||
|
||||
addEventListener(eventId, fn) {
|
||||
if (!this.eventListeners.has(eventId))
|
||||
this.eventListeners.set(eventId, new ListenerList());
|
||||
|
||||
const id = this.eventListeners.get(eventId).addListener(fn);
|
||||
|
||||
return [eventId, id];
|
||||
}
|
||||
|
||||
removeEventListener([eventId, id]) {
|
||||
if (this.eventListeners.has(eventId)) {
|
||||
this.eventListeners.get(eventId).removeListener(id);
|
||||
}
|
||||
}
|
||||
|
||||
emitEvent(eventId, object) {
|
||||
if (this.eventListeners.has(eventId))
|
||||
this.eventListeners.get(eventId).callListeners(object);
|
||||
}
|
||||
}
|
||||
|
||||
class Backend {
|
||||
constructor() {
|
||||
this.listeners = new EventListenerList();
|
||||
}
|
||||
|
||||
addEventListener(eventId, fn) {
|
||||
return this.listeners.addEventListener(eventId, fn);
|
||||
}
|
||||
|
||||
removeEventListener([eventId, id]) {
|
||||
this.listeners.removeEventListener(eventId, id);
|
||||
}
|
||||
|
||||
emitEvent(eventId, object) {
|
||||
window.__JUCE__.postMessage(
|
||||
JSON.stringify({ eventId: eventId, payload: object })
|
||||
);
|
||||
}
|
||||
|
||||
emitByBackend(eventId, object) {
|
||||
this.listeners.emitEvent(eventId, JSON.parse(object));
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof window.__JUCE__.backend === "undefined")
|
||||
window.__JUCE__.backend = new Backend();
|
||||
}
|
|
@ -1,492 +0,0 @@
|
|||
/*
|
||||
==============================================================================
|
||||
|
||||
This file is part of the JUCE framework.
|
||||
Copyright (c) Raw Material Software Limited
|
||||
|
||||
JUCE is an open source framework subject to commercial or open source
|
||||
licensing.
|
||||
|
||||
By downloading, installing, or using the JUCE framework, or combining the
|
||||
JUCE framework with any other source code, object code, content or any other
|
||||
copyrightable work, you agree to the terms of the JUCE End User Licence
|
||||
Agreement, and all incorporated terms including the JUCE Privacy Policy and
|
||||
the JUCE Website Terms of Service, as applicable, which will bind you. If you
|
||||
do not agree to the terms of these agreements, we will not license the JUCE
|
||||
framework to you, and you must discontinue the installation or download
|
||||
process and cease use of the JUCE framework.
|
||||
|
||||
JUCE End User Licence Agreement: https://juce.com/legal/juce-8-licence/
|
||||
JUCE Privacy Policy: https://juce.com/juce-privacy-policy
|
||||
JUCE Website Terms of Service: https://juce.com/juce-website-terms-of-service/
|
||||
|
||||
Or:
|
||||
|
||||
You may also use this code under the terms of the AGPLv3:
|
||||
https://www.gnu.org/licenses/agpl-3.0.en.html
|
||||
|
||||
THE JUCE FRAMEWORK IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL
|
||||
WARRANTIES, WHETHER EXPRESSED OR IMPLIED, INCLUDING WARRANTY OF
|
||||
MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, ARE DISCLAIMED.
|
||||
|
||||
==============================================================================
|
||||
*/
|
||||
|
||||
import "./check_native_interop.js";
|
||||
|
||||
class PromiseHandler {
|
||||
constructor() {
|
||||
this.lastPromiseId = 0;
|
||||
this.promises = new Map();
|
||||
|
||||
window.__JUCE__.backend.addEventListener(
|
||||
"__juce__complete",
|
||||
({ promiseId, result }) => {
|
||||
if (this.promises.has(promiseId)) {
|
||||
this.promises.get(promiseId).resolve(result);
|
||||
this.promises.delete(promiseId);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
createPromise() {
|
||||
const promiseId = this.lastPromiseId++;
|
||||
const result = new Promise((resolve, reject) => {
|
||||
this.promises.set(promiseId, { resolve: resolve, reject: reject });
|
||||
});
|
||||
return [promiseId, result];
|
||||
}
|
||||
}
|
||||
|
||||
const promiseHandler = new PromiseHandler();
|
||||
|
||||
/**
|
||||
* Returns a function object that calls a function registered on the JUCE backend and forwards all
|
||||
* parameters to it.
|
||||
*
|
||||
* The provided name should be the same as the name argument passed to
|
||||
* WebBrowserComponent::Options.withNativeFunction() on the backend.
|
||||
*
|
||||
* @param {String} name
|
||||
*/
|
||||
function getNativeFunction(name) {
|
||||
if (!window.__JUCE__.initialisationData.__juce__functions.includes(name))
|
||||
console.warn(
|
||||
`Creating native function binding for '${name}', which is unknown to the backend`
|
||||
);
|
||||
|
||||
const f = function () {
|
||||
const [promiseId, result] = promiseHandler.createPromise();
|
||||
|
||||
window.__JUCE__.backend.emitEvent("__juce__invoke", {
|
||||
name: name,
|
||||
params: Array.prototype.slice.call(arguments),
|
||||
resultId: promiseId,
|
||||
});
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
return f;
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
|
||||
class ListenerList {
|
||||
constructor() {
|
||||
this.listeners = new Map();
|
||||
this.listenerId = 0;
|
||||
}
|
||||
|
||||
addListener(fn) {
|
||||
const newListenerId = this.listenerId++;
|
||||
this.listeners.set(newListenerId, fn);
|
||||
return newListenerId;
|
||||
}
|
||||
|
||||
removeListener(id) {
|
||||
if (this.listeners.has(id)) {
|
||||
this.listeners.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
callListeners(payload) {
|
||||
for (const [, value] of this.listeners) {
|
||||
value(payload);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const BasicControl_valueChangedEventId = "valueChanged";
|
||||
const BasicControl_propertiesChangedId = "propertiesChanged";
|
||||
|
||||
class SliderState {
|
||||
constructor(name) {
|
||||
if (!window.__JUCE__.initialisationData.__juce__sliders.includes(name))
|
||||
console.warn(
|
||||
"Creating SliderState for '" +
|
||||
name +
|
||||
"', which is unknown to the backend"
|
||||
);
|
||||
|
||||
this.name = name;
|
||||
this.identifier = "__juce__slider" + this.name;
|
||||
this.scaledValue = 0;
|
||||
this.properties = {
|
||||
start: 0,
|
||||
end: 1,
|
||||
skew: 1,
|
||||
name: "",
|
||||
label: "",
|
||||
numSteps: 100,
|
||||
interval: 0,
|
||||
parameterIndex: -1,
|
||||
};
|
||||
this.valueChangedEvent = new ListenerList();
|
||||
this.propertiesChangedEvent = new ListenerList();
|
||||
|
||||
window.__JUCE__.backend.addEventListener(this.identifier, (event) =>
|
||||
this.handleEvent(event)
|
||||
);
|
||||
|
||||
window.__JUCE__.backend.emitEvent(this.identifier, {
|
||||
eventType: "requestInitialUpdate",
|
||||
});
|
||||
}
|
||||
|
||||
setNormalisedValue(newValue) {
|
||||
this.scaledValue = this.snapToLegalValue(
|
||||
this.normalisedToScaledValue(newValue)
|
||||
);
|
||||
|
||||
window.__JUCE__.backend.emitEvent(this.identifier, {
|
||||
eventType: BasicControl_valueChangedEventId,
|
||||
value: this.scaledValue,
|
||||
});
|
||||
}
|
||||
|
||||
sliderDragStarted() {}
|
||||
|
||||
sliderDragEnded() {}
|
||||
|
||||
handleEvent(event) {
|
||||
if (event.eventType == BasicControl_valueChangedEventId) {
|
||||
this.scaledValue = event.value;
|
||||
this.valueChangedEvent.callListeners();
|
||||
}
|
||||
if (event.eventType == BasicControl_propertiesChangedId) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
let { eventType: _, ...rest } = event;
|
||||
this.properties = rest;
|
||||
this.propertiesChangedEvent.callListeners();
|
||||
}
|
||||
}
|
||||
|
||||
getScaledValue() {
|
||||
return this.scaledValue;
|
||||
}
|
||||
|
||||
getNormalisedValue() {
|
||||
return Math.pow(
|
||||
(this.scaledValue - this.properties.start) /
|
||||
(this.properties.end - this.properties.start),
|
||||
this.properties.skew
|
||||
);
|
||||
}
|
||||
|
||||
normalisedToScaledValue(normalisedValue) {
|
||||
return (
|
||||
Math.pow(normalisedValue, 1 / this.properties.skew) *
|
||||
(this.properties.end - this.properties.start) +
|
||||
this.properties.start
|
||||
);
|
||||
}
|
||||
|
||||
snapToLegalValue(value) {
|
||||
const interval = this.properties.interval;
|
||||
|
||||
if (interval == 0) return value;
|
||||
|
||||
const start = this.properties.start;
|
||||
const clamp = (val, min = 0, max = 1) => Math.max(min, Math.min(max, val));
|
||||
|
||||
return clamp(
|
||||
start + interval * Math.floor((value - start) / interval + 0.5),
|
||||
this.properties.start,
|
||||
this.properties.end
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const sliderStates = new Map();
|
||||
|
||||
for (const sliderName of window.__JUCE__.initialisationData.__juce__sliders)
|
||||
sliderStates.set(sliderName, new SliderState(sliderName));
|
||||
|
||||
/**
|
||||
* Returns a SliderState object that is connected to the backend WebSliderRelay object that was
|
||||
* created with the same name argument.
|
||||
*
|
||||
* To register a WebSliderRelay object create one with the right name and add it to the
|
||||
* WebBrowserComponent::Options struct using withOptionsFrom.
|
||||
*
|
||||
* @param {String} name
|
||||
*/
|
||||
function getSliderState(name) {
|
||||
if (!sliderStates.has(name)) sliderStates.set(name, new SliderState(name));
|
||||
|
||||
return sliderStates.get(name);
|
||||
}
|
||||
|
||||
class ToggleState {
|
||||
constructor(name) {
|
||||
if (!window.__JUCE__.initialisationData.__juce__toggles.includes(name))
|
||||
console.warn(
|
||||
"Creating ToggleState for '" +
|
||||
name +
|
||||
"', which is unknown to the backend"
|
||||
);
|
||||
|
||||
this.name = name;
|
||||
this.identifier = "__juce__toggle" + this.name;
|
||||
this.value = false;
|
||||
this.properties = {
|
||||
name: "",
|
||||
parameterIndex: -1,
|
||||
};
|
||||
this.valueChangedEvent = new ListenerList();
|
||||
this.propertiesChangedEvent = new ListenerList();
|
||||
|
||||
window.__JUCE__.backend.addEventListener(this.identifier, (event) =>
|
||||
this.handleEvent(event)
|
||||
);
|
||||
|
||||
window.__JUCE__.backend.emitEvent(this.identifier, {
|
||||
eventType: "requestInitialUpdate",
|
||||
});
|
||||
}
|
||||
|
||||
getValue() {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
setValue(newValue) {
|
||||
this.value = newValue;
|
||||
|
||||
window.__JUCE__.backend.emitEvent(this.identifier, {
|
||||
eventType: BasicControl_valueChangedEventId,
|
||||
value: this.value,
|
||||
});
|
||||
}
|
||||
|
||||
handleEvent(event) {
|
||||
if (event.eventType == BasicControl_valueChangedEventId) {
|
||||
this.value = event.value;
|
||||
this.valueChangedEvent.callListeners();
|
||||
}
|
||||
if (event.eventType == BasicControl_propertiesChangedId) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
let { eventType: _, ...rest } = event;
|
||||
this.properties = rest;
|
||||
this.propertiesChangedEvent.callListeners();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const toggleStates = new Map();
|
||||
|
||||
for (const name of window.__JUCE__.initialisationData.__juce__toggles)
|
||||
toggleStates.set(name, new ToggleState(name));
|
||||
|
||||
/**
|
||||
* Returns a ToggleState object that is connected to the backend WebToggleButtonRelay object that was
|
||||
* created with the same name argument.
|
||||
*
|
||||
* To register a WebToggleButtonRelay object create one with the right name and add it to the
|
||||
* WebBrowserComponent::Options struct using withOptionsFrom.
|
||||
*
|
||||
* @param {String} name
|
||||
*/
|
||||
function getToggleState(name) {
|
||||
if (!toggleStates.has(name)) toggleStates.set(name, new ToggleState(name));
|
||||
|
||||
return toggleStates.get(name);
|
||||
}
|
||||
|
||||
class ComboBoxState {
|
||||
constructor(name) {
|
||||
if (!window.__JUCE__.initialisationData.__juce__comboBoxes.includes(name))
|
||||
console.warn(
|
||||
"Creating ComboBoxState for '" +
|
||||
name +
|
||||
"', which is unknown to the backend"
|
||||
);
|
||||
|
||||
this.name = name;
|
||||
this.identifier = "__juce__comboBox" + this.name;
|
||||
this.value = 0.0;
|
||||
this.properties = {
|
||||
name: "",
|
||||
parameterIndex: -1,
|
||||
choices: [],
|
||||
};
|
||||
this.valueChangedEvent = new ListenerList();
|
||||
this.propertiesChangedEvent = new ListenerList();
|
||||
|
||||
window.__JUCE__.backend.addEventListener(this.identifier, (event) =>
|
||||
this.handleEvent(event)
|
||||
);
|
||||
|
||||
window.__JUCE__.backend.emitEvent(this.identifier, {
|
||||
eventType: "requestInitialUpdate",
|
||||
});
|
||||
}
|
||||
|
||||
getChoiceIndex() {
|
||||
return Math.round(this.value * (this.properties.choices.length - 1));
|
||||
}
|
||||
|
||||
setChoiceIndex(index) {
|
||||
const numItems = this.properties.choices.length;
|
||||
this.value = numItems > 1 ? index / (numItems - 1) : 0.0;
|
||||
|
||||
window.__JUCE__.backend.emitEvent(this.identifier, {
|
||||
eventType: BasicControl_valueChangedEventId,
|
||||
value: this.value,
|
||||
});
|
||||
}
|
||||
|
||||
handleEvent(event) {
|
||||
if (event.eventType == BasicControl_valueChangedEventId) {
|
||||
this.value = event.value;
|
||||
this.valueChangedEvent.callListeners();
|
||||
}
|
||||
if (event.eventType == BasicControl_propertiesChangedId) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
let { eventType: _, ...rest } = event;
|
||||
this.properties = rest;
|
||||
this.propertiesChangedEvent.callListeners();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const comboBoxStates = new Map();
|
||||
|
||||
for (const name of window.__JUCE__.initialisationData.__juce__comboBoxes)
|
||||
comboBoxStates.set(name, new ComboBoxState(name));
|
||||
|
||||
/**
|
||||
* Returns a ComboBoxState object that is connected to the backend WebComboBoxRelay object that was
|
||||
* created with the same name argument.
|
||||
*
|
||||
* To register a WebComboBoxRelay object create one with the right name and add it to the
|
||||
* WebBrowserComponent::Options struct using withOptionsFrom.
|
||||
*
|
||||
* @param {String} name
|
||||
*/
|
||||
function getComboBoxState(name) {
|
||||
if (!comboBoxStates.has(name))
|
||||
comboBoxStates.set(name, new ComboBoxState(name));
|
||||
|
||||
return comboBoxStates.get(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends a platform-specific prefix to the path to ensure that a request sent to this address will
|
||||
* be received by the backend's ResourceProvider.
|
||||
* @param {String} path
|
||||
*/
|
||||
function getBackendResourceAddress(path) {
|
||||
const platform =
|
||||
window.__JUCE__.initialisationData.__juce__platform.length > 0
|
||||
? window.__JUCE__.initialisationData.__juce__platform[0]
|
||||
: "";
|
||||
|
||||
if (platform == "windows" || platform == "android")
|
||||
return "https://juce.backend/" + path;
|
||||
|
||||
if (platform == "macos" || platform == "ios" || platform == "linux")
|
||||
return "juce://juce.backend/" + path;
|
||||
|
||||
console.warn(
|
||||
"getBackendResourceAddress() called, but no JUCE native backend is detected."
|
||||
);
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* This helper class is intended to aid the implementation of
|
||||
* AudioProcessorEditor::getControlParameterIndex() for editors using a WebView interface.
|
||||
*
|
||||
* Create an instance of this class and call its handleMouseMove() method in each mousemove event.
|
||||
*
|
||||
* This class can be used to continuously report the controlParameterIndexAnnotation attribute's
|
||||
* value related to the DOM element that is currently under the mouse pointer.
|
||||
*
|
||||
* This value is defined at all times as follows
|
||||
* * the annotation attribute's value for the DOM element directly under the mouse, if it has it,
|
||||
* * the annotation attribute's value for the first parent element, that has it,
|
||||
* * -1 otherwise.
|
||||
*
|
||||
* Whenever there is a change in this value, an event is emitted to the frontend with the new value.
|
||||
* You can use a ControlParameterIndexReceiver object on the backend to listen to these events.
|
||||
*
|
||||
* @param {String} controlParameterIndexAnnotation
|
||||
*/
|
||||
class ControlParameterIndexUpdater {
|
||||
constructor(controlParameterIndexAnnotation) {
|
||||
this.controlParameterIndexAnnotation = controlParameterIndexAnnotation;
|
||||
this.lastElement = null;
|
||||
this.lastControlParameterIndex = null;
|
||||
}
|
||||
|
||||
handleMouseMove(event) {
|
||||
const currentElement = document.elementFromPoint(
|
||||
event.clientX,
|
||||
event.clientY
|
||||
);
|
||||
|
||||
if (currentElement === this.lastElement) return;
|
||||
this.lastElement = currentElement;
|
||||
|
||||
let controlParameterIndex = -1;
|
||||
|
||||
if (currentElement !== null)
|
||||
controlParameterIndex = this.#getControlParameterIndex(currentElement);
|
||||
|
||||
if (controlParameterIndex === this.lastControlParameterIndex) return;
|
||||
this.lastControlParameterIndex = controlParameterIndex;
|
||||
|
||||
window.__JUCE__.backend.emitEvent(
|
||||
"__juce__controlParameterIndexChanged",
|
||||
controlParameterIndex
|
||||
);
|
||||
}
|
||||
|
||||
//==============================================================================
|
||||
#getControlParameterIndex(element) {
|
||||
const isValidNonRootElement = (e) => {
|
||||
return e !== null && e !== document.documentElement;
|
||||
};
|
||||
|
||||
while (isValidNonRootElement(element)) {
|
||||
if (element.hasAttribute(this.controlParameterIndexAnnotation)) {
|
||||
return element.getAttribute(this.controlParameterIndexAnnotation);
|
||||
}
|
||||
|
||||
element = element.parentElement;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
getNativeFunction,
|
||||
getSliderState,
|
||||
getToggleState,
|
||||
getComboBoxState,
|
||||
getBackendResourceAddress,
|
||||
ControlParameterIndexUpdater,
|
||||
};
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"name": "juce-framework-frontend",
|
||||
"version": "7.0.7"
|
||||
}
|
|
@ -1,517 +0,0 @@
|
|||
<!-- THIS CODE HAS BEEN HEAVILY ADAPTED FROM https://dood.al/oscilloscope/ AS PERMITTED BY THE AUTHOR -->
|
||||
|
||||
<!DOCTYPE html>
|
||||
<head>
|
||||
<style>
|
||||
body {
|
||||
font-family: Sans-Serif;
|
||||
font-size: 14px;
|
||||
-webkit-user-select: none; /* Safari */
|
||||
-ms-user-select: none; /* IE 10 and IE 11 */
|
||||
user-select: none; /* Standard syntax */
|
||||
}
|
||||
|
||||
html,
|
||||
body {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
canvas {
|
||||
width: min(100vw, 100vh);
|
||||
height: min(100vw, 100vh);
|
||||
position: absolute;
|
||||
top: calc(calc(100vh - min(100vw, 100vh)) / 2);
|
||||
left: calc(calc(100vw - min(100vw, 100vh)) / 2);
|
||||
display: block;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
#overlay {
|
||||
background-color: rgba(0,0,0,0.5);
|
||||
height: 100vh;
|
||||
width: 100vw;
|
||||
position: absolute;
|
||||
display: none;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
cursor: default;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
table {
|
||||
border-spacing:0;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
#buttonRow {
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
display: none;
|
||||
z-index: 99;
|
||||
}
|
||||
|
||||
#buttonRow button {
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
background-color: transparent;
|
||||
background-repeat: no-repeat;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
overflow: hidden;
|
||||
outline: none;
|
||||
background-size: cover;
|
||||
}
|
||||
|
||||
#buttonRow button:hover {
|
||||
filter: brightness(70%);
|
||||
}
|
||||
|
||||
#buttonRow button:active {
|
||||
filter: brightness(50%);
|
||||
}
|
||||
|
||||
#download {
|
||||
background: url(download.svg) no-repeat;
|
||||
}
|
||||
|
||||
#fullscreen {
|
||||
background: url(fullscreen.svg) no-repeat;
|
||||
}
|
||||
|
||||
#popout {
|
||||
background: url(open_in_new.svg) no-repeat;
|
||||
}
|
||||
|
||||
#settings {
|
||||
background: url(cog.svg) no-repeat;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body bgcolor="black" text="white" autocomplete="off" style="margin: 0px;">
|
||||
|
||||
<div id="buttonRow">
|
||||
<button onClick="toggleRecording()" id="download"/>
|
||||
<button id="fullscreen"/>
|
||||
<button id="popout"/>
|
||||
<button id="settings"/>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
var controls=
|
||||
{
|
||||
swapXY : false,
|
||||
sweepOn : false,
|
||||
sweepMsDiv : 1,
|
||||
sweepTriggerValue : 0,
|
||||
mainGain : 0.0,
|
||||
brightness : 0.0,
|
||||
intensity: 0.02,
|
||||
saturation: 1.0,
|
||||
focus: 0.01,
|
||||
hue : 125,
|
||||
invertXY : false,
|
||||
grid : true,
|
||||
noise : true,
|
||||
persistence : 0,
|
||||
disableFilter : false,
|
||||
}
|
||||
|
||||
let timeout;
|
||||
document.addEventListener("mousemove", function() {
|
||||
const buttons = document.getElementById('buttonRow');
|
||||
buttons.style.display = "block";
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
timeout = setTimeout(function() {
|
||||
buttons.style.display = "none";
|
||||
}, 1000)
|
||||
});
|
||||
|
||||
let isDebug = true;
|
||||
let paused = false;
|
||||
let openInAnotherWindow = false;
|
||||
let externalSampleRate = 96000;
|
||||
let externalBufferSize = 1920;
|
||||
let recording = false;
|
||||
let mediaRecorder = undefined;
|
||||
let sendVideoDataCallback = undefined;
|
||||
let finishRecordingCallback = undefined;
|
||||
|
||||
const toggleRecording = () => {
|
||||
recording = !recording;
|
||||
if (recording) {
|
||||
const canvas = document.getElementById("crtCanvas");
|
||||
const data = [];
|
||||
const stream = canvas.captureStream(60);
|
||||
mediaRecorder = new MediaRecorder(stream);
|
||||
mediaRecorder.ondataavailable = (e) => {
|
||||
var reader = new FileReader();
|
||||
reader.readAsDataURL(e.data);
|
||||
reader.onloadend = function() {
|
||||
var dataUrl = reader.result;
|
||||
var base64 = dataUrl.split(',')[1];
|
||||
sendVideoDataCallback(base64);
|
||||
}
|
||||
};
|
||||
mediaRecorder.onstop = (e) => finishRecordingCallback();
|
||||
mediaRecorder.start();
|
||||
} else {
|
||||
mediaRecorder.stop();
|
||||
}
|
||||
};
|
||||
</script>
|
||||
|
||||
<script type="module">
|
||||
import * as Juce from "./index.js";
|
||||
|
||||
const fullscreen = document.getElementById('fullscreen');
|
||||
const toggleFullscreen = Juce.getNativeFunction("toggleFullscreen");
|
||||
fullscreen.onclick = toggleFullscreen;
|
||||
|
||||
const popout = document.getElementById('popout');
|
||||
const popoutFn = Juce.getNativeFunction("popout");
|
||||
popout.onclick = popoutFn;
|
||||
|
||||
const settings = document.getElementById('settings');
|
||||
const settingsFn = Juce.getNativeFunction("settings");
|
||||
settings.onclick = settingsFn;
|
||||
|
||||
const mainScreen = document.getElementById('mainScreen');
|
||||
const overlay = document.getElementById('overlay');
|
||||
const pauseFn = Juce.getNativeFunction("pause");
|
||||
mainScreen.onclick = function() {
|
||||
if (!openInAnotherWindow) {
|
||||
pauseFn();
|
||||
paused = !paused;
|
||||
if (paused) {
|
||||
overlay.style.display = "flex";
|
||||
} else {
|
||||
overlay.style.display = "none";
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const isDebugFn = Juce.getNativeFunction("isDebug");
|
||||
|
||||
isDebugFn().then(debug => {
|
||||
isDebug = debug;
|
||||
if (!debug) {
|
||||
document.addEventListener('contextmenu', event => event.preventDefault());
|
||||
}
|
||||
});
|
||||
|
||||
const isOverlayFn = Juce.getNativeFunction("isOverlay");
|
||||
isOverlayFn().then(overlay => {
|
||||
if (overlay) {
|
||||
popout.remove();
|
||||
fullscreen.remove();
|
||||
}
|
||||
});
|
||||
|
||||
Juce.getNativeFunction("isVisualiserOnly")().then(visualiserOnly => {
|
||||
if (visualiserOnly) {
|
||||
popout.remove();
|
||||
fullscreen.remove();
|
||||
settings.remove();
|
||||
}
|
||||
});
|
||||
|
||||
window.__JUCE__.backend.addEventListener("childPresent", hasChild => {
|
||||
openInAnotherWindow = hasChild;
|
||||
if (hasChild) {
|
||||
overlay.style.display = "flex";
|
||||
overlay.innerText = "Open in separate window";
|
||||
} else {
|
||||
overlay.style.display = "none";
|
||||
overlay.innerText = "Paused";
|
||||
}
|
||||
});
|
||||
|
||||
window.__JUCE__.backend.addEventListener("toggleRecording", hasChild => {
|
||||
toggleRecording();
|
||||
});
|
||||
|
||||
document.addEventListener("dblclick", function() {
|
||||
toggleFullscreen();
|
||||
});
|
||||
|
||||
finishRecordingCallback = () => {
|
||||
Juce.getNativeFunction("finishRecording")();
|
||||
};
|
||||
|
||||
sendVideoDataCallback = (base64) => {
|
||||
Juce.getNativeFunction("sendVideoData")(base64);
|
||||
};
|
||||
</script>
|
||||
|
||||
<div id="mainScreen">
|
||||
<div id="overlay">Paused</div>
|
||||
<canvas id="crtCanvas" width="800" height="800"></canvas>
|
||||
</div>
|
||||
|
||||
<script id="vertex" type="x-shader">
|
||||
attribute vec2 vertexPosition;
|
||||
void main()
|
||||
{
|
||||
gl_Position = vec4(vertexPosition, 0.0, 1.0);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="fragment" type="x-shader">
|
||||
precision highp float;
|
||||
uniform vec4 colour;
|
||||
void main()
|
||||
{
|
||||
gl_FragColor = colour;
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- The Gaussian line-drawing code, the next two shaders, is adapted
|
||||
from woscope by e1ml : https://github.com/m1el/woscope -->
|
||||
|
||||
<script id="gaussianVertex" type="x-shader">
|
||||
#define EPS 1E-6
|
||||
uniform float uInvert;
|
||||
uniform float uSize;
|
||||
uniform float uNEdges;
|
||||
uniform float uFadeAmount;
|
||||
uniform float uIntensity;
|
||||
uniform float uGain;
|
||||
attribute vec3 aStart, aEnd;
|
||||
attribute float aIdx;
|
||||
varying vec4 uvl;
|
||||
varying vec2 vTexCoord;
|
||||
varying float vLen;
|
||||
varying float vSize;
|
||||
void main () {
|
||||
float tang;
|
||||
vec2 current;
|
||||
// All points in quad contain the same data:
|
||||
// segment start point and segment end point.
|
||||
// We determine point position using its index.
|
||||
float idx = mod(aIdx,4.0);
|
||||
|
||||
vec2 aStartPos = aStart.xy;
|
||||
vec2 aEndPos = aEnd.xy;
|
||||
float aStartBrightness = aStart.z;
|
||||
float aEndBrightness = aEnd.z;
|
||||
|
||||
// `dir` vector is storing the normalized difference
|
||||
// between end and start
|
||||
vec2 dir = (aEndPos-aStartPos)*uGain;
|
||||
uvl.z = length(dir);
|
||||
|
||||
if (uvl.z > EPS) {
|
||||
dir = dir / uvl.z;
|
||||
} else {
|
||||
// If the segment is too short, just draw a square
|
||||
dir = vec2(1.0, 0.0);
|
||||
}
|
||||
|
||||
vSize = uSize;
|
||||
float intensity = 0.015 * uIntensity / uSize;
|
||||
vec2 norm = vec2(-dir.y, dir.x);
|
||||
|
||||
if (idx >= 2.0) {
|
||||
current = aEndPos*uGain;
|
||||
tang = 1.0;
|
||||
uvl.x = -vSize;
|
||||
uvl.w = aEndBrightness;
|
||||
} else {
|
||||
current = aStartPos*uGain;
|
||||
tang = -1.0;
|
||||
uvl.x = uvl.z + vSize;
|
||||
uvl.w = aStartBrightness;
|
||||
}
|
||||
// `side` corresponds to shift to the "right" or "left"
|
||||
float side = (mod(idx, 2.0)-0.5)*2.0;
|
||||
uvl.y = side * vSize;
|
||||
|
||||
uvl.w *= intensity * mix(1.0-uFadeAmount, 1.0, floor(aIdx / 4.0 + 0.5)/uNEdges);
|
||||
|
||||
vec4 pos = vec4((current+(tang*dir+norm*side)*vSize)*uInvert,0.0,1.0);
|
||||
gl_Position = pos;
|
||||
vTexCoord = 0.5*pos.xy+0.5;
|
||||
//float seed = floor(aIdx/4.0);
|
||||
//seed = mod(sin(seed*seed), 7.0);
|
||||
//if (mod(seed/2.0, 1.0)<0.5) gl_Position = vec4(10.0);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="gaussianFragment" type="x-shader">
|
||||
#define EPS 1E-6
|
||||
#define TAU 6.283185307179586
|
||||
#define TAUR 2.5066282746310002
|
||||
#define SQRT2 1.4142135623730951
|
||||
precision highp float;
|
||||
uniform float uSize;
|
||||
uniform float uIntensity;
|
||||
uniform sampler2D uScreen;
|
||||
varying float vSize;
|
||||
varying vec4 uvl;
|
||||
varying vec2 vTexCoord;
|
||||
|
||||
// A standard gaussian function, used for weighting samples
|
||||
float gaussian(float x, float sigma)
|
||||
{
|
||||
return exp(-(x * x) / (2.0 * sigma * sigma)) / (TAUR * sigma);
|
||||
}
|
||||
|
||||
// This approximates the error function, needed for the gaussian integral
|
||||
float erf(float x)
|
||||
{
|
||||
float s = sign(x), a = abs(x);
|
||||
x = 1.0 + (0.278393 + (0.230389 + 0.078108 * (a * a)) * a) * a;
|
||||
x *= x;
|
||||
return s - s / (x * x);
|
||||
}
|
||||
|
||||
void main (void)
|
||||
{
|
||||
float len = uvl.z;
|
||||
vec2 xy = uvl.xy;
|
||||
float brightness;
|
||||
|
||||
float sigma = vSize/5.0;
|
||||
if (len < EPS)
|
||||
{
|
||||
// If the beam segment is too short, just calculate intensity at the position.
|
||||
brightness = gaussian(length(xy), sigma);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Otherwise, use analytical integral for accumulated intensity.
|
||||
brightness = erf(xy.x/SQRT2/sigma) - erf((xy.x-len)/SQRT2/sigma);
|
||||
brightness *= exp(-xy.y*xy.y/(2.0*sigma*sigma))/2.0/len;
|
||||
}
|
||||
|
||||
brightness *= uvl.w;
|
||||
gl_FragColor = 2.0 * texture2D(uScreen, vTexCoord) * brightness;
|
||||
gl_FragColor.a = 1.0;
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="texturedVertex" type="x-shader">
|
||||
precision highp float;
|
||||
attribute vec2 aPos;
|
||||
varying vec2 vTexCoord;
|
||||
void main (void)
|
||||
{
|
||||
gl_Position = vec4(aPos, 0.0, 1.0);
|
||||
vTexCoord = (0.5*aPos+0.5);
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="texturedVertexWithResize" type="x-shader">
|
||||
precision highp float;
|
||||
attribute vec2 aPos;
|
||||
varying vec2 vTexCoord;
|
||||
uniform float uResizeForCanvas;
|
||||
void main (void)
|
||||
{
|
||||
gl_Position = vec4(aPos, 0.0, 1.0);
|
||||
vTexCoord = (0.5*aPos+0.5)*uResizeForCanvas;
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="texturedFragment" type="x-shader">
|
||||
precision highp float;
|
||||
uniform sampler2D uTexture0;
|
||||
varying vec2 vTexCoord;
|
||||
void main (void)
|
||||
{
|
||||
gl_FragColor = texture2D(uTexture0, vTexCoord);
|
||||
gl_FragColor.a= 1.0;
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="blurFragment" type="x-shader">
|
||||
precision highp float;
|
||||
uniform sampler2D uTexture0;
|
||||
uniform vec2 uOffset;
|
||||
varying vec2 vTexCoord;
|
||||
void main (void)
|
||||
{
|
||||
vec4 sum = vec4(0.0);
|
||||
sum += texture2D(uTexture0, vTexCoord - uOffset*8.0) * 0.000078;
|
||||
sum += texture2D(uTexture0, vTexCoord - uOffset*7.0) * 0.000489;
|
||||
sum += texture2D(uTexture0, vTexCoord - uOffset*6.0) * 0.002403;
|
||||
sum += texture2D(uTexture0, vTexCoord - uOffset*5.0) * 0.009245;
|
||||
sum += texture2D(uTexture0, vTexCoord - uOffset*4.0) * 0.027835;
|
||||
sum += texture2D(uTexture0, vTexCoord - uOffset*3.0) * 0.065592;
|
||||
sum += texture2D(uTexture0, vTexCoord - uOffset*2.0) * 0.12098;
|
||||
sum += texture2D(uTexture0, vTexCoord - uOffset*1.0) * 0.17467;
|
||||
sum += texture2D(uTexture0, vTexCoord + uOffset*0.0) * 0.19742;
|
||||
sum += texture2D(uTexture0, vTexCoord + uOffset*1.0) * 0.17467;
|
||||
sum += texture2D(uTexture0, vTexCoord + uOffset*2.0) * 0.12098;
|
||||
sum += texture2D(uTexture0, vTexCoord + uOffset*3.0) * 0.065592;
|
||||
sum += texture2D(uTexture0, vTexCoord + uOffset*4.0) * 0.027835;
|
||||
sum += texture2D(uTexture0, vTexCoord + uOffset*5.0) * 0.009245;
|
||||
sum += texture2D(uTexture0, vTexCoord + uOffset*6.0) * 0.002403;
|
||||
sum += texture2D(uTexture0, vTexCoord + uOffset*7.0) * 0.000489;
|
||||
sum += texture2D(uTexture0, vTexCoord + uOffset*8.0) * 0.000078;
|
||||
gl_FragColor = sum;
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="outputVertex" type="x-shader">
|
||||
precision highp float;
|
||||
attribute vec2 aPos;
|
||||
varying vec2 vTexCoord;
|
||||
varying vec2 vTexCoordCanvas;
|
||||
uniform float uResizeForCanvas;
|
||||
void main (void)
|
||||
{
|
||||
gl_Position = vec4(aPos, 0.0, 1.0);
|
||||
vTexCoord = (0.5*aPos+0.5);
|
||||
vTexCoordCanvas = vTexCoord*uResizeForCanvas;
|
||||
}
|
||||
</script>
|
||||
|
||||
<script id="outputFragment" type="x-shader">
|
||||
precision highp float;
|
||||
uniform sampler2D uTexture0; //line
|
||||
uniform sampler2D uTexture1; //tight glow
|
||||
uniform sampler2D uTexture2; //big glow
|
||||
uniform sampler2D uTexture3; //screen
|
||||
uniform float uExposure;
|
||||
uniform float uSaturation;
|
||||
uniform vec3 uColour;
|
||||
varying vec2 vTexCoord;
|
||||
varying vec2 vTexCoordCanvas;
|
||||
|
||||
vec3 desaturate(vec3 color, float factor) {
|
||||
vec3 lum = vec3(0.299, 0.587, 0.114);
|
||||
vec3 gray = vec3(dot(lum, color));
|
||||
return vec3(mix(color, gray, factor));
|
||||
}
|
||||
|
||||
/* Gradient noise from Jorge Jimenez's presentation: */
|
||||
/* http://www.iryoku.com/next-generation-post-processing-in-call-of-duty-advanced-warfare */
|
||||
float gradientNoise(in vec2 uv) {
|
||||
return fract(52.9829189 * fract(dot(uv, vec2(0.06711056, 0.00583715))));
|
||||
}
|
||||
|
||||
void main (void) {
|
||||
vec4 line = texture2D(uTexture0, vTexCoordCanvas);
|
||||
// r components have grid; g components do not.
|
||||
vec4 screen = texture2D(uTexture3, vTexCoord);
|
||||
vec4 tightGlow = texture2D(uTexture1, vTexCoord);
|
||||
vec4 scatter = texture2D(uTexture2, vTexCoord)+0.35;
|
||||
float light = line.r + 1.5*screen.g*screen.g*tightGlow.r;
|
||||
light += 0.4*scatter.g * (2.0+1.0*screen.g + 0.5*screen.r);
|
||||
float tlight = 1.0-pow(2.0, -uExposure*light);
|
||||
float tlight2 = tlight*tlight*tlight;
|
||||
gl_FragColor.rgb = mix(uColour, vec3(1.0), 0.3+tlight2*tlight2*0.5)*tlight;
|
||||
gl_FragColor.rgb = desaturate(gl_FragColor.rgb, 1.0 - uSaturation);
|
||||
gl_FragColor.rgb += (1.0 / 255.0) * gradientNoise(gl_FragCoord.xy) - (0.5 / 255.0);
|
||||
gl_FragColor.a = 1.0;
|
||||
}
|
||||
</script>
|
||||
|
||||
<script src="oscilloscope.js" type="module"></script>
|
|
@ -1,770 +0,0 @@
|
|||
// THIS CODE HAS BEEN HEAVILY ADAPTED FROM https://dood.al/oscilloscope/ AS PERMITTED BY THE AUTHOR
|
||||
|
||||
import * as Juce from "./index.js";
|
||||
|
||||
var AudioSystem =
|
||||
{
|
||||
microphoneActive : false,
|
||||
|
||||
init : function (bufferSize)
|
||||
{
|
||||
this.bufferSize = bufferSize;
|
||||
this.timePerSample = 1/externalSampleRate;
|
||||
this.oldXSamples = new Float32Array(this.bufferSize);
|
||||
this.oldYSamples = new Float32Array(this.bufferSize);
|
||||
this.oldZSamples = new Float32Array(this.bufferSize);
|
||||
this.smoothedXSamples = new Float32Array(Filter.nSmoothedSamples);
|
||||
this.smoothedYSamples = new Float32Array(Filter.nSmoothedSamples);
|
||||
this.smoothedZSamples = new Float32Array(Filter.nSmoothedSamples);
|
||||
},
|
||||
|
||||
startSound : function()
|
||||
{
|
||||
window.__JUCE__.backend.addEventListener("audioUpdated", doScriptProcessor);
|
||||
}
|
||||
}
|
||||
|
||||
var Filter =
|
||||
{
|
||||
lanczosTweak : 1.5,
|
||||
|
||||
init : function(bufferSize, a, steps)
|
||||
{
|
||||
this.bufferSize = bufferSize;
|
||||
this.a = a;
|
||||
this.steps = steps;
|
||||
this.radius = a * steps;
|
||||
this.nSmoothedSamples = this.bufferSize*this.steps + 1;
|
||||
this.allSamples = new Float32Array(2*this.bufferSize);
|
||||
|
||||
this.createLanczosKernel();
|
||||
},
|
||||
|
||||
|
||||
generateSmoothedSamples : function (oldSamples, samples, smoothedSamples)
|
||||
{
|
||||
var bufferSize = this.bufferSize;
|
||||
var allSamples = this.allSamples;
|
||||
var nSmoothedSamples = this.nSmoothedSamples;
|
||||
var a = this.a;
|
||||
var steps = this.steps;
|
||||
var K = this.K;
|
||||
|
||||
for (var i=0; i<bufferSize; i++)
|
||||
{
|
||||
allSamples[i] = oldSamples[i];
|
||||
allSamples[bufferSize+i] = samples[i];
|
||||
}
|
||||
|
||||
var pStart = bufferSize - 2*a;
|
||||
var pEnd = pStart + bufferSize;
|
||||
var i = 0;
|
||||
for (var position=pStart; position<pEnd; position++)
|
||||
{
|
||||
smoothedSamples[i] = allSamples[position];
|
||||
i += 1;
|
||||
for (var r=1; r<steps; r++)
|
||||
{
|
||||
var smoothedSample = 0;
|
||||
for (var s= -a+1; s<a; s++)
|
||||
{
|
||||
var sample = allSamples[position+s];
|
||||
var kernelPosition = -r+s*steps;
|
||||
if (kernelPosition<0) smoothedSample += sample * K[-kernelPosition];
|
||||
else smoothedSample += sample * K[kernelPosition];
|
||||
}
|
||||
smoothedSamples[i] = smoothedSample;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
smoothedSamples[nSmoothedSamples-1] = allSamples[2*bufferSize-2*a];
|
||||
},
|
||||
|
||||
createLanczosKernel : function ()
|
||||
{
|
||||
this.K = new Float32Array(this.radius);
|
||||
this.K[0] = 1;
|
||||
for (var i =1; i<this.radius; i++)
|
||||
{
|
||||
var piX = (Math.PI * i) / this.steps;
|
||||
var sinc = Math.sin(piX)/piX;
|
||||
var window = this.a * Math.sin(piX/this.a) / piX;
|
||||
this.K[i] = sinc*Math.pow(window, this.lanczosTweak);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var Render =
|
||||
{
|
||||
init : function()
|
||||
{
|
||||
this.canvas = document.getElementById("crtCanvas");
|
||||
this.onResize();
|
||||
window.onresize = this.onResize;
|
||||
window.gl = this.canvas.getContext("webgl", {preserveDrawingBuffer: true}, { alpha: false } );
|
||||
gl.viewport(0, 0, this.canvas.width, this.canvas.height);
|
||||
gl.enable(gl.BLEND);
|
||||
gl.blendEquation( gl.FUNC_ADD );
|
||||
gl.clearColor(0.0, 0.0, 0.0, 1.0);
|
||||
gl.clear(gl.COLOR_BUFFER_BIT);
|
||||
gl.colorMask(true, true, true, true);
|
||||
var ext1 = gl.getExtension('OES_texture_float');
|
||||
var ext2 = gl.getExtension('OES_texture_float_linear');
|
||||
//this.ext = gl.getExtension('OES_texture_half_float');
|
||||
//this.ext2 = gl.getExtension('OES_texture_half_float_linear');
|
||||
this.fadeAmount = 0.2*AudioSystem.bufferSize/512;
|
||||
|
||||
|
||||
this.fullScreenQuad = new Float32Array([
|
||||
-1, 1, 1, 1, 1,-1, // Triangle 1
|
||||
-1, 1, 1,-1, -1,-1 // Triangle 2
|
||||
]);
|
||||
|
||||
this.simpleShader = this.createShader("vertex","fragment");
|
||||
this.simpleShader.vertexPosition = gl.getAttribLocation(this.simpleShader, "vertexPosition");
|
||||
this.simpleShader.colour = gl.getUniformLocation(this.simpleShader, "colour");
|
||||
|
||||
this.lineShader = this.createShader("gaussianVertex","gaussianFragment");
|
||||
this.lineShader.aStart = gl.getAttribLocation(this.lineShader, "aStart");
|
||||
this.lineShader.aEnd = gl.getAttribLocation(this.lineShader, "aEnd");
|
||||
this.lineShader.aIdx = gl.getAttribLocation(this.lineShader, "aIdx");
|
||||
this.lineShader.uGain = gl.getUniformLocation(this.lineShader, "uGain");
|
||||
this.lineShader.uSize = gl.getUniformLocation(this.lineShader, "uSize");
|
||||
this.lineShader.uInvert = gl.getUniformLocation(this.lineShader, "uInvert");
|
||||
this.lineShader.uIntensity = gl.getUniformLocation(this.lineShader, "uIntensity");
|
||||
this.lineShader.uNEdges = gl.getUniformLocation(this.lineShader, "uNEdges");
|
||||
this.lineShader.uFadeAmount = gl.getUniformLocation(this.lineShader, "uFadeAmount");
|
||||
this.lineShader.uScreen = gl.getUniformLocation(this.lineShader, "uScreen");
|
||||
|
||||
this.outputShader = this.createShader("outputVertex","outputFragment");
|
||||
this.outputShader.aPos = gl.getAttribLocation(this.outputShader, "aPos");
|
||||
this.outputShader.uTexture0 = gl.getUniformLocation(this.outputShader, "uTexture0");
|
||||
this.outputShader.uTexture1 = gl.getUniformLocation(this.outputShader, "uTexture1");
|
||||
this.outputShader.uTexture2 = gl.getUniformLocation(this.outputShader, "uTexture2");
|
||||
this.outputShader.uTexture3 = gl.getUniformLocation(this.outputShader, "uTexture3");
|
||||
this.outputShader.uExposure = gl.getUniformLocation(this.outputShader, "uExposure");
|
||||
this.outputShader.uSaturation = gl.getUniformLocation(this.outputShader, "uSaturation");
|
||||
this.outputShader.uColour = gl.getUniformLocation(this.outputShader, "uColour");
|
||||
this.outputShader.uResizeForCanvas = gl.getUniformLocation(this.outputShader, "uResizeForCanvas");
|
||||
|
||||
this.texturedShader = this.createShader("texturedVertexWithResize","texturedFragment");
|
||||
this.texturedShader.aPos = gl.getAttribLocation(this.texturedShader, "aPos");
|
||||
this.texturedShader.uTexture0 = gl.getUniformLocation(this.texturedShader, "uTexture0");
|
||||
this.texturedShader.uResizeForCanvas = gl.getUniformLocation(this.texturedShader, "uResizeForCanvas");
|
||||
|
||||
this.blurShader = this.createShader("texturedVertex","blurFragment");
|
||||
this.blurShader.aPos = gl.getAttribLocation(this.blurShader, "aPos");
|
||||
this.blurShader.uTexture0 = gl.getUniformLocation(this.blurShader, "uTexture0");
|
||||
this.blurShader.uOffset = gl.getUniformLocation(this.blurShader, "uOffset");
|
||||
|
||||
this.vertexBuffer = gl.createBuffer();
|
||||
this.setupTextures();
|
||||
},
|
||||
|
||||
setupArrays : function(nPoints)
|
||||
{
|
||||
this.nPoints = nPoints;
|
||||
this.nEdges = this.nPoints-1;
|
||||
|
||||
this.quadIndexBuffer = gl.createBuffer();
|
||||
var indices = new Float32Array(4*this.nEdges);
|
||||
for (var i=0; i<indices.length; i++)
|
||||
{
|
||||
indices[i] = i;
|
||||
}
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, this.quadIndexBuffer);
|
||||
gl.bufferData(gl.ARRAY_BUFFER, indices, gl.STATIC_DRAW);
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, null);
|
||||
|
||||
this.vertexIndexBuffer = gl.createBuffer();
|
||||
var len = this.nEdges * 2 * 3,
|
||||
indices = new Uint16Array(len);
|
||||
for (var i = 0, pos = 0; i < len;)
|
||||
{
|
||||
indices[i++] = pos;
|
||||
indices[i++] = pos + 2;
|
||||
indices[i++] = pos + 1;
|
||||
indices[i++] = pos + 1;
|
||||
indices[i++] = pos + 2;
|
||||
indices[i++] = pos + 3;
|
||||
pos += 4;
|
||||
}
|
||||
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.vertexIndexBuffer);
|
||||
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW);
|
||||
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
|
||||
|
||||
|
||||
this.scratchVertices = new Float32Array(12*nPoints);
|
||||
},
|
||||
|
||||
setupTextures : function()
|
||||
{
|
||||
this.frameBuffer = gl.createFramebuffer();
|
||||
this.lineTexture = this.makeTexture(1024, 1024);
|
||||
this.onResize();
|
||||
this.blur1Texture = this.makeTexture(256, 256);
|
||||
this.blur2Texture = this.makeTexture(256, 256);
|
||||
this.blur3Texture = this.makeTexture(32, 32);
|
||||
this.blur4Texture = this.makeTexture(32, 32);
|
||||
this.screenTexture = Render.loadTexture('noise.jpg');
|
||||
},
|
||||
|
||||
onResize : function()
|
||||
{
|
||||
var windowWidth = Math.max(document.documentElement.clientWidth, window.innerWidth || 0)
|
||||
var windowHeight = Math.max(document.documentElement.clientHeight, window.innerHeight || 0)
|
||||
var canvasSize = Math.min(windowHeight, windowWidth) * window.devicePixelRatio;
|
||||
Render.canvas.width = canvasSize;
|
||||
Render.canvas.height = canvasSize;
|
||||
if (Render.lineTexture)
|
||||
{
|
||||
var renderSize = Math.min(Math.max(canvasSize, 128), 1024);
|
||||
Render.lineTexture.width = renderSize;
|
||||
Render.lineTexture.height = renderSize;
|
||||
//testOutputElement.value = windowHeight;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
drawLineTexture: function (xPoints, yPoints, zPoints)
|
||||
{
|
||||
this.fadeAmount = Math.min(1, Math.pow(0.5, controls.persistence) * 0.4);
|
||||
this.activateTargetTexture(this.lineTexture);
|
||||
this.fade();
|
||||
//gl.clear(gl.COLOR_BUFFER_BIT);
|
||||
this.drawLine(xPoints, yPoints, zPoints);
|
||||
gl.bindTexture(gl.TEXTURE_2D, this.targetTexture);
|
||||
gl.generateMipmap(gl.TEXTURE_2D);
|
||||
},
|
||||
|
||||
drawCRT : function()
|
||||
{
|
||||
this.setNormalBlending();
|
||||
|
||||
this.activateTargetTexture(this.blur1Texture);
|
||||
this.setShader(this.texturedShader);
|
||||
gl.uniform1f(this.texturedShader.uResizeForCanvas, this.lineTexture.width/1024);
|
||||
this.drawTexture(this.lineTexture);
|
||||
|
||||
//horizontal blur 256x256
|
||||
this.activateTargetTexture(this.blur2Texture);
|
||||
this.setShader(this.blurShader);
|
||||
gl.uniform2fv(this.blurShader.uOffset, [1.0/256.0, 0.0]);
|
||||
this.drawTexture(this.blur1Texture);
|
||||
|
||||
//vertical blur 256x256
|
||||
this.activateTargetTexture(this.blur1Texture);
|
||||
//this.setShader(this.blurShader);
|
||||
gl.uniform2fv(this.blurShader.uOffset, [0.0, 1.0/256.0]);
|
||||
this.drawTexture(this.blur2Texture);
|
||||
|
||||
//preserve blur1 for later
|
||||
this.activateTargetTexture(this.blur3Texture);
|
||||
this.setShader(this.texturedShader);
|
||||
gl.uniform1f(this.texturedShader.uResizeForCanvas, 1.0);
|
||||
this.drawTexture(this.blur1Texture);
|
||||
|
||||
//horizontal blur 64x64
|
||||
this.activateTargetTexture(this.blur4Texture);
|
||||
this.setShader(this.blurShader);
|
||||
gl.uniform2fv(this.blurShader.uOffset, [1.0/32.0, 1.0/60.0]);
|
||||
this.drawTexture(this.blur3Texture);
|
||||
|
||||
//vertical blur 64x64
|
||||
this.activateTargetTexture(this.blur3Texture);
|
||||
//this.setShader(this.blurShader);
|
||||
gl.uniform2fv(this.blurShader.uOffset, [-1.0/60.0, 1.0/32.0]);
|
||||
this.drawTexture(this.blur4Texture);
|
||||
|
||||
this.activateTargetTexture(null);
|
||||
this.setShader(this.outputShader);
|
||||
var brightness = Math.pow(2, controls.brightness-2.0);
|
||||
//if (controls.disableFilter) brightness *= Filter.steps;
|
||||
gl.uniform1f(this.outputShader.uExposure, brightness);
|
||||
gl.uniform1f(this.outputShader.uSaturation, controls.saturation);
|
||||
gl.uniform1f(this.outputShader.uResizeForCanvas, this.lineTexture.width/1024);
|
||||
var colour = this.getColourFromHue(controls.hue);
|
||||
gl.uniform3fv(this.outputShader.uColour, colour);
|
||||
this.drawTexture(this.lineTexture, this.blur1Texture, this.blur3Texture, this.screenTexture);
|
||||
},
|
||||
|
||||
getColourFromHue : function(hue)
|
||||
{
|
||||
var alpha = (hue/120.0) % 1.0;
|
||||
var start = Math.sqrt(1.0-alpha);
|
||||
var end = Math.sqrt(alpha);
|
||||
var colour;
|
||||
if (hue<120) colour = [start, end, 0.0];
|
||||
else if (hue<240) colour = [0.0, start, end];
|
||||
else colour = [end, 0.0, start];
|
||||
return colour;
|
||||
},
|
||||
|
||||
activateTargetTexture : function(texture)
|
||||
{
|
||||
if (texture)
|
||||
{
|
||||
gl.bindFramebuffer(gl.FRAMEBUFFER, this.frameBuffer);
|
||||
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
|
||||
gl.viewport(0, 0, texture.width, texture.height);
|
||||
}
|
||||
else
|
||||
{
|
||||
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
|
||||
gl.viewport(0, 0, this.canvas.width, this.canvas.height);
|
||||
}
|
||||
this.targetTexture = texture;
|
||||
},
|
||||
|
||||
setShader : function(program)
|
||||
{
|
||||
this.program = program;
|
||||
gl.useProgram(program);
|
||||
},
|
||||
|
||||
drawTexture : function(texture0, texture1, texture2, texture3)
|
||||
{
|
||||
//gl.useProgram(this.program);
|
||||
gl.enableVertexAttribArray(this.program.aPos);
|
||||
|
||||
gl.activeTexture(gl.TEXTURE0);
|
||||
gl.bindTexture(gl.TEXTURE_2D, texture0);
|
||||
gl.uniform1i(this.program.uTexture0, 0);
|
||||
|
||||
if (texture1)
|
||||
{
|
||||
gl.activeTexture(gl.TEXTURE1);
|
||||
gl.bindTexture(gl.TEXTURE_2D, texture1);
|
||||
gl.uniform1i(this.program.uTexture1, 1);
|
||||
}
|
||||
|
||||
if (texture2)
|
||||
{
|
||||
gl.activeTexture(gl.TEXTURE2);
|
||||
gl.bindTexture(gl.TEXTURE_2D, texture2);
|
||||
gl.uniform1i(this.program.uTexture2, 2);
|
||||
}
|
||||
|
||||
if (texture3)
|
||||
{
|
||||
gl.activeTexture(gl.TEXTURE3);
|
||||
gl.bindTexture(gl.TEXTURE_2D, texture3);
|
||||
gl.uniform1i(this.program.uTexture3, 3);
|
||||
}
|
||||
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
|
||||
gl.bufferData(gl.ARRAY_BUFFER, this.fullScreenQuad, gl.STATIC_DRAW);
|
||||
gl.vertexAttribPointer(this.program.aPos, 2, gl.FLOAT, false, 0, 0);
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, null);
|
||||
|
||||
gl.drawArrays(gl.TRIANGLES, 0, 6);
|
||||
gl.disableVertexAttribArray(this.program.aPos);
|
||||
|
||||
if (this.targetTexture)
|
||||
{
|
||||
gl.bindTexture(gl.TEXTURE_2D, this.targetTexture);
|
||||
gl.generateMipmap(gl.TEXTURE_2D);
|
||||
}
|
||||
},
|
||||
|
||||
drawLine : function(xPoints, yPoints, zPoints)
|
||||
{
|
||||
this.setAdditiveBlending();
|
||||
|
||||
var scratchVertices = this.scratchVertices;
|
||||
//this.totalLength = 0;
|
||||
var nPoints = xPoints.length;
|
||||
for (var i=0; i<nPoints; i++)
|
||||
{
|
||||
var p = i * 12;
|
||||
scratchVertices[p] = scratchVertices[p + 3] = scratchVertices[p + 6] = scratchVertices[p + 9] = xPoints[i];
|
||||
scratchVertices[p + 1] = scratchVertices[p + 4] = scratchVertices[p + 7] = scratchVertices[p + 10] = yPoints[i];
|
||||
scratchVertices[p + 2] = scratchVertices[p + 5] = scratchVertices[p + 8] = scratchVertices[p + 11] = zPoints[i];
|
||||
}
|
||||
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
|
||||
gl.bufferData(gl.ARRAY_BUFFER, scratchVertices, gl.STATIC_DRAW);
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, null);
|
||||
|
||||
var program = this.lineShader;
|
||||
gl.useProgram(program);
|
||||
gl.enableVertexAttribArray(program.aStart);
|
||||
gl.enableVertexAttribArray(program.aEnd);
|
||||
gl.enableVertexAttribArray(program.aIdx);
|
||||
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
|
||||
gl.vertexAttribPointer(program.aStart, 3, gl.FLOAT, false, 0, 0);
|
||||
gl.vertexAttribPointer(program.aEnd, 3, gl.FLOAT, false, 0, 12*4);
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, this.quadIndexBuffer);
|
||||
gl.vertexAttribPointer(program.aIdx, 1, gl.FLOAT, false, 0, 0);
|
||||
|
||||
gl.activeTexture(gl.TEXTURE0);
|
||||
gl.bindTexture(gl.TEXTURE_2D, this.screenTexture);
|
||||
gl.uniform1i(program.uScreen, 0);
|
||||
|
||||
gl.uniform1f(program.uSize, controls.focus);
|
||||
gl.uniform1f(program.uGain, Math.pow(2.0,controls.mainGain)*450/512);
|
||||
if (controls.invertXY) gl.uniform1f(program.uInvert, -1.0);
|
||||
else gl.uniform1f(program.uInvert, 1.0);
|
||||
|
||||
var intensity = controls.intensity * (41000 / externalSampleRate);
|
||||
|
||||
if (controls.disableFilter) gl.uniform1f(program.uIntensity, intensity *(Filter.steps+1.5));
|
||||
// +1.5 needed above for some reason for the brightness to match
|
||||
else gl.uniform1f(program.uIntensity, intensity);
|
||||
gl.uniform1f(program.uFadeAmount, this.fadeAmount);
|
||||
gl.uniform1f(program.uNEdges, this.nEdges);
|
||||
|
||||
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.vertexIndexBuffer);
|
||||
var nEdgesThisTime = (xPoints.length-1);
|
||||
|
||||
/*if (this.totalLength > 300)
|
||||
{
|
||||
nEdgesThisTime *= 300/this.totalLength;
|
||||
nEdgesThisTime = Math.floor(nEdgesThisTime);
|
||||
}*/
|
||||
|
||||
gl.drawElements(gl.TRIANGLES, nEdgesThisTime * 6, gl.UNSIGNED_SHORT, 0);
|
||||
|
||||
gl.disableVertexAttribArray(program.aStart);
|
||||
gl.disableVertexAttribArray(program.aEnd);
|
||||
gl.disableVertexAttribArray(program.aIdx);
|
||||
},
|
||||
|
||||
fade : function(alpha)
|
||||
{
|
||||
this.setNormalBlending();
|
||||
|
||||
var program = this.simpleShader;
|
||||
gl.useProgram(program);
|
||||
gl.enableVertexAttribArray(program.vertexPosition);
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
|
||||
gl.bufferData(gl.ARRAY_BUFFER, this.fullScreenQuad, gl.STATIC_DRAW);
|
||||
gl.vertexAttribPointer(program.vertexPosition, 2, gl.FLOAT, false, 0, 0);
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, null);
|
||||
gl.uniform4fv(program.colour, [0.0, 0.0, 0.0, this.fadeAmount]);
|
||||
gl.drawArrays(gl.TRIANGLES, 0, 6);
|
||||
gl.disableVertexAttribArray(program.vertexPosition);
|
||||
},
|
||||
|
||||
loadTexture : function(src)
|
||||
{
|
||||
var texture = gl.createTexture();
|
||||
gl.bindTexture(gl.TEXTURE_2D, texture);
|
||||
// Fill with grey pixel, as placeholder until loaded
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE,
|
||||
new Uint8Array([128, 128, 128, 255]));
|
||||
// Asynchronously load an image
|
||||
var image = new Image();
|
||||
image.crossOrigin = "anonymous";
|
||||
image.src = src;
|
||||
image.addEventListener('load', function()
|
||||
{
|
||||
// Now that the image has loaded make copy it to the texture.
|
||||
gl.bindTexture(gl.TEXTURE_2D, texture);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
||||
//gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR);
|
||||
gl.generateMipmap(gl.TEXTURE_2D);
|
||||
//hardcoded:
|
||||
texture.width = texture.height = 512;
|
||||
if (controls.grid) Render.drawGrid(texture);
|
||||
});
|
||||
return texture;
|
||||
},
|
||||
|
||||
drawGrid : function(texture)
|
||||
{
|
||||
this.activateTargetTexture(texture);
|
||||
this.setNormalBlending();
|
||||
this.setShader(this.simpleShader);
|
||||
gl.colorMask(true, false, false, true);
|
||||
|
||||
var data = [];
|
||||
|
||||
for (var i=0; i<11; i++)
|
||||
{
|
||||
var step = 45;
|
||||
var s = i*step;
|
||||
data.splice(0,0, 0, s, 10*step, s);
|
||||
data.splice(0,0, s, 0, s, 10*step);
|
||||
if (i!=0 && i!=10)
|
||||
{
|
||||
for (var j=0; j<51; j++)
|
||||
{
|
||||
t = j*step/5;
|
||||
if (i!=5)
|
||||
{
|
||||
data.splice(0,0, t, s-2, t, s+1);
|
||||
data.splice(0,0, s-2, t, s+1, t);
|
||||
}
|
||||
else
|
||||
{
|
||||
data.splice(0,0, t, s-5, t, s+4);
|
||||
data.splice(0,0, s-5, t, s+4, t);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (var j=0; j<51; j++)
|
||||
{
|
||||
var t = j*step/5;
|
||||
if (t%5 == 0) continue;
|
||||
data.splice(0,0, t-2, 2.5*step, t+2, 2.5*step);
|
||||
data.splice(0,0, t-2, 7.5*step, t+2, 7.5*step);
|
||||
}
|
||||
|
||||
|
||||
var vertices = new Float32Array(data);
|
||||
for (var i=0; i<data.length; i++)
|
||||
{
|
||||
vertices[i]=(vertices[i]+31)/256-1;
|
||||
}
|
||||
|
||||
|
||||
gl.enableVertexAttribArray(this.program.vertexPosition);
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
|
||||
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
|
||||
gl.vertexAttribPointer(this.program.vertexPosition, 2, gl.FLOAT, false, 0, 0);
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, null);
|
||||
gl.uniform4fv(this.program.colour, [0.01, 0.1, 0.01, 1.0]);
|
||||
|
||||
gl.lineWidth(1.0);
|
||||
gl.drawArrays(gl.LINES, 0, vertices.length/2);
|
||||
|
||||
gl.bindTexture(gl.TEXTURE_2D, this.targetTexture);
|
||||
gl.generateMipmap(gl.TEXTURE_2D);
|
||||
gl.colorMask(true, true, true, true);
|
||||
},
|
||||
|
||||
makeTexture : function(width, height)
|
||||
{
|
||||
var texture = gl.createTexture();
|
||||
gl.bindTexture(gl.TEXTURE_2D, texture);
|
||||
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.FLOAT, null);
|
||||
//gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, Render.ext.HALF_FLOAT_OES, null);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
||||
//gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
||||
gl.generateMipmap(gl.TEXTURE_2D);
|
||||
gl.bindTexture(gl.TEXTURE_2D, null);
|
||||
texture.width = width;
|
||||
texture.height = height;
|
||||
return texture;
|
||||
},
|
||||
|
||||
activateTargetTexture : function(ctx, texture)
|
||||
{
|
||||
gl.bindRenderbuffer(gl.RENDERBUFFER, ctx.renderBuffer);
|
||||
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, ctx.frameBuffer.width, ctx.frameBuffer.height);
|
||||
|
||||
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
|
||||
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, ctx.renderBuffer);
|
||||
|
||||
gl.bindTexture(gl.TEXTURE_2D, null);
|
||||
gl.bindRenderbuffer(gl.RENDERBUFFER, null);
|
||||
},
|
||||
|
||||
setAdditiveBlending : function()
|
||||
{
|
||||
//gl.blendEquation( gl.FUNC_ADD );
|
||||
gl.blendFunc(gl.ONE, gl.ONE);
|
||||
},
|
||||
|
||||
setNormalBlending : function()
|
||||
{
|
||||
//gl.blendEquation( gl.FUNC_ADD );
|
||||
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
|
||||
},
|
||||
|
||||
createShader : function(vsTag, fsTag)
|
||||
{
|
||||
if (!this.supportsWebGl())
|
||||
{
|
||||
throw new Error('createShader: no WebGL context');
|
||||
}
|
||||
|
||||
var vsSource = document.getElementById(vsTag).firstChild.nodeValue;
|
||||
var fsSource = document.getElementById(fsTag).firstChild.nodeValue;
|
||||
|
||||
var vs = gl.createShader(gl.VERTEX_SHADER);
|
||||
gl.shaderSource(vs, vsSource);
|
||||
gl.compileShader(vs);
|
||||
if (!gl.getShaderParameter(vs, gl.COMPILE_STATUS))
|
||||
{
|
||||
var infoLog = gl.getShaderInfoLog(vs);
|
||||
gl.deleteShader(vs);
|
||||
throw new Error('createShader, vertex shader compilation:\n' + infoLog);
|
||||
}
|
||||
|
||||
var fs = gl.createShader(gl.FRAGMENT_SHADER);
|
||||
gl.shaderSource(fs, fsSource);
|
||||
gl.compileShader(fs);
|
||||
if (!gl.getShaderParameter(fs, gl.COMPILE_STATUS))
|
||||
{
|
||||
var infoLog = gl.getShaderInfoLog(fs);
|
||||
gl.deleteShader(vs);
|
||||
gl.deleteShader(fs);
|
||||
throw new Error('createShader, fragment shader compilation:\n' + infoLog);
|
||||
}
|
||||
|
||||
var program = gl.createProgram();
|
||||
|
||||
gl.attachShader(program, vs);
|
||||
gl.deleteShader(vs);
|
||||
|
||||
gl.attachShader(program, fs);
|
||||
gl.deleteShader(fs);
|
||||
|
||||
gl.linkProgram(program);
|
||||
|
||||
if (!gl.getProgramParameter(program, gl.LINK_STATUS))
|
||||
{
|
||||
var infoLog = gl.getProgramInfoLog(program);
|
||||
gl.deleteProgram(program);
|
||||
throw new Error('createShader, linking:\n' + infoLog);
|
||||
}
|
||||
|
||||
return program;
|
||||
},
|
||||
|
||||
supportsWebGl : function()
|
||||
{
|
||||
// from https://github.com/Modernizr/Modernizr/blob/master/feature-detects/webgl.js
|
||||
var canvas = document.createElement('canvas'),
|
||||
supports = 'probablySupportsContext' in canvas ? 'probablySupportsContext' : 'supportsContext';
|
||||
if (supports in canvas)
|
||||
{
|
||||
return canvas[supports]('webgl') || canvas[supports]('experimental-webgl');
|
||||
}
|
||||
return 'WebGLRenderingContext' in window;
|
||||
}
|
||||
}
|
||||
|
||||
var sweepPosition = -1;
|
||||
var belowTrigger = false;
|
||||
|
||||
function doScriptProcessor(bufferBase64) {
|
||||
var req = new XMLHttpRequest;
|
||||
req.open('GET', "data:application/octet;base64," + bufferBase64);
|
||||
req.responseType = 'arraybuffer';
|
||||
req.onload = function fileLoaded(e) {
|
||||
Juce.getNativeFunction("getSettings")().then(settings => {
|
||||
Render.canvas.toBlob(blob => {
|
||||
var reader = new FileReader();
|
||||
reader.readAsDataURL(blob);
|
||||
reader.onloadend = function() {
|
||||
var dataUrl = reader.result;
|
||||
var base64 = dataUrl.split(',')[1];
|
||||
sendVideoDataCallback(base64);
|
||||
}
|
||||
|
||||
controls.brightness = settings.brightness;
|
||||
controls.intensity = settings.intensity;
|
||||
controls.persistence = settings.persistence;
|
||||
controls.saturation = settings.saturation;
|
||||
controls.focus = settings.focus;
|
||||
controls.hue = settings.hue;
|
||||
controls.disableFilter = !settings.upsampling;
|
||||
let numChannels = settings.numChannels;
|
||||
|
||||
if (controls.grid !== settings.graticule) {
|
||||
controls.grid = settings.graticule;
|
||||
const image = controls.noise ? 'noise.jpg' : 'empty.jpg';
|
||||
Render.screenTexture = Render.loadTexture(image);
|
||||
}
|
||||
|
||||
if (controls.noise !== settings.smudges) {
|
||||
controls.noise = settings.smudges;
|
||||
const image = controls.noise ? 'noise.jpg' : 'empty.jpg';
|
||||
Render.screenTexture = Render.loadTexture(image);
|
||||
}
|
||||
|
||||
var dataView = new DataView(e.target.response);
|
||||
|
||||
const stride = 4 * numChannels;
|
||||
for (var i = 0; i < xSamples.length; i++) {
|
||||
xSamples[i] = dataView.getFloat32(i * stride, true);
|
||||
ySamples[i] = dataView.getFloat32(i * stride + 4, true);
|
||||
if (numChannels === 3) {
|
||||
zSamples[i] = dataView.getFloat32(i * stride + 8, true);
|
||||
} else {
|
||||
zSamples[i] = 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (controls.sweepOn) {
|
||||
var gain = Math.pow(2.0, controls.mainGain);
|
||||
var sweepMinTime = controls.sweepMsDiv * 10 / 1000;
|
||||
var triggerValue = controls.sweepTriggerValue;
|
||||
for (var i = 0; i < xSamples.length; i++) {
|
||||
xSamples[i] = sweepPosition / gain;
|
||||
sweepPosition += 2 * AudioSystem.timePerSample / sweepMinTime;
|
||||
if (sweepPosition > 1.1 && belowTrigger && ySamples[i] >= triggerValue)
|
||||
sweepPosition = -1.3;
|
||||
belowTrigger = ySamples[i] < triggerValue;
|
||||
}
|
||||
}
|
||||
|
||||
if (!controls.freezeImage) {
|
||||
if (!controls.disableFilter) {
|
||||
Filter.generateSmoothedSamples(AudioSystem.oldXSamples, xSamples, AudioSystem.smoothedXSamples);
|
||||
Filter.generateSmoothedSamples(AudioSystem.oldYSamples, ySamples, AudioSystem.smoothedYSamples);
|
||||
if (numChannels === 3) {
|
||||
Filter.generateSmoothedSamples(AudioSystem.oldZSamples, zSamples, AudioSystem.smoothedZSamples);
|
||||
} else {
|
||||
AudioSystem.smoothedZSamples.fill(1);
|
||||
}
|
||||
|
||||
if (!controls.swapXY) Render.drawLineTexture(AudioSystem.smoothedXSamples, AudioSystem.smoothedYSamples, AudioSystem.smoothedZSamples);
|
||||
else Render.drawLineTexture(AudioSystem.smoothedYSamples, AudioSystem.smoothedXSamples, AudioSystem.smoothedZSamples);
|
||||
}
|
||||
else {
|
||||
if (!controls.swapXY) Render.drawLineTexture(xSamples, ySamples, zSamples);
|
||||
else Render.drawLineTexture(ySamples, xSamples, zSamples);
|
||||
}
|
||||
}
|
||||
|
||||
for (var i = 0; i < xSamples.length; i++) {
|
||||
AudioSystem.oldXSamples[i] = xSamples[i];
|
||||
AudioSystem.oldYSamples[i] = ySamples[i];
|
||||
AudioSystem.oldZSamples[i] = zSamples[i];
|
||||
}
|
||||
|
||||
requestAnimationFrame(drawCRTFrame);
|
||||
});
|
||||
});
|
||||
}
|
||||
req.send();
|
||||
}
|
||||
|
||||
function drawCRTFrame(timeStamp) {
|
||||
Render.drawCRT();
|
||||
}
|
||||
|
||||
var xSamples = new Float32Array(externalBufferSize);
|
||||
var ySamples = new Float32Array(externalBufferSize);
|
||||
var zSamples = new Float32Array(externalBufferSize);
|
||||
|
||||
Juce.getNativeFunction("bufferSize")().then(bufferSize => {
|
||||
externalBufferSize = bufferSize;
|
||||
Juce.getNativeFunction("sampleRate")().then(sampleRate => {
|
||||
externalSampleRate = sampleRate;
|
||||
xSamples = new Float32Array(externalBufferSize);
|
||||
ySamples = new Float32Array(externalBufferSize);
|
||||
zSamples = new Float32Array(externalBufferSize);
|
||||
Render.init();
|
||||
Filter.init(externalBufferSize, 8, 6);
|
||||
AudioSystem.init(externalBufferSize);
|
||||
Render.setupArrays(Filter.nSmoothedSamples);
|
||||
AudioSystem.startSound();
|
||||
requestAnimationFrame(drawCRTFrame);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -26,17 +26,8 @@
|
|||
<FILE id="LbviBq" name="cube.obj" compile="0" resource="1" file="Resources/models/cube.obj"/>
|
||||
</GROUP>
|
||||
<GROUP id="{F3C16D02-63B4-E3DA-7498-901173C37D6C}" name="oscilloscope">
|
||||
<GROUP id="{DD5ACF8F-4E4F-E277-176A-5FD4A9717037}" name="juce">
|
||||
<FILE id="FCfppP" name="check_native_interop.js" compile="0" resource="1"
|
||||
file="Resources/oscilloscope/juce/check_native_interop.js"/>
|
||||
<FILE id="ZEUE5w" name="index.js" compile="0" resource="1" file="Resources/oscilloscope/juce/index.js"/>
|
||||
<FILE id="hWk293" name="package.json" compile="0" resource="1" file="Resources/oscilloscope/juce/package.json"/>
|
||||
</GROUP>
|
||||
<FILE id="qpPhpN" name="empty.jpg" compile="0" resource="1" file="Resources/oscilloscope/empty.jpg"/>
|
||||
<FILE id="dNtZYs" name="noise.jpg" compile="0" resource="1" file="Resources/oscilloscope/noise.jpg"/>
|
||||
<FILE id="YPMnjq" name="oscilloscope.html" compile="0" resource="1"
|
||||
file="Resources/oscilloscope/oscilloscope.html"/>
|
||||
<FILE id="BeXHj7" name="oscilloscope.js" compile="0" resource="1" file="Resources/oscilloscope/oscilloscope.js"/>
|
||||
</GROUP>
|
||||
<GROUP id="{82BCD6F1-A8BF-F30B-5587-81EE70168883}" name="svg">
|
||||
<FILE id="rl17ZK" name="cog.svg" compile="0" resource="1" file="Resources/svg/cog.svg"/>
|
||||
|
|
|
@ -16,17 +16,8 @@
|
|||
<FILE id="jI9VSZ" name="logo.png" compile="0" resource="1" file="Resources/images/logo.png"/>
|
||||
</GROUP>
|
||||
<GROUP id="{F3C16D02-63B4-E3DA-7498-901173C37D6C}" name="oscilloscope">
|
||||
<GROUP id="{DD5ACF8F-4E4F-E277-176A-5FD4A9717037}" name="juce">
|
||||
<FILE id="FCfppP" name="check_native_interop.js" compile="0" resource="1"
|
||||
file="Resources/oscilloscope/juce/check_native_interop.js"/>
|
||||
<FILE id="ZEUE5w" name="index.js" compile="0" resource="1" file="Resources/oscilloscope/juce/index.js"/>
|
||||
<FILE id="hWk293" name="package.json" compile="0" resource="1" file="Resources/oscilloscope/juce/package.json"/>
|
||||
</GROUP>
|
||||
<FILE id="qpPhpN" name="empty.jpg" compile="0" resource="1" file="Resources/oscilloscope/empty.jpg"/>
|
||||
<FILE id="dNtZYs" name="noise.jpg" compile="0" resource="1" file="Resources/oscilloscope/noise.jpg"/>
|
||||
<FILE id="YPMnjq" name="oscilloscope.html" compile="0" resource="1"
|
||||
file="Resources/oscilloscope/oscilloscope.html"/>
|
||||
<FILE id="BeXHj7" name="oscilloscope.js" compile="0" resource="1" file="Resources/oscilloscope/oscilloscope.js"/>
|
||||
</GROUP>
|
||||
<GROUP id="{82BCD6F1-A8BF-F30B-5587-81EE70168883}" name="svg">
|
||||
<FILE id="rl17ZK" name="cog.svg" compile="0" resource="1" file="Resources/svg/cog.svg"/>
|
||||
|
|
Ładowanie…
Reference in New Issue