diff --git a/HISTORY.md b/HISTORY.md
index 71a2d5eb..60f08015 100755
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -9,7 +9,7 @@
* parse JSON using the SPLIT reporter
* new "aspect AT location" reporter in Sensing category for sniffing colors and sprites
* new blocks for setting and changing the stage's background color
- * new "microphone" reporter in Sensing for getting volume, signals and frequencies
+ * new "microphone" reporter in Sensing for getting volume, note, pitch signals and frequencies
* new "object" reporter in the Sensing category for getting a sprite by its name
* blocks for changing and querying the "flat line ends" setting
* selectors for changing and querying "draggable" and "rotation style" settings
@@ -47,6 +47,9 @@
* Catalan, thanks, Joan!
* German
+### 2019-03-10
+* Objects, Blocks, Threads: added microphone note and pitch detection
+
### 2019-03-07
* AudioComp lib: added block to set the microphone's buffer and fft sizes
* German translation update (microphone features)
diff --git a/snap.html b/snap.html
index 3b8de062..4663a8e3 100755
--- a/snap.html
+++ b/snap.html
@@ -6,9 +6,9 @@
-
-
-
+
+
+
diff --git a/src/blocks.js b/src/blocks.js
index a696841a..9f122ff6 100644
--- a/src/blocks.js
+++ b/src/blocks.js
@@ -148,7 +148,7 @@ CustomCommandBlockMorph, SymbolMorph, ToggleButtonMorph, DialMorph*/
// Global stuff ////////////////////////////////////////////////////////
-modules.blocks = '2019-March-06';
+modules.blocks = '2019-March-10';
var SyntaxElementMorph;
var BlockMorph;
@@ -989,6 +989,8 @@ SyntaxElementMorph.prototype.labelPart = function (spec) {
false, // numeric?
{
'volume' : ['volume'],
+ 'note' : ['note'],
+ 'pitch' : ['pitch'],
'signals' : ['signals'],
'frequencies' : ['frequencies']
},
diff --git a/src/objects.js b/src/objects.js
index d7d553c7..d7cf2c87 100644
--- a/src/objects.js
+++ b/src/objects.js
@@ -84,7 +84,7 @@ BlockEditorMorph, BlockDialogMorph, PrototypeHatBlockMorph, localize,
TableMorph, TableFrameMorph, normalizeCanvas, BooleanSlotMorph, HandleMorph,
AlignmentMorph, Process, XML_Element, VectorPaintEditorMorph*/
-modules.objects = '2019-March-07';
+modules.objects = '2019-March-10';
var SpriteMorph;
var StageMorph;
@@ -8857,13 +8857,12 @@ Note.prototype.stop = function () {
// Microphone /////////////////////////////////////////////////////////
-// I am a microphone and know about volume, signals and frequencies
+// I am a microphone and know about volume, note, pitch, as well as
+// signals and frequencies.
// mostly meant to be a singleton of the stage
// I stop when I'm not queried something for 5 seconds
// to free up system resources
-// Microphone instance creation
-
function Microphone() {
// web audio components:
this.audioContext = null;
@@ -8872,13 +8871,20 @@ function Microphone() {
this.analyser = null;
// parameters:
- this.signalBufferSize = 512;
- this.fftSize = 1024;
+ this.signalBufferSize = 512; // should probably be 1024 by default
+ this.fftSize = 1024; // should probably be 2048 by default
+ this.MIN_SAMPLES = 0; // will be initialized when AudioContext is created.
+ this.GOOD_ENOUGH_CORRELATION = 0.9;
+
+ // buffers:
+ this.freqBuffer = null; // will be initialized to Uint8Array
+ this.pitchBuffer = null; // will be initialized to Float32Array
// metered values:
this.volume = 0;
this.signals = [];
this.frequencies = [];
+ this.pitch = -1;
// asynch control:
this.isStarted = false;
@@ -8930,6 +8936,21 @@ Microphone.prototype.start = function () {
}).catch(nop);
};
+Microphone.prototype.stop = function () {
+ this.processor.onaudioprocess = null;
+ this.sourceStream.getTracks().forEach(function (track) {
+ track.stop();}
+ );
+ this.processor.disconnect();
+ this.analyser.disconnect();
+ this.audioContext.close();
+ this.processor = null;
+ this.analyser = null;
+ this.audioContext = null;
+ this.isReady = false;
+ this.isStarted = false;
+};
+
Microphone.prototype.setupNodes = function (stream) {
this.sourceStream = stream;
this.createProcessor();
@@ -8941,8 +8962,12 @@ Microphone.prototype.setupNodes = function (stream) {
};
Microphone.prototype.createAnalyser = function () {
+ var freqBufLength;
this.analyser = this.audioContext.createAnalyser();
this.analyser.fftSize = this.fftSize;
+ freqBufLength = this.analyser.frequencyBinCount;
+ this.freqBuffer = new Uint8Array(freqBufLength);
+ this.pitchBuffer = new Float32Array(freqBufLength);
};
Microphone.prototype.createProcessor = function () {
@@ -8966,18 +8991,21 @@ Microphone.prototype.stepAudio = function (event) {
var buf = event.inputBuffer.getChannelData(0),
bufLength = buf.length,
sum = 0,
- x, i, rms,
- freqBufLength = this.analyser.frequencyBinCount,
- dataArray = new Uint8Array(freqBufLength);
+ x, i, rms;
if (this.isAutoStop && ((Date.now() - this.lastTime) > 5000)) {
this.stop();
return;
}
+ // signals:
this.signals = buf;
- this.analyser.getByteFrequencyData(dataArray);
- this.frequencies = dataArray;
+
+ // frequency bins:
+ this.analyser.getByteFrequencyData(this.freqBuffer);
+ this.frequencies = this.freqBuffer;
+
+ // volume:
for (i = 0; i < bufLength; i += 1) {
x = buf[i];
if (Math.abs(x) >= this.processor.clipLevel) {
@@ -8989,23 +9017,83 @@ Microphone.prototype.stepAudio = function (event) {
rms = Math.sqrt(sum / bufLength);
this.volume = Math.max(rms, this.volume * this.processor.averaging);
+ // pitch:
+ this.analyser.getFloatTimeDomainData(this.pitchBuffer);
+ this.pitch = this.detectPitch(
+ this.pitchBuffer,
+ this.audioContext.sampleRate
+ );
+
+ // note:
+ if (this.pitch > 0) {
+ this.note = Math.round(
+ 12 * (Math.log(this.pitch / 440) / Math.log(2))
+ ) + 69;
+ } else {
+ this.note = -1;
+ }
+
this.isReady = true;
this.isStarted = false;
};
-Microphone.prototype.stop = function () {
- this.processor.onaudioprocess = null;
- this.sourceStream.getTracks().forEach(function (track) {
- track.stop();}
- );
- this.processor.disconnect();
- this.analyser.disconnect();
- this.audioContext.close();
- this.processor = null;
- this.analyser = null;
- this.audioContext = null;
- this.isReady = false;
- this.isStarted = false;
+Microphone.prototype.detectPitch = function (buf, sampleRate) {
+ // https://en.wikipedia.org/wiki/Autocorrelation
+ // thanks to Chris Wilson:
+ // https://plus.google.com/+ChrisWilson/posts/9zHsF9PCDAL
+ // https://github.com/cwilso/PitchDetect/
+
+ var SIZE = buf.length,
+ MAX_SAMPLES = Math.floor(SIZE/2),
+ best_offset = -1,
+ best_correlation = 0,
+ rms = 0,
+ foundGoodCorrelation = false,
+ correlations = new Array(MAX_SAMPLES),
+ correlation,
+ lastCorrelation,
+ offset,
+ shift,
+ i,
+ val;
+
+ for (i = 0; i < SIZE; i += 1) {
+ val = buf[i];
+ rms += val * val;
+ }
+ rms = Math.sqrt(rms/SIZE);
+ if (rms < 0.01)
+ return -1;
+
+ lastCorrelation = 1;
+ for (offset = this.MIN_SAMPLES; offset < MAX_SAMPLES; offset += 1) {
+ correlation = 0;
+
+ for (i = 0; i < MAX_SAMPLES; i += 1) {
+ correlation += Math.abs((buf[i]) - (buf[i + offset]));
+ }
+ correlation = 1 - (correlation/MAX_SAMPLES);
+ correlations[offset] = correlation;
+ if ((correlation > this.GOOD_ENOUGH_CORRELATION)
+ && (correlation > lastCorrelation)
+ ) {
+ foundGoodCorrelation = true;
+ if (correlation > best_correlation) {
+ best_correlation = correlation;
+ best_offset = offset;
+ }
+ } else if (foundGoodCorrelation) {
+ shift = (correlations[best_offset + 1] -
+ correlations[best_offset - 1]) /
+ correlations[best_offset];
+ return sampleRate / (best_offset + (8 * shift));
+ }
+ lastCorrelation = correlation;
+ }
+ if (best_correlation > 0.01) {
+ return sampleRate / best_offset;
+ }
+ return -1;
};
// CellMorph //////////////////////////////////////////////////////////
diff --git a/src/threads.js b/src/threads.js
index cf042ed4..f381c5b1 100644
--- a/src/threads.js
+++ b/src/threads.js
@@ -62,7 +62,7 @@ StageMorph, SpriteMorph, StagePrompterMorph, Note, modules, isString, copy,
isNil, WatcherMorph, List, ListWatcherMorph, alert, console, TableMorph, Color,
TableFrameMorph, ColorSlotMorph, isSnapObject, Map*/
-modules.threads = '2019-March-06';
+modules.threads = '2019-March-10';
var ThreadManager;
var Process;
@@ -2251,12 +2251,16 @@ Process.prototype.doStopAllSounds = function () {
// Process audio input (interpolated)
-Process.prototype.reportAudio = function (choice) { // +++
+Process.prototype.reportAudio = function (choice) {
var stage = this.blockReceiver().parentThatIsA(StageMorph);
if (stage.microphone.isOn()) {
switch (this.inputOption(choice)) {
case 'volume':
return stage.microphone.volume;
+ case 'pitch':
+ return stage.microphone.pitch;
+ case 'note':
+ return stage.microphone.note;
case 'signals':
return new List(stage.microphone.signals);
case 'frequencies':