Merge pull request #162 from nyanpasu64/pitch-invariant-trigger

Add pitch-tracking trigger, set trigger_diameter=None (improves bass)
pull/357/head
nyanpasu64 2019-03-03 00:00:13 -08:00 zatwierdzone przez GitHub
commit bf4a13b329
7 zmienionych plików z 441 dodań i 56 usunięć

Wyświetl plik

@ -20,6 +20,7 @@ from corrscope.triggers import (
CorrelationTriggerConfig,
PerFrameCache,
CorrelationTrigger,
SpectrumConfig,
)
from corrscope.util import pushd, coalesce
from corrscope.wave import Wave, Flatten
@ -85,7 +86,7 @@ class Config(
trigger_stereo: Flatten = Flatten.SumAvg
render_stereo: Flatten = Flatten.SumAvg
trigger: ITriggerConfig # Can be overriden per Wave
trigger: CorrelationTriggerConfig # Can be overriden per Wave
# Multiplies by trigger_width, render_width. Can override trigger.
channels: List[ChannelConfig]
@ -118,6 +119,7 @@ def default_config(**kwargs) -> Config:
responsiveness=0.5,
buffer_falloff=0.5,
use_edge_trigger=False,
pitch_tracking=SpectrumConfig()
# Removed due to speed hit.
# post=LocalPostTriggerConfig(strength=0.1),
),
@ -263,13 +265,9 @@ class CorrScope:
self.renderer.render_frame(datas)
self.output.write_frame(self.renderer.get_frame())
extra_outputs.window = None
if "window" in internals:
extra_outputs.window = RenderOutput()
extra_outputs.buffer = None
if "buffer" in internals:
extra_outputs.buffer = RenderOutput()
extra_outputs.window = RenderOutput() if "window" in internals else None
extra_outputs.buffer = RenderOutput() if "buffer" in internals else None
extra_outputs.spectrum = RenderOutput() if "spectrum" in internals else None
# endregion
if PRINT_TIMESTAMP:
@ -331,17 +329,22 @@ class CorrScope:
continue
# region Display buffers, for debugging purposes.
triggers = cast(List[CorrelationTrigger], self.triggers)
if extra_outputs.window:
triggers = cast(List[CorrelationTrigger], self.triggers)
extra_outputs.window.render_frame(
[trigger._prev_window for trigger in triggers]
)
if extra_outputs.buffer:
triggers = cast(List[CorrelationTrigger], self.triggers)
extra_outputs.buffer.render_frame(
[trigger._buffer for trigger in triggers]
)
if extra_outputs.spectrum:
extra_outputs.spectrum.render_frame(
[trigger._spectrum - 0.99 for trigger in triggers]
)
# endregion
if not_benchmarking or benchmark_mode >= BenchmarkMode.RENDER:

Wyświetl plik

@ -38,7 +38,7 @@ from corrscope.gui.util import color2hex, Locked, find_ranges, TracebackDialog
from corrscope.layout import Orientation, StereoOrientation
from corrscope.outputs import IOutputConfig, FFplayOutputConfig, FFmpegOutputConfig
from corrscope.settings import paths
from corrscope.triggers import CorrelationTriggerConfig, ITriggerConfig
from corrscope.triggers import CorrelationTriggerConfig, ITriggerConfig, SpectrumConfig
from corrscope.util import obj_name
from corrscope.wave import Flatten
@ -676,6 +676,17 @@ class ConfigModel(PresentationModel):
render__line_width = default_property("render__line_width", 1.5)
@property
def trigger__pitch_tracking(self) -> bool:
scfg = self.cfg.trigger.pitch_tracking
gui = scfg is not None
return gui
@trigger__pitch_tracking.setter
def trigger__pitch_tracking(self, gui: bool):
scfg = SpectrumConfig() if gui else None
self.cfg.trigger.pitch_tracking = scfg
# End ConfigModel

Wyświetl plik

@ -550,6 +550,13 @@
</property>
</widget>
</item>
<item row="0" column="3" rowspan="2">
<widget class="BoundCheckBox" name="trigger__pitch_tracking">
<property name="text">
<string>Pitch Tracking</string>
</property>
</widget>
</item>
</layout>
</widget>
</item>

Wyświetl plik

@ -1,6 +1,18 @@
import warnings
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Type, Tuple, Optional, ClassVar, Callable, Union
from typing import (
TYPE_CHECKING,
Type,
Tuple,
Optional,
ClassVar,
Callable,
Union,
NewType,
Sequence,
List,
Any,
)
import attr
import numpy as np
@ -105,10 +117,183 @@ class PerFrameCache:
# CorrelationTrigger
class CorrelationTriggerConfig(ITriggerConfig):
class SpectrumConfig(KeywordAttrs):
"""
# Rationale:
If no basal frequency note-bands are to be truncated,
the spectrum must have freq resolution
`min_hz * (2 ** 1/notes_per_octave - 1)`.
At 20hz, 10 octaves, 12 notes/octave, this is 1.19Hz fft freqs.
Our highest band must be
`min_hz * 2**octaves`,
leading to nearly 20K freqs, which produces an somewhat slow FFT.
So increase min_hz and decrease octaves and notes_per_octave.
--------
Using a Constant-Q transform may eliminate performance concerns?
"""
# Spectrum X density
min_hz: float = 20
octaves: int = 8
notes_per_octave: int = 6
# Spectrum Y power
exponent: float = 1
divide_by_freq: bool = True
# Spectral alignment and resampling
pitch_estimate_boost: float = 1.2
max_octaves_to_resample: float = 1.0
@property
def max_notes_to_resample(self) -> int:
return round(self.notes_per_octave * self.max_octaves_to_resample)
# Time-domain history parameters
min_frames_between_recompute: int = 6
frames_to_lookbehind: int = 2
class DummySpectrum:
# noinspection PyMethodMayBeStatic,PyUnusedLocal
def calc_spectrum(self, data: np.ndarray) -> np.ndarray:
return np.array([])
# Indices are linearly spaced in FFT. Notes are exponentially spaced.
# FFT is grouped into notes.
FFTIndex = NewType("FFTIndex", int)
# Very hacky and weird. Maybe it's not worth getting mypy to pass.
if TYPE_CHECKING:
FFTIndexArray = Any # mypy
else:
FFTIndexArray = "np.ndarray[FFTIndex]" # pycharm
class LogFreqSpectrum(DummySpectrum):
"""
Invariants:
- len(note_fenceposts) == n_fencepost
- rfft()[ : note_fenceposts[0]] is NOT used.
- rfft()[note_fenceposts[-1] : ] is NOT used.
- rfft()[note_fenceposts[0] : note_fenceposts[1]] becomes a note.
"""
n_fftindex: FFTIndex # Determines frequency resolution, not range.
note_fenceposts: FFTIndexArray
n_fencepost: int
def __init__(self, scfg: SpectrumConfig, subsmp_s: float, dummy_data: np.ndarray):
self.scfg = scfg
n_fftindex: FFTIndex = signal.next_fast_len(len(dummy_data))
# Increase n_fftindex until every note has nonzero width.
while True:
# Compute parameters
self.min_hz = scfg.min_hz
self.max_hz = self.min_hz * 2 ** scfg.octaves
n_fencepost = scfg.notes_per_octave * scfg.octaves + 1
note_fenceposts_hz = np.geomspace(
self.min_hz, self.max_hz, n_fencepost, dtype=FLOAT
)
# Convert fenceposts to FFTIndex
fft_from_hertz = n_fftindex / subsmp_s
note_fenceposts: FFTIndexArray = (
fft_from_hertz * note_fenceposts_hz
).astype(np.int32)
note_widths = np.diff(note_fenceposts)
if np.any(note_widths == 0):
n_fftindex = signal.next_fast_len(n_fftindex + n_fftindex // 5 + 1)
continue
else:
break
self.n_fftindex = n_fftindex # Passed to rfft() to automatically zero-pad data.
self.note_fenceposts = note_fenceposts
self.n_fencepost = len(note_fenceposts)
def calc_spectrum(self, data: np.ndarray) -> np.ndarray:
""" Unfortunately converting to FLOAT (single) adds too much overhead.
Input: Time-domain signal to be analyzed.
Output: Frequency-domain spectrum with exponentially-spaced notes.
- ret[note] = nonnegative float.
"""
scfg = self.scfg
# Compute FFT spectrum[freq]
spectrum = np.fft.rfft(data, self.n_fftindex)
spectrum = abs(spectrum)
if scfg.exponent != 1:
spectrum **= scfg.exponent
# Compute energy of each note
# spectrum_per_note[note] = np.ndarray[float]
spectrum_per_note: List[np.ndarray] = split(spectrum, self.note_fenceposts)
# energy_per_note[note] = float
energy_per_note: np.ndarray
# np.add.reduce is much faster than np.sum/mean.
if scfg.divide_by_freq:
energy_per_note = np.array(
[np.add.reduce(region) / len(region) for region in spectrum_per_note]
)
else:
energy_per_note = np.array(
[np.add.reduce(region) for region in spectrum_per_note]
)
assert len(energy_per_note) == self.n_fencepost - 1
return energy_per_note
def split(data: np.ndarray, fenceposts: Sequence[FFTIndex]) -> List[np.ndarray]:
""" Based off np.split(), but faster.
Unlike np.split, does not include data before fenceposts[0] or after fenceposts[-1].
"""
sub_arys = []
ndata = len(data)
for i in range(len(fenceposts) - 1):
st = fenceposts[i]
end = fenceposts[i + 1]
if not st < ndata:
break
region = data[st:end]
sub_arys.append(region)
return sub_arys
class CircularArray:
def __init__(self, size: int, *dims: int):
self.size = size
self.buf = np.zeros((size, *dims))
self.index = 0
def push(self, arr: np.ndarray) -> None:
if self.size == 0:
return
self.buf[self.index] = arr
self.index = (self.index + 1) % self.size
def peek(self) -> np.ndarray:
"""Return is borrowed from self.buf.
Do NOT push to self while borrow is alive."""
return self.buf[self.index]
class CorrelationTriggerConfig(ITriggerConfig, always_dump="pitch_tracking"):
# get_trigger
edge_strength: float
trigger_diameter: float = 0.5
trigger_diameter: Optional[float] = None
trigger_falloff: Tuple[float, float] = (4.0, 1.0)
recalc_semitones: float = 1.0
@ -118,6 +303,9 @@ class CorrelationTriggerConfig(ITriggerConfig):
responsiveness: float
buffer_falloff: float # Gaussian std = wave_period * buffer_falloff
# Pitch tracking = compute spectrum.
pitch_tracking: Optional["SpectrumConfig"] = None
# region Legacy Aliases
trigger_strength = Alias("edge_strength")
falloff_width = Alias("buffer_falloff")
@ -152,6 +340,10 @@ class CorrelationTriggerConfig(ITriggerConfig):
class CorrelationTrigger(Trigger):
cfg: CorrelationTriggerConfig
@property
def scfg(self) -> SpectrumConfig:
return self.cfg.pitch_tracking
def __init__(self, *args, **kwargs):
"""
Correlation-based trigger which looks at a window of `trigger_tsamp` samples.
@ -181,6 +373,24 @@ class CorrelationTrigger(Trigger):
self._prev_period: Optional[int] = None
self._prev_window: Optional[np.ndarray] = None
# (mutable) Log-scaled spectrum
self.frames_since_spectrum = 0
if self.scfg:
self._spectrum_calc = LogFreqSpectrum(
scfg=self.scfg,
subsmp_s=self._wave.smp_s / self._stride,
dummy_data=self._buffer,
)
self._spectrum = self._spectrum_calc.calc_spectrum(self._buffer)
self.history = CircularArray(
self.scfg.frames_to_lookbehind, self._buffer_nsamp
)
else:
self._spectrum_calc = DummySpectrum()
self._spectrum = np.array([0])
self.history = CircularArray(0, self._buffer_nsamp)
def _calc_data_taper(self) -> np.ndarray:
""" Input data window. Zeroes out all data older than 1 frame old.
See https://github.com/nyanpasu64/corrscope/wiki/Correlation-Trigger
@ -242,6 +452,7 @@ class CorrelationTrigger(Trigger):
# begin per-frame
def get_trigger(self, index: int, cache: "PerFrameCache") -> int:
N = self._buffer_nsamp
cfg = self.cfg
# Get data
stride = self._stride
@ -253,50 +464,39 @@ class CorrelationTrigger(Trigger):
period = get_period(data)
cache.period = period * stride
if self._is_window_invalid(period):
diameter, falloff = [round(period * x) for x in self.cfg.trigger_falloff]
semitones = self._is_window_invalid(period)
# If pitch changed...
if semitones:
diameter, falloff = [round(period * x) for x in cfg.trigger_falloff]
falloff_window = cosine_flat(N, diameter, falloff)
window = np.minimum(falloff_window, self._data_taper)
# If pitch tracking enabled, rescale buffer to match data's pitch.
if self.scfg and (data != 0).any():
if isinstance(semitones, float):
peak_semitones = semitones
else:
peak_semitones = None
self.spectrum_rescale_buffer(data, peak_semitones)
self._prev_period = period
self._prev_window = window
else:
window = self._prev_window
self.history.push(data)
data *= window
# prev_buffer
prev_buffer = self._windowed_step + self._buffer
prev_buffer: np.ndarray = self._buffer.copy()
prev_buffer += self._windowed_step
# Calculate correlation
"""
If offset < optimal, we need to `offset += positive`.
- The peak will appear near the right of `data`.
if self.cfg.trigger_diameter is not None:
radius = round(N * self.cfg.trigger_diameter / 2)
else:
radius = None
Either we must slide prev_buffer to the right:
- correlate(data, prev_buffer)
- trigger = offset + peak_offset
Or we must slide data to the left (by sliding offset to the right):
- correlate(prev_buffer, data)
- trigger = offset - peak_offset
"""
corr = signal.correlate(data, prev_buffer) # returns double, not single/FLOAT
assert len(corr) == 2 * N - 1
# Find optimal offset (within trigger_diameter, default=±N/4)
mid = N - 1
radius = round(N * self.cfg.trigger_diameter / 2)
left = mid - radius
right = mid + radius + 1
corr = corr[left:right]
mid = mid - left
# argmax(corr) == mid + peak_offset == (data >> peak_offset)
# peak_offset == argmax(corr) - mid
peak_offset = np.argmax(corr) - mid # type: int
peak_offset = self.correlate_offset(data, prev_buffer, radius)
trigger = index + (stride * peak_offset)
# Apply post trigger (before updating correlation buffer)
@ -306,11 +506,107 @@ class CorrelationTrigger(Trigger):
# Update correlation buffer (distinct from visible area)
aligned = self._wave.get_around(trigger, self._buffer_nsamp, stride)
self._update_buffer(aligned, cache)
self.frames_since_spectrum += 1
return trigger
def _is_window_invalid(self, period: int) -> bool:
""" Returns True if pitch has changed more than `recalc_semitones`. """
def spectrum_rescale_buffer(
self, data: np.ndarray, peak_semitones: Optional[float]
) -> None:
"""Rewrites self._spectrum, and possibly rescales self._buffer."""
scfg = self.scfg
N = self._buffer_nsamp
if self.frames_since_spectrum < self.scfg.min_frames_between_recompute:
return
self.frames_since_spectrum = 0
spectrum = self._spectrum_calc.calc_spectrum(data)
normalize_buffer(spectrum)
# Don't normalize self._spectrum. It was already normalized when being assigned.
prev_spectrum = self._spectrum_calc.calc_spectrum(self.history.peek())
# rewrite spectrum
self._spectrum = spectrum
assert not np.any(np.isnan(spectrum))
# Find spectral correlation peak,
# but prioritize "changing pitch by ???".
if peak_semitones is not None:
boost_x = int(round(peak_semitones / 12 * scfg.notes_per_octave))
boost_y: float = scfg.pitch_estimate_boost
else:
boost_x = 0
boost_y = 1.0
# If we want to double pitch...
resample_notes = self.correlate_offset(
spectrum,
prev_spectrum,
scfg.max_notes_to_resample,
boost_x=boost_x,
boost_y=boost_y,
)
if resample_notes != 0:
# we must divide sampling rate by 2.
new_len = int(round(N / 2 ** (resample_notes / scfg.notes_per_octave)))
# Copy+resample self._buffer.
self._buffer = np.interp(
np.linspace(0, 1, new_len), np.linspace(0, 1, N), self._buffer
)
# assert len(self._buffer) == new_len
self._buffer = midpad(self._buffer, N)
@staticmethod
def correlate_offset(
data: np.ndarray,
prev_buffer: np.ndarray,
radius: Optional[int],
boost_x: int = 0,
boost_y: float = 1.0,
) -> int:
"""
This is confusing.
If data index < optimal, data will be too far to the right,
and we need to `index += positive`.
- The peak will appear near the right of `data`.
Either we must slide prev_buffer to the right,
or we must slide data to the left (by sliding index to the right):
- correlate(data, prev_buffer)
- trigger = index + peak_offset
"""
N = len(data)
corr = signal.correlate(data, prev_buffer) # returns double, not single/FLOAT
Ncorr = 2 * N - 1
assert len(corr) == Ncorr
# Find optimal offset
mid = N - 1
if radius is not None:
left = max(mid - radius, 0)
right = min(mid + radius + 1, Ncorr)
corr = corr[left:right]
mid = mid - left
# Prioritize part of it.
corr[mid + boost_x : mid + boost_x + 1] *= boost_y
# argmax(corr) == mid + peak_offset == (data >> peak_offset)
# peak_offset == argmax(corr) - mid
peak_offset = np.argmax(corr) - mid # type: int
return peak_offset
def _is_window_invalid(self, period: int) -> Union[bool, float]:
""" Returns number of semitones,
if pitch has changed more than `recalc_semitones`. """
prev = self._prev_period
@ -319,12 +615,12 @@ class CorrelationTrigger(Trigger):
elif prev * period == 0:
return prev != period
else:
semitones = abs(np.log(period / prev) / np.log(2) * 12)
# If period doubles, semitones are -12.
semitones = np.log(period / prev) / np.log(2) * -12
# If semitones == recalc_semitones == 0, do NOT recalc.
if semitones <= self.cfg.recalc_semitones:
if abs(semitones) <= self.cfg.recalc_semitones:
return False
return True
return semitones
def _update_buffer(self, data: np.ndarray, cache: PerFrameCache) -> None:
"""

23
poetry.lock wygenerowano
Wyświetl plik

@ -94,6 +94,14 @@ version = "0.10.0"
[package.dependencies]
six = "*"
[[package]]
category = "dev"
description = "Better living through Python with decorators"
name = "decorator"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*"
version = "4.3.2"
[[package]]
category = "dev"
description = "Delayed/soft assertions for python"
@ -268,6 +276,17 @@ py = ">=1.5.0"
setuptools = "*"
six = ">=1.10.0"
[[package]]
category = "dev"
description = "Separate test code from test cases in pytest."
name = "pytest-cases"
optional = false
python-versions = "*"
version = "1.2.2"
[package.dependencies]
decorator = "*"
[[package]]
category = "dev"
description = "Pytest plugin for measuring coverage."
@ -350,7 +369,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4"
version = "1.24.1"
[metadata]
content-hash = "5f805ebc68759139df36aae10a903fd9d347b2f23248eb32182c5b48b48e2a1c"
content-hash = "ba2ed55facdab2c93972f4a3cbf732de4ed2b709eb376a204ab563aa0d264272"
python-versions = "^3.6"
[metadata.hashes]
@ -365,6 +384,7 @@ codecov = ["8ed8b7c6791010d359baed66f84f061bba5bd41174bf324c31311e8737602788", "
colorama = ["05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", "f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"]
coverage = ["06123b58a1410873e22134ca2d88bd36680479fe354955b3579fb8ff150e4d27", "09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f", "0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe", "0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d", "0d34245f824cc3140150ab7848d08b7e2ba67ada959d77619c986f2062e1f0e8", "10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0", "1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607", "1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d", "258b21c5cafb0c3768861a6df3ab0cfb4d8b495eee5ec660e16f928bf7385390", "2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b", "3ad59c84c502cd134b0088ca9038d100e8fb5081bbd5ccca4863f3804d81f61d", "447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3", "46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e", "4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815", "510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36", "5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1", "5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14", "5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c", "6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794", "6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b", "71afc1f5cd72ab97330126b566bbf4e8661aab7449f08895d21a5d08c6b051ff", "7349c27128334f787ae63ab49d90bf6d47c7288c63a0a5dfaa319d4b4541dd2c", "77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840", "828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd", "859714036274a75e6e57c7bab0c47a4602d2a8cfaaa33bbdb68c8359b2ed4f5c", "85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82", "869ef4a19f6e4c6987e18b315721b8b971f7048e6eaea29c066854242b4e98d9", "8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952", "977e2d9a646773cc7428cdd9a34b069d6ee254fadfb4d09b3f430e95472f3cf3", "99bd767c49c775b79fdcd2eabff405f1063d9d959039c0bdd720527a7738748a", "a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389", "aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f", "ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4", "b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da", "bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647", "c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d", "d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42", "d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478", "da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b", "ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb", "ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9"]
cycler = ["1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d", "cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"]
decorator = ["33cd704aea07b4c28b3eb2c97d288a06918275dac0ecebdaf1bc8a48d98adb9e", "cabb249f4710888a2fc0e13e9a16c343d932033718ff62e1e9bc93a9d3a9122b"]
delayed-assert = ["02eae58d56b9b8e3a72f890b85c1eb530184e25a65689e43889d4e9995cd42e4", "ecfffa0eba4980606739475480c4722330eb44a828e3b9cfa9a4deed4104b7d1"]
future = ["67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8"]
hypothesis = ["25023a6610a97e48bae511e71227a42293853432906e5a13403015e51b69e27c", "2b1ec609a1e357fd89e98dc59864f78f112dd58893c976e6b744caae83e6c494", "b0f0f3cca1954847a96b54d73345b3e5590739e235baa5470033d84f99dcd62d"]
@ -382,6 +402,7 @@ pyparsing = ["40856e74d4987de5d01761a22d1621ae1c7f8774585acae358aa5c5936c6c90b",
pyqt5 = ["517e4339135c4874b799af0d484bc2e8c27b54850113a68eec40a0b56534f450", "ac1eb5a114b6e7788e8be378be41c5e54b17d5158994504e85e43b5fca006a39", "d2309296a5a79d0a1c0e6c387c30f0398b65523a6dcc8a19cc172e46b949e00d", "e85936bae1581bcb908847d2038e5b34237a5e6acc03130099a78930770e7ead"]
pyqt5-sip = ["125f77c087572c9272219cda030a63c2f996b8507592b2a54d7ef9b75f9f054d", "14c37b06e3fb7c2234cb208fa461ec4e62b4ba6d8b32ca3753c0b2cfd61b00e3", "1cb2cf52979f9085fc0eab7e0b2438eb4430d4aea8edec89762527e17317175b", "4babef08bccbf223ec34464e1ed0a23caeaeea390ca9a3529227d9a57f0d6ee4", "53cb9c1208511cda0b9ed11cffee992a5a2f5d96eb88722569b2ce65ecf6b960", "549449d9461d6c665cbe8af4a3808805c5e6e037cd2ce4fd93308d44a049bfac", "5f5b3089b200ff33de3f636b398e7199b57a6b5c1bb724bdb884580a072a14b5", "a4d9bf6e1fa2dd6e73f1873f1a47cee11a6ba0cf9ba8cf7002b28c76823600d0", "a4ee6026216f1fbe25c8847f9e0fbce907df5b908f84816e21af16ec7666e6fe", "a91a308a5e0cc99de1e97afd8f09f46dd7ca20cfaa5890ef254113eebaa1adff", "b0342540da479d2713edc68fb21f307473f68da896ad5c04215dae97630e0069", "f997e21b4e26a3397cb7b255b8d1db5b9772c8e0c94b6d870a5a0ab5c27eacaa"]
pytest = ["3e65a22eb0d4f1bdbc1eacccf4a3198bf8d4049dea5112d70a0c61b00e748d02", "5924060b374f62608a078494b909d341720a050b5224ff87e17e12377486a71d"]
pytest-cases = ["7f466da8058b1fcb371010ccad8e17efbd62c934c4a4ece5fee0defad270ce5a", "8baef91b47482d0febc3fd39a034d6add4ac5cfa2314a1dfb6ac847736412d1d"]
pytest-cov = ["0ab664b25c6aa9716cbf203b17ddb301932383046082c081b9848a0edf5add33", "230ef817450ab0699c6cc3c9c8f7a829c34674456f2ed8df1fe1d39780f7c87f"]
pytest-mock = ["53801e621223d34724926a5c98bd90e8e417ce35264365d39d6c896388dcc928", "d89a8209d722b8307b5e351496830d5cc5e192336003a485443ae9adeb7dd4c0"]
python-dateutil = ["063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", "88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02"]

Wyświetl plik

@ -31,6 +31,7 @@ pywin32-ctypes = {version = "^0.2.0",platform = "win32"}
coverage = "^4.5"
pytest-cov = "^2.6"
codecov = "^2.0"
pytest_cases = "^1.2"
[tool.poetry.scripts]
corr = 'corrscope.cli:main'

Wyświetl plik

@ -1,8 +1,10 @@
import attr
import matplotlib.pyplot as plt
import numpy as np
import pytest
from matplotlib.axes import Axes
from matplotlib.figure import Figure
from pytest_cases import pytest_fixture_plus
from corrscope import triggers
from corrscope.triggers import (
@ -11,6 +13,7 @@ from corrscope.triggers import (
PerFrameCache,
ZeroCrossingTriggerConfig,
LocalPostTriggerConfig,
SpectrumConfig,
)
from corrscope.wave import Wave
@ -25,10 +28,16 @@ def cfg_template(**kwargs) -> CorrelationTriggerConfig:
return attr.evolve(cfg, **kwargs)
@pytest.fixture(scope="session", params=[False, True])
def cfg(request):
use_edge_trigger = request.param
return cfg_template(use_edge_trigger=use_edge_trigger)
@pytest_fixture_plus
@pytest.mark.parametrize("use_edge_trigger", [False, True])
@pytest.mark.parametrize("trigger_diameter", [None, 0.5])
@pytest.mark.parametrize("pitch_tracking", [None, SpectrumConfig()])
def cfg(use_edge_trigger, trigger_diameter, pitch_tracking):
return cfg_template(
use_edge_trigger=use_edge_trigger,
trigger_diameter=trigger_diameter,
pitch_tracking=pitch_tracking,
)
@pytest.fixture(
@ -177,6 +186,43 @@ def test_trigger_should_recalc_window():
assert trigger._is_window_invalid(x), x
# Test pitch-invariant triggering using spectrum
def test_correlate_offset():
"""
Catches bug where writing N instead of Ncorr
prevented function from returning positive numbers.
"""
np.random.seed(31337)
correlate_offset = CorrelationTrigger.correlate_offset
# Ensure autocorrelation on random data returns peak at 0.
N = 100
spectrum = np.random.random(N)
assert correlate_offset(spectrum, spectrum, 12) == 0
# Ensure cross-correlation of time-shifted impulses works.
# Assume wave where y=[i==99].
wave = np.eye(N)[::-1]
# Taking a slice beginning at index i will produce an impulse at 99-i.
left = wave[30]
right = wave[40]
# We need to slide `left` to the right by 10 samples, and vice versa.
for radius in [None, 12]:
assert correlate_offset(data=left, prev_buffer=right, radius=radius) == 10
assert correlate_offset(data=right, prev_buffer=left, radius=radius) == -10
# The correlation peak at zero-offset is small enough for boost_x to be returned.
boost_y = 1.5
ones = np.ones(N)
for boost_x in [6, -6]:
assert (
correlate_offset(ones, ones, radius=9, boost_x=boost_x, boost_y=boost_y)
== boost_x
)
# Test the ability to load legacy TriggerConfig