improve waveform #260

and type
pull/1939/head
Mikael Finstad 2024-03-20 19:55:42 +08:00
rodzic 2e7d746007
commit 8a7c1f8a17
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 25AB36E3E81CBC26
15 zmienionych plików z 233 dodań i 114 usunięć

Wyświetl plik

@ -201,10 +201,6 @@ async function renderWaveformPng({ filePath, start, duration, color, streamIndex
return {
buffer: stdout,
from: start,
to: start + duration,
duration,
createdAt: new Date(),
};
} catch (err) {
if (ps1) ps1.kill();

Wyświetl plik

@ -430,7 +430,7 @@ function App() {
return formatDuration({ seconds, shorten, fileNameFriendly });
}, [detectedFps, timecodeFormat, getFrameCount]);
const formatTimeAndFrames = useCallback((seconds) => {
const formatTimeAndFrames = useCallback((seconds: number) => {
const frameCount = getFrameCount(seconds);
const timeStr = timecodeFormat === 'timecodeWithFramesFraction'
@ -628,6 +628,7 @@ function App() {
if (!subtitleStream || workingRef.current) return;
try {
setWorking({ text: i18n.t('Loading subtitle') });
invariant(filePath != null);
const url = await extractSubtitleTrack(filePath, index);
setSubtitlesByStreamId((old) => ({ ...old, [index]: { url, lang: subtitleStream.tags && subtitleStream.tags.language } }));
setActiveSubtitleStreamIndex(index);
@ -713,6 +714,8 @@ function App() {
try {
setThumbnails([]);
invariant(filePath != null);
invariant(zoomedDuration != null);
const promise = ffmpegRenderThumbnails({ filePath, from: zoomWindowStartTime, duration: zoomedDuration, onThumbnail: addThumbnail });
thumnailsRenderingPromiseRef.current = promise;
await promise;
@ -1205,6 +1208,8 @@ function App() {
), [customOutDir, filePath, mergedOutFileName]);
const onExportConfirm = useCallback(async () => {
invariant(filePath != null);
if (numStreamsToCopy === 0) {
errorToast(i18n.t('No tracks selected for export'));
return;
@ -1453,7 +1458,7 @@ function App() {
const storeProjectInSourceDir = !storeProjectInWorkingDir;
async function tryFindAndLoadProjectFile({ chapters, cod }) {
async function tryFindAndLoadProjectFile({ chapters, cod }: { chapters, cod: string | undefined }) {
try {
// First try to open from from working dir
if (await tryOpenProjectPath(getEdlFilePath(fp, cod), 'llc')) return;
@ -2620,7 +2625,6 @@ function App() {
<div className="no-user-select" style={bottomStyle}>
<Timeline
// @ts-expect-error todo
shouldShowKeyframes={shouldShowKeyframes}
waveforms={waveforms}
shouldShowWaveform={shouldShowWaveform}
@ -2631,7 +2635,6 @@ function App() {
playerTime={playerTime}
commandedTime={commandedTime}
relevantTime={relevantTime}
getRelevantTime={getRelevantTime}
commandedTimeRef={commandedTimeRef}
startTimeOffset={startTimeOffset}
zoom={zoom}

Wyświetl plik

@ -5,7 +5,7 @@ import { FaTrashAlt, FaSave } from 'react-icons/fa';
import { mySpring } from './animations';
import { saveColor } from './colors';
const BetweenSegments = memo(({ start, end, duration, invertCutSegments }) => {
const BetweenSegments = memo(({ start, end, duration, invertCutSegments }: { start: number, end: number, duration: number, invertCutSegments: boolean }) => {
const left = `${(start / duration) * 100}%`;
return (

Wyświetl plik

@ -1,8 +1,9 @@
import { memo, useRef, useMemo, useCallback, useEffect, useState } from 'react';
import { memo, useRef, useMemo, useCallback, useEffect, useState, MutableRefObject, CSSProperties, WheelEventHandler } from 'react';
import { motion, useMotionValue, useSpring } from 'framer-motion';
import debounce from 'lodash/debounce';
import { useTranslation } from 'react-i18next';
import { FaCaretDown, FaCaretUp } from 'react-icons/fa';
import invariant from 'tiny-invariant';
import TimelineSeg from './TimelineSeg';
import BetweenSegments from './BetweenSegments';
@ -11,11 +12,18 @@ import useUserSettings from './hooks/useUserSettings';
import { timelineBackground, darkModeTransition } from './colors';
import { Frame } from './ffmpeg';
import { ApparentCutSegment, FormatTimecode, InverseCutSegment, RenderableWaveform, Thumbnail } from './types';
type CalculateTimelinePercent = (time: number) => string | undefined;
const currentTimeWidth = 1;
const Waveform = memo(({ waveform, calculateTimelinePercent, durationSafe }) => {
const [style, setStyle] = useState({ display: 'none' });
const Waveform = memo(({ waveform, calculateTimelinePercent, durationSafe }: {
waveform: RenderableWaveform, calculateTimelinePercent: CalculateTimelinePercent, durationSafe: number,
}) => {
const [style, setStyle] = useState<CSSProperties>({ display: 'none' });
const leftPos = calculateTimelinePercent(waveform.from);
@ -27,12 +35,17 @@ const Waveform = memo(({ waveform, calculateTimelinePercent, durationSafe }) =>
position: 'absolute', height: '100%', left: leftPos, width: `${((toTruncated - waveform.from) / durationSafe) * 100}%`,
});
}
if (waveform.url == null) return null;
return (
<img src={waveform.url} draggable={false} style={style} alt="" onLoad={onLoad} />
);
});
const Waveforms = memo(({ calculateTimelinePercent, durationSafe, waveforms, zoom, height }) => (
const Waveforms = memo(({ calculateTimelinePercent, durationSafe, waveforms, zoom, height }: {
calculateTimelinePercent: CalculateTimelinePercent, durationSafe: number, waveforms: RenderableWaveform[], zoom: number, height: number,
}) => (
<div style={{ height, width: `${zoom * 100}%`, position: 'relative' }}>
{waveforms.map((waveform) => (
<Waveform key={`${waveform.from}-${waveform.to}`} waveform={waveform} calculateTimelinePercent={calculateTimelinePercent} durationSafe={durationSafe} />
@ -40,9 +53,9 @@ const Waveforms = memo(({ calculateTimelinePercent, durationSafe, waveforms, zoo
</div>
));
const CommandedTime = memo(({ commandedTimePercent }) => {
const CommandedTime = memo(({ commandedTimePercent }: { commandedTimePercent: string }) => {
const color = 'var(--gray12)';
const commonStyle = { left: commandedTimePercent, position: 'absolute', pointerEvents: 'none' };
const commonStyle: CSSProperties = { left: commandedTimePercent, position: 'absolute', pointerEvents: 'none' };
return (
<>
<FaCaretDown style={{ ...commonStyle, top: 0, color, fontSize: 14, marginLeft: -7, marginTop: -6 }} />
@ -54,27 +67,76 @@ const CommandedTime = memo(({ commandedTimePercent }) => {
const timelineHeight = 36;
const timeWrapperStyle = { position: 'absolute', height: timelineHeight, left: 0, right: 0, bottom: 0, display: 'flex', alignItems: 'center', justifyContent: 'center', pointerEvents: 'none' };
const timeStyle = { background: 'rgba(0,0,0,0.4)', borderRadius: 3, padding: '2px 4px', color: 'rgba(255, 255, 255, 0.8)' };
const timeWrapperStyle: CSSProperties = { position: 'absolute', height: timelineHeight, left: 0, right: 0, bottom: 0, display: 'flex', alignItems: 'center', justifyContent: 'center', pointerEvents: 'none' };
const timeStyle: CSSProperties = { background: 'rgba(0,0,0,0.4)', borderRadius: 3, padding: '2px 4px', color: 'rgba(255, 255, 255, 0.8)' };
const Timeline = memo(({
durationSafe, startTimeOffset, playerTime, commandedTime, relevantTime,
zoom, neighbouringKeyFrames, seekAbs, apparentCutSegments,
setCurrentSegIndex, currentSegIndexSafe, inverseCutSegments, formatTimecode, formatTimeAndFrames,
waveforms, shouldShowWaveform, shouldShowKeyframes, thumbnails,
onZoomWindowStartTimeChange, waveformEnabled, showThumbnails,
playing, isFileOpened, onWheel, commandedTimeRef, goToTimecode, isSegmentSelected,
durationSafe,
startTimeOffset,
playerTime,
commandedTime,
relevantTime,
zoom,
neighbouringKeyFrames,
seekAbs,
apparentCutSegments,
setCurrentSegIndex,
currentSegIndexSafe,
inverseCutSegments,
formatTimecode,
formatTimeAndFrames,
waveforms,
shouldShowWaveform,
shouldShowKeyframes,
thumbnails,
onZoomWindowStartTimeChange,
waveformEnabled,
showThumbnails,
playing,
isFileOpened,
onWheel,
commandedTimeRef,
goToTimecode,
isSegmentSelected,
} : {
durationSafe: number,
startTimeOffset: number,
playerTime: number | undefined,
commandedTime: number,
relevantTime: number,
zoom: number,
neighbouringKeyFrames: Frame[],
seekAbs: (a: number) => void,
apparentCutSegments: ApparentCutSegment[],
setCurrentSegIndex: (a: number) => void,
currentSegIndexSafe: number,
inverseCutSegments: InverseCutSegment[],
formatTimecode: FormatTimecode,
formatTimeAndFrames: (a: number) => string,
waveforms: RenderableWaveform[],
shouldShowWaveform: boolean,
shouldShowKeyframes: boolean,
thumbnails: Thumbnail[],
onZoomWindowStartTimeChange: (a: number) => void,
waveformEnabled: boolean,
showThumbnails: boolean,
playing: boolean,
isFileOpened: boolean,
onWheel: WheelEventHandler,
commandedTimeRef: MutableRefObject<number>,
goToTimecode: () => void,
isSegmentSelected: (a: { segId: string }) => boolean,
}) => {
const { t } = useTranslation();
const { invertCutSegments } = useUserSettings();
const timelineScrollerRef = useRef();
const timelineScrollerSkipEventRef = useRef();
const timelineScrollerSkipEventDebounce = useRef();
const timelineWrapperRef = useRef();
const timelineScrollerRef = useRef<HTMLDivElement>(null);
const timelineScrollerSkipEventRef = useRef<boolean>(false);
const timelineScrollerSkipEventDebounce = useRef<() => void>();
const timelineWrapperRef = useRef<HTMLDivElement>(null);
const [hoveringTime, setHoveringTime] = useState();
const [hoveringTime, setHoveringTime] = useState<number>();
const displayTime = (hoveringTime != null && isFileOpened && !playing ? hoveringTime : relevantTime) + startTimeOffset;
const displayTimePercent = useMemo(() => `${Math.round((displayTime / durationSafe) * 100)}%`, [displayTime, durationSafe]);
@ -99,12 +161,12 @@ const Timeline = memo(({
const timeOfInterestPosPixels = useMemo(() => {
// https://github.com/mifi/lossless-cut/issues/676
const pos = calculateTimelinePos(relevantTime);
if (pos != null && timelineScrollerRef.current) return pos * zoom * timelineScrollerRef.current.offsetWidth;
if (pos != null && timelineScrollerRef.current) return pos * zoom * timelineScrollerRef.current!.offsetWidth;
return undefined;
}, [calculateTimelinePos, relevantTime, zoom]);
const calcZoomWindowStartTime = useCallback(() => (timelineScrollerRef.current
? (timelineScrollerRef.current.scrollLeft / (timelineScrollerRef.current.offsetWidth * zoom)) * durationSafe
? (timelineScrollerRef.current.scrollLeft / (timelineScrollerRef.current!.offsetWidth * zoom)) * durationSafe
: 0), [durationSafe, zoom]);
// const zoomWindowStartTime = calcZoomWindowStartTime(duration, zoom);
@ -117,7 +179,7 @@ const Timeline = memo(({
function suppressScrollerEvents() {
timelineScrollerSkipEventRef.current = true;
timelineScrollerSkipEventDebounce.current();
timelineScrollerSkipEventDebounce.current?.();
}
const scrollLeftMotion = useMotionValue(0);
@ -127,7 +189,7 @@ const Timeline = memo(({
useEffect(() => {
spring.on('change', (value) => {
if (timelineScrollerSkipEventRef.current) return; // Don't animate while zooming
timelineScrollerRef.current.scrollLeft = value;
timelineScrollerRef.current!.scrollLeft = value;
});
}, [spring]);
@ -135,8 +197,9 @@ const Timeline = memo(({
useEffect(() => {
if (timeOfInterestPosPixels == null || timelineScrollerSkipEventRef.current) return;
invariant(timelineScrollerRef.current != null);
if (timeOfInterestPosPixels > timelineScrollerRef.current.scrollLeft + timelineScrollerRef.current.offsetWidth) {
const timelineWidth = timelineWrapperRef.current.offsetWidth;
const timelineWidth = timelineWrapperRef.current!.offsetWidth;
const scrollLeft = timeOfInterestPosPixels - (timelineScrollerRef.current.offsetWidth * 0.1);
scrollLeftMotion.set(Math.min(scrollLeft, timelineWidth - timelineScrollerRef.current.offsetWidth));
} else if (timeOfInterestPosPixels < timelineScrollerRef.current.scrollLeft) {
@ -150,6 +213,7 @@ const Timeline = memo(({
suppressScrollerEvents();
if (isZoomed) {
invariant(timelineScrollerRef.current != null);
const zoomedTargetWidth = timelineScrollerRef.current.offsetWidth * zoom;
const scrollLeft = Math.max((commandedTimeRef.current / durationSafe) * zoomedTargetWidth - timelineScrollerRef.current.offsetWidth / 2, 0);
@ -163,6 +227,7 @@ const Timeline = memo(({
const cancelWheel = (event) => event.preventDefault();
const scroller = timelineScrollerRef.current;
invariant(scroller != null);
scroller.addEventListener('wheel', cancelWheel, { passive: false });
return () => {
@ -186,6 +251,7 @@ const Timeline = memo(({
const getMouseTimelinePos = useCallback((e) => {
const target = timelineWrapperRef.current;
invariant(target != null);
const rect = target.getBoundingClientRect();
const relX = e.pageX - (rect.left + document.body.scrollLeft);
return (relX / target.offsetWidth) * durationSafe;
@ -196,7 +262,7 @@ const Timeline = memo(({
const handleScrub = useCallback((e) => seekAbs((getMouseTimelinePos(e))), [seekAbs, getMouseTimelinePos]);
useEffect(() => {
setHoveringTime();
setHoveringTime(undefined);
}, [relevantTime]);
const onMouseDown = useCallback((e) => {
@ -231,7 +297,7 @@ const Timeline = memo(({
e.preventDefault();
}, [getMouseTimelinePos]);
const onMouseOut = useCallback(() => setHoveringTime(), []);
const onMouseOut = useCallback(() => setHoveringTime(undefined), []);
const contextMenuTemplate = useMemo(() => [
{ label: t('Seek to timecode'), click: goToTimecode },

Wyświetl plik

@ -1,14 +1,17 @@
import { memo, useMemo } from 'react';
import { motion, AnimatePresence } from 'framer-motion';
import { motion, AnimatePresence, MotionStyle } from 'framer-motion';
import { FaTrashAlt } from 'react-icons/fa';
import { mySpring } from './animations';
import useUserSettings from './hooks/useUserSettings';
import { useSegColors } from './contexts';
import { ApparentCutSegment, FormatTimecode } from './types';
const TimelineSeg = memo(({
seg, duration, isActive, segNum, onSegClick, invertCutSegments, formatTimecode, selected,
} : {
seg: ApparentCutSegment, duration: number, isActive: boolean, segNum: number, onSegClick: (a: number) => void, invertCutSegments: boolean, formatTimecode: FormatTimecode, selected: boolean,
}) => {
const { darkMode } = useUserSettings();
const { getSegColor } = useSegColors();
@ -34,7 +37,7 @@ const TimelineSeg = memo(({
}, [darkMode, invertCutSegments, isActive, segColor, selected]);
const markerBorderRadius = 5;
const wrapperStyle = {
const wrapperStyle: MotionStyle = {
position: 'absolute',
top: 0,
bottom: 0,
@ -59,7 +62,7 @@ const TimelineSeg = memo(({
const onThisSegClick = () => onSegClick(segNum);
const title = [];
const title: string[] = [];
if (cutEnd > cutStart) title.push(`${formatTimecode({ seconds: cutEnd - cutStart, shorten: true })}`);
if (name) title.push(name);

Wyświetl plik

@ -1,4 +1,6 @@
import { memo, useEffect, useState, useCallback, useRef } from 'react';
import { memo, useEffect, useState, useCallback, useRef, CSSProperties } from 'react';
import { Spinner } from 'evergreen-ui';
import { ffmpegExtractWindow } from '../util/constants';
import { RenderableWaveform } from '../types';
@ -90,22 +92,36 @@ const BigWaveform = memo(({ waveforms, relevantTime, playing, durationSafe, zoom
const leftPercent = `${left * 100}%`;
const widthPercent = `${width * 100}%`;
const style: CSSProperties = {
pointerEvents: 'none',
backgroundColor: 'var(--gray3)',
position: 'absolute',
height: '100%',
width: widthPercent,
left: leftPercent,
borderLeft: waveform.from === 0 ? '1px solid var(--gray11)' : undefined,
borderRight: waveform.to >= durationSafe ? '1px solid var(--gray11)' : undefined,
};
if (waveform.url == null) {
return (
<div
key={`${waveform.from}-${waveform.to}`}
draggable={false}
style={{ ...style, display: 'flex', alignItems: 'center', justifyContent: 'center' }}
>
<Spinner />
</div>
);
}
return (
<img
key={`${waveform.from}-${waveform.to}`}
src={waveform.url}
draggable={false}
alt=""
style={{
pointerEvents: 'none',
backgroundColor: 'var(--gray3)',
position: 'absolute',
height: '100%',
width: widthPercent,
left: leftPercent,
borderLeft: waveform.from === 0 ? '1px solid var(--gray11)' : undefined,
borderRight: waveform.to >= durationSafe ? '1px solid var(--gray11)' : undefined,
}}
style={style}
/>
);
})}

Wyświetl plik

@ -5,6 +5,7 @@ import { AiOutlineMergeCells } from 'react-icons/ai';
import { FaQuestionCircle, FaExclamationTriangle } from 'react-icons/fa';
import i18n from 'i18next';
import withReactContent from 'sweetalert2-react-content';
import invariant from 'tiny-invariant';
import Swal from '../swal';
import { readFileMeta, getSmarterOutFormat } from '../ffmpeg';
@ -57,6 +58,7 @@ const ConcatDialog = memo(({ isShown, onHide, paths, onConcat, alwaysConcatMulti
setFileFormat(undefined);
setDetectedFileFormat(undefined);
setOutFileName(undefined);
invariant(firstPath != null);
const fileMetaNew = await readFileMeta(firstPath);
const fileFormatNew = await getSmarterOutFormat({ filePath: firstPath, fileMeta: fileMetaNew });
if (aborted) return;

Wyświetl plik

@ -5,11 +5,11 @@ import Timecode from 'smpte-timecode';
import minBy from 'lodash/minBy';
import invariant from 'tiny-invariant';
import { pcmAudioCodecs, getMapStreamsArgs, isMov } from './util/streams';
import { pcmAudioCodecs, getMapStreamsArgs, isMov, LiteFFprobeStream } from './util/streams';
import { getSuffixedOutPath, isExecaFailure } from './util';
import { isDurationValid } from './segments';
import { Waveform } from '../types';
import { FFprobeProbeResult, FFprobeStream } from '../ffprobe';
import { FFprobeChapter, FFprobeFormat, FFprobeProbeResult, FFprobeStream } from '../ffprobe';
const FileType = window.require('file-type');
const { pathExists } = window.require('fs-extra');
@ -64,11 +64,13 @@ interface Keyframe {
createdAt: Date,
}
interface Frame extends Keyframe {
export interface Frame extends Keyframe {
keyframe: boolean
}
export async function readFrames({ filePath, from, to, streamIndex }) {
export async function readFrames({ filePath, from, to, streamIndex }: {
filePath: string, from?: number | undefined, to?: number | undefined, streamIndex: number,
}) {
const intervalsArgs = from != null && to != null ? ['-read_intervals', `${from}%${to}`] : [];
const { stdout } = await runFfprobe(['-v', 'error', ...intervalsArgs, '-show_packets', '-select_streams', streamIndex, '-show_entries', 'packet=pts_time,flags', '-of', 'json', filePath]);
// todo types
@ -183,7 +185,7 @@ export function getSafeCutTime(frames, cutTime, nextMode) {
return frames[index - 1].time;
}
export function findNearestKeyFrameTime({ frames, time, direction, fps }) {
export function findNearestKeyFrameTime({ frames, time, direction, fps }: { frames: Frame[], time: number, direction: number, fps: number | undefined }) {
const sigma = fps ? (1 / fps) : 0.1;
const keyframes = frames.filter((f) => f.keyframe && (direction > 0 ? f.time > time + sigma : f.time < time - sigma));
if (keyframes.length === 0) return undefined;
@ -192,7 +194,7 @@ export function findNearestKeyFrameTime({ frames, time, direction, fps }) {
return nearestKeyFrame.time;
}
export async function tryMapChaptersToEdl(chapters) {
export async function tryMapChaptersToEdl(chapters: FFprobeChapter[]) {
try {
return chapters.map((chapter) => {
const start = parseFloat(chapter.start_time);
@ -206,7 +208,7 @@ export async function tryMapChaptersToEdl(chapters) {
end,
name,
};
}).filter(Boolean);
}).flatMap((it) => (it ? [it] : []));
} catch (err) {
console.error('Failed to read chapters from file', err);
return [];
@ -235,7 +237,7 @@ export async function createChaptersFromSegments({ segmentPaths, chapterNames }:
* Therefore we have to map between detected input format and encode format
* See also ffmpeg -formats
*/
function mapDefaultFormat({ streams, requestedFormat }) {
function mapDefaultFormat({ streams, requestedFormat }: { streams: FFprobeStream[], requestedFormat: string | undefined }) {
if (requestedFormat === 'mp4') {
// Only MOV supports these codecs, so default to MOV instead https://github.com/mifi/lossless-cut/issues/948
// eslint-disable-next-line unicorn/no-lonely-if
@ -250,7 +252,7 @@ function mapDefaultFormat({ streams, requestedFormat }) {
return requestedFormat;
}
async function determineOutputFormat(ffprobeFormatsStr, filePath) {
async function determineOutputFormat(ffprobeFormatsStr: string | undefined, filePath: string) {
const ffprobeFormats = (ffprobeFormatsStr || '').split(',').map((str) => str.trim()).filter(Boolean);
if (ffprobeFormats.length === 0) {
console.warn('ffprobe returned unknown formats', ffprobeFormatsStr);
@ -262,7 +264,7 @@ async function determineOutputFormat(ffprobeFormatsStr, filePath) {
// If ffprobe returned a list of formats, try to be a bit smarter about it.
// This should only be the case for matroska and mov. See `ffmpeg -formats`
if (!['matroska', 'mov'].includes(firstFfprobeFormat)) {
if (firstFfprobeFormat == null || !['matroska', 'mov'].includes(firstFfprobeFormat)) {
console.warn('Unknown ffprobe format list', ffprobeFormats);
return firstFfprobeFormat;
}
@ -319,14 +321,14 @@ async function determineOutputFormat(ffprobeFormatsStr, filePath) {
}
}
export async function getSmarterOutFormat({ filePath, fileMeta: { format, streams } }) {
export async function getSmarterOutFormat({ filePath, fileMeta: { format, streams } }: { filePath: string, fileMeta: { format: FFprobeFormat, streams: FFprobeStream[] } }) {
const formatsStr = format.format_name;
const assumedFormat = await determineOutputFormat(formatsStr, filePath);
return mapDefaultFormat({ streams, requestedFormat: assumedFormat });
}
export async function readFileMeta(filePath) {
export async function readFileMeta(filePath: string) {
try {
const { stdout } = await runFfprobe([
'-of', 'json', '-show_chapters', '-show_format', '-show_entries', 'stream', '-i', filePath, '-hide_banner',
@ -352,7 +354,7 @@ export async function readFileMeta(filePath) {
}
}
function getPreferredCodecFormat(stream) {
function getPreferredCodecFormat(stream: LiteFFprobeStream) {
const map = {
mp3: { format: 'mp3', ext: 'mp3' },
opus: { format: 'opus', ext: 'opus' },
@ -386,7 +388,7 @@ function getPreferredCodecFormat(stream) {
}
async function extractNonAttachmentStreams({ customOutDir, filePath, streams, enableOverwriteOutput }: {
customOutDir?: string, filePath: string, streams: FFprobeStream[], enableOverwriteOutput?: boolean,
customOutDir?: string | undefined, filePath: string, streams: FFprobeStream[], enableOverwriteOutput: boolean | undefined,
}) {
if (streams.length === 0) return [];
@ -427,7 +429,7 @@ async function extractNonAttachmentStreams({ customOutDir, filePath, streams, en
}
async function extractAttachmentStreams({ customOutDir, filePath, streams, enableOverwriteOutput }: {
customOutDir?: string, filePath: string, streams: FFprobeStream[], enableOverwriteOutput?: boolean,
customOutDir?: string | undefined, filePath: string, streams: FFprobeStream[], enableOverwriteOutput: boolean | undefined,
}) {
if (streams.length === 0) return [];
@ -468,7 +470,9 @@ async function extractAttachmentStreams({ customOutDir, filePath, streams, enabl
}
// https://stackoverflow.com/questions/32922226/extract-every-audio-and-subtitles-from-a-video-with-ffmpeg
export async function extractStreams({ filePath, customOutDir, streams, enableOverwriteOutput }) {
export async function extractStreams({ filePath, customOutDir, streams, enableOverwriteOutput }: {
filePath: string, customOutDir: string | undefined, streams: FFprobeStream[], enableOverwriteOutput?: boolean | undefined,
}) {
const attachmentStreams = streams.filter((s) => s.codec_type === 'attachment');
const nonAttachmentStreams = streams.filter((s) => s.codec_type !== 'attachment');
@ -481,7 +485,7 @@ export async function extractStreams({ filePath, customOutDir, streams, enableOv
];
}
async function renderThumbnail(filePath, timestamp) {
async function renderThumbnail(filePath: string, timestamp: number) {
const args = [
'-ss', timestamp,
'-i', filePath,
@ -498,7 +502,7 @@ async function renderThumbnail(filePath, timestamp) {
return URL.createObjectURL(blob);
}
export async function extractSubtitleTrack(filePath, streamId) {
export async function extractSubtitleTrack(filePath: string, streamId: number) {
const args = [
'-hide_banner',
'-i', filePath,
@ -513,7 +517,9 @@ export async function extractSubtitleTrack(filePath, streamId) {
return URL.createObjectURL(blob);
}
export async function renderThumbnails({ filePath, from, duration, onThumbnail }) {
export async function renderThumbnails({ filePath, from, duration, onThumbnail }: {
filePath: string, from: number, duration: number, onThumbnail: (a: { time: number, url: string }) => void,
}) {
// Time first render to determine how many to render
const startTime = Date.now() / 1000;
let url = await renderThumbnail(filePath, from);
@ -533,7 +539,7 @@ export async function renderThumbnails({ filePath, from, duration, onThumbnail }
}, { concurrency: 2 });
}
export async function extractWaveform({ filePath, outPath }) {
export async function extractWaveform({ filePath, outPath }: { filePath: string, outPath: string }) {
const numSegs = 10;
const duration = 60 * 60;
const maxLen = 0.1;
@ -560,29 +566,29 @@ export async function extractWaveform({ filePath, outPath }) {
console.timeEnd('ffmpeg');
}
export function isIphoneHevc(format, streams) {
export function isIphoneHevc(format: FFprobeFormat, streams: FFprobeStream[]) {
if (!streams.some((s) => s.codec_name === 'hevc')) return false;
const makeTag = format.tags && format.tags['com.apple.quicktime.make'];
const modelTag = format.tags && format.tags['com.apple.quicktime.model'];
return (makeTag === 'Apple' && modelTag.startsWith('iPhone'));
}
export function isProblematicAvc1(outFormat, streams) {
export function isProblematicAvc1(outFormat: string | undefined, streams: FFprobeStream[]) {
// it seems like this only happens for files that are also 4.2.2 10bit (yuv422p10le)
// https://trac.ffmpeg.org/wiki/Chroma%20Subsampling
return isMov(outFormat) && streams.some((s) => s.codec_name === 'h264' && s.codec_tag === '0x31637661' && s.codec_tag_string === 'avc1' && s.pix_fmt === 'yuv422p10le');
}
function parseFfprobeFps(stream) {
function parseFfprobeFps(stream: FFprobeStream) {
const match = typeof stream.avg_frame_rate === 'string' && stream.avg_frame_rate.match(/^(\d+)\/(\d+)$/);
if (!match) return undefined;
const num = parseInt(match[1], 10);
const den = parseInt(match[2], 10);
const num = parseInt(match[1]!, 10);
const den = parseInt(match[2]!, 10);
if (den > 0) return num / den;
return undefined;
}
export function getStreamFps(stream) {
export function getStreamFps(stream: FFprobeStream) {
if (stream.codec_type === 'video') {
const fps = parseFfprobeFps(stream);
return fps;
@ -609,7 +615,7 @@ export function getStreamFps(stream) {
}
function parseTimecode(str, frameRate) {
function parseTimecode(str: string, frameRate?: number | undefined) {
// console.log(str, frameRate);
const t = Timecode(str, frameRate ? parseFloat(frameRate.toFixed(3)) : undefined);
if (!t) return undefined;
@ -617,15 +623,15 @@ function parseTimecode(str, frameRate) {
return Number.isFinite(seconds) ? seconds : undefined;
}
export function getTimecodeFromStreams(streams) {
export function getTimecodeFromStreams(streams: FFprobeStream[]) {
console.log('Trying to load timecode');
let foundTimecode;
streams.find((stream) => {
try {
if (stream.tags && stream.tags.timecode) {
if (stream.tags && stream.tags['timecode']) {
const fps = getStreamFps(stream);
foundTimecode = parseTimecode(stream.tags.timecode, fps);
console.log('Loaded timecode', stream.tags.timecode, 'from stream', stream.index);
foundTimecode = parseTimecode(stream.tags['timecode'], fps);
console.log('Loaded timecode', stream.tags['timecode'], 'from stream', stream.index);
return true;
}
return undefined;

Wyświetl plik

@ -1,5 +1,7 @@
// Taken from: https://github.com/facebookarchive/fixed-data-table/blob/master/src/vendor_upstream/dom/normalizeWheel.js
import { WheelEvent } from 'react';
/**
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
@ -117,17 +119,18 @@ const PAGE_HEIGHT = 800;
* Firefox v4/Win7 | undefined | 3
*
*/
export default function normalizeWheel(/* object */ event) /* object */ {
export default function normalizeWheel(/* object */ event: WheelEvent<Element>) /* object */ {
let sX = 0; let sY = 0; // spinX, spinY
let pX = 0; let pY = 0; // pixelX, pixelY
// Legacy
if ('detail' in event) { sY = event.detail; }
if ('wheelDelta' in event) { sY = -event.wheelDelta / 120; }
if ('wheelDeltaY' in event) { sY = -event.wheelDeltaY / 120; }
if ('wheelDeltaX' in event) { sX = -event.wheelDeltaX / 120; }
if ('wheelDelta' in event) { sY = -(event.wheelDelta as number) / 120; }
if ('wheelDeltaY' in event) { sY = -(event.wheelDeltaY as number) / 120; }
if ('wheelDeltaX' in event) { sX = -(event.wheelDeltaX as number) / 120; }
// side scrolling on FF with DOMMouseScroll
// @ts-expect-error todo
if ('axis' in event && event.axis === event.HORIZONTAL_AXIS) {
sX = sY;
sY = 0;
@ -139,7 +142,7 @@ export default function normalizeWheel(/* object */ event) /* object */ {
if ('deltaY' in event) { pY = event.deltaY; }
if ('deltaX' in event) { pX = event.deltaX; }
if ((pX || pY) && event.deltaMode) {
if ((pX || pY) && 'deltaMode' in event && event.deltaMode) {
if (event.deltaMode === 1) { // delta in LINE units
pX *= LINE_HEIGHT;
pY *= LINE_HEIGHT;

Wyświetl plik

@ -2,14 +2,17 @@ import { useState, useCallback, useRef, useEffect, useMemo } from 'react';
import sortBy from 'lodash/sortBy';
import useDebounceOld from 'react-use/lib/useDebounce'; // Want to phase out this
import { readFramesAroundTime, findNearestKeyFrameTime as ffmpegFindNearestKeyFrameTime } from '../ffmpeg';
import { readFramesAroundTime, findNearestKeyFrameTime as ffmpegFindNearestKeyFrameTime, Frame } from '../ffmpeg';
import { FFprobeStream } from '../../ffprobe';
const maxKeyframes = 1000;
// const maxKeyframes = 100;
export default ({ keyframesEnabled, filePath, commandedTime, videoStream, detectedFps, ffmpegExtractWindow }) => {
const readingKeyframesPromise = useRef();
const [neighbouringKeyFramesMap, setNeighbouringKeyFrames] = useState({});
export default ({ keyframesEnabled, filePath, commandedTime, videoStream, detectedFps, ffmpegExtractWindow }: {
keyframesEnabled: boolean, filePath: string | undefined, commandedTime: number, videoStream: FFprobeStream | undefined, detectedFps: number | undefined, ffmpegExtractWindow: number,
}) => {
const readingKeyframesPromise = useRef<Promise<unknown>>();
const [neighbouringKeyFramesMap, setNeighbouringKeyFrames] = useState<Record<string, Frame>>({});
const neighbouringKeyFrames = useMemo(() => Object.values(neighbouringKeyFramesMap), [neighbouringKeyFramesMap]);
const findNearestKeyFrameTime = useCallback(({ time, direction }) => ffmpegFindNearestKeyFrameTime({ frames: neighbouringKeyFrames, time, direction, fps: detectedFps }), [neighbouringKeyFrames, detectedFps]);
@ -22,7 +25,7 @@ export default ({ keyframesEnabled, filePath, commandedTime, videoStream, detect
(async () => {
// See getIntervalAroundTime
// We still want to calculate keyframes even if not shouldShowKeyframes because maybe we want to be able to step to the closest keyframe
const shouldRun = keyframesEnabled && filePath && videoStream && commandedTime != null && !readingKeyframesPromise.current;
const shouldRun = keyframesEnabled && filePath != null && videoStream && commandedTime != null && !readingKeyframesPromise.current;
if (!shouldRun) return;
try {
@ -37,7 +40,7 @@ export default ({ keyframesEnabled, filePath, commandedTime, videoStream, detect
if (existingFrames.length >= maxKeyframes) {
existingFrames = sortBy(existingFrames, 'createdAt').slice(newKeyFrames.length);
}
const toObj = (map) => Object.fromEntries(map.map((frame) => [frame.time, frame]));
const toObj = (map: Frame[]) => Object.fromEntries(map.map((frame) => [frame.time, frame]));
return {
...toObj(existingFrames),
...toObj(newKeyFrames),

Wyświetl plik

@ -2,6 +2,7 @@ import { useCallback, useRef, useMemo, useState, MutableRefObject } from 'react'
import { useStateWithHistory } from 'react-use/lib/useStateWithHistory';
import i18n from 'i18next';
import pMap from 'p-map';
import invariant from 'tiny-invariant';
import sortBy from 'lodash/sortBy';
@ -109,7 +110,7 @@ function useSegments({ filePath, workingRef, setWorking, setCutProgress, videoSt
// These are segments guaranteed to have a start and end time
const apparentCutSegments = useMemo(() => getApparentCutSegments(cutSegments), [cutSegments, getApparentCutSegments]);
const getApparentCutSegmentById = useCallback((id) => apparentCutSegments.find((s) => s.segId === id), [apparentCutSegments]);
const getApparentCutSegmentById = useCallback((id: string) => apparentCutSegments.find((s) => s.segId === id), [apparentCutSegments]);
const haveInvalidSegs = useMemo(() => apparentCutSegments.some((cutSegment) => cutSegment.start >= cutSegment.end), [apparentCutSegments]);
@ -147,6 +148,7 @@ function useSegments({ filePath, workingRef, setWorking, setCutProgress, videoSt
const createSegmentsFromKeyframes = useCallback(async () => {
if (!videoStream) return;
invariant(filePath != null);
const keyframes = (await readFrames({ filePath, from: currentApparentCutSeg.start, to: currentApparentCutSeg.end, streamIndex: videoStream.index })).filter((frame) => frame.keyframe);
const newSegments = mapTimesToSegments(keyframes.map((keyframe) => keyframe.time));
loadCutSegments(newSegments, true);

Wyświetl plik

@ -1,4 +1,4 @@
import { useCallback } from 'react';
import { WheelEventHandler, useCallback } from 'react';
import { t } from 'i18next';
import normalizeWheel from './normalizeWheel';
@ -19,8 +19,10 @@ export const getModifierKeyNames = () => ({
export const getModifier = (key) => getModifierKeyNames()[key];
function useTimelineScroll({ wheelSensitivity, mouseWheelZoomModifierKey, invertTimelineScroll, zoomRel, seekRel }) {
const onWheel = useCallback((e) => {
function useTimelineScroll({ wheelSensitivity, mouseWheelZoomModifierKey, invertTimelineScroll, zoomRel, seekRel }: {
wheelSensitivity: number, mouseWheelZoomModifierKey: string, invertTimelineScroll?: boolean | undefined, zoomRel: (a: number) => void, seekRel: (a: number) => void,
}) {
const onWheel = useCallback<WheelEventHandler<Element>>((e) => {
const { pixelX, pixelY } = normalizeWheel(e);
// console.log({ spinX, spinY, pixelX, pixelY });

Wyświetl plik

@ -24,41 +24,59 @@ export default ({ darkMode, filePath, relevantTime, durationSafe, waveformEnable
const waveformColor = darkMode ? waveformColorDark : waveformColorLight;
const timeThrottled = useThrottle(relevantTime, 1000);
useEffect(() => {
waveformsRef.current = [];
setWaveforms([]);
}, [filePath, audioStream, setWaveforms]);
const waveformStartTime = Math.floor(relevantTime / ffmpegExtractWindow) * ffmpegExtractWindow;
const safeExtractDuration = Math.min(waveformStartTime + ffmpegExtractWindow, durationSafe) - waveformStartTime;
const waveformStartTimeThrottled = useThrottle(waveformStartTime, 1000);
useEffect(() => {
let aborted = false;
(async () => {
const waveformStartTime = Math.floor(timeThrottled / ffmpegExtractWindow) * ffmpegExtractWindow;
const alreadyHaveWaveformAtTime = (waveformsRef.current || []).some((waveform) => waveform.from === waveformStartTime);
const shouldRun = filePath && audioStream && timeThrottled != null && waveformEnabled && !alreadyHaveWaveformAtTime && !creatingWaveformPromise.current;
const alreadyHaveWaveformAtTime = (waveformsRef.current ?? []).some((waveform) => waveform.from === waveformStartTimeThrottled);
const shouldRun = !!filePath && audioStream && waveformEnabled && !alreadyHaveWaveformAtTime && !creatingWaveformPromise.current;
if (!shouldRun) return;
try {
const safeExtractDuration = Math.min(waveformStartTime + ffmpegExtractWindow, durationSafe) - waveformStartTime;
const promise = renderWaveformPng({ filePath, start: waveformStartTime, duration: safeExtractDuration, color: waveformColor, streamIndex: audioStream.index });
const promise = renderWaveformPng({ filePath, start: waveformStartTimeThrottled, duration: safeExtractDuration, color: waveformColor, streamIndex: audioStream.index });
creatingWaveformPromise.current = promise;
const { buffer, ...newWaveform } = await promise;
if (aborted) return;
setWaveforms((currentWaveforms) => {
const waveformsByCreatedAt = sortBy(currentWaveforms, 'createdAt');
return [
// cleanup old
...(currentWaveforms.length >= maxWaveforms ? waveformsByCreatedAt.slice(1) : waveformsByCreatedAt),
// add new
{
...newWaveform,
url: URL.createObjectURL(new Blob([buffer], { type: 'image/png' })),
from: waveformStartTimeThrottled,
to: waveformStartTimeThrottled + safeExtractDuration,
duration: safeExtractDuration,
createdAt: new Date(),
},
];
});
const { buffer } = await promise;
if (aborted) {
setWaveforms((currentWaveforms) => currentWaveforms.filter((w) => w.from !== waveformStartTimeThrottled));
return;
}
setWaveforms((currentWaveforms) => currentWaveforms.map((w) => {
if (w.from !== waveformStartTimeThrottled) {
return w;
}
return {
...w,
url: URL.createObjectURL(new Blob([buffer], { type: 'image/png' })),
};
}));
} catch (err) {
console.error('Failed to render waveform', err);
} finally {
@ -69,14 +87,16 @@ export default ({ darkMode, filePath, relevantTime, durationSafe, waveformEnable
return () => {
aborted = true;
};
}, [filePath, timeThrottled, waveformEnabled, audioStream, ffmpegExtractWindow, durationSafe, waveformColor, setWaveforms]);
}, [audioStream, filePath, safeExtractDuration, waveformColor, waveformEnabled, waveformStartTimeThrottled]);
const lastWaveformsRef = useRef<RenderableWaveform[]>([]);
useEffect(() => {
const removedWaveforms = lastWaveformsRef.current.filter((wf) => !waveforms.includes(wf));
// Cleanup old
// if (removedWaveforms.length > 0) console.log('cleanup waveforms', removedWaveforms.length);
removedWaveforms.forEach((waveform) => URL.revokeObjectURL(waveform.url));
removedWaveforms.forEach((waveform) => {
if (waveform.url != null) URL.revokeObjectURL(waveform.url);
});
lastWaveformsRef.current = waveforms;
}, [waveforms]);

Wyświetl plik

@ -72,10 +72,11 @@ export type EdlExportType = 'csv' | 'tsv-human' | 'csv-human' | 'csv-frames' | '
export type TunerType = 'wheelSensitivity' | 'keyboardNormalSeekSpeed' | 'keyboardSeekAccFactor';
export interface RenderableWaveform {
createdAt: Date,
from: number,
to: number,
duration: number,
url: string,
url?: string,
}
export type FfmpegCommandLog = { command: string, time: Date }[];

Wyświetl plik

@ -105,8 +105,4 @@ export type StoreResetConfig = <T extends keyof Config>(key: T) => void;
export interface Waveform {
buffer: Buffer,
from: number,
to: number,
duration: number,
createdAt: Date,
}