implement video/audio track switching #256

also with MediaSource for unsupported codecs
pull/1899/head
Mikael Finstad 2024-02-11 22:36:49 +08:00
rodzic e326adf9be
commit 6fddf72a2d
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 25AB36E3E81CBC26
16 zmienionych plików z 285 dodań i 141 usunięć

Wyświetl plik

@ -50,6 +50,7 @@
"@types/sortablejs": "^1.15.0",
"@typescript-eslint/eslint-plugin": "^6.17.0",
"@typescript-eslint/parser": "^6.17.0",
"@uidotdev/usehooks": "^2.4.1",
"@vitejs/plugin-react": "^3.1.0",
"color": "^3.1.0",
"concurrently": "^6.0.0",

Wyświetl plik

@ -26,6 +26,11 @@ require('./i18n');
const { app, ipcMain, shell, BrowserWindow, nativeTheme } = electron;
// https://chromestatus.com/feature/5748496434987008
// https://peter.sh/experiments/chromium-command-line-switches/
// https://chromium.googlesource.com/chromium/src/+/main/third_party/blink/renderer/platform/runtime_enabled_features.json5
app.commandLine.appendSwitch('enable-blink-features', 'AudioVideoTracks');
remote.initialize();
unhandled({

Wyświetl plik

@ -148,7 +148,7 @@ async function runFfprobe(args, { timeout = isDev ? 10000 : 30000 } = {}) {
}
}
async function renderWaveformPng({ filePath, start, duration, color }) {
async function renderWaveformPng({ filePath, start, duration, color, streamIndex }) {
const args1 = [
'-hide_banner',
'-i', filePath,
@ -156,7 +156,7 @@ async function renderWaveformPng({ filePath, start, duration, color }) {
'-t', duration,
'-c', 'copy',
'-vn',
'-map', 'a:0',
'-map', `0:${streamIndex}`,
'-f', 'matroska', // mpegts doesn't support vorbis etc
'-',
];
@ -397,7 +397,7 @@ async function html5ify({ outPath, filePath: filePathArg, speed, hasAudio, hasVi
else video = 'copy';
}
console.log('Making HTML5 friendly version', { filePathArg, outPath, video, audio });
console.log('Making HTML5 friendly version', { filePathArg, outPath, speed, video, audio });
let videoArgs;
let audioArgs;

Wyświetl plik

@ -43,7 +43,7 @@ import BottomBar from './BottomBar';
import ExportConfirm from './components/ExportConfirm';
import ValueTuners from './components/ValueTuners';
import VolumeControl from './components/VolumeControl';
import SubtitleControl from './components/SubtitleControl';
import PlaybackStreamSelector from './components/PlaybackStreamSelector';
import BatchFilesList from './components/BatchFilesList';
import ConcatDialog from './components/ConcatDialog';
import KeyboardShortcuts from './components/KeyboardShortcuts';
@ -61,7 +61,7 @@ import {
getDuration, getTimecodeFromStreams, createChaptersFromSegments, extractSubtitleTrack,
RefuseOverwriteError, abortFfmpegs,
} from './ffmpeg';
import { shouldCopyStreamByDefault, getAudioStreams, getRealVideoStreams, isAudioDefinitelyNotSupported, willPlayerProperlyHandleVideo, doesPlayerSupportHevcPlayback, isStreamThumbnail } from './util/streams';
import { shouldCopyStreamByDefault, getAudioStreams, getRealVideoStreams, isAudioDefinitelyNotSupported, willPlayerProperlyHandleVideo, doesPlayerSupportHevcPlayback, isStreamThumbnail, getSubtitleStreams, getVideoTrackForStreamIndex, getAudioTrackForStreamIndex, enableVideoTrack, enableAudioTrack } from './util/streams';
import { exportEdlFile, readEdlFile, saveLlcProject, loadLlcProject, askForEdlImport } from './edlStore';
import { formatYouTube, getFrameCountRaw, formatTsv } from './edlFormats';
import {
@ -112,7 +112,7 @@ function App() {
const [working, setWorkingState] = useState();
const [usingDummyVideo, setUsingDummyVideo] = useState(false);
const [playing, setPlaying] = useState(false);
const [canvasPlayerEventId, setCanvasPlayerEventId] = useState(0);
const [compatPlayerEventId, setCompatPlayerEventId] = useState(0);
const playbackModeRef = useRef();
const [playerTime, setPlayerTime] = useState();
const [duration, setDuration] = useState();
@ -125,8 +125,6 @@ function App() {
const [paramsByStreamId, setParamsByStreamId] = useState(new Map());
const [detectedFps, setDetectedFps] = useState();
const [mainFileMeta, setMainFileMeta] = useState({ streams: [], formatData: {} });
const [mainVideoStream, setMainVideoStream] = useState();
const [mainAudioStream, setMainAudioStream] = useState();
const [copyStreamIdsByFile, setCopyStreamIdsByFile] = useState({});
const [streamsSelectorShown, setStreamsSelectorShown] = useState(false);
const [concatDialogVisible, setConcatDialogVisible] = useState(false);
@ -135,8 +133,10 @@ function App() {
const [shortestFlag, setShortestFlag] = useState(false);
const [zoomWindowStartTime, setZoomWindowStartTime] = useState(0);
const [subtitlesByStreamId, setSubtitlesByStreamId] = useState({});
const [activeVideoStreamIndex, setActiveVideoStreamIndex] = useState();
const [activeAudioStreamIndex, setActiveAudioStreamIndex] = useState();
const [activeSubtitleStreamIndex, setActiveSubtitleStreamIndex] = useState();
const [hideCanvasPreview, setHideCanvasPreview] = useState(false);
const [hideMediaSourcePlayer, setHideMediaSourcePlayer] = useState(false);
const [exportConfirmVisible, setExportConfirmVisible] = useState(false);
const [cacheBuster, setCacheBuster] = useState(0);
const [mergedOutFileName, setMergedOutFileName] = useState();
@ -292,7 +292,7 @@ function App() {
video.currentTime = outVal;
setCommandedTime(outVal);
setCanvasPlayerEventId((id) => id + 1); // To make sure that we can seek even to the same commanded time that we are already add (e.g. loop current segment)
setCompatPlayerEventId((id) => id + 1); // To make sure that we can seek even to the same commanded time that we are already add (e.g. loop current segment)
}, []);
const userSeekAbs = useCallback((val) => {
@ -324,20 +324,43 @@ function App() {
userSeekAbs(nextFrame / fps);
}, [detectedFps, userSeekAbs]);
// 360 means we don't modify rotation
const mainStreams = useMemo(() => mainFileMeta.streams, [mainFileMeta.streams]);
const mainFileFormatData = useMemo(() => mainFileMeta.formatData, [mainFileMeta.formatData]);
const mainFileChapters = useMemo(() => mainFileMeta.chapters, [mainFileMeta.chapters]);
const isCopyingStreamId = useCallback((path, streamId) => (
!!(copyStreamIdsByFile[path] || {})[streamId]
), [copyStreamIdsByFile]);
const checkCopyingAnyTrackOfType = useCallback((filter) => mainStreams.some(stream => isCopyingStreamId(filePath, stream.index) && filter(stream)), [filePath, isCopyingStreamId, mainStreams]);
const copyAnyAudioTrack = useMemo(() => checkCopyingAnyTrackOfType((stream) => stream.codec_type === 'audio'), [checkCopyingAnyTrackOfType]);
const subtitleStreams = useMemo(() => getSubtitleStreams(mainStreams), [mainStreams]);
const videoStreams = useMemo(() => getRealVideoStreams(mainStreams), [mainStreams]);
const audioStreams = useMemo(() => getAudioStreams(mainStreams), [mainStreams]);
const mainVideoStream = useMemo(() => videoStreams[0], [videoStreams]);
const mainAudioStream = useMemo(() => audioStreams[0], [audioStreams]);
const activeVideoStream = useMemo(() => (activeVideoStreamIndex != null ? videoStreams.find((stream) => stream.index === activeVideoStreamIndex) : undefined) ?? mainVideoStream, [activeVideoStreamIndex, mainVideoStream, videoStreams]);
const activeAudioStream = useMemo(() => (activeAudioStreamIndex != null ? audioStreams.find((stream) => stream.index === activeAudioStreamIndex) : undefined) ?? mainAudioStream, [activeAudioStreamIndex, audioStreams, mainAudioStream]);
const activeSubtitle = useMemo(() => subtitlesByStreamId[activeSubtitleStreamIndex], [activeSubtitleStreamIndex, subtitlesByStreamId]);
// 360 means we don't modify rotation gtrgt
const isRotationSet = rotation !== 360;
const effectiveRotation = useMemo(() => (isRotationSet ? rotation : (mainVideoStream && mainVideoStream.tags && mainVideoStream.tags.rotate && parseInt(mainVideoStream.tags.rotate, 10))), [isRotationSet, mainVideoStream, rotation]);
const effectiveRotation = useMemo(() => (isRotationSet ? rotation : (activeVideoStream?.tags?.rotate && parseInt(activeVideoStream.tags.rotate, 10))), [isRotationSet, activeVideoStream, rotation]);
const zoomRel = useCallback((rel) => setZoom((z) => Math.min(Math.max(z + (rel * (1 + (z / 10))), 1), zoomMax)), []);
const canvasPlayerRequired = usingDummyVideo;
// Allow user to disable it
const canvasPlayerWanted = isRotationSet && !hideCanvasPreview;
const canvasPlayerEnabled = canvasPlayerRequired || canvasPlayerWanted;
const compatPlayerRequired = usingDummyVideo;
const compatPlayerWanted = (isRotationSet || activeVideoStreamIndex != null || activeAudioStreamIndex != null) && !hideMediaSourcePlayer;
const compatPlayerEnabled = (compatPlayerRequired || compatPlayerWanted) && (activeVideoStream != null || activeAudioStream != null);
const shouldShowPlaybackStreamSelector = videoStreams.length > 1 || audioStreams.length > 1 || (compatPlayerEnabled && subtitleStreams.length > 0);
useEffect(() => {
// Reset the user preference when the state changes to true
if (canvasPlayerEnabled) setHideCanvasPreview(false);
}, [canvasPlayerEnabled]);
if (compatPlayerEnabled) setHideMediaSourcePlayer(false);
}, [compatPlayerEnabled]);
const comfortZoom = isDurationValid(duration) ? Math.max(duration / 100, 1) : undefined;
const timelineToggleComfortZoom = useCallback(() => {
@ -366,7 +389,7 @@ function App() {
const {
cutSegments, cutSegmentsHistory, createSegmentsFromKeyframes, shuffleSegments, detectBlackScenes, detectSilentScenes, detectSceneChanges, removeCutSegment, invertAllSegments, fillSegmentsGaps, combineOverlappingSegments, combineSelectedSegments, shiftAllSegmentTimes, alignSegmentTimesToKeyframes, updateSegOrder, updateSegOrders, reorderSegsByStartTime, addSegment, setCutStart, setCutEnd, onLabelSegment, splitCurrentSegment, createNumSegments, createFixedDurationSegments, createRandomSegments, apparentCutSegments, haveInvalidSegs, currentSegIndexSafe, currentCutSeg, currentApparentCutSeg, inverseCutSegments, clearSegments, loadCutSegments, isSegmentSelected, setCutTime, setCurrentSegIndex, onLabelSelectedSegments, deselectAllSegments, selectAllSegments, selectOnlyCurrentSegment, toggleCurrentSegmentSelected, invertSelectedSegments, removeSelectedSegments, setDeselectedSegmentIds, onSelectSegmentsByLabel, onSelectSegmentsByTag, toggleSegmentSelected, selectOnlySegment, getApparentCutSegmentById, selectedSegments, selectedSegmentsOrInverse, nonFilteredSegmentsOrInverse, segmentsToExport, duplicateCurrentSegment, duplicateSegment, updateSegAtIndex,
} = useSegments({ filePath, workingRef, setWorking, setCutProgress, mainVideoStream, duration, getRelevantTime, maxLabelLength, checkFileOpened, invertCutSegments, segmentsToChaptersOnly });
} = useSegments({ filePath, workingRef, setWorking, setCutProgress, videoStream: activeVideoStream, duration, getRelevantTime, maxLabelLength, checkFileOpened, invertCutSegments, segmentsToChaptersOnly });
const jumpSegStart = useCallback((index) => userSeekAbs(apparentCutSegments[index].start), [apparentCutSegments, userSeekAbs]);
const jumpSegEnd = useCallback((index) => userSeekAbs(apparentCutSegments[index].end), [apparentCutSegments, userSeekAbs]);
@ -488,7 +511,7 @@ function App() {
const increaseRotation = useCallback(() => {
setRotation((r) => (r + 90) % 450);
setHideCanvasPreview(false);
setHideMediaSourcePlayer(false);
// Matroska is known not to work, so we warn user. See https://github.com/mifi/lossless-cut/discussions/661
const supportsRotation = !['matroska', 'webm'].includes(fileFormat);
if (!supportsRotation && !hideAllNotifications) toast.fire({ text: i18n.t('Lossless rotation might not work with this file format. You may try changing to MP4') });
@ -572,20 +595,6 @@ function App() {
},
}), [preferStrongColors]);
const isCopyingStreamId = useCallback((path, streamId) => (
!!(copyStreamIdsByFile[path] || {})[streamId]
), [copyStreamIdsByFile]);
const mainStreams = useMemo(() => mainFileMeta.streams, [mainFileMeta.streams]);
const mainFileFormatData = useMemo(() => mainFileMeta.formatData, [mainFileMeta.formatData]);
const mainFileChapters = useMemo(() => mainFileMeta.chapters, [mainFileMeta.chapters]);
const checkCopyingAnyTrackOfType = useCallback((filter) => mainStreams.some(stream => isCopyingStreamId(filePath, stream.index) && filter(stream)), [filePath, isCopyingStreamId, mainStreams]);
const copyAnyAudioTrack = useMemo(() => checkCopyingAnyTrackOfType((stream) => stream.codec_type === 'audio'), [checkCopyingAnyTrackOfType]);
const subtitleStreams = useMemo(() => mainStreams.filter((stream) => stream.codec_type === 'subtitle'), [mainStreams]);
const activeSubtitle = useMemo(() => subtitlesByStreamId[activeSubtitleStreamIndex], [activeSubtitleStreamIndex, subtitlesByStreamId]);
const onActiveSubtitleChange = useCallback(async (index) => {
if (index == null) {
setActiveSubtitleStreamIndex();
@ -609,6 +618,17 @@ function App() {
}
}, [setWorking, subtitleStreams, subtitlesByStreamId, filePath]);
const onActiveVideoStreamChange = useCallback((index) => {
setHideMediaSourcePlayer(index == null || getVideoTrackForStreamIndex(videoRef.current, index) != null);
enableVideoTrack(videoRef.current, index);
setActiveVideoStreamIndex(index);
}, []);
const onActiveAudioStreamChange = useCallback((index) => {
setHideMediaSourcePlayer(index == null || getAudioTrackForStreamIndex(videoRef.current, index) != null);
enableAudioTrack(videoRef.current, index);
setActiveAudioStreamIndex(index);
}, []);
const mainCopiedStreams = useMemo(() => mainStreams.filter((stream) => isCopyingStreamId(filePath, stream.index)), [filePath, isCopyingStreamId, mainStreams]);
const mainCopiedThumbnailStreams = useMemo(() => mainCopiedStreams.filter(isStreamThumbnail), [mainCopiedStreams]);
@ -658,8 +678,8 @@ function App() {
setThumbnails(v => [...v, thumbnail]);
}
const hasAudio = !!mainAudioStream;
const hasVideo = !!mainVideoStream;
const hasAudio = !!activeAudioStream;
const hasVideo = !!activeVideoStream;
const waveformEnabled = hasAudio && ['waveform', 'big-waveform'].includes(waveformMode);
const bigWaveformEnabled = waveformEnabled && waveformMode === 'big-waveform';
@ -704,8 +724,8 @@ function App() {
const shouldShowKeyframes = keyframesEnabled && hasVideo && calcShouldShowKeyframes(zoomedDuration);
const shouldShowWaveform = calcShouldShowWaveform(zoomedDuration);
const { neighbouringKeyFrames, findNearestKeyFrameTime } = useKeyframes({ keyframesEnabled, filePath, commandedTime, mainVideoStream, detectedFps, ffmpegExtractWindow });
const { waveforms } = useWaveform({ darkMode, filePath, relevantTime, waveformEnabled, mainAudioStream, shouldShowWaveform, ffmpegExtractWindow, durationSafe });
const { neighbouringKeyFrames, findNearestKeyFrameTime } = useKeyframes({ keyframesEnabled, filePath, commandedTime, videoStream: activeVideoStream, detectedFps, ffmpegExtractWindow });
const { waveforms } = useWaveform({ darkMode, filePath, relevantTime, waveformEnabled, audioStream: activeAudioStream, shouldShowWaveform, ffmpegExtractWindow, durationSafe });
const resetMergedOutFileName = useCallback(() => {
const ext = getOutFileExtension({ isCustomFormatSelected, outFormat: fileFormat, filePath });
@ -727,7 +747,7 @@ function App() {
setUsingDummyVideo(false);
setPlaying(false);
playbackModeRef.current = undefined;
setCanvasPlayerEventId(0);
setCompatPlayerEventId(0);
setDuration();
cutSegmentsHistory.go(0);
clearSegments();
@ -742,8 +762,6 @@ function App() {
setParamsByStreamId(new Map());
setDetectedFps();
setMainFileMeta({ streams: [], formatData: [] });
setMainVideoStream();
setMainAudioStream();
setCopyStreamIdsByFile({});
setStreamsSelectorShown(false);
setZoom(1);
@ -752,8 +770,10 @@ function App() {
setZoomWindowStartTime(0);
setDeselectedSegmentIds({});
setSubtitlesByStreamId({});
setActiveAudioStreamIndex();
setActiveVideoStreamIndex();
setActiveSubtitleStreamIndex();
setHideCanvasPreview(false);
setHideMediaSourcePlayer(false);
setExportConfirmVisible(false);
resetMergedOutFileName();
setOutputPlaybackRateState(1);
@ -1359,7 +1379,7 @@ function App() {
const extractSelectedSegmentsFramesAsImages = useCallback(() => extractSegmentFramesAsImages(selectedSegments.map((seg) => seg.segId)), [extractSegmentFramesAsImages, selectedSegments]);
const changePlaybackRate = useCallback((dir, rateMultiplier) => {
if (canvasPlayerEnabled) {
if (compatPlayerEnabled) {
toast.fire({ title: i18n.t('Unable to change playback rate right now'), timer: 1000 });
return;
}
@ -1372,7 +1392,7 @@ function App() {
toast.fire({ title: `${i18n.t('Playback rate:')} ${Math.round(newRate * 100)}%`, timer: 1000 });
video.playbackRate = newRate;
}
}, [playing, canvasPlayerEnabled]);
}, [playing, compatPlayerEnabled]);
const segmentAtCursor = useMemo(() => {
const segmentsAtCursorIndexes = findSegmentsAtCursor(apparentCutSegments, commandedTime);
@ -1455,11 +1475,8 @@ function App() {
const timecode = autoLoadTimecode ? getTimecodeFromStreams(fileMeta.streams) : undefined;
const videoStreams = getRealVideoStreams(fileMeta.streams);
const audioStreams = getAudioStreams(fileMeta.streams);
const videoStream = videoStreams[0];
const audioStream = audioStreams[0];
const [videoStream] = getRealVideoStreams(fileMeta.streams);
const [audioStream] = getAudioStreams(fileMeta.streams);
const haveVideoStream = !!videoStream;
const haveAudioStream = !!audioStream;
@ -1519,8 +1536,6 @@ function App() {
setDetectedFps(getFps());
if (!haveVideoStream) setWaveformMode('big-waveform');
setMainFileMeta({ streams: fileMeta.streams, formatData: fileMeta.format, chapters: fileMeta.chapters });
setMainVideoStream(videoStream);
setMainAudioStream(audioStream);
setCopyStreamIdsForPath(fp, () => copyStreamIdsForPathNew);
setFileFormat(outFormatLocked || fileFormatNew);
setDetectedFileFormat(fileFormatNew);
@ -2409,7 +2424,7 @@ function App() {
)}
</AnimatePresence>
{/* Middle part: */}
{/* Middle part (also shown in fullscreen): */}
<div style={{ position: 'relative', flexGrow: 1, overflow: 'hidden' }} ref={videoContainerRef}>
{!isFileOpened && <NoFileLoaded mifiLink={mifiLink} currentCutSeg={currentCutSeg} onClick={openFilesDialog} darkMode={darkMode} />}
@ -2418,7 +2433,7 @@ function App() {
<video
className="main-player"
tabIndex={-1}
muted={playbackVolume === 0 || canvasPlayerEnabled}
muted={playbackVolume === 0 || compatPlayerEnabled}
ref={videoRef}
style={videoStyle}
src={fileUri}
@ -2435,26 +2450,35 @@ function App() {
{renderSubtitles()}
</video>
{canvasPlayerEnabled && (mainVideoStream != null || mainAudioStream != null) && <MediaSourcePlayer rotate={effectiveRotation} filePath={filePath} videoStream={mainVideoStream} audioStream={mainAudioStream} playerTime={playerTime} commandedTime={commandedTime} playing={playing} eventId={canvasPlayerEventId} masterVideoRef={videoRef} mediaSourceQuality={mediaSourceQuality} />}
{compatPlayerEnabled && <MediaSourcePlayer rotate={effectiveRotation} filePath={filePath} videoStream={activeVideoStream} audioStream={activeAudioStream} playerTime={playerTime} commandedTime={commandedTime} playing={playing} eventId={compatPlayerEventId} masterVideoRef={videoRef} mediaSourceQuality={mediaSourceQuality} playbackVolume={playbackVolume} />}
</div>
{bigWaveformEnabled && <BigWaveform waveforms={waveforms} relevantTime={relevantTime} playing={playing} durationSafe={durationSafe} zoom={zoomUnrounded} seekRel={seekRel} />}
{isRotationSet && !hideCanvasPreview && (
<div style={{ position: 'absolute', top: 0, right: 0, left: 0, marginTop: '1em', marginLeft: '1em', color: 'white', opacity: 0.7, display: 'flex', alignItems: 'center' }}>
<MdRotate90DegreesCcw size={26} style={{ marginRight: 5 }} />
{t('Rotation preview')}
{!canvasPlayerRequired && <FaWindowClose role="button" style={{ cursor: 'pointer', verticalAlign: 'middle', padding: 10 }} onClick={() => setHideCanvasPreview(true)} />}
{compatPlayerEnabled && (
<div style={{ position: 'absolute', top: 0, right: 0, left: 0, marginTop: '1em', marginLeft: '1em', color: 'white', opacity: 0.7, display: 'flex', alignItems: 'center', pointerEvents: 'none' }}>
{isRotationSet ? (
<>
<MdRotate90DegreesCcw size={26} style={{ marginRight: 5 }} />
{t('Rotation preview')}
</>
) : (
<>
{t('FFmpeg-assisted playback')}
</>
)}
{!compatPlayerRequired && <FaWindowClose role="button" style={{ cursor: 'pointer', pointerEvents: 'initial', verticalAlign: 'middle', padding: 10 }} onClick={() => setHideMediaSourcePlayer(true)} />}
</div>
)}
{isFileOpened && (
<div className="no-user-select" style={{ position: 'absolute', right: 0, bottom: 0, marginBottom: 10, display: 'flex', alignItems: 'center' }}>
{!canvasPlayerEnabled && <VolumeControl playbackVolume={playbackVolume} setPlaybackVolume={setPlaybackVolume} />}
<VolumeControl playbackVolume={playbackVolume} setPlaybackVolume={setPlaybackVolume} />
{!canvasPlayerEnabled && subtitleStreams.length > 0 && <SubtitleControl subtitleStreams={subtitleStreams} activeSubtitleStreamIndex={activeSubtitleStreamIndex} onActiveSubtitleChange={onActiveSubtitleChange} />}
{shouldShowPlaybackStreamSelector && <PlaybackStreamSelector subtitleStreams={subtitleStreams} videoStreams={videoStreams} audioStreams={audioStreams} activeSubtitleStreamIndex={activeSubtitleStreamIndex} activeVideoStreamIndex={activeVideoStreamIndex} activeAudioStreamIndex={activeAudioStreamIndex} onActiveSubtitleChange={onActiveSubtitleChange} onActiveVideoStreamChange={onActiveVideoStreamChange} onActiveAudioStreamChange={onActiveAudioStreamChange} />}
{canvasPlayerEnabled && <div style={{ color: 'white', opacity: 0.7, padding: '.5em' }} role="button" onClick={() => incrementMediaSourceQuality()} title={t('Select preview playback quality')}>{mediaSourceQualities[mediaSourceQuality]}</div>}
{compatPlayerEnabled && <div style={{ color: 'white', opacity: 0.7, padding: '.5em' }} role="button" onClick={() => incrementMediaSourceQuality()} title={t('Select playback quality')}>{mediaSourceQualities[mediaSourceQuality]}</div>}
{!showRightBar && (
<FaAngleLeft

Wyświetl plik

@ -5,7 +5,7 @@ import { useDebounce } from 'use-debounce';
import isDev from './isDev';
const remote = window.require('@electron/remote');
const { createMediaSourceStream, readOneJpegFrame } = remote.require('./canvasPlayer');
const { createMediaSourceStream, readOneJpegFrame } = remote.require('./compatPlayer');
async function startPlayback({ path, video, videoStreamIndex, audioStreamIndex, seekTo, signal, playSafe, onCanPlay, getTargetTime, size, fps }: {
@ -251,7 +251,7 @@ async function createPauseImage({ path, seekTo, videoStreamIndex, canvas, signal
drawJpegFrame(canvas, jpegImage);
}
function MediaSourcePlayer({ rotate, filePath, playerTime, videoStream, audioStream, commandedTime, playing, eventId, masterVideoRef, mediaSourceQuality }) {
function MediaSourcePlayer({ rotate, filePath, playerTime, videoStream, audioStream, commandedTime, playing, eventId, masterVideoRef, mediaSourceQuality, playbackVolume }) {
const videoRef = useRef<HTMLVideoElement>(null);
const canvasRef = useRef<HTMLCanvasElement>(null);
const [loading, setLoading] = useState(true);
@ -328,6 +328,10 @@ function MediaSourcePlayer({ rotate, filePath, playerTime, videoStream, audioStr
// Important that we also have eventId in the deps, so that we can restart the preview when the eventId changes
}, [debouncedState.startTime, debouncedState.eventId, filePath, masterVideoRef, playSafe, debouncedState.playing, videoStream, mediaSourceQuality, audioStream?.index]);
useEffect(() => {
if (videoRef.current) videoRef.current.volume = playbackVolume;
}, [playbackVolume]);
const onFocus = useCallback((e) => {
// prevent video element from stealing focus in fullscreen mode https://github.com/mifi/lossless-cut/issues/543#issuecomment-1868167775
e.target.blur();

Wyświetl plik

@ -588,7 +588,7 @@ const KeyboardShortcuts = memo(({
},
},
};
}, [currentCutSeg, t]);
}, [currentCutSeg, mouseWheelZoomModifierKey, t]);
useEffect(() => {
// cleanup invalid bindings, to prevent renamed actions from blocking user to rebind

Wyświetl plik

@ -0,0 +1,100 @@
import React, { memo, useState, useCallback, useRef, useEffect } from 'react';
import { MdSubtitles } from 'react-icons/md';
import { useTranslation } from 'react-i18next';
import Select from './Select';
const PlaybackStreamSelector = memo(({
subtitleStreams,
videoStreams,
audioStreams,
activeSubtitleStreamIndex,
activeVideoStreamIndex,
activeAudioStreamIndex,
onActiveSubtitleChange,
onActiveVideoStreamChange,
onActiveAudioStreamChange,
}) => {
const [controlVisible, setControlVisible] = useState(false);
const timeoutRef = useRef();
const { t } = useTranslation();
const resetTimer = useCallback(() => {
clearTimeout(timeoutRef.current);
timeoutRef.current = setTimeout(() => setControlVisible(false), 7000);
}, []);
const onChange = useCallback((e, fn) => {
resetTimer();
const index = e.target.value ? parseInt(e.target.value, 10) : undefined;
fn(index);
e.target.blur();
}, [resetTimer]);
const onActiveSubtitleChange2 = useCallback((e) => onChange(e, onActiveSubtitleChange), [onActiveSubtitleChange, onChange]);
const onActiveVideoStreamChange2 = useCallback((e) => onChange(e, onActiveVideoStreamChange), [onActiveVideoStreamChange, onChange]);
const onActiveAudioStreamChange2 = useCallback((e) => onChange(e, onActiveAudioStreamChange), [onActiveAudioStreamChange, onChange]);
const onIconClick = useCallback(() => {
resetTimer();
setControlVisible((v) => !v);
}, [resetTimer]);
useEffect(() => () => clearTimeout(timeoutRef.current), []);
return (
<>
{controlVisible && (
<>
{subtitleStreams.length > 0 && (
<Select
value={activeSubtitleStreamIndex ?? ''}
onChange={onActiveSubtitleChange2}
onMouseMove={resetTimer}
>
<option value="">{t('Subtitle')}</option>
{subtitleStreams.map((stream, i) => (
<option key={stream.index} value={stream.index}>#{i + 1} (id {stream.index}) {stream.tags?.language}</option>
))}
</Select>
)}
{videoStreams.length > 1 && (
<Select
value={activeVideoStreamIndex ?? ''}
onChange={onActiveVideoStreamChange2}
onMouseMove={resetTimer}
>
<option value="">{t('Video track')}</option>
{videoStreams.map((stream, i) => (
<option key={stream.index} value={stream.index}>#{i + 1} (id {stream.index + 1}) {stream.codec_name}</option>
))}
</Select>
)}
{audioStreams.length > 1 && (
<Select
value={activeAudioStreamIndex ?? ''}
onChange={onActiveAudioStreamChange2}
onMouseMove={resetTimer}
>
<option value="">{t('Audio track')}</option>
{audioStreams.map((stream, i) => (
<option key={stream.index} value={stream.index}>#{i + 1} (id {stream.index + 1}) {stream.codec_name} - {stream.tags?.language}</option>
))}
</Select>
)}
</>
)}
<MdSubtitles
size={30}
role="button"
style={{ margin: '0 7px', color: 'var(--gray12)', opacity: 0.7 }}
onClick={onIconClick}
/>
</>
);
});
export default PlaybackStreamSelector;

Wyświetl plik

@ -1,55 +0,0 @@
import React, { memo, useState, useCallback, useRef, useEffect } from 'react';
import { MdSubtitles } from 'react-icons/md';
import { useTranslation } from 'react-i18next';
import Select from './Select';
const SubtitleControl = memo(({ subtitleStreams, activeSubtitleStreamIndex, onActiveSubtitleChange }) => {
const [controlVisible, setControlVisible] = useState(false);
const timeoutRef = useRef();
const { t } = useTranslation();
const resetTimer = useCallback(() => {
clearTimeout(timeoutRef.current);
timeoutRef.current = setTimeout(() => setControlVisible(false), 7000);
}, []);
const onChange = useCallback((e) => {
resetTimer();
const index = e.target.value ? parseInt(e.target.value, 10) : undefined;
onActiveSubtitleChange(index);
e.target.blur();
}, [onActiveSubtitleChange, resetTimer]);
const onIconClick = useCallback(() => {
resetTimer();
setControlVisible((v) => !v);
}, [resetTimer]);
useEffect(() => () => clearTimeout(timeoutRef.current), []);
return (
<>
{controlVisible && (
<Select
value={activeSubtitleStreamIndex}
onChange={onChange}
>
<option value="">{t('Subtitle')}</option>
{subtitleStreams.map((stream) => (
<option key={stream.index} value={stream.index}>{(stream.tags && stream.tags.language) || stream.index}</option>
))}
</Select>
)}
<MdSubtitles
size={30}
role="button"
style={{ margin: '0 7px', color: 'var(--gray12)', opacity: 0.7 }}
onClick={onIconClick}
/>
</>
);
});
export default SubtitleControl;

Wyświetl plik

@ -12,7 +12,7 @@ const ReactSwal = withReactContent(Swal);
// eslint-disable-next-line import/prefer-default-export
export async function askForHtml5ifySpeed({ allowedOptions, showRemember, initialOption }) {
const availOptions = {
fastest: i18n.t('Fastest: Low playback speed'),
fastest: i18n.t('Fastest: FFmpeg-assisted playback'),
fast: i18n.t('Fast: Full quality remux (no audio), likely to fail'),
'fast-audio-remux': i18n.t('Fast: Full quality remux, likely to fail'),
'fast-audio': i18n.t('Fast: Remux video, encode audio (fails if unsupported video codec)'),

Wyświetl plik

@ -463,9 +463,9 @@ function useFfmpegOperations({ filePath, treatInputFileModifiedTimeAsStart, trea
}, [treatOutputFileModifiedTimeAsStart]);
// This is just used to load something into the player with correct length,
// so user can seek and then we render frames using ffmpeg
// so user can seek and then we render frames using ffmpeg & MediaSource
const html5ifyDummy = useCallback(async ({ filePath: filePathArg, outPath, onProgress }) => {
console.log('Making HTML5 friendly dummy', { filePathArg, outPath });
console.log('Making ffmpeg-assisted dummy file', { filePathArg, outPath });
const duration = await getDuration(filePathArg);

Wyświetl plik

@ -7,14 +7,14 @@ import { readFramesAroundTime, findNearestKeyFrameTime as ffmpegFindNearestKeyFr
const maxKeyframes = 1000;
// const maxKeyframes = 100;
export default ({ keyframesEnabled, filePath, commandedTime, mainVideoStream, detectedFps, ffmpegExtractWindow }) => {
export default ({ keyframesEnabled, filePath, commandedTime, videoStream, detectedFps, ffmpegExtractWindow }) => {
const readingKeyframesPromise = useRef();
const [neighbouringKeyFramesMap, setNeighbouringKeyFrames] = useState({});
const neighbouringKeyFrames = useMemo(() => Object.values(neighbouringKeyFramesMap), [neighbouringKeyFramesMap]);
const findNearestKeyFrameTime = useCallback(({ time, direction }) => ffmpegFindNearestKeyFrameTime({ frames: neighbouringKeyFrames, time, direction, fps: detectedFps }), [neighbouringKeyFrames, detectedFps]);
useEffect(() => setNeighbouringKeyFrames({}), [filePath]);
useEffect(() => setNeighbouringKeyFrames({}), [filePath, videoStream]);
useDebounceOld(() => {
let aborted = false;
@ -22,11 +22,11 @@ export default ({ keyframesEnabled, filePath, commandedTime, mainVideoStream, de
(async () => {
// See getIntervalAroundTime
// We still want to calculate keyframes even if not shouldShowKeyframes because maybe we want to be able to step to the closest keyframe
const shouldRun = keyframesEnabled && filePath && mainVideoStream && commandedTime != null && !readingKeyframesPromise.current;
const shouldRun = keyframesEnabled && filePath && videoStream && commandedTime != null && !readingKeyframesPromise.current;
if (!shouldRun) return;
try {
const promise = readFramesAroundTime({ filePath, aroundTime: commandedTime, streamIndex: mainVideoStream.index, window: ffmpegExtractWindow });
const promise = readFramesAroundTime({ filePath, aroundTime: commandedTime, streamIndex: videoStream.index, window: ffmpegExtractWindow });
readingKeyframesPromise.current = promise;
const newFrames = await promise;
if (aborted) return;
@ -53,7 +53,7 @@ export default ({ keyframesEnabled, filePath, commandedTime, mainVideoStream, de
return () => {
aborted = true;
};
}, 500, [keyframesEnabled, filePath, commandedTime, mainVideoStream, ffmpegExtractWindow]);
}, 500, [keyframesEnabled, filePath, commandedTime, videoStream, ffmpegExtractWindow]);
return {
neighbouringKeyFrames, findNearestKeyFrameTime,

Wyświetl plik

@ -20,7 +20,7 @@ const { blackDetect, silenceDetect } = remote.require('./ffmpeg');
export default ({
filePath, workingRef, setWorking, setCutProgress, mainVideoStream,
filePath, workingRef, setWorking, setCutProgress, videoStream,
duration, getRelevantTime, maxLabelLength, checkFileOpened, invertCutSegments, segmentsToChaptersOnly,
}) => {
// Segment related state
@ -138,11 +138,11 @@ export default ({
}, [currentApparentCutSeg.end, currentApparentCutSeg.start, detectSegments, filePath, setCutProgress]);
const createSegmentsFromKeyframes = useCallback(async () => {
if (!mainVideoStream) return;
const keyframes = (await readFrames({ filePath, from: currentApparentCutSeg.start, to: currentApparentCutSeg.end, streamIndex: mainVideoStream.index })).filter((frame) => frame.keyframe);
if (!videoStream) return;
const keyframes = (await readFrames({ filePath, from: currentApparentCutSeg.start, to: currentApparentCutSeg.end, streamIndex: videoStream.index })).filter((frame) => frame.keyframe);
const newSegments = mapTimesToSegments(keyframes.map((keyframe) => keyframe.time));
loadCutSegments(newSegments, true);
}, [currentApparentCutSeg.end, currentApparentCutSeg.start, filePath, loadCutSegments, mainVideoStream]);
}, [currentApparentCutSeg.end, currentApparentCutSeg.start, filePath, loadCutSegments, videoStream]);
const removeSegments = useCallback((removeSegmentIds) => {
setCutSegments((existingSegments) => {
@ -246,7 +246,7 @@ export default ({
}, [modifySelectedSegmentTimes]);
const alignSegmentTimesToKeyframes = useCallback(async () => {
if (!mainVideoStream || workingRef.current) return;
if (!videoStream || workingRef.current) return;
try {
const response = await askForAlignSegments();
if (response == null) return;
@ -257,7 +257,7 @@ export default ({
async function align(key) {
const time = newSegment[key];
const keyframe = await findKeyframeNearTime({ filePath, streamIndex: mainVideoStream.index, time, mode });
const keyframe = await findKeyframeNearTime({ filePath, streamIndex: videoStream.index, time, mode });
if (keyframe == null) throw new Error(`Cannot find any keyframe within 60 seconds of frame ${time}`);
newSegment[key] = keyframe;
}
@ -270,7 +270,7 @@ export default ({
} finally {
setWorking();
}
}, [filePath, mainVideoStream, modifySelectedSegmentTimes, setWorking, workingRef]);
}, [filePath, videoStream, modifySelectedSegmentTimes, setWorking, workingRef]);
const updateSegOrder = useCallback((index, newOrder) => {
if (newOrder > cutSegments.length - 1 || newOrder < 0) return;

Wyświetl plik

@ -1,6 +1,6 @@
import { useState, useRef, useEffect } from 'react';
import sortBy from 'lodash/sortBy';
import useThrottle from 'react-use/lib/useThrottle';
import { useThrottle } from '@uidotdev/usehooks';
import { waveformColorDark, waveformColorLight } from '../colors';
import { renderWaveformPng } from '../ffmpeg';
@ -8,7 +8,7 @@ import { renderWaveformPng } from '../ffmpeg';
const maxWaveforms = 100;
// const maxWaveforms = 3; // testing
export default ({ darkMode, filePath, relevantTime, durationSafe, waveformEnabled, mainAudioStream, shouldShowWaveform, ffmpegExtractWindow }) => {
export default ({ darkMode, filePath, relevantTime, durationSafe, waveformEnabled, audioStream, shouldShowWaveform, ffmpegExtractWindow }) => {
const creatingWaveformPromise = useRef();
const [waveforms, setWaveforms] = useState([]);
const waveformsRef = useRef();
@ -21,6 +21,11 @@ export default ({ darkMode, filePath, relevantTime, durationSafe, waveformEnable
const timeThrottled = useThrottle(relevantTime, 1000);
useEffect(() => {
waveformsRef.current = [];
setWaveforms([]);
}, [filePath, audioStream, setWaveforms]);
useEffect(() => {
let aborted = false;
@ -28,12 +33,12 @@ export default ({ darkMode, filePath, relevantTime, durationSafe, waveformEnable
const waveformStartTime = Math.floor(timeThrottled / ffmpegExtractWindow) * ffmpegExtractWindow;
const alreadyHaveWaveformAtTime = (waveformsRef.current || []).some((waveform) => waveform.from === waveformStartTime);
const shouldRun = filePath && mainAudioStream && timeThrottled != null && shouldShowWaveform && waveformEnabled && !alreadyHaveWaveformAtTime && !creatingWaveformPromise.current;
const shouldRun = filePath && audioStream && timeThrottled != null && shouldShowWaveform && waveformEnabled && !alreadyHaveWaveformAtTime && !creatingWaveformPromise.current;
if (!shouldRun) return;
try {
const safeExtractDuration = Math.min(waveformStartTime + ffmpegExtractWindow, durationSafe) - waveformStartTime;
const promise = renderWaveformPng({ filePath, start: waveformStartTime, duration: safeExtractDuration, color: waveformColor });
const promise = renderWaveformPng({ filePath, start: waveformStartTime, duration: safeExtractDuration, color: waveformColor, streamIndex: audioStream.index });
creatingWaveformPromise.current = promise;
const { buffer, ...newWaveform } = await promise;
if (aborted) return;
@ -59,7 +64,7 @@ export default ({ darkMode, filePath, relevantTime, durationSafe, waveformEnable
return () => {
aborted = true;
};
}, [filePath, timeThrottled, waveformEnabled, mainAudioStream, shouldShowWaveform, ffmpegExtractWindow, durationSafe, waveformColor, setWaveforms]);
}, [filePath, timeThrottled, waveformEnabled, audioStream, shouldShowWaveform, ffmpegExtractWindow, durationSafe, waveformColor, setWaveforms]);
const lastWaveformsRef = useRef([]);
useEffect(() => {
@ -70,7 +75,6 @@ export default ({ darkMode, filePath, relevantTime, durationSafe, waveformEnable
lastWaveformsRef.current = waveforms;
}, [waveforms]);
useEffect(() => setWaveforms([]), [filePath, setWaveforms]);
useEffect(() => () => setWaveforms([]), [setWaveforms]);
return { waveforms };

Wyświetl plik

@ -223,6 +223,56 @@ export const getAudioStreams = (streams) => streams.filter(stream => stream.code
export const getRealVideoStreams = (streams) => streams.filter(stream => stream.codec_type === 'video' && !isStreamThumbnail(stream));
export const getSubtitleStreams = (streams) => streams.filter(stream => stream.codec_type === 'subtitle');
// videoTracks/audioTracks seems to be 1-indexed, while ffmpeg is 0-indexes
const getHtml5TrackId = (ffmpegTrackIndex) => String(ffmpegTrackIndex + 1);
const getHtml5VideoTracks = (video) => [...(video.videoTracks ?? [])];
const getHtml5AudioTracks = (video) => [...(video.audioTracks ?? [])];
export const getVideoTrackForStreamIndex = (video, index) => getHtml5VideoTracks(video).find((videoTrack) => videoTrack.id === getHtml5TrackId(index));
export const getAudioTrackForStreamIndex = (video, index) => getHtml5AudioTracks(video).find((audioTrack) => audioTrack.id === getHtml5TrackId(index));
function resetVideoTrack(video) {
console.log('Resetting video track');
getHtml5VideoTracks(video).forEach((track, index) => {
// eslint-disable-next-line no-param-reassign
track.selected = index === 0;
});
}
function resetAudioTrack(video) {
console.log('Resetting audio track');
getHtml5AudioTracks(video).forEach((track, index) => {
// eslint-disable-next-line no-param-reassign
track.enabled = index === 0;
});
}
// https://github.com/mifi/lossless-cut/issues/256
export function enableVideoTrack(video, index) {
if (index == null) {
resetVideoTrack(video);
return;
}
console.log('Enabling video track', index);
getHtml5VideoTracks(video).forEach((track) => {
// eslint-disable-next-line no-param-reassign
track.selected = track.id === getHtml5TrackId(index);
});
}
export function enableAudioTrack(video, index) {
if (index == null) {
resetAudioTrack(video);
return;
}
console.log('Enabling audio track', index);
getHtml5AudioTracks(video).forEach((track) => {
// eslint-disable-next-line no-param-reassign
track.enabled = track.id === getHtml5TrackId(index);
});
}
export function getStreamIdsToCopy({ streams, includeAllStreams }) {
if (includeAllStreams) {
return {

Wyświetl plik

@ -1905,6 +1905,16 @@ __metadata:
languageName: node
linkType: hard
"@uidotdev/usehooks@npm:^2.4.1":
version: 2.4.1
resolution: "@uidotdev/usehooks@npm:2.4.1"
peerDependencies:
react: ">=18.0.0"
react-dom: ">=18.0.0"
checksum: 7f2e1dcfcaf654841150fde36556a257afb2240bca0145586b4e1e9385b85fea108a7dd17c2cbc4c2bd46d136126ffdf84267118933120fe54ad2e028c2dfa68
languageName: node
linkType: hard
"@ungap/structured-clone@npm:^1.2.0":
version: 1.2.0
resolution: "@ungap/structured-clone@npm:1.2.0"
@ -6806,6 +6816,7 @@ __metadata:
"@types/sortablejs": "npm:^1.15.0"
"@typescript-eslint/eslint-plugin": "npm:^6.17.0"
"@typescript-eslint/parser": "npm:^6.17.0"
"@uidotdev/usehooks": "npm:^2.4.1"
"@vitejs/plugin-react": "npm:^3.1.0"
color: "npm:^3.1.0"
concurrently: "npm:^6.0.0"