- upgarde execa
- remove moment
- pull ffmpeg/execa logic out into main process
pull/1556/head
Mikael Finstad 2023-04-08 23:51:09 +09:00
rodzic 6cd949ac26
commit 3bc407cabd
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 25AB36E3E81CBC26
13 zmienionych plików z 696 dodań i 596 usunięć

Wyświetl plik

@ -67,8 +67,8 @@
"i18next-parser": "^7.6.0",
"icon-gen": "^3.0.0",
"ky": "^0.33.1",
"luxon": "^3.3.0",
"mkdirp": "^1.0.3",
"moment": "^2.29.4",
"mousetrap": "^1.6.5",
"p-map": "^5.5.0",
"patch-package": "^6.2.1",
@ -103,7 +103,7 @@
"electron-is-dev": "^2.0.0",
"electron-store": "5.1.1",
"electron-unhandled": "^4.0.1",
"execa": "^5.0.0",
"execa": "^7.1.1",
"file-type": "16",
"file-url": "^3.0.0",
"fs-extra": "^8.1.0",

Wyświetl plik

@ -0,0 +1,66 @@
const strtok3 = require('strtok3');
const { getOneRawFrame, encodeLiveRawStream } = require('./ffmpeg');
let aborters = [];
async function command({ path, inWidth, inHeight, streamIndex, seekTo: commandedTime, onFrame, playing }) {
let process;
let aborted = false;
function killProcess() {
if (process) {
process.kill();
process = undefined;
}
}
function abort() {
aborted = true;
killProcess();
aborters = aborters.filter(((aborter) => aborter !== abort));
}
aborters.push(abort);
try {
if (playing) {
const { process: processIn, channels, width, height } = encodeLiveRawStream({ path, inWidth, inHeight, streamIndex, seekTo: commandedTime });
process = processIn;
// process.stderr.on('data', data => console.log(data.toString('utf-8')));
const tokenizer = await strtok3.fromStream(process.stdout);
if (aborted) return;
const size = width * height * channels;
const rgbaImage = Buffer.allocUnsafe(size);
while (!aborted) {
// eslint-disable-next-line no-await-in-loop
await tokenizer.readBuffer(rgbaImage, { length: size });
if (aborted) return;
onFrame(rgbaImage, width, height);
}
} else {
const { process: processIn, width, height } = getOneRawFrame({ path, inWidth, inHeight, streamIndex, seekTo: commandedTime, outSize: 1000 });
process = processIn;
const { stdout: rgbaImage } = await process;
if (aborted) return;
onFrame(rgbaImage, width, height);
}
} catch (err) {
if (!err.killed) console.warn(err.message);
} finally {
killProcess();
}
}
function abortAll() {
aborters.forEach((aborter) => aborter());
}
module.exports = {
command,
abortAll,
};

Wyświetl plik

@ -10,6 +10,7 @@ const yargsParser = require('yargs-parser');
const JSON5 = require('json5');
const remote = require('@electron/remote/main');
const { stat } = require('fs/promises');
const ffmpeg = require('./ffmpeg');
const logger = require('./logger');
const menu = require('./menu');
@ -253,6 +254,8 @@ const readyPromise = app.whenReady();
(async () => {
try {
await ffmpeg.whenImported;
logger.info('Initializing config store');
await configStore.init();

520
public/ffmpeg.js 100644
Wyświetl plik

@ -0,0 +1,520 @@
const { join } = require('path');
const isDev = require('electron-is-dev');
const readline = require('readline');
const stringToStream = require('string-to-stream');
const { platform, arch, isWindows, isMac } = require('./util');
const execaPromise = import('execa');
let execa;
execaPromise.then((execaNew) => {
({ execa } = execaNew);
}).catch((err) => console.error(err));
const runningFfmpegs = new Set();
// setInterval(() => console.log(runningFfmpegs.size), 1000);
let customFfPath;
// Note that this does not work on MAS because of sandbox restrictions
function setCustomFfPath(path) {
customFfPath = path;
}
function getFfCommandLine(cmd, args) {
const mapArg = arg => (/[^0-9a-zA-Z-_]/.test(arg) ? `'${arg}'` : arg);
return `${cmd} ${args.map(mapArg).join(' ')}`;
}
function getFfPath(cmd) {
const exeName = isWindows ? `${cmd}.exe` : cmd;
if (customFfPath) return join(customFfPath, exeName);
if (isDev) return join('ffmpeg', `${platform}-${arch}`, exeName);
return join(window.process.resourcesPath, exeName);
}
const getFfprobePath = () => getFfPath('ffprobe');
const getFfmpegPath = () => getFfPath('ffmpeg');
function abortFfmpegs() {
console.log('Aborting', runningFfmpegs.size, 'ffmpeg process(es)');
runningFfmpegs.forEach((process) => {
process.kill('SIGTERM', { forceKillAfterTimeout: 10000 });
});
}
function handleProgress(process, durationIn, onProgress, customMatcher = () => {}) {
if (!onProgress) return;
onProgress(0);
const rl = readline.createInterface({ input: process.stderr });
rl.on('line', (line) => {
// console.log('progress', line);
try {
let match = line.match(/frame=\s*[^\s]+\s+fps=\s*[^\s]+\s+q=\s*[^\s]+\s+(?:size|Lsize)=\s*[^\s]+\s+time=\s*([^\s]+)\s+/);
// Audio only looks like this: "line size= 233422kB time=01:45:50.68 bitrate= 301.1kbits/s speed= 353x "
if (!match) match = line.match(/(?:size|Lsize)=\s*[^\s]+\s+time=\s*([^\s]+)\s+/);
if (!match) {
customMatcher(line);
return;
}
const timeStr = match[1];
console.log(timeStr);
const match2 = timeStr.match(/^(\d+):(\d+):(\d+)\.(\d+)$/);
const h = parseInt(match2[1], 10);
const m = parseInt(match2[2], 10);
const s = parseInt(match2[3], 10);
const cs = parseInt(match2[4], 10);
const time = (((h * 60) + m) * 60 + s) + cs / 100;
console.log(time);
const progressTime = Math.max(0, time);
// console.log(progressTime);
if (durationIn == null) return;
const duration = Math.max(0, durationIn);
if (duration === 0) return;
const progress = duration ? Math.min(progressTime / duration, 1) : 0; // sometimes progressTime will be greater than cutDuration
onProgress(progress);
} catch (err) {
console.log('Failed to parse ffmpeg progress line', err);
}
});
}
// todo collect warnings from ffmpeg output and show them after export? example: https://github.com/mifi/lossless-cut/issues/1469
function runFfmpegProcess(args, execaOptions, { logCli = true } = {}) {
const ffmpegPath = getFfmpegPath();
if (logCli) console.log(getFfCommandLine('ffmpeg', args));
const process = execa(ffmpegPath, args, execaOptions);
(async () => {
runningFfmpegs.add(process);
try {
await process;
} catch (err) {
// ignored here
} finally {
runningFfmpegs.delete(process);
}
})();
return process;
}
async function runFfmpegConcat({ ffmpegArgs, concatTxt, totalDuration, onProgress }) {
const process = runFfmpegProcess(ffmpegArgs);
handleProgress(process, totalDuration, onProgress);
stringToStream(concatTxt).pipe(process.stdin);
return process;
}
async function runFfmpegWithProgress({ ffmpegArgs, duration, onProgress }) {
const process = runFfmpegProcess(ffmpegArgs);
handleProgress(process, duration, onProgress);
return process;
}
async function runFfprobe(args, { timeout = isDev ? 10000 : 30000 } = {}) {
const ffprobePath = getFfprobePath();
console.log(getFfCommandLine('ffprobe', args));
const ps = execa(ffprobePath, args);
const timer = setTimeout(() => {
console.warn('killing timed out ffprobe');
ps.kill();
}, timeout);
try {
return await ps;
} finally {
clearTimeout(timer);
}
}
async function renderWaveformPng({ filePath, start, duration, color }) {
const args1 = [
'-hide_banner',
'-i', filePath,
'-ss', start,
'-t', duration,
'-c', 'copy',
'-vn',
'-map', 'a:0',
'-f', 'matroska', // mpegts doesn't support vorbis etc
'-',
];
const args2 = [
'-hide_banner',
'-i', '-',
'-filter_complex', `showwavespic=s=2000x300:scale=lin:filter=peak:split_channels=1:colors=${color}`,
'-frames:v', '1',
'-vcodec', 'png',
'-f', 'image2',
'-',
];
console.log(getFfCommandLine('ffmpeg1', args1));
console.log('|', getFfCommandLine('ffmpeg2', args2));
let ps1;
let ps2;
try {
ps1 = runFfmpegProcess(args1, { encoding: null, buffer: false }, { logCli: false });
ps2 = runFfmpegProcess(args2, { encoding: null }, { logCli: false });
ps1.stdout.pipe(ps2.stdin);
const timer = setTimeout(() => {
ps1.kill();
ps2.kill();
console.warn('ffmpeg timed out');
}, 10000);
let stdout;
try {
({ stdout } = await ps2);
} finally {
clearTimeout(timer);
}
return {
buffer: stdout,
from: start,
to: start + duration,
duration,
createdAt: new Date(),
};
} catch (err) {
if (ps1) ps1.kill();
if (ps2) ps2.kill();
throw err;
}
}
const getInputSeekArgs = ({ filePath, from, to }) => [
...(from != null ? ['-ss', from.toFixed(5)] : []),
'-i', filePath,
...(to != null ? ['-t', (to - from).toFixed(5)] : []),
];
function mapTimesToSegments(times) {
const segments = [];
for (let i = 0; i < times.length; i += 1) {
const start = times[i];
const end = times[i + 1];
if (start != null) segments.push({ start, end }); // end undefined is allowed (means until end of video)
}
return segments;
}
const getSegmentOffset = (from) => (from != null ? from : 0);
function adjustSegmentsWithOffset({ segments, from }) {
const offset = getSegmentOffset(from);
return segments.map(({ start, end }) => ({ start: start + offset, end: end != null ? end + offset : end }));
}
// https://stackoverflow.com/questions/35675529/using-ffmpeg-how-to-do-a-scene-change-detection-with-timecode
async function detectSceneChanges({ filePath, minChange, onProgress, from, to }) {
const args = [
'-hide_banner',
...getInputSeekArgs({ filePath, from, to }),
'-filter_complex', `select='gt(scene,${minChange})',metadata=print:file=-`,
'-f', 'null', '-',
];
const process = runFfmpegProcess(args, { encoding: null, buffer: false });
const times = [0];
handleProgress(process, to - from, onProgress);
const rl = readline.createInterface({ input: process.stdout });
rl.on('line', (line) => {
const match = line.match(/^frame:\d+\s+pts:\d+\s+pts_time:([\d.]+)/);
if (!match) return;
const time = parseFloat(match[1]);
if (Number.isNaN(time) || time <= times[times.length - 1]) return;
times.push(time);
});
await process;
const segments = mapTimesToSegments(times);
return adjustSegmentsWithOffset({ segments, from });
}
async function detectIntervals({ filePath, customArgs, onProgress, from, to, matchLineTokens }) {
const args = [
'-hide_banner',
...getInputSeekArgs({ filePath, from, to }),
...customArgs,
'-f', 'null', '-',
];
const process = runFfmpegProcess(args, { encoding: null, buffer: false });
const segments = [];
function customMatcher(line) {
const { start: startStr, end: endStr } = matchLineTokens(line);
const start = parseFloat(startStr);
const end = parseFloat(endStr);
if (start == null || end == null || Number.isNaN(start) || Number.isNaN(end)) return;
segments.push({ start, end });
}
handleProgress(process, to - from, onProgress, customMatcher);
await process;
return adjustSegmentsWithOffset({ segments, from });
}
function getFffmpegJpegQuality(quality) {
// Normal range for JPEG is 2-31 with 31 being the worst quality.
const qMin = 2;
const qMax = 31;
return Math.min(Math.max(qMin, quality, Math.round((1 - quality) * (qMax - qMin) + qMin)), qMax);
}
async function captureFrames({ from, to, videoPath, outPathTemplate, quality, filter, framePts, onProgress }) {
const ffmpegQuality = getFffmpegJpegQuality(quality);
const args = [
'-ss', from,
'-i', videoPath,
'-t', Math.max(0, to - from),
'-q:v', ffmpegQuality,
...(filter != null ? ['-vf', filter] : []),
// https://superuser.com/questions/1336285/use-ffmpeg-for-thumbnail-selections
...(framePts ? ['-frame_pts', '1'] : []),
'-vsync', '0', // else we get a ton of duplicates (thumbnail filter)
'-y', outPathTemplate,
];
const process = runFfmpegProcess(args, { encoding: null, buffer: false });
handleProgress(process, to - from, onProgress);
await process;
}
async function captureFrame({ timestamp, videoPath, outPath, quality }) {
const ffmpegQuality = getFffmpegJpegQuality(quality);
await runFfmpegProcess([
'-ss', timestamp,
'-i', videoPath,
'-vframes', '1',
'-q:v', ffmpegQuality,
'-y', outPath,
]);
}
async function readFormatData(filePath) {
console.log('readFormatData', filePath);
const { stdout } = await runFfprobe([
'-of', 'json', '-show_format', '-i', filePath, '-hide_banner',
]);
return JSON.parse(stdout).format;
}
async function getDuration(filePath) {
return parseFloat((await readFormatData(filePath)).duration);
}
async function html5ify({ outPath, filePath: filePathArg, speed, hasAudio, hasVideo, onProgress }) {
let audio;
if (hasAudio) {
if (speed === 'slowest') audio = 'hq';
else if (['slow-audio', 'fast-audio', 'fastest-audio'].includes(speed)) audio = 'lq';
else if (['fast-audio-remux', 'fastest-audio-remux'].includes(speed)) audio = 'copy';
}
let video;
if (hasVideo) {
if (speed === 'slowest') video = 'hq';
else if (['slow-audio', 'slow'].includes(speed)) video = 'lq';
else video = 'copy';
}
console.log('Making HTML5 friendly version', { filePathArg, outPath, video, audio });
let videoArgs;
let audioArgs;
// h264/aac_at: No licensing when using HW encoder (Video/Audio Toolbox on Mac)
// https://github.com/mifi/lossless-cut/issues/372#issuecomment-810766512
const targetHeight = 400;
switch (video) {
case 'hq': {
if (isMac) {
videoArgs = ['-vf', 'format=yuv420p', '-allow_sw', '1', '-vcodec', 'h264', '-b:v', '15M'];
} else {
// AV1 is very slow
// videoArgs = ['-vf', 'format=yuv420p', '-sws_flags', 'neighbor', '-vcodec', 'libaom-av1', '-crf', '30', '-cpu-used', '8'];
// Theora is a bit faster but not that much
// videoArgs = ['-vf', '-c:v', 'libtheora', '-qscale:v', '1'];
// videoArgs = ['-vf', 'format=yuv420p', '-c:v', 'libvpx-vp9', '-crf', '30', '-b:v', '0', '-row-mt', '1'];
// x264 can only be used in GPL projects
videoArgs = ['-vf', 'format=yuv420p', '-c:v', 'libx264', '-profile:v', 'high', '-preset:v', 'slow', '-crf', '17'];
}
break;
}
case 'lq': {
if (isMac) {
videoArgs = ['-vf', `scale=-2:${targetHeight},format=yuv420p`, '-allow_sw', '1', '-sws_flags', 'lanczos', '-vcodec', 'h264', '-b:v', '1500k'];
} else {
// videoArgs = ['-vf', `scale=-2:${targetHeight},format=yuv420p`, '-sws_flags', 'neighbor', '-c:v', 'libtheora', '-qscale:v', '1'];
// x264 can only be used in GPL projects
videoArgs = ['-vf', `scale=-2:${targetHeight},format=yuv420p`, '-sws_flags', 'neighbor', '-c:v', 'libx264', '-profile:v', 'baseline', '-x264opts', 'level=3.0', '-preset:v', 'ultrafast', '-crf', '28'];
}
break;
}
case 'copy': {
videoArgs = ['-vcodec', 'copy'];
break;
}
default: {
videoArgs = ['-vn'];
}
}
switch (audio) {
case 'hq': {
if (isMac) {
audioArgs = ['-acodec', 'aac_at', '-b:a', '192k'];
} else {
audioArgs = ['-acodec', 'flac'];
}
break;
}
case 'lq': {
if (isMac) {
audioArgs = ['-acodec', 'aac_at', '-ar', '44100', '-ac', '2', '-b:a', '96k'];
} else {
audioArgs = ['-acodec', 'flac', '-ar', '11025', '-ac', '2'];
}
break;
}
case 'copy': {
audioArgs = ['-acodec', 'copy'];
break;
}
default: {
audioArgs = ['-an'];
}
}
const ffmpegArgs = [
'-hide_banner',
'-i', filePathArg,
...videoArgs,
...audioArgs,
'-sn',
'-y', outPath,
];
const duration = await getDuration(filePathArg);
const process = runFfmpegProcess(ffmpegArgs);
if (duration) handleProgress(process, duration, onProgress);
const { stdout } = await process;
console.log(stdout);
}
function createRawFfmpeg({ fps = 25, path, inWidth, inHeight, seekTo, oneFrameOnly, execaOpts, streamIndex, outSize = 320 }) {
// const fps = 25; // TODO
const aspectRatio = inWidth / inHeight;
let newWidth;
let newHeight;
if (inWidth > inHeight) {
newWidth = outSize;
newHeight = Math.floor(newWidth / aspectRatio);
} else {
newHeight = outSize;
newWidth = Math.floor(newHeight * aspectRatio);
}
const args = [
'-hide_banner', '-loglevel', 'panic',
'-re',
'-ss', seekTo,
'-noautorotate',
'-i', path,
'-vf', `fps=${fps},scale=${newWidth}:${newHeight}:flags=lanczos`,
'-map', `0:${streamIndex}`,
'-vcodec', 'rawvideo',
'-pix_fmt', 'rgba',
...(oneFrameOnly ? ['-frames:v', '1'] : []),
'-f', 'image2pipe',
'-',
];
// console.log(args);
return {
process: runFfmpegProcess(args, execaOpts, { logCli: false }),
width: newWidth,
height: newHeight,
channels: 4,
};
}
function getOneRawFrame({ path, inWidth, inHeight, seekTo, streamIndex, outSize }) {
const { process, width, height, channels } = createRawFfmpeg({ path, inWidth, inHeight, seekTo, streamIndex, oneFrameOnly: true, execaOpts: { encoding: null }, outSize });
return { process, width, height, channels };
}
function encodeLiveRawStream({ path, inWidth, inHeight, seekTo, streamIndex }) {
const { process, width, height, channels } = createRawFfmpeg({ path, inWidth, inHeight, seekTo, streamIndex, execaOpts: { encoding: null, buffer: false } });
return {
process,
width,
height,
channels,
};
}
// Don't pass complex objects over the bridge
const runFfmpeg = async (...args) => runFfmpegProcess(...args);
module.exports = {
whenImported: execaPromise,
setCustomFfPath,
abortFfmpegs,
getFfmpegPath,
runFfprobe,
runFfmpeg,
runFfmpegConcat,
runFfmpegWithProgress,
renderWaveformPng,
mapTimesToSegments,
detectSceneChanges,
detectIntervals,
captureFrames,
captureFrame,
getFfCommandLine,
html5ify,
getDuration,
getOneRawFrame,
encodeLiveRawStream,
};

Wyświetl plik

@ -1,5 +1,18 @@
const os = require('os');
const frontendBuildDir = 'vite-dist';
// todo dedupe between renderer and main
const platform = os.platform();
const arch = os.arch();
const isWindows = platform === 'win32';
const isMac = platform === 'darwin';
module.exports = {
frontendBuildDir,
isWindows,
isMac,
platform,
arch,
};

Wyświetl plik

@ -1,6 +1,6 @@
import { readFile, writeFile } from 'fs/promises';
import { XMLParser, XMLBuilder } from 'fast-xml-parser';
import moment from 'moment';
import { DateTime } from 'luxon';
const xmlUrl = new URL('../no.mifi.losslesscut.appdata.xml', import.meta.url);
const xmlData = await readFile(xmlUrl);
@ -13,7 +13,7 @@ const xml = parser.parse(xmlData);
const { version } = packageJson;
xml.component.releases.release = [{ '@_version': version, '@_date': moment().format('YYYY-MM-DD') }, ...xml.component.releases.release];
xml.component.releases.release = [{ '@_version': version, '@_date': DateTime.now().toISODate() }, ...xml.component.releases.release];
const builder = new XMLBuilder({ format: true, ignoreAttributes: false, suppressEmptyNode: true });
await writeFile(xmlUrl, builder.build(xml));

Wyświetl plik

@ -1,4 +1,4 @@
import execa from 'execa';
import { execa } from 'execa';
import { readFile } from 'fs/promises';
// we need a wrapper script because altool tends to error out very often

Wyświetl plik

@ -2263,7 +2263,7 @@ const App = memo(() => {
)}
<AnimatePresence>
{working && <Working text={working} cutProgress={cutProgress} onAbortClick={abortFfmpegs} />}
{working && <Working text={working} cutProgress={cutProgress} onAbortClick={() => abortFfmpegs()} />}
</AnimatePresence>
{tunerVisible && <ValueTuners type={tunerVisible} onFinished={() => setTunerVisible()} />}

Wyświetl plik

@ -1,13 +1,9 @@
import { encodeLiveRawStream, getOneRawFrame } from './ffmpeg';
const remote = window.require('@electron/remote');
// TODO keep everything in electron land?
const strtok3 = window.require('strtok3');
const { command, abortAll } = remote.require('./canvasPlayer');
export default ({ path, width: inWidth, height: inHeight, streamIndex, getCanvas }) => {
let terminated;
let aborters = [];
let commandedTime;
let playing;
function drawOnCanvas(rgbaImage, width, height) {
const canvas = getCanvas();
@ -22,77 +18,16 @@ export default ({ path, width: inWidth, height: inHeight, streamIndex, getCanvas
ctx.putImageData(new ImageData(Uint8ClampedArray.from(rgbaImage), width, height), 0, 0);
}
async function command() {
let process;
let aborted = false;
function killProcess() {
if (process) {
process.kill();
process = undefined;
}
}
function abort() {
aborted = true;
killProcess();
aborters = aborters.filter(((aborter) => aborter !== abort));
}
aborters.push(abort);
try {
if (playing) {
const { process: processIn, channels, width, height } = encodeLiveRawStream({ path, inWidth, inHeight, streamIndex, seekTo: commandedTime });
process = processIn;
// process.stderr.on('data', data => console.log(data.toString('utf-8')));
const tokenizer = await strtok3.fromStream(process.stdout);
if (aborted) return;
const size = width * height * channels;
const rgbaImage = Buffer.allocUnsafe(size);
while (!aborted) {
// eslint-disable-next-line no-await-in-loop
await tokenizer.readBuffer(rgbaImage, { length: size });
if (aborted) return;
drawOnCanvas(rgbaImage, width, height);
}
} else {
const { process: processIn, width, height } = getOneRawFrame({ path, inWidth, inHeight, streamIndex, seekTo: commandedTime, outSize: 1000 });
process = processIn;
const { stdout: rgbaImage } = await process;
if (aborted) return;
drawOnCanvas(rgbaImage, width, height);
}
} catch (err) {
if (!err.killed) console.warn(err.message);
} finally {
killProcess();
}
}
function abortAll() {
aborters.forEach((aborter) => aborter());
}
function pause(seekTo) {
if (terminated) return;
playing = false;
commandedTime = seekTo;
abortAll();
command();
command({ path, inWidth, inHeight, streamIndex, seekTo, onFrame: drawOnCanvas, playing: false });
}
function play(playFrom) {
if (terminated) return;
playing = true;
commandedTime = playFrom;
abortAll();
command();
command({ path, inWidth, inHeight, streamIndex, seekTo: playFrom, onFrame: drawOnCanvas, playing: true });
}
function terminate() {

Wyświetl plik

@ -1,85 +1,26 @@
import pMap from 'p-map';
import sortBy from 'lodash/sortBy';
import moment from 'moment';
import i18n from 'i18next';
import Timecode from 'smpte-timecode';
import minBy from 'lodash/minBy';
import { pcmAudioCodecs, getMapStreamsArgs, isMov } from './util/streams';
import { getSuffixedOutPath, isWindows, isMac, platform, arch, isExecaFailure } from './util';
import { getSuffixedOutPath, isExecaFailure } from './util';
import { isDurationValid } from './segments';
import isDev from './isDev';
const execa = window.require('execa');
const { join } = window.require('path');
const FileType = window.require('file-type');
const readline = window.require('readline');
const { pathExists } = window.require('fs-extra');
let customFfPath;
const remote = window.require('@electron/remote');
const runningFfmpegs = new Set();
// setInterval(() => console.log(runningFfmpegs.size), 1000);
const { renderWaveformPng, mapTimesToSegments, detectSceneChanges, detectIntervals, captureFrames, captureFrame, getFfCommandLine, runFfmpegConcat, runFfmpegWithProgress, html5ify, getDuration, abortFfmpegs, runFfmpeg, runFfprobe, getFfmpegPath, setCustomFfPath } = remote.require('./ffmpeg');
export { renderWaveformPng, mapTimesToSegments, detectSceneChanges, captureFrames, captureFrame, getFfCommandLine, runFfmpegConcat, runFfmpegWithProgress, html5ify, getDuration, abortFfmpegs, runFfprobe, getFfmpegPath, setCustomFfPath };
export class RefuseOverwriteError extends Error {}
// Note that this does not work on MAS because of sandbox restrictions
export function setCustomFfPath(path) {
customFfPath = path;
}
export function getFfCommandLine(cmd, args) {
const mapArg = arg => (/[^0-9a-zA-Z-_]/.test(arg) ? `'${arg}'` : arg);
return `${cmd} ${args.map(mapArg).join(' ')}`;
}
function getFfPath(cmd) {
const exeName = isWindows ? `${cmd}.exe` : cmd;
if (customFfPath) return join(customFfPath, exeName);
if (isDev) return join('ffmpeg', `${platform}-${arch}`, exeName);
return join(window.process.resourcesPath, exeName);
}
export const getFfmpegPath = () => getFfPath('ffmpeg');
export const getFfprobePath = () => getFfPath('ffprobe');
export async function runFfprobe(args, { timeout = isDev ? 10000 : 30000 } = {}) {
const ffprobePath = getFfprobePath();
console.log(getFfCommandLine('ffprobe', args));
const ps = execa(ffprobePath, args);
const timer = setTimeout(() => {
console.warn('killing timed out ffprobe');
ps.kill();
}, timeout);
try {
return await ps;
} finally {
clearTimeout(timer);
}
}
// todo collect warnings from ffmpeg output and show them after export? example: https://github.com/mifi/lossless-cut/issues/1469
export function runFfmpeg(args, execaOptions, { logCli = true } = {}) {
const ffmpegPath = getFfmpegPath();
if (logCli) console.log(getFfCommandLine('ffmpeg', args));
const process = execa(ffmpegPath, args, execaOptions);
(async () => {
runningFfmpegs.add(process);
try {
await process;
} catch (err) {
// ignored here
} finally {
runningFfmpegs.delete(process);
}
})();
return process;
}
export function logStdoutStderr({ stdout, stderr }) {
if (stdout.length > 0) {
console.log('%cSTDOUT:', 'color: green; font-weight: bold');
@ -91,45 +32,6 @@ export function logStdoutStderr({ stdout, stderr }) {
}
}
export function abortFfmpegs() {
runningFfmpegs.forEach((process) => {
process.kill('SIGTERM', { forceKillAfterTimeout: 10000 });
});
}
export function handleProgress(process, durationIn, onProgress, customMatcher = () => {}) {
if (!onProgress) return;
onProgress(0);
const rl = readline.createInterface({ input: process.stderr });
rl.on('line', (line) => {
// console.log('progress', line);
try {
let match = line.match(/frame=\s*[^\s]+\s+fps=\s*[^\s]+\s+q=\s*[^\s]+\s+(?:size|Lsize)=\s*[^\s]+\s+time=\s*([^\s]+)\s+/);
// Audio only looks like this: "line size= 233422kB time=01:45:50.68 bitrate= 301.1kbits/s speed= 353x "
if (!match) match = line.match(/(?:size|Lsize)=\s*[^\s]+\s+time=\s*([^\s]+)\s+/);
if (!match) {
customMatcher(line);
return;
}
const str = match[1];
// console.log(str);
const progressTime = Math.max(0, moment.duration(str).asSeconds());
// console.log(progressTime);
if (durationIn == null) return;
const duration = Math.max(0, durationIn);
if (duration === 0) return;
const progress = duration ? Math.min(progressTime / duration, 1) : 0; // sometimes progressTime will be greater than cutDuration
onProgress(progress);
} catch (err) {
console.log('Failed to parse ffmpeg progress line', err);
}
});
}
export function isCuttingStart(cutFrom) {
return cutFrom > 0;
}
@ -278,20 +180,6 @@ export async function tryMapChaptersToEdl(chapters) {
}
}
async function readFormatData(filePath) {
console.log('readFormatData', filePath);
const { stdout } = await runFfprobe([
'-of', 'json', '-show_format', '-i', filePath, '-hide_banner',
]);
return JSON.parse(stdout).format;
}
export async function getDuration(filePath) {
return parseFloat((await readFormatData(filePath)).duration);
}
export async function createChaptersFromSegments({ segmentPaths, chapterNames }) {
if (!chapterNames) return undefined;
try {
@ -596,145 +484,6 @@ export async function renderThumbnails({ filePath, from, duration, onThumbnail }
}
export async function renderWaveformPng({ filePath, start, duration, color }) {
const args1 = [
'-hide_banner',
'-i', filePath,
'-ss', start,
'-t', duration,
'-c', 'copy',
'-vn',
'-map', 'a:0',
'-f', 'matroska', // mpegts doesn't support vorbis etc
'-',
];
const args2 = [
'-hide_banner',
'-i', '-',
'-filter_complex', `showwavespic=s=2000x300:scale=lin:filter=peak:split_channels=1:colors=${color}`,
'-frames:v', '1',
'-vcodec', 'png',
'-f', 'image2',
'-',
];
console.log(getFfCommandLine('ffmpeg1', args1));
console.log('|', getFfCommandLine('ffmpeg2', args2));
let ps1;
let ps2;
try {
ps1 = runFfmpeg(args1, { encoding: null, buffer: false }, { logCli: false });
ps2 = runFfmpeg(args2, { encoding: null }, { logCli: false });
ps1.stdout.pipe(ps2.stdin);
const timer = setTimeout(() => {
ps1.kill();
ps2.kill();
console.warn('ffmpeg timed out');
}, 10000);
let stdout;
try {
({ stdout } = await ps2);
} finally {
clearTimeout(timer);
}
const blob = new Blob([stdout], { type: 'image/png' });
return {
url: URL.createObjectURL(blob),
from: start,
to: start + duration,
duration,
createdAt: new Date(),
};
} catch (err) {
if (ps1) ps1.kill();
if (ps2) ps2.kill();
throw err;
}
}
const getInputSeekArgs = ({ filePath, from, to }) => [
...(from != null ? ['-ss', from.toFixed(5)] : []),
'-i', filePath,
...(to != null ? ['-t', (to - from).toFixed(5)] : []),
];
const getSegmentOffset = (from) => (from != null ? from : 0);
function adjustSegmentsWithOffset({ segments, from }) {
const offset = getSegmentOffset(from);
return segments.map(({ start, end }) => ({ start: start + offset, end: end != null ? end + offset : end }));
}
export function mapTimesToSegments(times) {
const segments = [];
for (let i = 0; i < times.length; i += 1) {
const start = times[i];
const end = times[i + 1];
if (start != null) segments.push({ start, end }); // end undefined is allowed (means until end of video)
}
return segments;
}
// https://stackoverflow.com/questions/35675529/using-ffmpeg-how-to-do-a-scene-change-detection-with-timecode
export async function detectSceneChanges({ filePath, minChange, onProgress, from, to }) {
const args = [
'-hide_banner',
...getInputSeekArgs({ filePath, from, to }),
'-filter_complex', `select='gt(scene,${minChange})',metadata=print:file=-`,
'-f', 'null', '-',
];
const process = runFfmpeg(args, { encoding: null, buffer: false });
const times = [0];
handleProgress(process, to - from, onProgress);
const rl = readline.createInterface({ input: process.stdout });
rl.on('line', (line) => {
const match = line.match(/^frame:\d+\s+pts:\d+\s+pts_time:([\d.]+)/);
if (!match) return;
const time = parseFloat(match[1]);
if (Number.isNaN(time) || time <= times[times.length - 1]) return;
times.push(time);
});
await process;
const segments = mapTimesToSegments(times);
return adjustSegmentsWithOffset({ segments, from });
}
export async function detectIntervals({ filePath, customArgs, onProgress, from, to, matchLineTokens }) {
const args = [
'-hide_banner',
...getInputSeekArgs({ filePath, from, to }),
...customArgs,
'-f', 'null', '-',
];
const process = runFfmpeg(args, { encoding: null, buffer: false });
const segments = [];
function customMatcher(line) {
const { start: startStr, end: endStr } = matchLineTokens(line);
const start = parseFloat(startStr);
const end = parseFloat(endStr);
if (start == null || end == null || Number.isNaN(start) || Number.isNaN(end)) return;
segments.push({ start, end });
}
handleProgress(process, to - from, onProgress, customMatcher);
await process;
return adjustSegmentsWithOffset({ segments, from });
}
const mapFilterOptions = (options) => Object.entries(options).map(([key, value]) => `${key}=${value}`).join(':');
export async function blackDetect({ filePath, filterOptions, onProgress, from, to }) {
@ -795,46 +544,6 @@ export async function extractWaveform({ filePath, outPath }) {
console.timeEnd('ffmpeg');
}
function getFffmpegJpegQuality(quality) {
// Normal range for JPEG is 2-31 with 31 being the worst quality.
const qMin = 2;
const qMax = 31;
return Math.min(Math.max(qMin, quality, Math.round((1 - quality) * (qMax - qMin) + qMin)), qMax);
}
export async function captureFrame({ timestamp, videoPath, outPath, quality }) {
const ffmpegQuality = getFffmpegJpegQuality(quality);
await runFfmpeg([
'-ss', timestamp,
'-i', videoPath,
'-vframes', '1',
'-q:v', ffmpegQuality,
'-y', outPath,
]);
}
export async function captureFrames({ from, to, videoPath, outPathTemplate, quality, filter, framePts, onProgress }) {
const ffmpegQuality = getFffmpegJpegQuality(quality);
const args = [
'-ss', from,
'-i', videoPath,
'-t', Math.max(0, to - from),
'-q:v', ffmpegQuality,
...(filter != null ? ['-vf', filter] : []),
// https://superuser.com/questions/1336285/use-ffmpeg-for-thumbnail-selections
...(framePts ? ['-frame_pts', '1'] : []),
'-vsync', '0', // else we get a ton of duplicates (thumbnail filter)
'-y', outPathTemplate,
];
const process = runFfmpeg(args, { encoding: null, buffer: false });
handleProgress(process, to - from, onProgress);
await process;
}
export function isIphoneHevc(format, streams) {
if (!streams.some((s) => s.codec_name === 'hevc')) return false;
const makeTag = format.tags && format.tags['com.apple.quicktime.make'];
@ -858,68 +567,6 @@ export function getStreamFps(stream) {
return undefined;
}
function createRawFfmpeg({ fps = 25, path, inWidth, inHeight, seekTo, oneFrameOnly, execaOpts, streamIndex, outSize = 320 }) {
// const fps = 25; // TODO
const aspectRatio = inWidth / inHeight;
let newWidth;
let newHeight;
if (inWidth > inHeight) {
newWidth = outSize;
newHeight = Math.floor(newWidth / aspectRatio);
} else {
newHeight = outSize;
newWidth = Math.floor(newHeight * aspectRatio);
}
const args = [
'-hide_banner', '-loglevel', 'panic',
'-re',
'-ss', seekTo,
'-noautorotate',
'-i', path,
'-vf', `fps=${fps},scale=${newWidth}:${newHeight}:flags=lanczos`,
'-map', `0:${streamIndex}`,
'-vcodec', 'rawvideo',
'-pix_fmt', 'rgba',
...(oneFrameOnly ? ['-frames:v', '1'] : []),
'-f', 'image2pipe',
'-',
];
// console.log(args);
return {
process: runFfmpeg(args, execaOpts, { logCli: false }),
width: newWidth,
height: newHeight,
channels: 4,
};
}
export function getOneRawFrame({ path, inWidth, inHeight, seekTo, streamIndex, outSize }) {
const { process, width, height, channels } = createRawFfmpeg({ path, inWidth, inHeight, seekTo, streamIndex, oneFrameOnly: true, execaOpts: { encoding: null }, outSize });
return { process, width, height, channels };
}
export function encodeLiveRawStream({ path, inWidth, inHeight, seekTo, streamIndex }) {
const { process, width, height, channels } = createRawFfmpeg({ path, inWidth, inHeight, seekTo, streamIndex, execaOpts: { encoding: null, buffer: false } });
return {
process,
width,
height,
channels,
};
}
function parseTimecode(str, frameRate) {
// console.log(str, frameRate);
@ -954,109 +601,6 @@ export async function runFfmpegStartupCheck() {
await runFfmpeg(['-hide_banner', '-f', 'lavfi', '-i', 'nullsrc=s=256x256:d=1', '-f', 'null', '-']);
}
export async function html5ify({ outPath, filePath: filePathArg, speed, hasAudio, hasVideo, onProgress }) {
let audio;
if (hasAudio) {
if (speed === 'slowest') audio = 'hq';
else if (['slow-audio', 'fast-audio', 'fastest-audio'].includes(speed)) audio = 'lq';
else if (['fast-audio-remux', 'fastest-audio-remux'].includes(speed)) audio = 'copy';
}
let video;
if (hasVideo) {
if (speed === 'slowest') video = 'hq';
else if (['slow-audio', 'slow'].includes(speed)) video = 'lq';
else video = 'copy';
}
console.log('Making HTML5 friendly version', { filePathArg, outPath, video, audio });
let videoArgs;
let audioArgs;
// h264/aac_at: No licensing when using HW encoder (Video/Audio Toolbox on Mac)
// https://github.com/mifi/lossless-cut/issues/372#issuecomment-810766512
const targetHeight = 400;
switch (video) {
case 'hq': {
if (isMac) {
videoArgs = ['-vf', 'format=yuv420p', '-allow_sw', '1', '-vcodec', 'h264', '-b:v', '15M'];
} else {
// AV1 is very slow
// videoArgs = ['-vf', 'format=yuv420p', '-sws_flags', 'neighbor', '-vcodec', 'libaom-av1', '-crf', '30', '-cpu-used', '8'];
// Theora is a bit faster but not that much
// videoArgs = ['-vf', '-c:v', 'libtheora', '-qscale:v', '1'];
// videoArgs = ['-vf', 'format=yuv420p', '-c:v', 'libvpx-vp9', '-crf', '30', '-b:v', '0', '-row-mt', '1'];
// x264 can only be used in GPL projects
videoArgs = ['-vf', 'format=yuv420p', '-c:v', 'libx264', '-profile:v', 'high', '-preset:v', 'slow', '-crf', '17'];
}
break;
}
case 'lq': {
if (isMac) {
videoArgs = ['-vf', `scale=-2:${targetHeight},format=yuv420p`, '-allow_sw', '1', '-sws_flags', 'lanczos', '-vcodec', 'h264', '-b:v', '1500k'];
} else {
// videoArgs = ['-vf', `scale=-2:${targetHeight},format=yuv420p`, '-sws_flags', 'neighbor', '-c:v', 'libtheora', '-qscale:v', '1'];
// x264 can only be used in GPL projects
videoArgs = ['-vf', `scale=-2:${targetHeight},format=yuv420p`, '-sws_flags', 'neighbor', '-c:v', 'libx264', '-profile:v', 'baseline', '-x264opts', 'level=3.0', '-preset:v', 'ultrafast', '-crf', '28'];
}
break;
}
case 'copy': {
videoArgs = ['-vcodec', 'copy'];
break;
}
default: {
videoArgs = ['-vn'];
}
}
switch (audio) {
case 'hq': {
if (isMac) {
audioArgs = ['-acodec', 'aac_at', '-b:a', '192k'];
} else {
audioArgs = ['-acodec', 'flac'];
}
break;
}
case 'lq': {
if (isMac) {
audioArgs = ['-acodec', 'aac_at', '-ar', '44100', '-ac', '2', '-b:a', '96k'];
} else {
audioArgs = ['-acodec', 'flac', '-ar', '11025', '-ac', '2'];
}
break;
}
case 'copy': {
audioArgs = ['-acodec', 'copy'];
break;
}
default: {
audioArgs = ['-an'];
}
}
const ffmpegArgs = [
'-hide_banner',
'-i', filePathArg,
...videoArgs,
...audioArgs,
'-sn',
'-y', outPath,
];
const duration = await getDuration(filePathArg);
const process = runFfmpeg(ffmpegArgs);
if (duration) handleProgress(process, duration, onProgress);
const { stdout } = await process;
console.log(stdout);
}
// https://superuser.com/questions/543589/information-about-ffmpeg-command-line-options
export const getExperimentalArgs = (ffmpegExperimental) => (ffmpegExperimental ? ['-strict', 'experimental'] : []);

Wyświetl plik

@ -4,14 +4,13 @@ import sum from 'lodash/sum';
import pMap from 'p-map';
import { getSuffixedOutPath, transferTimestamps, getOutFileExtension, getOutDir, deleteDispositionValue, getHtml5ifiedPath } from '../util';
import { isCuttingStart, isCuttingEnd, handleProgress, getFfCommandLine, getDuration, runFfmpeg, createChaptersFromSegments, readFileMeta, cutEncodeSmartPart, getExperimentalArgs, html5ify as ffmpegHtml5ify, getVideoTimescaleArgs, RefuseOverwriteError, logStdoutStderr } from '../ffmpeg';
import { isCuttingStart, isCuttingEnd, runFfmpegWithProgress, getFfCommandLine, getDuration, createChaptersFromSegments, readFileMeta, cutEncodeSmartPart, getExperimentalArgs, html5ify as ffmpegHtml5ify, getVideoTimescaleArgs, RefuseOverwriteError, logStdoutStderr, runFfmpegConcat } from '../ffmpeg';
import { getMapStreamsArgs, getStreamIdsToCopy } from '../util/streams';
import { getSmartCutParams } from '../smartcut';
const { join, resolve, dirname } = window.require('path');
const { pathExists } = window.require('fs-extra');
const { writeFile, unlink, mkdir } = window.require('fs/promises');
const stringToStream = window.require('string-to-stream');
async function writeChaptersFfmetadata(outDir, chapters) {
if (!chapters || chapters.length === 0) return undefined;
@ -155,13 +154,7 @@ function useFfmpegOperations({ filePath, enableTransferTimestamps, needSmartCut
console.log(fullCommandLine);
appendFfmpegCommandLog(fullCommandLine);
const process = runFfmpeg(ffmpegArgs);
handleProgress(process, totalDuration, onProgress);
stringToStream(concatTxt).pipe(process.stdin);
const result = await process;
const result = await runFfmpegConcat({ ffmpegArgs, concatTxt, totalDuration, onProgress });
logStdoutStderr(result);
await optionalTransferTimestamps(metadataFromPath, outPath);
@ -313,9 +306,7 @@ function useFfmpegOperations({ filePath, enableTransferTimestamps, needSmartCut
// console.log(ffmpegCommandLine);
appendFfmpegCommandLog(ffmpegCommandLine);
const process = runFfmpeg(ffmpegArgs);
handleProgress(process, cutDuration, onProgress);
const result = await process;
const result = await runFfmpegWithProgress({ ffmpegArgs, duration: cutDuration, onProgress });
logStdoutStderr(result);
await optionalTransferTimestamps(filePath, outPath, cutFrom);
@ -487,10 +478,7 @@ function useFfmpegOperations({ filePath, enableTransferTimestamps, needSmartCut
'-y', outPath,
];
const process = runFfmpeg(ffmpegArgs);
handleProgress(process, duration, onProgress);
const result = await process;
const result = await runFfmpegWithProgress({ ffmpegArgs, duration, onProgress });
logStdoutStderr(result);
await optionalTransferTimestamps(filePathArg, outPath);
@ -515,10 +503,7 @@ function useFfmpegOperations({ filePath, enableTransferTimestamps, needSmartCut
'-y', outPath,
];
const process = runFfmpeg(ffmpegArgs);
handleProgress(process, duration, onProgress);
const result = await process;
const result = await runFfmpegWithProgress({ ffmpegArgs, duration, onProgress });
logStdoutStderr(result);
await optionalTransferTimestamps(filePath, outPath);

Wyświetl plik

@ -35,14 +35,18 @@ export default ({ darkMode, filePath, relevantTime, durationSafe, waveformEnable
const safeExtractDuration = Math.min(waveformStartTime + ffmpegExtractWindow, durationSafe) - waveformStartTime;
const promise = renderWaveformPng({ filePath, start: waveformStartTime, duration: safeExtractDuration, color: waveformColor });
creatingWaveformPromise.current = promise;
const newWaveform = await promise;
const { buffer, ...newWaveform } = await promise;
if (aborted) return;
setWaveforms((currentWaveforms) => {
const waveformsByCreatedAt = sortBy(currentWaveforms, 'createdAt');
return [
// cleanup old
...(currentWaveforms.length >= maxWaveforms ? waveformsByCreatedAt.slice(1) : waveformsByCreatedAt),
newWaveform,
{
...newWaveform,
url: URL.createObjectURL(new Blob([buffer], { type: 'image/png' })),
},
];
});
} catch (err) {

104
yarn.lock
Wyświetl plik

@ -4096,20 +4096,20 @@ __metadata:
languageName: node
linkType: hard
"execa@npm:^5.0.0":
version: 5.1.1
resolution: "execa@npm:5.1.1"
"execa@npm:^7.1.1":
version: 7.1.1
resolution: "execa@npm:7.1.1"
dependencies:
cross-spawn: ^7.0.3
get-stream: ^6.0.0
human-signals: ^2.1.0
is-stream: ^2.0.0
get-stream: ^6.0.1
human-signals: ^4.3.0
is-stream: ^3.0.0
merge-stream: ^2.0.0
npm-run-path: ^4.0.1
onetime: ^5.1.2
signal-exit: ^3.0.3
strip-final-newline: ^2.0.0
checksum: fba9022c8c8c15ed862847e94c252b3d946036d7547af310e344a527e59021fd8b6bb0723883ea87044dc4f0201f949046993124a42ccb0855cae5bf8c786343
npm-run-path: ^5.1.0
onetime: ^6.0.0
signal-exit: ^3.0.7
strip-final-newline: ^3.0.0
checksum: 21fa46fc69314ace4068cf820142bdde5b643a5d89831c2c9349479c1555bff137a291b8e749e7efca36535e4e0a8c772c11008ca2e84d2cbd6ca141a3c8f937
languageName: node
linkType: hard
@ -4677,7 +4677,7 @@ __metadata:
languageName: node
linkType: hard
"get-stream@npm:^6.0.0":
"get-stream@npm:^6.0.1":
version: 6.0.1
resolution: "get-stream@npm:6.0.1"
checksum: e04ecece32c92eebf5b8c940f51468cd53554dcbb0ea725b2748be583c9523d00128137966afce410b9b051eb2ef16d657cd2b120ca8edafcf5a65e81af63cad
@ -5084,10 +5084,10 @@ __metadata:
languageName: node
linkType: hard
"human-signals@npm:^2.1.0":
version: 2.1.0
resolution: "human-signals@npm:2.1.0"
checksum: b87fd89fce72391625271454e70f67fe405277415b48bcc0117ca73d31fa23a4241787afdc8d67f5a116cf37258c052f59ea82daffa72364d61351423848e3b8
"human-signals@npm:^4.3.0":
version: 4.3.1
resolution: "human-signals@npm:4.3.1"
checksum: 6f12958df3f21b6fdaf02d90896c271df00636a31e2bbea05bddf817a35c66b38a6fdac5863e2df85bd52f34958997f1f50350ff97249e1dff8452865d5235d1
languageName: node
linkType: hard
@ -5630,6 +5630,13 @@ __metadata:
languageName: node
linkType: hard
"is-stream@npm:^3.0.0":
version: 3.0.0
resolution: "is-stream@npm:3.0.0"
checksum: 172093fe99119ffd07611ab6d1bcccfe8bc4aa80d864b15f43e63e54b7abc71e779acd69afdb854c4e2a67fdc16ae710e370eda40088d1cfc956a50ed82d8f16
languageName: node
linkType: hard
"is-string@npm:^1.0.5, is-string@npm:^1.0.7":
version: 1.0.7
resolution: "is-string@npm:1.0.7"
@ -6203,7 +6210,7 @@ __metadata:
eslint-plugin-react: ^7.28.0
eslint-plugin-react-hooks: ^4.3.0
evergreen-ui: ^6.13.1
execa: ^5.0.0
execa: ^7.1.1
fast-xml-parser: ^4.0.3
file-type: 16
file-url: ^3.0.0
@ -6217,9 +6224,9 @@ __metadata:
json5: ^2.2.2
ky: ^0.33.1
lodash: ^4.17.19
luxon: ^3.3.0
mime-types: ^2.1.14
mkdirp: ^1.0.3
moment: ^2.29.4
mousetrap: ^1.6.5
p-map: ^5.5.0
patch-package: ^6.2.1
@ -6311,6 +6318,13 @@ __metadata:
languageName: node
linkType: hard
"luxon@npm:^3.3.0":
version: 3.3.0
resolution: "luxon@npm:3.3.0"
checksum: 50cf17a0dc155c3dcacbeae8c0b7e80db425e0ba97b9cbdf12a7fc142d841ff1ab1560919f033af46240ed44e2f70c49f76e3422524c7fc8bb8d81ca47c66187
languageName: node
linkType: hard
"magic-string@npm:^0.27.0":
version: 0.27.0
resolution: "magic-string@npm:0.27.0"
@ -6435,6 +6449,13 @@ __metadata:
languageName: node
linkType: hard
"mimic-fn@npm:^4.0.0":
version: 4.0.0
resolution: "mimic-fn@npm:4.0.0"
checksum: 995dcece15ee29aa16e188de6633d43a3db4611bcf93620e7e62109ec41c79c0f34277165b8ce5e361205049766e371851264c21ac64ca35499acb5421c2ba56
languageName: node
linkType: hard
"mimic-response@npm:^1.0.0":
version: 1.0.1
resolution: "mimic-response@npm:1.0.1"
@ -6622,13 +6643,6 @@ __metadata:
languageName: node
linkType: hard
"moment@npm:^2.29.4":
version: 2.29.4
resolution: "moment@npm:2.29.4"
checksum: 0ec3f9c2bcba38dc2451b1daed5daded747f17610b92427bebe1d08d48d8b7bdd8d9197500b072d14e326dd0ccf3e326b9e3d07c5895d3d49e39b6803b76e80e
languageName: node
linkType: hard
"mousetrap@npm:^1.6.5":
version: 1.6.5
resolution: "mousetrap@npm:1.6.5"
@ -6834,12 +6848,12 @@ __metadata:
languageName: node
linkType: hard
"npm-run-path@npm:^4.0.1":
version: 4.0.1
resolution: "npm-run-path@npm:4.0.1"
"npm-run-path@npm:^5.1.0":
version: 5.1.0
resolution: "npm-run-path@npm:5.1.0"
dependencies:
path-key: ^3.0.0
checksum: 5374c0cea4b0bbfdfae62da7bbdf1e1558d338335f4cacf2515c282ff358ff27b2ecb91ffa5330a8b14390ac66a1e146e10700440c1ab868208430f56b5f4d23
path-key: ^4.0.0
checksum: dc184eb5ec239d6a2b990b43236845332ef12f4e0beaa9701de724aa797fe40b6bbd0157fb7639d24d3ab13f5d5cf22d223a19c6300846b8126f335f788bee66
languageName: node
linkType: hard
@ -6977,7 +6991,7 @@ __metadata:
languageName: node
linkType: hard
"onetime@npm:^5.1.0, onetime@npm:^5.1.2":
"onetime@npm:^5.1.0":
version: 5.1.2
resolution: "onetime@npm:5.1.2"
dependencies:
@ -6986,6 +7000,15 @@ __metadata:
languageName: node
linkType: hard
"onetime@npm:^6.0.0":
version: 6.0.0
resolution: "onetime@npm:6.0.0"
dependencies:
mimic-fn: ^4.0.0
checksum: 0846ce78e440841335d4e9182ef69d5762e9f38aa7499b19f42ea1c4cd40f0b4446094c455c713f9adac3f4ae86f613bb5e30c99e52652764d06a89f709b3788
languageName: node
linkType: hard
"open@npm:^7.4.2":
version: 7.4.2
resolution: "open@npm:7.4.2"
@ -7238,13 +7261,20 @@ __metadata:
languageName: node
linkType: hard
"path-key@npm:^3.0.0, path-key@npm:^3.1.0":
"path-key@npm:^3.1.0":
version: 3.1.1
resolution: "path-key@npm:3.1.1"
checksum: 55cd7a9dd4b343412a8386a743f9c746ef196e57c823d90ca3ab917f90ab9f13dd0ded27252ba49dbdfcab2b091d998bc446f6220cd3cea65db407502a740020
languageName: node
linkType: hard
"path-key@npm:^4.0.0":
version: 4.0.0
resolution: "path-key@npm:4.0.0"
checksum: 8e6c314ae6d16b83e93032c61020129f6f4484590a777eed709c4a01b50e498822b00f76ceaf94bc64dbd90b327df56ceadce27da3d83393790f1219e07721d7
languageName: node
linkType: hard
"path-parse@npm:^1.0.6, path-parse@npm:^1.0.7":
version: 1.0.7
resolution: "path-parse@npm:1.0.7"
@ -8383,7 +8413,7 @@ __metadata:
languageName: node
linkType: hard
"signal-exit@npm:^3.0.0, signal-exit@npm:^3.0.2, signal-exit@npm:^3.0.3":
"signal-exit@npm:^3.0.0, signal-exit@npm:^3.0.2":
version: 3.0.6
resolution: "signal-exit@npm:3.0.6"
checksum: b819ac81ba757af559dad0804233ae31bf6f054591cd8a671e9cbcf09f21c72ec3076fe87d1e04861f5b33b47d63f0694b568de99c99cd733ee2060515beb6d5
@ -8841,10 +8871,10 @@ __metadata:
languageName: node
linkType: hard
"strip-final-newline@npm:^2.0.0":
version: 2.0.0
resolution: "strip-final-newline@npm:2.0.0"
checksum: 69412b5e25731e1938184b5d489c32e340605bb611d6140344abc3421b7f3c6f9984b21dff296dfcf056681b82caa3bb4cc996a965ce37bcfad663e92eae9c64
"strip-final-newline@npm:^3.0.0":
version: 3.0.0
resolution: "strip-final-newline@npm:3.0.0"
checksum: 23ee263adfa2070cd0f23d1ac14e2ed2f000c9b44229aec9c799f1367ec001478469560abefd00c5c99ee6f0b31c137d53ec6029c53e9f32a93804e18c201050
languageName: node
linkType: hard