mirror of
https://github.com/mifi/lossless-cut.git
synced 2024-11-22 10:22:31 +01:00
create segments from:
- scene changes - keyframes closes #1398 also limit max number of segments and improve parameters dialog
This commit is contained in:
parent
60bf1a5c5f
commit
64966590e2
@ -52,7 +52,7 @@ The main feature is lossless trimming and cutting of video and audio files, whic
|
||||
- MKV/MP4 embedded chapters marks editor
|
||||
- View subtitles
|
||||
- Customizable keyboard hotkeys
|
||||
- Black scene detection and silent audio detection
|
||||
- Black scene detection, silent audio detection, and scene change detection
|
||||
- Divide timeline into segments with length L or into N segments or even randomized segments!
|
||||
- [Basic CLI support](cli.md)
|
||||
|
||||
@ -77,6 +77,8 @@ The main feature is lossless trimming and cutting of video and audio files, whic
|
||||
- Loop a video / audio clip X times quickly without re-encoding
|
||||
- See [#284](https://github.com/mifi/lossless-cut/issues/284)
|
||||
- Convert a video or parts of it into X image files (not lossless)
|
||||
- Losslessly split a video into one file per scene (note you probably have to shift segments, see [#330](https://github.com/mifi/lossless-cut/issues/330).)
|
||||
- Cut away silent parts of an audio/video
|
||||
|
||||
### Export cut times as YouTube Chapters
|
||||
1. Export with Merge and "Create chapters from merged segments" enabled
|
||||
|
@ -305,6 +305,18 @@ module.exports = (app, mainWindow, newVersion) => {
|
||||
mainWindow.webContents.send('detectSilentScenes');
|
||||
},
|
||||
},
|
||||
{
|
||||
label: i18n.t('Detect scene changes'),
|
||||
click() {
|
||||
mainWindow.webContents.send('detectSceneChanges');
|
||||
},
|
||||
},
|
||||
{
|
||||
label: i18n.t('Create segments from keyframes'),
|
||||
click() {
|
||||
mainWindow.webContents.send('createSegmentsFromKeyframes');
|
||||
},
|
||||
},
|
||||
{
|
||||
label: i18n.t('Last ffmpeg commands'),
|
||||
click() { mainWindow.webContents.send('toggleLastCommands'); },
|
||||
|
92
src/App.jsx
92
src/App.jsx
@ -53,9 +53,9 @@ import {
|
||||
getStreamFps, isCuttingStart, isCuttingEnd,
|
||||
readFileMeta, getSmarterOutFormat, renderThumbnails as ffmpegRenderThumbnails,
|
||||
extractStreams, runStartupCheck, setCustomFfPath as ffmpegSetCustomFfPath,
|
||||
isIphoneHevc, tryMapChaptersToEdl, blackDetect, silenceDetect,
|
||||
isIphoneHevc, tryMapChaptersToEdl, blackDetect, silenceDetect, detectSceneChanges as ffmpegDetectSceneChanges,
|
||||
getDuration, getTimecodeFromStreams, createChaptersFromSegments, extractSubtitleTrack,
|
||||
getFfmpegPath, RefuseOverwriteError,
|
||||
getFfmpegPath, RefuseOverwriteError, readFrames, mapTimesToSegments,
|
||||
} from './ffmpeg';
|
||||
import { shouldCopyStreamByDefault, getAudioStreams, getRealVideoStreams, isAudioDefinitelyNotSupported, doesPlayerSupportFile } from './util/streams';
|
||||
import { exportEdlFile, readEdlFile, saveLlcProject, loadLlcProject, askForEdlImport } from './edlStore';
|
||||
@ -74,6 +74,8 @@ import { openSendReportDialog } from './reporting';
|
||||
import { fallbackLng } from './i18n';
|
||||
import { createSegment, getCleanCutSegments, getSegApparentStart, findSegmentsAtCursor, sortSegments, invertSegments, getSegmentTags, convertSegmentsToChapters, hasAnySegmentOverlap } from './segments';
|
||||
import { getOutSegError as getOutSegErrorRaw } from './util/outputNameTemplate';
|
||||
import * as ffmpegParameters from './ffmpeg-parameters';
|
||||
import { maxSegmentsAllowed } from './util/constants';
|
||||
|
||||
import isDev from './isDev';
|
||||
|
||||
@ -1491,13 +1493,14 @@ const App = memo(() => {
|
||||
|
||||
if (validEdl.length === 0) throw new Error(i18n.t('No valid segments found'));
|
||||
|
||||
if (!append) {
|
||||
clearSegCounter();
|
||||
}
|
||||
if (!append) clearSegCounter();
|
||||
|
||||
if (validEdl.length > maxSegmentsAllowed) throw new Error(i18n.t('Tried to create too many segments (max {{maxSegmentsAllowed}}.)', { maxSegmentsAllowed }));
|
||||
|
||||
setCutSegments((existingSegments) => {
|
||||
const needToAppend = append && existingSegments.length > 1;
|
||||
const newSegments = validEdl.map((segment, i) => createIndexedSegment({ segment, incrementCount: needToAppend || i > 0 }));
|
||||
if (needToAppend) return [...existingSegments, ...newSegments];
|
||||
let newSegments = validEdl.map((segment, i) => createIndexedSegment({ segment, incrementCount: needToAppend || i > 0 }));
|
||||
if (needToAppend) newSegments = [...existingSegments, ...newSegments];
|
||||
return newSegments;
|
||||
});
|
||||
}, [clearSegCounter, createIndexedSegment, setCutSegments]);
|
||||
@ -1753,7 +1756,7 @@ const App = memo(() => {
|
||||
}
|
||||
}, [customOutDir, enableOverwriteOutput, filePath, mainStreams, setWorking]);
|
||||
|
||||
const detectScenes = useCallback(async ({ name, workingText, errorText, fn }) => {
|
||||
const detectSegments = useCallback(async ({ name, workingText, errorText, fn }) => {
|
||||
if (!filePath) return;
|
||||
if (workingRef.current) return;
|
||||
try {
|
||||
@ -1764,8 +1767,7 @@ const App = memo(() => {
|
||||
console.log(name, newSegments);
|
||||
loadCutSegments(newSegments, true);
|
||||
} catch (err) {
|
||||
errorToast(errorText);
|
||||
console.error('Failed to detect scenes', name, err);
|
||||
handleError(errorText, err);
|
||||
} finally {
|
||||
setWorking();
|
||||
setCutProgress();
|
||||
@ -1773,40 +1775,28 @@ const App = memo(() => {
|
||||
}, [filePath, setWorking, loadCutSegments]);
|
||||
|
||||
const detectBlackScenes = useCallback(async () => {
|
||||
const parameters = {
|
||||
black_min_duration: {
|
||||
value: '2.0',
|
||||
hint: i18n.t('Set the minimum detected black duration expressed in seconds. It must be a non-negative floating point number.'),
|
||||
},
|
||||
picture_black_ratio_th: {
|
||||
value: '0.98',
|
||||
hint: i18n.t('Set the threshold for considering a picture "black".'),
|
||||
},
|
||||
pixel_black_th: {
|
||||
value: '0.10',
|
||||
hint: i18n.t('Set the threshold for considering a pixel "black".'),
|
||||
},
|
||||
};
|
||||
const filterOptions = await showParametersDialog({ title: i18n.t('Enter parameters'), parameters, docUrl: 'https://ffmpeg.org/ffmpeg-filters.html#blackdetect' });
|
||||
const filterOptions = await showParametersDialog({ title: i18n.t('Enter parameters'), parameters: ffmpegParameters.blackdetect(), docUrl: 'https://ffmpeg.org/ffmpeg-filters.html#blackdetect' });
|
||||
if (filterOptions == null) return;
|
||||
await detectScenes({ name: 'blackScenes', workingText: i18n.t('Detecting black scenes'), errorText: i18n.t('Failed to detect black scenes'), fn: async () => blackDetect({ filePath, duration, filterOptions, onProgress: setCutProgress, from: currentApparentCutSeg.start, to: currentApparentCutSeg.end }) });
|
||||
}, [currentApparentCutSeg.end, currentApparentCutSeg.start, detectScenes, duration, filePath]);
|
||||
await detectSegments({ name: 'blackScenes', workingText: i18n.t('Detecting black scenes'), errorText: i18n.t('Failed to detect black scenes'), fn: async () => blackDetect({ filePath, duration, filterOptions, onProgress: setCutProgress, from: currentApparentCutSeg.start, to: currentApparentCutSeg.end }) });
|
||||
}, [currentApparentCutSeg.end, currentApparentCutSeg.start, detectSegments, duration, filePath]);
|
||||
|
||||
const detectSilentScenes = useCallback(async () => {
|
||||
const parameters = {
|
||||
noise: {
|
||||
value: '-60dB',
|
||||
hint: i18n.t('Set noise tolerance. Can be specified in dB (in case "dB" is appended to the specified value) or amplitude ratio. Default is -60dB, or 0.001.'),
|
||||
},
|
||||
duration: {
|
||||
value: '2.0',
|
||||
hint: i18n.t('Set silence duration until notification (default is 2 seconds).'),
|
||||
},
|
||||
};
|
||||
const filterOptions = await showParametersDialog({ title: i18n.t('Enter parameters'), parameters, docUrl: 'https://ffmpeg.org/ffmpeg-filters.html#silencedetect' });
|
||||
const filterOptions = await showParametersDialog({ title: i18n.t('Enter parameters'), parameters: ffmpegParameters.silencedetect(), docUrl: 'https://ffmpeg.org/ffmpeg-filters.html#silencedetect' });
|
||||
if (filterOptions == null) return;
|
||||
await detectScenes({ name: 'silentScenes', workingText: i18n.t('Detecting silent scenes'), errorText: i18n.t('Failed to detect silent scenes'), fn: async () => silenceDetect({ filePath, duration, filterOptions, onProgress: setCutProgress, from: currentApparentCutSeg.start, to: currentApparentCutSeg.end }) });
|
||||
}, [currentApparentCutSeg.end, currentApparentCutSeg.start, detectScenes, duration, filePath]);
|
||||
await detectSegments({ name: 'silentScenes', workingText: i18n.t('Detecting silent scenes'), errorText: i18n.t('Failed to detect silent scenes'), fn: async () => silenceDetect({ filePath, duration, filterOptions, onProgress: setCutProgress, from: currentApparentCutSeg.start, to: currentApparentCutSeg.end }) });
|
||||
}, [currentApparentCutSeg.end, currentApparentCutSeg.start, detectSegments, duration, filePath]);
|
||||
|
||||
const detectSceneChanges = useCallback(async () => {
|
||||
const filterOptions = await showParametersDialog({ title: i18n.t('Enter parameters'), parameters: ffmpegParameters.sceneChange() });
|
||||
if (filterOptions == null) return;
|
||||
await detectSegments({ name: 'sceneChanges', workingText: i18n.t('Detecting scene changes'), errorText: i18n.t('Failed to detect scene changes'), fn: async () => ffmpegDetectSceneChanges({ filePath, duration, minChange: filterOptions.minChange, onProgress: setCutProgress, from: currentApparentCutSeg.start, to: currentApparentCutSeg.end }) });
|
||||
}, [currentApparentCutSeg.end, currentApparentCutSeg.start, detectSegments, duration, filePath]);
|
||||
|
||||
const createSegmentsFromKeyframes = useCallback(async () => {
|
||||
const keyframes = (await readFrames({ filePath, from: currentApparentCutSeg.start, to: currentApparentCutSeg.end, streamIndex: mainVideoStream?.index })).filter((frame) => frame.keyframe);
|
||||
const newSegments = mapTimesToSegments(keyframes.map((keyframe) => keyframe.time));
|
||||
loadCutSegments(newSegments, true);
|
||||
}, [currentApparentCutSeg.end, currentApparentCutSeg.start, filePath, loadCutSegments, mainVideoStream?.index]);
|
||||
|
||||
const userHtml5ifyCurrentFile = useCallback(async () => {
|
||||
if (!filePath) return;
|
||||
@ -2276,7 +2266,7 @@ const App = memo(() => {
|
||||
}
|
||||
}
|
||||
|
||||
const action = {
|
||||
const actions = {
|
||||
openFiles: (event, filePaths) => { userOpenFiles(filePaths.map(resolvePathIfNeeded)); },
|
||||
openFilesDialog,
|
||||
closeCurrentFile: () => { closeFileWithConfirm(); },
|
||||
@ -2305,13 +2295,25 @@ const App = memo(() => {
|
||||
concatCurrentBatch,
|
||||
detectBlackScenes,
|
||||
detectSilentScenes,
|
||||
detectSceneChanges,
|
||||
createSegmentsFromKeyframes,
|
||||
shiftAllSegmentTimes,
|
||||
};
|
||||
|
||||
const entries = Object.entries(action);
|
||||
entries.forEach(([key, value]) => electron.ipcRenderer.on(key, value));
|
||||
return () => entries.forEach(([key, value]) => electron.ipcRenderer.removeListener(key, value));
|
||||
}, [apparentCutSegments, askSetStartTimeOffset, checkFileOpened, clearSegments, closeBatch, closeFileWithConfirm, concatCurrentBatch, createFixedDurationSegments, createNumSegments, createRandomSegments, customOutDir, cutSegments, detectBlackScenes, detectSilentScenes, detectedFps, extractAllStreams, fileFormat, filePath, fillSegmentsGaps, getFrameCount, invertAllSegments, loadCutSegments, loadMedia, openFilesDialog, openSendReportDialogWithState, reorderSegsByStartTime, setWorking, shiftAllSegmentTimes, shuffleSegments, toggleKeyboardShortcuts, toggleLastCommands, toggleSettings, tryFixInvalidDuration, userHtml5ifyCurrentFile, userOpenFiles]);
|
||||
const actionsWithCatch = Object.entries(actions).map(([key, action]) => [
|
||||
key,
|
||||
async () => {
|
||||
try {
|
||||
await action();
|
||||
} catch (err) {
|
||||
handleError(err);
|
||||
}
|
||||
},
|
||||
]);
|
||||
|
||||
actionsWithCatch.forEach(([key, action]) => electron.ipcRenderer.on(key, action));
|
||||
return () => actionsWithCatch.forEach(([key, action]) => electron.ipcRenderer.removeListener(key, action));
|
||||
}, [apparentCutSegments, askSetStartTimeOffset, checkFileOpened, clearSegments, closeBatch, closeFileWithConfirm, concatCurrentBatch, createFixedDurationSegments, createNumSegments, createRandomSegments, createSegmentsFromKeyframes, customOutDir, cutSegments, detectBlackScenes, detectSceneChanges, detectSilentScenes, detectedFps, extractAllStreams, fileFormat, filePath, fillSegmentsGaps, getFrameCount, invertAllSegments, loadCutSegments, loadMedia, openFilesDialog, openSendReportDialogWithState, reorderSegsByStartTime, setWorking, shiftAllSegmentTimes, shuffleSegments, toggleKeyboardShortcuts, toggleLastCommands, toggleSettings, tryFixInvalidDuration, userHtml5ifyCurrentFile, userOpenFiles]);
|
||||
|
||||
const showAddStreamSourceDialog = useCallback(async () => {
|
||||
try {
|
||||
|
@ -1,4 +1,4 @@
|
||||
import React, { useState, useCallback } from 'react';
|
||||
import React, { useState, useCallback, useRef, useEffect } from 'react';
|
||||
import { Button, TextInputField, Checkbox, RadioGroup, Paragraph, LinkIcon } from 'evergreen-ui';
|
||||
import Swal from 'sweetalert2';
|
||||
import i18n from 'i18next';
|
||||
@ -404,42 +404,71 @@ export async function showCleanupFilesDialog(cleanupChoicesIn = {}) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const ParametersInput = ({ description, parameters: parametersIn, onChange: onChangeProp, docUrl }) => {
|
||||
const ParametersInput = ({ description, parameters: parametersIn, onChange, onSubmit, docUrl }) => {
|
||||
const firstInputRef = useRef();
|
||||
const [parameters, setParameters] = useState(parametersIn);
|
||||
|
||||
const getParameter = (key) => parameters[key]?.value;
|
||||
const onChange = (key, value) => setParameters((existing) => {
|
||||
|
||||
const handleChange = (key, value) => setParameters((existing) => {
|
||||
const newParameters = { ...existing, [key]: { ...existing[key], value } };
|
||||
onChangeProp(newParameters);
|
||||
onChange(newParameters);
|
||||
return newParameters;
|
||||
});
|
||||
|
||||
const handleSubmit = useCallback((e) => {
|
||||
e.preventDefault();
|
||||
onSubmit();
|
||||
}, [onSubmit]);
|
||||
|
||||
useEffect(() => {
|
||||
firstInputRef.current?.focus?.();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div style={{ textAlign: 'left' }}>
|
||||
{description && <p>{description}</p>}
|
||||
|
||||
{docUrl && <p><Button iconBefore={LinkIcon} onClick={() => electron.shell.openExternal(docUrl)}>Read more</Button></p>}
|
||||
|
||||
{Object.entries(parametersIn).map(([key, parameter]) => (
|
||||
<TextInputField key={key} label={parameter.label || key} value={getParameter(key)} onChange={(e) => onChange(key, e.target.value)} hint={parameter.hint} />
|
||||
))}
|
||||
<form onSubmit={handleSubmit}>
|
||||
{Object.entries(parametersIn).map(([key, parameter], i) => (
|
||||
<TextInputField ref={i === 0 ? firstInputRef : undefined} key={key} label={parameter.label || key} value={getParameter(key)} onChange={(e) => handleChange(key, e.target.value)} hint={parameter.hint} />
|
||||
))}
|
||||
|
||||
<input type="submit" value="submit" style={{ display: 'none' }} />
|
||||
</form>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export async function showParametersDialog({ title, description, parameters: parametersIn, docUrl }) {
|
||||
let parameters = parametersIn;
|
||||
let resolve1;
|
||||
|
||||
const { value } = await ReactSwal.fire({
|
||||
title,
|
||||
html: <ParametersInput description={description} parameters={parameters} onChange={(newParameters) => { parameters = newParameters; }} docUrl={docUrl} />,
|
||||
confirmButtonText: i18n.t('Confirm'),
|
||||
showCancelButton: true,
|
||||
cancelButtonText: i18n.t('Cancel'),
|
||||
const promise1 = new Promise((resolve) => {
|
||||
resolve1 = resolve;
|
||||
});
|
||||
const handleSubmit = () => {
|
||||
Swal.close();
|
||||
resolve1(true);
|
||||
};
|
||||
|
||||
if (value) return Object.fromEntries(Object.entries(parameters).map(([key, parameter]) => [key, parameter.value]));
|
||||
return undefined;
|
||||
const promise2 = (async () => {
|
||||
const { isConfirmed } = await ReactSwal.fire({
|
||||
title,
|
||||
html: <ParametersInput description={description} parameters={parameters} onChange={(newParameters) => { parameters = newParameters; }} onSubmit={handleSubmit} docUrl={docUrl} />,
|
||||
confirmButtonText: i18n.t('Confirm'),
|
||||
showCancelButton: true,
|
||||
cancelButtonText: i18n.t('Cancel'),
|
||||
});
|
||||
return isConfirmed;
|
||||
})();
|
||||
|
||||
const isConfirmed = await Promise.race([promise1, promise2]);
|
||||
if (!isConfirmed) return undefined;
|
||||
|
||||
return Object.fromEntries(Object.entries(parameters).map(([key, parameter]) => [key, parameter.value]));
|
||||
}
|
||||
|
||||
|
||||
|
34
src/ffmpeg-parameters.js
Normal file
34
src/ffmpeg-parameters.js
Normal file
@ -0,0 +1,34 @@
|
||||
import i18n from 'i18next';
|
||||
|
||||
export const blackdetect = () => ({
|
||||
black_min_duration: {
|
||||
value: '2.0',
|
||||
hint: i18n.t('Set the minimum detected black duration expressed in seconds. It must be a non-negative floating point number.'),
|
||||
},
|
||||
picture_black_ratio_th: {
|
||||
value: '0.98',
|
||||
hint: i18n.t('Set the threshold for considering a picture "black".'),
|
||||
},
|
||||
pixel_black_th: {
|
||||
value: '0.10',
|
||||
hint: i18n.t('Set the threshold for considering a pixel "black".'),
|
||||
},
|
||||
});
|
||||
|
||||
export const silencedetect = () => ({
|
||||
noise: {
|
||||
value: '-60dB',
|
||||
hint: i18n.t('Set noise tolerance. Can be specified in dB (in case "dB" is appended to the specified value) or amplitude ratio. Default is -60dB, or 0.001.'),
|
||||
},
|
||||
duration: {
|
||||
value: '2.0',
|
||||
hint: i18n.t('Set silence duration until notification (default is 2 seconds).'),
|
||||
},
|
||||
});
|
||||
|
||||
export const sceneChange = () => ({
|
||||
minChange: {
|
||||
value: '0.3',
|
||||
hint: i18n.t('Minimum change between two frames to be considered a new scene. A value between 0.3 and 0.5 is generally a sane choice.'),
|
||||
},
|
||||
});
|
@ -108,12 +108,8 @@ function getIntervalAroundTime(time, window) {
|
||||
};
|
||||
}
|
||||
|
||||
export async function readFrames({ filePath, aroundTime, window, streamIndex }) {
|
||||
let intervalsArgs = [];
|
||||
if (aroundTime != null) {
|
||||
const { from, to } = getIntervalAroundTime(aroundTime, window);
|
||||
intervalsArgs = ['-read_intervals', `${from}%${to}`];
|
||||
}
|
||||
export async function readFrames({ filePath, from, to, streamIndex }) {
|
||||
const intervalsArgs = from != null && to != null ? ['-read_intervals', `${from}%${to}`] : [];
|
||||
const { stdout } = await runFfprobe(['-v', 'error', ...intervalsArgs, '-show_packets', '-select_streams', streamIndex, '-show_entries', 'packet=pts_time,flags', '-of', 'json', filePath]);
|
||||
const packetsFiltered = JSON.parse(stdout).packets
|
||||
.map(p => ({
|
||||
@ -126,6 +122,12 @@ export async function readFrames({ filePath, aroundTime, window, streamIndex })
|
||||
return sortBy(packetsFiltered, 'time');
|
||||
}
|
||||
|
||||
export async function readFramesAroundTime({ filePath, streamIndex, aroundTime, window }) {
|
||||
if (aroundTime == null) throw new Error('aroundTime was nullish');
|
||||
const { from, to } = getIntervalAroundTime(aroundTime, window);
|
||||
return readFrames({ filePath, from, to, streamIndex });
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/questions/14005110/how-to-split-a-video-using-ffmpeg-so-that-each-chunk-starts-with-a-key-frame
|
||||
// http://kicherer.org/joomla/index.php/de/blog/42-avcut-frame-accurate-video-cutting-with-only-small-quality-loss
|
||||
export function getSafeCutTime(frames, cutTime, nextMode) {
|
||||
@ -548,12 +550,63 @@ export async function renderWaveformPng({ filePath, aroundTime, window, color })
|
||||
}
|
||||
}
|
||||
|
||||
const getInputSeekArgs = ({ filePath, from, to }) => [
|
||||
...(from != null ? ['-ss', from.toFixed(5)] : []),
|
||||
'-i', filePath,
|
||||
...(to != null ? ['-t', (to - from).toFixed(5)] : []),
|
||||
];
|
||||
|
||||
const getSegmentOffset = (from) => (from != null ? from : 0);
|
||||
|
||||
function adjustSegmentsWithOffset({ segments, from }) {
|
||||
const offset = getSegmentOffset(from);
|
||||
return segments.map(({ start, end }) => ({ start: start + offset, end: end != null ? end + offset : end }));
|
||||
}
|
||||
|
||||
export function mapTimesToSegments(times) {
|
||||
const segments = [];
|
||||
for (let i = 0; i < times.length; i += 1) {
|
||||
const start = times[i];
|
||||
const end = times[i + 1];
|
||||
if (start != null) segments.push({ start, end }); // end undefined is allowed (means until end of video)
|
||||
}
|
||||
return segments;
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/questions/35675529/using-ffmpeg-how-to-do-a-scene-change-detection-with-timecode
|
||||
export async function detectSceneChanges({ filePath, duration, minChange, onProgress, from, to }) {
|
||||
const args = [
|
||||
'-hide_banner',
|
||||
...getInputSeekArgs({ filePath, from, to }),
|
||||
'-filter_complex', `select='gt(scene,${minChange})',metadata=print:file=-`,
|
||||
'-f', 'null', '-',
|
||||
];
|
||||
const process = execa(getFfmpegPath(), args, { encoding: null, buffer: false });
|
||||
|
||||
const times = [0];
|
||||
|
||||
handleProgress(process, duration, onProgress);
|
||||
const rl = readline.createInterface({ input: process.stdout });
|
||||
rl.on('line', (line) => {
|
||||
const match = line.match(/^frame:\d+\s+pts:\d+\s+pts_time:([\d.]+)/);
|
||||
if (!match) return;
|
||||
const time = parseFloat(match[1]);
|
||||
if (Number.isNaN(time) || time <= times[times.length - 1]) return;
|
||||
times.push(time);
|
||||
});
|
||||
|
||||
await process;
|
||||
|
||||
const segments = mapTimesToSegments(times);
|
||||
|
||||
return adjustSegmentsWithOffset({ segments, from });
|
||||
}
|
||||
|
||||
|
||||
export async function detectIntervals({ filePath, duration, customArgs, onProgress, from, to, matchLineTokens }) {
|
||||
const args = [
|
||||
'-hide_banner',
|
||||
...(from != null ? ['-ss', from.toFixed(5)] : []),
|
||||
'-i', filePath,
|
||||
...(to != null ? ['-t', (to - from).toFixed(5)] : []),
|
||||
...getInputSeekArgs({ filePath, from, to }),
|
||||
...customArgs,
|
||||
'-f', 'null', '-',
|
||||
];
|
||||
@ -571,8 +624,7 @@ export async function detectIntervals({ filePath, duration, customArgs, onProgre
|
||||
handleProgress(process, duration, onProgress, customMatcher);
|
||||
|
||||
await process;
|
||||
const offset = from != null ? from : 0;
|
||||
return segments.map(({ start, end }) => ({ start: start + offset, end: end + offset }));
|
||||
return adjustSegmentsWithOffset({ segments, from });
|
||||
}
|
||||
|
||||
const mapFilterOptions = (options) => Object.entries(options).map(([key, value]) => `${key}=${value}`).join(':');
|
||||
|
@ -2,7 +2,7 @@ import { useState, useCallback, useRef, useEffect, useMemo } from 'react';
|
||||
import sortBy from 'lodash/sortBy';
|
||||
import useDebounceOld from 'react-use/lib/useDebounce'; // Want to phase out this
|
||||
|
||||
import { readFrames, findNearestKeyFrameTime as ffmpegFindNearestKeyFrameTime } from '../ffmpeg';
|
||||
import { readFramesAroundTime, findNearestKeyFrameTime as ffmpegFindNearestKeyFrameTime } from '../ffmpeg';
|
||||
|
||||
const maxKeyframes = 1000;
|
||||
// const maxKeyframes = 100;
|
||||
@ -26,7 +26,7 @@ export default ({ keyframesEnabled, filePath, commandedTime, mainVideoStream, de
|
||||
if (!shouldRun) return;
|
||||
|
||||
try {
|
||||
const promise = readFrames({ filePath, aroundTime: commandedTime, streamIndex: mainVideoStream.index, window: ffmpegExtractWindow });
|
||||
const promise = readFramesAroundTime({ filePath, aroundTime: commandedTime, streamIndex: mainVideoStream.index, window: ffmpegExtractWindow });
|
||||
readingKeyframesPromise.current = promise;
|
||||
const newFrames = await promise;
|
||||
if (aborted) return;
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { getRealVideoStreams, getVideoTimebase } from './util/streams';
|
||||
|
||||
import { readFrames } from './ffmpeg';
|
||||
import { readFramesAroundTime } from './ffmpeg';
|
||||
|
||||
const { stat } = window.require('fs-extra');
|
||||
|
||||
@ -18,7 +18,7 @@ export async function getSmartCutParams({ path, videoDuration, desiredCutFrom, s
|
||||
const videoStream = videoStreams[0];
|
||||
|
||||
async function readKeyframes(window) {
|
||||
const frames = await readFrames({ filePath: path, aroundTime: desiredCutFrom, streamIndex: videoStream.index, window });
|
||||
const frames = await readFramesAroundTime({ filePath: path, aroundTime: desiredCutFrom, streamIndex: videoStream.index, window });
|
||||
return frames.filter((frame) => frame.keyframe);
|
||||
}
|
||||
|
||||
|
3
src/util/constants.js
Normal file
3
src/util/constants.js
Normal file
@ -0,0 +1,3 @@
|
||||
// anything more than this will probably cause the UI to become unusably slow
|
||||
// eslint-disable-next-line import/prefer-default-export
|
||||
export const maxSegmentsAllowed = 2000;
|
Loading…
Reference in New Issue
Block a user