1
0
mirror of https://github.com/mifi/lossless-cut.git synced 2024-11-25 11:43:17 +01:00

Implement thumbnails #6

This commit is contained in:
Mikael Finstad 2020-02-27 22:59:37 +08:00
parent b5f2f5d552
commit 9e7a4e317f
5 changed files with 143 additions and 22 deletions

View File

@ -31,6 +31,7 @@ I made a tool for cross platform sharing of files between computer/phone over th
- Undo/redo
- Give labels to cut segments
- View segment details, export/import cut segments as CSV
- Thumbnails and audio waveform
## Example lossless use cases

View File

@ -38,7 +38,8 @@ const Timeline = memo(({
durationSafe, getCurrentTime, startTimeOffset, playerTime, commandedTime,
zoom, neighbouringFrames, seekAbs, seekRel, duration, apparentCutSegments, zoomRel,
setCurrentSegIndex, currentSegIndexSafe, invertCutSegments, inverseCutSegments, mainVideoStream, formatTimecode,
waveform, shouldShowWaveform, shouldShowKeyframes, timelineHeight, timelineExpanded,
waveform, shouldShowWaveform, shouldShowKeyframes, timelineHeight, thumbnails,
onZoomWindowStartTimeChange, waveformEnabled, thumbnailsEnabled,
}) => {
const timelineScrollerRef = useRef();
const timelineScrollerSkipEventRef = useRef();
@ -68,14 +69,22 @@ const Timeline = memo(({
}, [zoom, durationSafe, getCurrentTime]);
const onTimelineScroll = useCallback((e) => {
if (!zoomed) return;
const zoomWindowStartTime = timelineScrollerRef.current
? (timelineScrollerRef.current.scrollLeft / (timelineScrollerRef.current.offsetWidth * zoom)) * duration
: 0;
onZoomWindowStartTimeChange(zoomWindowStartTime);
if (timelineScrollerSkipEventRef.current) {
timelineScrollerSkipEventRef.current = false;
return;
}
if (!zoomed) return;
seekAbs((((e.target.scrollLeft + (timelineScrollerRef.current.offsetWidth * 0.5))
/ (timelineScrollerRef.current.offsetWidth * zoom)) * duration));
}, [duration, seekAbs, zoomed, zoom]);
}, [duration, seekAbs, zoomed, zoom, onZoomWindowStartTimeChange]);
const handleTap = useCallback((e) => {
const target = timelineWrapperRef.current;
@ -105,7 +114,7 @@ const Timeline = memo(({
onScroll={onTimelineScroll}
ref={timelineScrollerRef}
>
{timelineExpanded && shouldShowWaveform && waveform && (
{waveformEnabled && shouldShowWaveform && waveform && (
<Waveform
calculateTimelinePos={calculateTimelinePos}
durationSafe={durationSafe}
@ -115,6 +124,14 @@ const Timeline = memo(({
/>
)}
{thumbnailsEnabled && (
<div style={{ height: timelineHeight, width: `${zoom * 100}%`, position: 'relative' }}>
{thumbnails.map((thumbnail) => (
<img key={thumbnail.url} src={thumbnail.url} alt="" style={{ position: 'absolute', left: `${(thumbnail.time / durationSafe) * 100}%`, height: timelineHeight * 1.5, zIndex: 1, maxWidth: '13%', objectFit: 'cover', border: '1px solid rgba(255, 255, 255, 0.5)', borderBottomRightRadius: 15, borderTopLeftRadius: 15, borderTopRightRadius: 15 }} />
))}
</div>
)}
<div
style={{ height: timelineHeight, width: `${zoom * 100}%`, position: 'relative', backgroundColor: timelineBackground }}
ref={timelineWrapperRef}
@ -163,11 +180,13 @@ const Timeline = memo(({
</div>
</div>
{timelineExpanded && !shouldShowWaveform && (
<div style={{ position: 'absolute', display: 'flex', alignItems: 'center', justifyContent: 'center', height: timelineHeight, bottom: timelineHeight, left: 0, right: 0, color: 'rgba(255,255,255,0.6)' }}>Zoom in more to view waveform</div>
{(waveformEnabled && !thumbnailsEnabled && !shouldShowWaveform) && (
<div style={{ position: 'absolute', display: 'flex', alignItems: 'center', justifyContent: 'center', height: timelineHeight, bottom: timelineHeight, left: 0, right: 0, color: 'rgba(255,255,255,0.6)' }}>
Zoom in more to view waveform
</div>
)}
<div style={{ position: 'absolute', height: timelineHeight, left: 0, right: 0, bottom: 0, display: 'flex', alignItems: 'center', justifyContent: 'center', pointerEvents: 'none' }}>
<div style={{ position: 'absolute', height: timelineHeight, left: 0, right: 0, bottom: 0, display: 'flex', alignItems: 'center', justifyContent: 'center', pointerEvents: 'none', zIndex: 2 }}>
<div style={{ background: 'rgba(0,0,0,0.4)', borderRadius: 3, padding: '2px 4px', color: 'rgba(255, 255, 255, 0.8)' }}>
{formatTimecode(offsetCurrentTime)}
</div>

View File

@ -1,16 +1,17 @@
import React, { memo } from 'react';
import { FaHandPointLeft, FaHandPointRight, FaStepBackward, FaStepForward, FaCaretLeft, FaCaretRight, FaPause, FaPlay } from 'react-icons/fa';
import { FaHandPointLeft, FaHandPointRight, FaStepBackward, FaStepForward, FaCaretLeft, FaCaretRight, FaPause, FaPlay, FaImages } from 'react-icons/fa';
import { GiSoundWaves } from 'react-icons/gi';
// import useTraceUpdate from 'use-trace-update';
import { getSegColors, parseDuration, formatDuration } from './util';
import { primaryColor } from './colors';
const TimelineControls = memo(({
seekAbs, currentSegIndexSafe, cutSegments, currentCutSeg, setCutStart, setCutEnd,
setCurrentSegIndex, cutStartTimeManual, setCutStartTimeManual, cutEndTimeManual, setCutEndTimeManual,
duration, jumpCutEnd, jumpCutStart, startTimeOffset, setCutTime, currentApparentCutSeg,
playing, shortStep, playCommand, setTimelineExpanded, hasAudio,
playing, shortStep, playCommand, setTimelineMode, hasAudio, hasVideo, timelineMode,
}) => {
const {
segActiveBgColor: currentSegActiveBgColor,
@ -117,10 +118,19 @@ const TimelineControls = memo(({
{hasAudio && (
<GiSoundWaves
size={24}
style={{ padding: '0 5px' }}
style={{ padding: '0 5px', color: timelineMode === 'waveform' ? primaryColor : undefined }}
role="button"
title="Expand timeline"
onClick={() => setTimelineExpanded(v => !v)}
title="Show waveform"
onClick={() => setTimelineMode('waveform')}
/>
)}
{hasVideo && (
<FaImages
size={20}
style={{ padding: '0 5px', color: timelineMode === 'thumbnails' ? primaryColor : undefined }}
role="button"
title="Show thumbnails"
onClick={() => setTimelineMode('thumbnails')}
/>
)}
</div>

View File

@ -486,6 +486,34 @@ async function extractStreams({ filePath, customOutDir, streams }) {
console.log(stdout);
}
async function renderThumbnail(filePath, timestamp) {
const args = [
'-ss', timestamp,
'-i', filePath,
'-vf', 'scale=-2:200',
'-f', 'image2',
'-vframes', '1',
'-q:v', '10',
'-',
];
const ffmpegPath = await getFfmpegPath();
const { stdout } = await execa(ffmpegPath, args, { encoding: null });
const blob = new Blob([stdout], { type: 'image/jpeg' });
return URL.createObjectURL(blob);
}
async function renderThumbnails({ filePath, numThumbs, from, duration, onThumbnail }) {
const thumbTimes = Array(numThumbs).fill().map((unused, i) => (from + ((duration * i) / numThumbs)));
await pMap(thumbTimes, async (time) => {
const url = await renderThumbnail(filePath, time);
onThumbnail({ time, url });
}, { concurrency: 2 });
}
async function renderWaveformPng({ filePath, aroundTime, window, color }) {
const { from, to } = getIntervalAroundTime(aroundTime, window);
@ -602,4 +630,5 @@ module.exports = {
readFrames,
getNextPrevKeyframe,
renderWaveformPng,
renderThumbnails,
};

View File

@ -143,10 +143,13 @@ const App = memo(() => {
const [commandedTime, setCommandedTime] = useState(0);
const [ffmpegCommandLog, setFfmpegCommandLog] = useState([]);
const [neighbouringFrames, setNeighbouringFrames] = useState([]);
const [thumbnails, setThumbnails] = useState([]);
const [shortestFlag, setShortestFlag] = useState(false);
const [debouncedWaveformData, setDebouncedWaveformData] = useState();
const [debouncedReadKeyframesData, setDebouncedReadKeyframesData] = useState();
const [timelineExpanded, setTimelineExpanded] = useState(false);
const [waveformEnabled, setWaveformEnabled] = useState(false);
const [thumbnailsEnabled, setThumbnailsEnabled] = useState(false);
const [zoomWindowStartTime, setZoomWindowStartTime] = useState(0);
const [showSideBar, setShowSideBar] = useState(true);
@ -169,8 +172,8 @@ const App = memo(() => {
const durationSafe = duration || 1;
const [, cancelWaveformDataDebounce] = useDebounce(() => {
setDebouncedWaveformData({ filePath, commandedTime, duration, zoom, timelineExpanded, mainAudioStream });
}, 500, [filePath, commandedTime, duration, zoom, timelineExpanded, mainAudioStream]);
setDebouncedWaveformData({ filePath, commandedTime, duration, zoom, waveformEnabled, mainAudioStream });
}, 500, [filePath, commandedTime, duration, zoom, waveformEnabled, mainAudioStream]);
const [, cancelReadKeyframeDataDebounce] = useDebounce(() => {
setDebouncedReadKeyframesData({ filePath, commandedTime, duration, zoom, mainVideoStream });
@ -244,6 +247,7 @@ const App = memo(() => {
setStreamsSelectorShown(false);
setZoom(1);
setShortestFlag(false);
setZoomWindowStartTime(0);
setWaveform();
cancelWaveformDataDebounce();
@ -252,8 +256,20 @@ const App = memo(() => {
setNeighbouringFrames([]);
cancelReadKeyframeDataDebounce();
setDebouncedReadKeyframesData();
setThumbnails([]);
}, [cutSegmentsHistory, cancelCutSegmentsDebounce, setCutSegments, cancelWaveformDataDebounce, cancelReadKeyframeDataDebounce]);
function setTimelineMode(newMode) {
if (newMode === 'waveform') {
setWaveformEnabled(v => !v);
setThumbnailsEnabled(false);
} else {
setThumbnailsEnabled(v => !v);
setWaveformEnabled(false);
}
}
function appendFfmpegCommandLog(command) {
setFfmpegCommandLog(old => [...old, { command, time: new Date() }]);
}
@ -667,6 +683,42 @@ const App = memo(() => {
const shouldShowKeyframes = calcShouldShowKeyframes(duration, zoom);
const thumnailsRef = useRef([]);
const thumnailsRenderingPromiseRef = useRef();
function addThumbnail(thumbnail) {
// console.log('Rendered thumbnail', thumbnail.url);
setThumbnails(v => [...v, thumbnail]);
}
useEffect(() => {
async function renderThumbnails() {
if (!thumbnailsEnabled || thumnailsRenderingPromiseRef.current) return;
try {
const numThumbs = 5;
setThumbnails([]);
const promise = ffmpeg.renderThumbnails({ filePath, numThumbs, from: zoomWindowStartTime, duration: duration / zoom, onThumbnail: addThumbnail });
thumnailsRenderingPromiseRef.current = promise;
await promise;
} catch (err) {
console.error('Failed to render thumbnail', err);
} finally {
thumnailsRenderingPromiseRef.current = undefined;
}
}
if (duration) renderThumbnails();
}, [duration, filePath, zoom, zoomWindowStartTime, thumbnailsEnabled]);
// Cleanup removed thumbnails
useEffect(() => {
thumnailsRef.current.forEach((thumbnail) => {
if (!thumbnails.some(t => t.url === thumbnail.url)) URL.revokeObjectURL(thumbnail.url);
});
thumnailsRef.current = thumbnails;
}, [thumbnails]);
useEffect(() => {
async function run() {
const d = debouncedReadKeyframesData;
@ -690,7 +742,7 @@ const App = memo(() => {
useEffect(() => {
async function run() {
const d = debouncedWaveformData;
if (!d || !d.filePath || !d.mainAudioStream || d.commandedTime == null || !calcShouldShowWaveform(d.duration, d.zoom) || !d.timelineExpanded || creatingWaveformPromise.current) return;
if (!d || !d.filePath || !d.mainAudioStream || d.commandedTime == null || !calcShouldShowWaveform(d.duration, d.zoom) || !d.waveformEnabled || creatingWaveformPromise.current) return;
try {
const promise = ffmpeg.renderWaveformPng({ filePath: d.filePath, aroundTime: d.commandedTime, window: ffmpegExtractWindow, color: waveformColor });
creatingWaveformPromise.current = promise;
@ -943,7 +995,7 @@ const App = memo(() => {
stream.index, defaultProcessedCodecTypes.includes(stream.codec_type),
])));
const videoStream = streams.find(stream => stream.codec_type === 'video');
const videoStream = streams.find(stream => stream.codec_type === 'video' && !['png'].includes(stream.codec_name));
const audioStream = streams.find(stream => stream.codec_type === 'audio');
setMainVideoStream(videoStream);
setMainAudioStream(audioStream);
@ -1347,8 +1399,13 @@ const App = memo(() => {
const sideBarWidth = showSideBar ? 200 : 0;
const hasAudio = !!mainAudioStream;
const hasVideo = !!mainVideoStream;
const shouldShowWaveform = calcShouldShowWaveform(duration, zoom);
const bottomBarHeight = 96 + (hasAudio && timelineExpanded ? timelineHeight : 0);
const bottomBarHeight = 96 + ((hasAudio && waveformEnabled) || (hasVideo && thumbnailsEnabled) ? timelineHeight : 0);
let timelineMode;
if (thumbnailsEnabled) timelineMode = 'thumbnails';
if (waveformEnabled) timelineMode = 'waveform';
return (
<div>
@ -1541,15 +1598,17 @@ const App = memo(() => {
>
<Timeline
shouldShowKeyframes={shouldShowKeyframes}
shouldShowWaveform={shouldShowWaveform}
timelineExpanded={timelineExpanded}
waveform={waveform}
shouldShowWaveform={shouldShowWaveform}
waveformEnabled={waveformEnabled}
thumbnailsEnabled={thumbnailsEnabled}
neighbouringFrames={neighbouringFrames}
thumbnails={thumbnails}
getCurrentTime={getCurrentTime}
startTimeOffset={startTimeOffset}
playerTime={playerTime}
commandedTime={commandedTime}
zoom={zoom}
neighbouringFrames={neighbouringFrames}
seekAbs={seekAbs}
seekRel={seekRel}
zoomRel={zoomRel}
@ -1563,6 +1622,7 @@ const App = memo(() => {
mainVideoStream={mainVideoStream}
formatTimecode={formatTimecode}
timelineHeight={timelineHeight}
onZoomWindowStartTimeChange={setZoomWindowStartTime}
/>
<TimelineControls
@ -1586,8 +1646,10 @@ const App = memo(() => {
playing={playing}
shortStep={shortStep}
playCommand={playCommand}
setTimelineExpanded={setTimelineExpanded}
setTimelineMode={setTimelineMode}
timelineMode={timelineMode}
hasAudio={hasAudio}
hasVideo={hasVideo}
/>
<div style={{ display: 'flex', justifyContent: 'space-between' }}>