1
0
mirror of https://github.com/mifi/lossless-cut.git synced 2024-11-25 03:33:14 +01:00

Use MediaSource for unsupported formats

This commit is contained in:
Mikael Finstad 2024-01-04 23:33:33 +08:00
parent 83c910a0fd
commit 7f32cdca8a
No known key found for this signature in database
GPG Key ID: 25AB36E3E81CBC26
12 changed files with 527 additions and 262 deletions

View File

@ -104,9 +104,9 @@ If the output file name has special characters that get replaced by underscore (
# Known limitations
## Low quality / blurry playback and no audio
## Low quality / blurry playback
Some formats or codecs are not natively supported, so they will preview with low quality playback and no audio. You may convert these files to a supported codec from the File menu, see [#88](https://github.com/mifi/lossless-cut/issues/88).
Some formats or codecs are not natively supported, so they will play back with a lower quality. You may convert these files to a supported codec from the File menu, see [#88](https://github.com/mifi/lossless-cut/issues/88).
## MPEG TS / MTS

View File

@ -126,7 +126,6 @@
"morgan": "^1.10.0",
"semver": "^7.5.2",
"string-to-stream": "^1.1.1",
"strtok3": "^6.0.0",
"winston": "^3.8.1",
"yargs-parser": "^21.0.0"
},

View File

@ -1,67 +1,102 @@
const strtok3 = require('strtok3');
const { getOneRawFrame, encodeLiveRawStream } = require('./ffmpeg');
const logger = require('./logger');
const { createMediaSourceProcess, readOneJpegFrame } = require('./ffmpeg');
let aborters = [];
function createMediaSourceStream({ path, videoStreamIndex, audioStreamIndex, seekTo, size, fps }) {
const abortController = new AbortController();
logger.info('Starting preview process', { videoStreamIndex, audioStreamIndex, seekTo });
const process = createMediaSourceProcess({ path, videoStreamIndex, audioStreamIndex, seekTo, size, fps });
async function command({ path, inWidth, inHeight, streamIndex, seekTo: commandedTime, onRawFrame, onJpegFrame, playing }) {
let process;
let aborted = false;
abortController.signal.onabort = () => {
logger.info('Aborting preview process', { videoStreamIndex, audioStreamIndex, seekTo });
process.kill('SIGKILL');
};
function killProcess() {
if (process) {
process.kill();
process = undefined;
}
process.stdout.pause();
async function readChunk() {
return new Promise((resolve, reject) => {
let cleanup;
const onClose = () => {
cleanup();
resolve(null);
};
const onData = (chunk) => {
process.stdout.pause();
cleanup();
resolve(chunk);
};
const onError = (err) => {
cleanup();
reject(err);
};
cleanup = () => {
process.stdout.off('data', onData);
process.stdout.off('error', onError);
process.stdout.off('close', onClose);
};
process.stdout.once('data', onData);
process.stdout.once('error', onError);
process.stdout.once('close', onClose);
process.stdout.resume();
});
}
function abort() {
aborted = true;
killProcess();
aborters = aborters.filter(((aborter) => aborter !== abort));
abortController.abort();
}
aborters.push(abort);
try {
if (playing) {
const { process: processIn, channels, width, height } = encodeLiveRawStream({ path, inWidth, inHeight, streamIndex, seekTo: commandedTime });
process = processIn;
let stderr = Buffer.alloc(0);
process.stderr?.on('data', (chunk) => {
stderr = Buffer.concat([stderr, chunk]);
});
// process.stderr.on('data', data => console.log(data.toString('utf-8')));
const tokenizer = await strtok3.fromStream(process.stdout);
if (aborted) return;
const size = width * height * channels;
const rgbaImage = Buffer.allocUnsafe(size);
while (!aborted) {
// eslint-disable-next-line no-await-in-loop
await tokenizer.readBuffer(rgbaImage, { length: size });
if (aborted) return;
// eslint-disable-next-line no-await-in-loop
await onRawFrame(rgbaImage, width, height);
(async () => {
try {
await process;
} catch (err) {
if (err instanceof Error && err.name === 'AbortError') {
return;
}
if (!err.killed) {
console.warn(err.message);
console.warn(stderr.toString('utf-8'));
}
} else {
const { process: processIn, width, height } = getOneRawFrame({ path, inWidth, inHeight, streamIndex, seekTo: commandedTime, outSize: 1000 });
process = processIn;
const { stdout: jpegImage } = await process;
if (aborted) return;
onJpegFrame(jpegImage, width, height);
}
} catch (err) {
if (!err.killed) console.warn(err.message);
} finally {
killProcess();
}
})();
return { abort, readChunk };
}
function abortAll() {
aborters.forEach((aborter) => aborter());
function readOneJpegFrameWrapper({ path, seekTo, videoStreamIndex }) {
const abortController = new AbortController();
const process = readOneJpegFrame({ path, seekTo, videoStreamIndex });
abortController.signal.onabort = () => process.kill('SIGKILL');
function abort() {
abortController.abort();
}
const promise = (async () => {
try {
const { stdout } = await process;
return stdout;
} catch (err) {
logger.error('renderOneJpegFrame', err.shortMessage);
throw new Error('Failed to render JPEG frame');
}
})();
return { promise, abort };
}
module.exports = {
command,
abortAll,
createMediaSourceStream,
readOneJpegFrame: readOneJpegFrameWrapper,
};

View File

@ -386,8 +386,8 @@ async function html5ify({ outPath, filePath: filePathArg, speed, hasAudio, hasVi
let audio;
if (hasAudio) {
if (speed === 'slowest') audio = 'hq';
else if (['slow-audio', 'fast-audio', 'fastest-audio'].includes(speed)) audio = 'lq';
else if (['fast-audio-remux', 'fastest-audio-remux'].includes(speed)) audio = 'copy';
else if (['slow-audio', 'fast-audio'].includes(speed)) audio = 'lq';
else if (['fast-audio-remux'].includes(speed)) audio = 'copy';
}
let video;
@ -485,24 +485,7 @@ async function html5ify({ outPath, filePath: filePathArg, speed, hasAudio, hasVi
console.log(stdout);
}
function calcSize({ inWidth, inHeight, outSize }) {
const aspectRatio = inWidth / inHeight;
if (inWidth > inHeight) {
return {
newWidth: outSize,
newHeight: Math.floor(outSize / aspectRatio),
};
}
return {
newHeight: outSize,
newWidth: Math.floor(outSize * aspectRatio),
};
}
function getOneRawFrame({ path, inWidth, inHeight, seekTo, streamIndex, outSize }) {
const { newWidth, newHeight } = calcSize({ inWidth, inHeight, outSize });
function readOneJpegFrame({ path, seekTo, videoStreamIndex }) {
const args = [
'-hide_banner', '-loglevel', 'error',
@ -512,8 +495,7 @@ function getOneRawFrame({ path, inWidth, inHeight, seekTo, streamIndex, outSize
'-i', path,
'-vf', `scale=${newWidth}:${newHeight}:flags=lanczos`,
'-map', `0:${streamIndex}`,
'-map', `0:${videoStreamIndex}`,
'-vcodec', 'mjpeg',
'-frames:v', '1',
@ -524,20 +506,36 @@ function getOneRawFrame({ path, inWidth, inHeight, seekTo, streamIndex, outSize
// console.log(args);
return {
process: runFfmpegProcess(args, { encoding: 'buffer' }, { logCli: true }),
width: newWidth,
height: newHeight,
};
return runFfmpegProcess(args, { encoding: 'buffer' }, { logCli: true });
}
function encodeLiveRawStream({ path, inWidth, inHeight, seekTo, streamIndex, fps = 25 }) {
const { newWidth, newHeight } = calcSize({ inWidth, inHeight, outSize: 320 });
const enableLog = false;
const encode = true;
function createMediaSourceProcess({ path, videoStreamIndex, audioStreamIndex, seekTo, size, fps }) {
function getVideoFilters() {
if (videoStreamIndex == null) return [];
const filters = [];
if (fps != null) filters.push(`fps=${fps}`);
if (size != null) filters.push(`scale=${size}:${size}:flags=lanczos:force_original_aspect_ratio=decrease`);
if (filters.length === 0) return [];
return ['-vf', filters.join(',')];
}
// https://stackoverflow.com/questions/16658873/how-to-minimize-the-delay-in-a-live-streaming-with-ffmpeg
// https://unix.stackexchange.com/questions/25372/turn-off-buffering-in-pipe
const args = [
'-hide_banner', '-loglevel', 'panic',
'-hide_banner',
...(enableLog ? [] : ['-loglevel', 'error']),
'-re',
// https://stackoverflow.com/questions/30868854/flush-latency-issue-with-fragmented-mp4-creation-in-ffmpeg
'-fflags', '+nobuffer+flush_packets+discardcorrupt',
'-avioflags', 'direct',
// '-flags', 'low_delay', // this seems to ironically give a *higher* delay
'-flush_packets', '1',
'-vsync', 'passthrough',
'-ss', seekTo,
@ -545,23 +543,33 @@ function encodeLiveRawStream({ path, inWidth, inHeight, seekTo, streamIndex, fps
'-i', path,
'-vf', `fps=${fps},scale=${newWidth}:${newHeight}:flags=lanczos`,
'-map', `0:${streamIndex}`,
'-vcodec', 'rawvideo',
'-pix_fmt', 'rgba',
...(videoStreamIndex != null ? ['-map', `0:${videoStreamIndex}`] : ['-vn']),
'-f', 'image2pipe',
'-',
...(audioStreamIndex != null ? ['-map', `0:${audioStreamIndex}`] : ['-an']),
...(encode ? [
...(videoStreamIndex != null ? [
...getVideoFilters(),
'-pix_fmt', 'yuv420p', '-c:v', 'libx264', '-preset', 'ultrafast', '-tune', 'zerolatency', '-crf', '10',
'-g', '1', // reduces latency and buffering
] : []),
...(audioStreamIndex != null ? [
'-ac', '2', '-c:a', 'aac', '-b:a', '128k',
] : []),
// May alternatively use webm/vp8 https://stackoverflow.com/questions/24152810/encoding-ffmpeg-to-mpeg-dash-or-webm-with-keyframe-clusters-for-mediasource
] : [
'-c', 'copy',
]),
'-f', 'mp4', '-movflags', '+frag_keyframe+empty_moov+default_base_moof', '-',
];
// console.log(args);
if (enableLog) console.log(getFfCommandLine('ffmpeg', args));
return {
process: runFfmpegProcess(args, { encoding: null, buffer: false }, { logCli: true }),
width: newWidth,
height: newHeight,
channels: 4,
};
return execa(getFfmpegPath(), args, { encoding: null, buffer: false, stderr: enableLog ? 'inherit' : 'pipe' });
}
// Don't pass complex objects over the bridge
@ -583,8 +591,8 @@ module.exports = {
getFfCommandLine,
html5ify,
getDuration,
getOneRawFrame,
encodeLiveRawStream,
readOneJpegFrame,
blackDetect,
silenceDetect,
createMediaSourceProcess,
};

View File

@ -31,7 +31,7 @@ import useDirectoryAccess, { DirectoryAccessDeclinedError } from './hooks/useDir
import { UserSettingsContext, SegColorsContext } from './contexts';
import NoFileLoaded from './NoFileLoaded';
import Canvas from './Canvas';
import MediaSourcePlayer from './MediaSourcePlayer';
import TopMenu from './TopMenu';
import Sheet from './components/Sheet';
import LastCommandsSheet from './LastCommandsSheet';
@ -69,7 +69,7 @@ import {
isStoreBuild, dragPreventer,
havePermissionToReadFile, resolvePathIfNeeded, getPathReadAccessError, html5ifiedPrefix, html5dummySuffix, findExistingHtml5FriendlyFile,
deleteFiles, isOutOfSpaceError, isExecaFailure, readFileSize, readFileSizes, checkFileSizes, setDocumentTitle, getOutFileExtension, getSuffixedFileName, mustDisallowVob, readVideoTs, getImportProjectType,
calcShouldShowWaveform, calcShouldShowKeyframes,
calcShouldShowWaveform, calcShouldShowKeyframes, mediaSourceQualities,
} from './util';
import { toast, errorToast } from './swal';
import { formatDuration } from './util/duration';
@ -158,6 +158,9 @@ function App() {
const [alwaysConcatMultipleFiles, setAlwaysConcatMultipleFiles] = useState(false);
const [editingSegmentTagsSegmentIndex, setEditingSegmentTagsSegmentIndex] = useState();
const [editingSegmentTags, setEditingSegmentTags] = useState();
const [mediaSourceQuality, setMediaSourceQuality] = useState(0);
const incrementMediaSourceQuality = useCallback(() => setMediaSourceQuality((v) => (v + 1) % mediaSourceQualities.length), []);
// Batch state / concat files
const [batchFiles, setBatchFiles] = useState([]);
@ -325,10 +328,10 @@ function App() {
const effectiveRotation = useMemo(() => (isRotationSet ? rotation : (mainVideoStream && mainVideoStream.tags && mainVideoStream.tags.rotate && parseInt(mainVideoStream.tags.rotate, 10))), [isRotationSet, mainVideoStream, rotation]);
const zoomRel = useCallback((rel) => setZoom((z) => Math.min(Math.max(z + (rel * (1 + (z / 10))), 1), zoomMax)), []);
const canvasPlayerRequired = !!(mainVideoStream && usingDummyVideo);
const canvasPlayerWanted = !!(mainVideoStream && isRotationSet && !hideCanvasPreview);
const canvasPlayerRequired = usingDummyVideo;
// Allow user to disable it
const canvasPlayerEnabled = (canvasPlayerRequired || canvasPlayerWanted);
const canvasPlayerWanted = isRotationSet && !hideCanvasPreview;
const canvasPlayerEnabled = canvasPlayerRequired || canvasPlayerWanted;
useEffect(() => {
// Reset the user preference when the state changes to true
@ -759,7 +762,7 @@ function App() {
const showUnsupportedFileMessage = useCallback(() => {
if (!hideAllNotifications) toast.fire({ timer: 13000, text: i18n.t('File not natively supported. Preview may have no audio or low quality. The final export will however be lossless with audio. You may convert it from the menu for a better preview with audio.') });
if (!hideAllNotifications) toast.fire({ timer: 13000, text: i18n.t('File is not natively supported. Preview playback may be slow and of low quality, but the final export will be lossless. You may convert the file from the menu for a better preview.') });
}, [hideAllNotifications]);
const showPreviewFileLoadedMessage = useCallback((fileName) => {
@ -774,7 +777,7 @@ function App() {
} = useFfmpegOperations({ filePath, treatInputFileModifiedTimeAsStart, treatOutputFileModifiedTimeAsStart, needSmartCut, enableOverwriteOutput, outputPlaybackRate });
const html5ifyAndLoad = useCallback(async (cod, fp, speed, hv, ha) => {
const usesDummyVideo = ['fastest-audio', 'fastest-audio-remux', 'fastest'].includes(speed);
const usesDummyVideo = speed === 'fastest';
console.log('html5ifyAndLoad', { speed, hasVideo: hv, hasAudio: ha, usesDummyVideo });
async function doHtml5ify() {
@ -813,7 +816,7 @@ function App() {
let i = 0;
const setTotalProgress = (fileProgress = 0) => setCutProgress((i + fileProgress) / filePaths.length);
const { selectedOption: speed } = await askForHtml5ifySpeed({ allowedOptions: ['fastest-audio', 'fastest-audio-remux', 'fast-audio-remux', 'fast-audio', 'fast', 'slow', 'slow-audio', 'slowest'] });
const { selectedOption: speed } = await askForHtml5ifySpeed({ allowedOptions: ['fast-audio-remux', 'fast-audio', 'fast', 'slow', 'slow-audio', 'slowest'] });
if (!speed) return;
if (workingRef.current) return;
@ -1678,13 +1681,12 @@ function App() {
let selectedOption = rememberConvertToSupportedFormat;
if (selectedOption == null || ignoreRememberedValue) {
const allHtml5ifyOptions = ['fastest', 'fastest-audio', 'fastest-audio-remux', 'fast-audio-remux', 'fast-audio', 'fast', 'slow', 'slow-audio', 'slowest'];
let relevantOptions = [];
if (hasAudio && hasVideo) relevantOptions = [...allHtml5ifyOptions];
else if (hasAudio) relevantOptions = [...relevantOptions, 'fast-audio-remux', 'slow-audio', 'slowest'];
else if (hasVideo) relevantOptions = [...relevantOptions, 'fastest', 'fast', 'slow', 'slowest'];
let allowedOptions = [];
if (hasAudio && hasVideo) allowedOptions = ['fastest', 'fast-audio-remux', 'fast-audio', 'fast', 'slow', 'slow-audio', 'slowest'];
else if (hasAudio) allowedOptions = ['fast-audio-remux', 'slow-audio', 'slowest'];
else if (hasVideo) allowedOptions = ['fastest', 'fast', 'slow', 'slowest'];
const userResponse = await askForHtml5ifySpeed({ allowedOptions: allHtml5ifyOptions.filter((option) => relevantOptions.includes(option)), showRemember: true, initialOption: selectedOption });
const userResponse = await askForHtml5ifySpeed({ allowedOptions, showRemember: true, initialOption: selectedOption });
console.log('Choice', userResponse);
({ selectedOption } = userResponse);
if (!selectedOption) return;
@ -2194,14 +2196,9 @@ function App() {
if (!isDurationValid(await getDuration(filePath))) throw new Error('Invalid duration');
if (hasVideo) {
// "fastest" is the most likely type not to fail for video (but it is muted).
if (hasVideo || hasAudio) {
await html5ifyAndLoadWithPreferences(customOutDir, filePath, 'fastest', hasVideo, hasAudio);
showUnsupportedFileMessage();
} else if (hasAudio) {
// For audio do a fast re-encode
await html5ifyAndLoadWithPreferences(customOutDir, filePath, 'fastest-audio', hasVideo, hasAudio);
showUnsupportedFileMessage();
}
} catch (err) {
console.error(err);
@ -2398,7 +2395,7 @@ function App() {
<video
className="main-player"
tabIndex={-1}
muted={playbackVolume === 0}
muted={playbackVolume === 0 || canvasPlayerEnabled}
ref={videoRef}
style={videoStyle}
src={fileUri}
@ -2415,7 +2412,7 @@ function App() {
{renderSubtitles()}
</video>
{canvasPlayerEnabled && <Canvas rotate={effectiveRotation} filePath={filePath} width={mainVideoStream.width} height={mainVideoStream.height} streamIndex={mainVideoStream.index} playerTime={playerTime} commandedTime={commandedTime} playing={playing} eventId={canvasPlayerEventId} />}
{canvasPlayerEnabled && (mainVideoStream != null || mainAudioStream != null) && <MediaSourcePlayer rotate={effectiveRotation} filePath={filePath} videoStream={mainVideoStream} audioStream={mainAudioStream} playerTime={playerTime} commandedTime={commandedTime} playing={playing} eventId={canvasPlayerEventId} masterVideoRef={videoRef} mediaSourceQuality={mediaSourceQuality} />}
</div>
{bigWaveformEnabled && <BigWaveform waveforms={waveforms} relevantTime={relevantTime} playing={playing} durationSafe={durationSafe} zoom={zoomUnrounded} seekRel={seekRel} />}
@ -2430,9 +2427,11 @@ function App() {
{isFileOpened && (
<div className="no-user-select" style={{ position: 'absolute', right: 0, bottom: 0, marginBottom: 10, display: 'flex', alignItems: 'center' }}>
<VolumeControl playbackVolume={playbackVolume} setPlaybackVolume={setPlaybackVolume} usingDummyVideo={usingDummyVideo} />
{!canvasPlayerEnabled && <VolumeControl playbackVolume={playbackVolume} setPlaybackVolume={setPlaybackVolume} />}
{subtitleStreams.length > 0 && <SubtitleControl subtitleStreams={subtitleStreams} activeSubtitleStreamIndex={activeSubtitleStreamIndex} onActiveSubtitleChange={onActiveSubtitleChange} />}
{!canvasPlayerEnabled && subtitleStreams.length > 0 && <SubtitleControl subtitleStreams={subtitleStreams} activeSubtitleStreamIndex={activeSubtitleStreamIndex} onActiveSubtitleChange={onActiveSubtitleChange} />}
{canvasPlayerEnabled && <div style={{ color: 'white', opacity: 0.7, padding: '.5em' }} role="button" onClick={() => incrementMediaSourceQuality()} title={t('Select preview playback quality')}>{mediaSourceQualities[mediaSourceQuality]}</div>}
{!showRightBar && (
<FaAngleLeft

View File

@ -1,55 +0,0 @@
import React, { memo, useEffect, useRef, useMemo } from 'react';
import { useDebounce } from 'use-debounce';
import CanvasPlayer from './CanvasPlayer';
const Canvas = memo(({ rotate, filePath, width, height, playerTime, streamIndex, commandedTime, playing, eventId }) => {
const canvasRef = useRef();
const canvasPlayer = useMemo(() => CanvasPlayer({ path: filePath, width, height, streamIndex, getCanvas: () => canvasRef.current }), [filePath, width, height, streamIndex]);
useEffect(() => () => {
canvasPlayer.terminate();
}, [canvasPlayer]);
const state = useMemo(() => {
if (playing) {
return { startTime: commandedTime, playing, eventId };
}
return { startTime: playerTime, playing, eventId };
}, [commandedTime, eventId, playerTime, playing]);
const [debouncedState, { cancel }] = useDebounce(state, 200, {
equalityFn: (a, b) => a.startTime === b.startTime && a.playing === b.playing && a.eventId === b.eventId,
});
/* useEffect(() => {
console.log('state', state);
}, [state]); */
useEffect(() => () => {
cancel();
}, [cancel]);
useEffect(() => {
// console.log('debouncedState', debouncedState);
if (debouncedState.startTime == null) return;
if (debouncedState.playing) {
canvasPlayer.play(debouncedState.startTime);
} else {
canvasPlayer.pause(debouncedState.startTime);
}
}, [debouncedState, canvasPlayer]);
const canvasStyle = useMemo(() => ({ display: 'block', width: '100%', height: '100%', objectFit: 'contain', transform: rotate ? `rotate(${rotate}deg)` : undefined }), [rotate]);
return (
<div style={{ width: '100%', height: '100%', left: 0, right: 0, top: 0, bottom: 0, position: 'absolute', overflow: 'hidden', background: 'black' }}>
<canvas ref={canvasRef} style={canvasStyle} />
</div>
);
});
export default Canvas;

View File

@ -1,59 +0,0 @@
const remote = window.require('@electron/remote');
const { command, abortAll } = remote.require('./canvasPlayer');
export default ({ path, width: inWidth, height: inHeight, streamIndex, getCanvas }) => {
let terminated;
async function drawRawFrame(rgbaImage, width, height) {
const canvas = getCanvas();
if (!canvas || rgbaImage.length === 0) return;
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext('2d');
// https://developer.mozilla.org/en-US/docs/Web/API/ImageData/ImageData
// https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/putImageData
ctx.putImageData(new ImageData(Uint8ClampedArray.from(rgbaImage), width, height), 0, 0);
}
function drawJpegFrame(jpegImage, width, height) {
const canvas = getCanvas();
if (!canvas) return;
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext('2d');
const img = new Image();
img.onload = () => ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
img.onerror = (error) => console.error('Canvas JPEG image error', error);
img.src = `data:image/jpeg;base64,${jpegImage.toString('base64')}`;
}
function pause(seekTo) {
if (terminated) return;
abortAll();
command({ path, inWidth, inHeight, streamIndex, seekTo, onJpegFrame: drawJpegFrame, onRawFrame: drawRawFrame, playing: false });
}
function play(playFrom) {
if (terminated) return;
abortAll();
command({ path, inWidth, inHeight, streamIndex, seekTo: playFrom, onJpegFrame: drawJpegFrame, onRawFrame: drawRawFrame, playing: true });
}
function terminate() {
if (terminated) return;
terminated = true;
abortAll();
}
return {
play,
pause,
terminate,
};
};

357
src/MediaSourcePlayer.tsx Normal file
View File

@ -0,0 +1,357 @@
import { useEffect, useRef, useState, useCallback, useMemo, memo, CSSProperties } from 'react';
import { Spinner } from 'evergreen-ui';
import { useDebounce } from 'use-debounce';
import isDev from './isDev';
const remote = window.require('@electron/remote');
const { createMediaSourceStream, readOneJpegFrame } = remote.require('./canvasPlayer');
async function startPlayback({ path, video, videoStreamIndex, audioStreamIndex, seekTo, signal, playSafe, onCanPlay, getTargetTime, size, fps }: {
path: string,
video: HTMLVideoElement,
videoStreamIndex?: number,
audioStreamIndex?: number,
seekTo: number,
signal: AbortSignal,
playSafe: () => void,
onCanPlay: () => void,
getTargetTime: () => number,
size?: number,
fps?: number,
}) {
let canPlay = false;
let bufferEndTime: number | undefined;
let bufferStartTime = 0;
let stream;
let done = false;
let interval: NodeJS.Timeout | undefined;
let objectUrl: string | undefined;
let processChunkTimeout: NodeJS.Timeout;
function cleanup() {
console.log('Cleanup');
done = true;
video.pause();
if (interval != null) clearInterval(interval);
if (processChunkTimeout != null) clearInterval(processChunkTimeout);
stream?.abort();
if (objectUrl != null) URL.revokeObjectURL(objectUrl);
video.removeAttribute('src');
}
signal.addEventListener('abort', cleanup);
// See chrome://media-internals
const mediaSource = new MediaSource();
let streamTimestamp;
let lastRemoveTimestamp = 0;
function setStandardPlaybackRate() {
// set it a bit faster, so that we don't easily fall behind (better too fast than too slow)
// eslint-disable-next-line no-param-reassign
video.playbackRate = 1.05;
}
setStandardPlaybackRate();
const codecs: string[] = [];
if (videoStreamIndex != null) codecs.push('avc1.42C01F');
if (audioStreamIndex != null) codecs.push('mp4a.40.2');
const codecTag = codecs.join(', ');
const mimeCodec = `video/mp4; codecs="${codecTag}"`;
// mp4info sample-file.mp4 | grep Codec
// https://developer.mozilla.org/en-US/docs/Web/API/Media_Source_Extensions_API/Transcoding_assets_for_MSE
// https://stackoverflow.com/questions/16363167/html5-video-tag-codecs-attribute
// https://cconcolato.github.io/media-mime-support/
// https://github.com/cconcolato/media-mime-support
// const mimeCodec = 'video/mp4; codecs="avc1.42C01E"'; // Video only
// const mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"'; // Video+audio
if (!MediaSource.isTypeSupported(mimeCodec)) {
throw new Error(`Unsupported MIME type or codec: ${mimeCodec}`);
}
// console.log(mediaSource.readyState); // closed
objectUrl = URL.createObjectURL(mediaSource);
// eslint-disable-next-line no-param-reassign
video.src = objectUrl;
await new Promise((resolve) => mediaSource.addEventListener('sourceopen', resolve, { once: true }));
// console.log(mediaSource.readyState); // open
const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
const getBufferEndTime = () => {
if (mediaSource.readyState !== 'open') {
console.log('mediaSource.readyState was not open, but:', mediaSource.readyState);
// else we will get: Uncaught DOMException: Failed to execute 'end' on 'TimeRanges': The index provided (0) is greater than or equal to the maximum bound (0).
return undefined;
}
if (sourceBuffer.buffered.length < 1) {
return undefined;
}
// https://developer.mozilla.org/en-US/docs/Web/API/TimeRanges/start
return sourceBuffer.buffered.end(0);
};
sourceBuffer.addEventListener('updateend', () => {
playSafe();
}, { once: true });
let firstChunkReceived = false;
const processChunk = async () => {
try {
const chunk = await stream.readChunk();
if (chunk == null) {
console.log('End of stream');
return;
}
if (done) return;
if (!firstChunkReceived) {
firstChunkReceived = true;
console.log('First chunk received');
}
sourceBuffer.appendBuffer(chunk);
} catch (err) {
console.error('processChunk failed', err);
processChunkTimeout = setTimeout(processChunk, 1000);
}
};
sourceBuffer.addEventListener('error', (err) => console.error('sourceBuffer error, check DevTools ▶ More Tools ▶ Media', err));
// video.addEventListener('loadeddata', () => console.log('loadeddata'));
// video.addEventListener('play', () => console.log('play'));
video.addEventListener('canplay', () => {
console.log('canplay');
if (!canPlay) {
canPlay = true;
onCanPlay();
}
}, { once: true });
sourceBuffer.addEventListener('updateend', ({ timeStamp }) => {
if (done) return;
streamTimestamp = timeStamp; // apparently this timestamp cannot be trusted much
const bufferThrottleSec = isDev ? 5 : 10; // how many seconds ahead of playback we want to buffer
const bufferMaxSec = bufferThrottleSec + (isDev ? 5 : 60); // how many seconds we want to buffer in total (ahead of playback and behind)
bufferEndTime = getBufferEndTime();
// console.log('updateend', { bufferEndTime })
if (bufferEndTime != null) {
const targetTime = getTargetTime();
const bufferedTime = bufferEndTime - lastRemoveTimestamp;
if (bufferedTime > bufferMaxSec && !sourceBuffer.updating) {
try {
lastRemoveTimestamp = bufferEndTime;
const removeTo = bufferEndTime - bufferMaxSec;
bufferStartTime = removeTo;
console.log('sourceBuffer remove', 0, removeTo);
sourceBuffer.remove(0, removeTo); // updateend will be emitted again when this is done
return;
} catch (err) {
console.error('sourceBuffer remove failed', err);
}
}
const bufferAheadSec = bufferEndTime - targetTime;
if (bufferAheadSec > bufferThrottleSec) {
console.debug(`buffer ahead by ${bufferAheadSec}, throttling stream read`);
processChunkTimeout = setTimeout(processChunk, 1000);
return;
}
}
// make sure we always process the next chunk
processChunk();
});
stream = createMediaSourceStream({ path, videoStreamIndex, audioStreamIndex, seekTo, size, fps });
interval = setInterval(() => {
if (mediaSource.readyState !== 'open') {
console.warn('mediaSource.readyState was not open, but:', mediaSource.readyState);
// else we will get: Uncaught DOMException: Failed to execute 'end' on 'TimeRanges': The index provided (0) is greater than or equal to the maximum bound (0).
return;
}
const targetTime = getTargetTime();
const playbackDiff = targetTime != null ? targetTime - video.currentTime : undefined;
const streamTimestampDiff = streamTimestamp != null && bufferEndTime != null ? (streamTimestamp / 1000) - bufferEndTime : undefined; // not really needed, but log for curiosity
console.debug('bufferStartTime', bufferStartTime, 'bufferEndTime', bufferEndTime, 'targetTime', targetTime, 'playback:', video.currentTime, 'playbackDiff:', playbackDiff, 'streamTimestamp diff:', streamTimestampDiff);
if (!canPlay || targetTime == null) return;
if (sourceBuffer.buffered.length !== 1) {
// not sure why this would happen or how to handle this
console.warn('sourceBuffer.buffered.length was', sourceBuffer.buffered.length);
}
if ((video.paused || video.ended) && !done) {
console.warn('Resuming unexpectedly paused video');
playSafe();
}
// make sure the playback keeps up
// https://stackoverflow.com/questions/23301496/how-to-keep-a-live-mediasource-video-stream-in-sync
if (playbackDiff != null && playbackDiff > 1) {
console.warn(`playback severely behind by ${playbackDiff}s, seeking to desired time`);
// eslint-disable-next-line no-param-reassign
video.currentTime = targetTime;
setStandardPlaybackRate();
} else if (playbackDiff != null && playbackDiff > 0.3) {
console.warn(`playback behind by ${playbackDiff}s, speeding up playback`);
// eslint-disable-next-line no-param-reassign
video.playbackRate = 1.5;
} else {
setStandardPlaybackRate();
}
}, 200);
// OK, everything initialized and ready to stream!
processChunk();
}
function drawJpegFrame(canvas: HTMLCanvasElement, jpegImage: Buffer) {
if (!canvas) return;
const ctx = canvas.getContext('2d');
const img = new Image();
if (ctx == null) {
console.error('Canvas context is null');
return;
}
img.onload = () => ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
img.onerror = (error) => console.error('Canvas JPEG image error', error);
img.src = `data:image/jpeg;base64,${jpegImage.toString('base64')}`;
}
async function createPauseImage({ path, seekTo, videoStreamIndex, canvas, signal }) {
const { promise, abort } = readOneJpegFrame({ path, seekTo, videoStreamIndex });
signal.addEventListener('abort', () => abort());
const jpegImage = await promise;
drawJpegFrame(canvas, jpegImage);
}
function MediaSourcePlayer({ rotate, filePath, playerTime, videoStream, audioStream, commandedTime, playing, eventId, masterVideoRef, mediaSourceQuality }) {
const videoRef = useRef<HTMLVideoElement>(null);
const canvasRef = useRef<HTMLCanvasElement>(null);
const [loading, setLoading] = useState(true);
const onVideoError = useCallback((error) => {
console.error('video error', error);
}, []);
const state = useMemo(() => (playing
? { startTime: commandedTime, playing, eventId }
: { startTime: playerTime, playing, eventId }
), [commandedTime, eventId, playerTime, playing]);
const [debouncedState] = useDebounce(state, 200, {
equalityFn: (a, b) => a.startTime === b.startTime && a.playing === b.playing && a.eventId === b.eventId,
});
useEffect(() => {
// console.log('debouncedState', debouncedState);
}, [debouncedState]);
const playSafe = useCallback(async () => {
try {
await videoRef.current?.play();
} catch (err) {
console.error('play failed', err);
}
}, []);
useEffect(() => {
setLoading(true);
if (debouncedState.startTime == null) {
return () => undefined;
}
const onCanPlay = () => setLoading(false);
const getTargetTime = () => masterVideoRef.current.currentTime - debouncedState.startTime;
const abortController = new AbortController();
const video = videoRef.current;
(async () => {
try {
// When playing, we use a secondary video element, but when paused we use a canvas
if (debouncedState.playing) {
if (video == null) throw new Error('No video ref');
let size: number | undefined;
if (videoStream != null) {
if (mediaSourceQuality === 0) size = 800;
else if (mediaSourceQuality === 1) size = 420;
}
let fps: number | undefined;
if (mediaSourceQuality === 0) fps = 30;
else if (mediaSourceQuality === 1) fps = 15;
await startPlayback({ path: filePath, video, videoStreamIndex: videoStream?.index, audioStreamIndex: audioStream?.index, seekTo: debouncedState.startTime, signal: abortController.signal, playSafe, onCanPlay, getTargetTime, size, fps });
} else { // paused
if (videoStream != null) {
await createPauseImage({ path: filePath, seekTo: debouncedState.startTime, videoStreamIndex: videoStream.index, canvas: canvasRef.current, signal: abortController.signal });
}
setLoading(false);
}
} catch (err) {
console.error('Preview failed', err);
}
})();
return () => abortController.abort();
// Important that we also have eventId in the deps, so that we can restart the preview when the eventId changes
}, [debouncedState.startTime, debouncedState.eventId, filePath, masterVideoRef, playSafe, debouncedState.playing, videoStream, mediaSourceQuality, audioStream?.index]);
const onFocus = useCallback((e) => {
// prevent video element from stealing focus in fullscreen mode https://github.com/mifi/lossless-cut/issues/543#issuecomment-1868167775
e.target.blur();
}, []);
const { videoStyle, canvasStyle } = useMemo(() => {
const sharedStyle: CSSProperties = { position: 'absolute', left: 0, right: 0, top: 0, bottom: 0, display: 'block', width: '100%', height: '100%', objectFit: 'contain', transform: rotate ? `rotate(${rotate}deg)` : undefined };
return {
videoStyle: { ...sharedStyle, visibility: loading || !debouncedState.playing ? 'hidden' : undefined },
canvasStyle: { ...sharedStyle, visibility: loading || debouncedState.playing ? 'hidden' : undefined },
} as { videoStyle: CSSProperties, canvasStyle: CSSProperties };
}, [loading, debouncedState.playing, rotate]);
return (
<div style={{ width: '100%', height: '100%', left: 0, right: 0, top: 0, bottom: 0, position: 'absolute', overflow: 'hidden', background: 'black', pointerEvents: 'none' }}>
{/* eslint-disable-next-line jsx-a11y/media-has-caption */}
<video style={videoStyle} ref={videoRef} playsInline onError={onVideoError} tabIndex={-1} onFocusCapture={onFocus} />
{videoStream != null && <canvas width={videoStream.width} height={videoStream.height} ref={canvasRef} style={canvasStyle} tabIndex={-1} onFocusCapture={onFocus} />}
{loading && (
<div style={{ position: 'absolute', top: 0, bottom: 0, left: 0, right: 0, display: 'flex', justifyContent: 'center', alignItems: 'center' }}><Spinner /></div>
)}
</div>
);
}
export default memo(MediaSourcePlayer);

View File

@ -3,7 +3,7 @@ import { FaVolumeMute, FaVolumeUp } from 'react-icons/fa';
import { useTranslation } from 'react-i18next';
const VolumeControl = memo(({ playbackVolume, setPlaybackVolume, usingDummyVideo }) => {
const VolumeControl = memo(({ playbackVolume, setPlaybackVolume }) => {
const [volumeControlVisible, setVolumeControlVisible] = useState(false);
const timeoutRef = useRef();
const { t } = useTranslation();
@ -29,8 +29,7 @@ const VolumeControl = memo(({ playbackVolume, setPlaybackVolume, usingDummyVideo
}
}, [volumeControlVisible, setPlaybackVolume, playbackVolume]);
// TODO fastest-audio/fastest-audio-remux currently shows as muted
const VolumeIcon = playbackVolume === 0 || usingDummyVideo ? FaVolumeMute : FaVolumeUp;
const VolumeIcon = playbackVolume === 0 ? FaVolumeMute : FaVolumeUp;
return (
<>

View File

@ -12,9 +12,7 @@ const ReactSwal = withReactContent(Swal);
// eslint-disable-next-line import/prefer-default-export
export async function askForHtml5ifySpeed({ allowedOptions, showRemember, initialOption }) {
const availOptions = {
fastest: i18n.t('Fastest: Low playback speed (no audio)'),
'fastest-audio': i18n.t('Fastest: Low playback speed'),
'fastest-audio-remux': i18n.t('Fastest: Low playback speed (audio remux), likely to fail'),
fastest: i18n.t('Fastest: Low playback speed'),
fast: i18n.t('Fast: Full quality remux (no audio), likely to fail'),
'fast-audio-remux': i18n.t('Fast: Full quality remux, likely to fail'),
'fast-audio': i18n.t('Fast: Remux video, encode audio (fails if unsupported video codec)'),

View File

@ -222,7 +222,7 @@ export const html5dummySuffix = 'dummy';
export async function findExistingHtml5FriendlyFile(fp, cod) {
// The order is the priority we will search:
const suffixes = ['slowest', 'slow-audio', 'slow', 'fast-audio-remux', 'fast-audio', 'fast', 'fastest-audio', 'fastest-audio-remux', html5dummySuffix];
const suffixes = ['slowest', 'slow-audio', 'slow', 'fast-audio-remux', 'fast-audio', 'fast', html5dummySuffix];
const prefix = getSuffixedFileName(fp, html5ifiedPrefix);
const outDir = getOutDir(cod, fp);
@ -248,7 +248,7 @@ export async function findExistingHtml5FriendlyFile(fp, cod) {
return {
path: join(outDir, entry),
usingDummyVideo: ['fastest-audio', 'fastest-audio-remux', html5dummySuffix].includes(suffix),
usingDummyVideo: suffix === html5dummySuffix,
};
}
@ -412,3 +412,5 @@ export function getImportProjectType(filePath) {
export const calcShouldShowWaveform = (zoomedDuration) => (zoomedDuration != null && zoomedDuration < ffmpegExtractWindow * 8);
export const calcShouldShowKeyframes = (zoomedDuration) => (zoomedDuration != null && zoomedDuration < ffmpegExtractWindow * 8);
export const mediaSourceQualities = ['HD', 'SD', 'OG']; // OG is original

View File

@ -6879,7 +6879,6 @@ __metadata:
smpte-timecode: "npm:^1.2.3"
sortablejs: "npm:^1.13.0"
string-to-stream: "npm:^1.1.1"
strtok3: "npm:^6.0.0"
sweetalert2: "npm:^11.0.0"
sweetalert2-react-content: "npm:^5.0.7"
typescript: "npm:^5.3.3"
@ -7977,13 +7976,6 @@ __metadata:
languageName: node
linkType: hard
"peek-readable@npm:^4.0.1":
version: 4.0.2
resolution: "peek-readable@npm:4.0.2"
checksum: 54d085b796781f32898bafc2ef0cd55b63022c2187d96e3101d9e8d585627cecafdd5beb8807b04aee9c22b0261c4b72ed77945bb5454c361b433bcab20c2069
languageName: node
linkType: hard
"peek-readable@npm:^4.1.0":
version: 4.1.0
resolution: "peek-readable@npm:4.1.0"
@ -9649,16 +9641,6 @@ __metadata:
languageName: node
linkType: hard
"strtok3@npm:^6.0.0":
version: 6.2.4
resolution: "strtok3@npm:6.2.4"
dependencies:
"@tokenizer/token": "npm:^0.3.0"
peek-readable: "npm:^4.0.1"
checksum: b92f2510830e96df4dcf055f2761a2b4930bc440c16de7f53aecf1006a5362f5fa9af1e68ba9572971012f34634c96e9675a1a5bbb6ed0ea6ff0a728be64644f
languageName: node
linkType: hard
"strtok3@npm:^6.2.4":
version: 6.3.0
resolution: "strtok3@npm:6.3.0"