diff --git a/package.json b/package.json index 6d892cb8..edd6ba0a 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,7 @@ "@types/ndarray": "^1.0.14", "canvas": "^2.11.2", "compare-versions": "^6.1.0", - "execa": "^6.1.0", + "execa": "^8.0.1", "fabric": "^6.5.4", "file-type": "^20.0.0", "file-url": "^4.0.0", diff --git a/src/audio.ts b/src/audio.ts index b98a4516..abd58ae6 100644 --- a/src/audio.ts +++ b/src/audio.ts @@ -1,17 +1,13 @@ import pMap from 'p-map'; import { join, basename, resolve } from 'path'; -import { execa } from 'execa'; import { flatMap } from 'lodash-es'; -import { getFfmpegCommonArgs, getCutFromArgs } from './ffmpeg.js'; -import { readFileStreams } from './util.js'; +import { getCutFromArgs, ffmpeg } from './ffmpeg.js'; +import { readFileStreams } from './ffmpeg.js'; import type { AudioLayer, AudioNormalizationOptions, AudioTrack, Clip, Config, Transition, VideoLayer } from './types.js' export type AudioOptions = { - ffmpegPath: string; - ffprobePath: string; - enableFfmpegLog: boolean; verbose: boolean; tmpDir: string; } @@ -22,7 +18,7 @@ export type EditAudioOptions = Pick { +export default ({ verbose, tmpDir }: AudioOptions) => { async function createMixedAudioClips({ clips, keepSourceAudio }: { clips: Clip[], keepSourceAudio?: boolean }) { return pMap(clips, async (clip, i) => { const { duration, layers, transition } = clip; @@ -33,6 +29,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A async function createSilence() { if (verbose) console.log('create silence', duration); const args = [ + '-nostdin', '-f', 'lavfi', '-i', 'anullsrc=channel_layout=stereo:sample_rate=44100', '-sample_fmt', 's32', '-ar', '48000', @@ -41,7 +38,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A '-y', clipAudioPath, ]; - await execa(ffmpegPath, args); + await ffmpeg(args); return { silent: true, clipAudioPath }; } @@ -60,7 +57,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A const processedAudioLayersRaw = await pMap(audioLayers, async (audioLayer, j) => { const { path, cutFrom, cutTo, speedFactor } = audioLayer; - const streams = await readFileStreams(ffprobePath, path); + const streams = await readFileStreams(path); if (!streams.some((s) => s.codec_type === 'audio')) return undefined; const layerAudioPath = join(tmpDir, `clip${i}-layer${j}-audio.flac`); @@ -80,7 +77,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A const cutToArg = (cutTo! - cutFrom!) * speedFactor; const args = [ - ...getFfmpegCommonArgs({ enableFfmpegLog }), + '-nostdin', ...getCutFromArgs({ cutFrom }), '-i', path, '-t', cutToArg!.toString(), @@ -92,8 +89,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A layerAudioPath, ]; - // console.log(args); - await execa(ffmpegPath, args); + await ffmpeg(args); return [ layerAudioPath, @@ -115,7 +111,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A // Merge/mix all layers' audio const weights = processedAudioLayers.map(([, { mixVolume }]) => mixVolume ?? 1); const args = [ - ...getFfmpegCommonArgs({ enableFfmpegLog }), + '-nostdin', ...flatMap(processedAudioLayers, ([layerAudioPath]) => ['-i', layerAudioPath]), '-filter_complex', `amix=inputs=${processedAudioLayers.length}:duration=longest:weights=${weights.join(' ')}`, '-c:a', 'flac', @@ -123,7 +119,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A clipAudioPath, ]; - await execa(ffmpegPath, args); + await ffmpeg(args); return { clipAudioPath, silent: false }; } @@ -160,7 +156,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A }).join(','); const args = [ - ...getFfmpegCommonArgs({ enableFfmpegLog }), + '-nostdin', ...(flatMap(clipAudio, ({ path }) => ['-i', path])), '-filter_complex', filterGraph, @@ -168,7 +164,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A '-y', outPath, ]; - await execa(ffmpegPath, args); + await ffmpeg(args); return outPath; } @@ -198,7 +194,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A const mixedAudioPath = join(tmpDir, 'audio-mixed.flac'); const args = [ - ...getFfmpegCommonArgs({ enableFfmpegLog }), + '-nostdin', ...(flatMap(streams, ({ path, loop }) => ([ '-stream_loop', (loop || 0).toString(), '-i', path, @@ -210,9 +206,7 @@ export default ({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }: A mixedAudioPath, ]; - if (verbose) console.log(args.join(' ')); - - await execa(ffmpegPath, args); + await ffmpeg(args); return mixedAudioPath; } diff --git a/src/ffmpeg.ts b/src/ffmpeg.ts index 323a218f..2e488606 100644 --- a/src/ffmpeg.ts +++ b/src/ffmpeg.ts @@ -1,10 +1,39 @@ import fsExtra from 'fs-extra'; -import { execa } from 'execa'; +import { execa, type Options } from 'execa'; import assert from 'assert'; import { compareVersions } from 'compare-versions'; -export function getFfmpegCommonArgs({ enableFfmpegLog }: { enableFfmpegLog?: boolean }) { - return enableFfmpegLog ? [] : ['-hide_banner', '-loglevel', 'error']; +export type Stream = { + codec_type: string; + codec_name: string; + r_frame_rate: string; + width?: number; + height?: number; + tags?: { + rotate: string; + }; + side_data_list?: { + rotation: string; + }[]; +}; + +export type FfmpegConfig = { + ffmpegPath: string; + ffprobePath: string; + enableFfmpegLog?: boolean; +} + +const config: FfmpegConfig = { + ffmpegPath: 'ffmpeg', + ffprobePath: 'ffprobe', + enableFfmpegLog: false, +} + +export function getFfmpegCommonArgs() { + return [ + '-hide_banner', + ...(config.enableFfmpegLog ? [] : ['-loglevel', 'error']), + ]; } export function getCutFromArgs({ cutFrom }: { cutFrom?: number }) { @@ -35,3 +64,68 @@ export async function testFf(exePath: string, name: string) { console.error(`WARNING: ${name}:`, err); } } + +export async function configureFf(params: Partial) { + Object.assign(config, params); + await testFf(config.ffmpegPath, 'ffmpeg'); + await testFf(config.ffprobePath, 'ffprobe'); +} + +export function ffmpeg(args: string[], options?: Options) { + if (config.enableFfmpegLog) console.log(`$ ${config.ffmpegPath} ${args.join(' ')}`); + return execa(config.ffmpegPath, [...getFfmpegCommonArgs(), ...args], options); +} + +export function ffprobe(args: string[]) { + return execa(config.ffprobePath, args); +} + +export function parseFps(fps?: string) { + const match = typeof fps === 'string' && fps.match(/^([0-9]+)\/([0-9]+)$/); + if (match) { + const num = parseInt(match[1], 10); + const den = parseInt(match[2], 10); + if (den > 0) return num / den; + } + return undefined; +} + +export async function readDuration(p: string) { + const { stdout } = await ffprobe(['-v', 'error', '-show_entries', 'format=duration', '-of', 'default=noprint_wrappers=1:nokey=1', p]); + const parsed = parseFloat(stdout); + assert(!Number.isNaN(parsed)); + return parsed; +} + +export async function readFileStreams(p: string) { + const { stdout } = await ffprobe(['-show_entries', 'stream', '-of', 'json', p]); + return JSON.parse(stdout).streams as Stream[]; +} + + +export async function readVideoFileInfo(p: string) { + const streams = await readFileStreams(p); + const stream = streams.find((s) => s.codec_type === 'video'); // TODO + + if (!stream) { + throw new Error(`Could not find a video stream in ${p}`); + } + + const duration = await readDuration(p); + + let rotation = parseInt(stream.tags?.rotate ?? '', 10); + + // If we can't find rotation, try side_data_list + if (Number.isNaN(rotation) && stream.side_data_list?.[0]?.rotation) { + rotation = parseInt(stream.side_data_list[0].rotation, 10); + } + + return { + // numFrames: parseInt(stream.nb_frames, 10), + duration, + width: stream.width, // TODO coded_width? + height: stream.height, + framerateStr: stream.r_frame_rate, + rotation: !Number.isNaN(rotation) ? rotation : undefined, + }; +} diff --git a/src/index.ts b/src/index.ts index 71fdd151..fdcfe4fa 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,12 +1,12 @@ -import { execa, ExecaChildProcess } from 'execa'; +import { ExecaChildProcess } from 'execa'; import assert from 'assert'; import { join, dirname } from 'path'; import JSON5 from 'json5'; import fsExtra from 'fs-extra'; import { nanoid } from 'nanoid'; -import { testFf } from './ffmpeg.js'; -import { parseFps, multipleOf2, assertFileValid, checkTransition } from './util.js'; +import { configureFf, ffmpeg, parseFps } from './ffmpeg.js'; +import { multipleOf2, assertFileValid, checkTransition } from './util.js'; import { createFabricCanvas, rgbaToFabricImage } from './sources/fabric.js'; import { createFrameSource } from './sources/frameSource.js'; import parseConfig, { ProcessedClip } from './parseConfig.js'; @@ -26,7 +26,6 @@ export type * from './types.js'; async function Editly(config: Config): Promise { const { // Testing options: - enableFfmpegLog = false, verbose = false, logTimes = false, keepTmp = false, @@ -49,12 +48,12 @@ async function Editly(config: Config): Promise { outputVolume, customOutputArgs, + enableFfmpegLog = verbose, ffmpegPath = 'ffmpeg', ffprobePath = 'ffprobe', } = config; - await testFf(ffmpegPath, 'ffmpeg'); - await testFf(ffprobePath, 'ffprobe'); + await configureFf({ ffmpegPath, ffprobePath, enableFfmpegLog }); const isGif = outPath.toLowerCase().endsWith('.gif'); @@ -67,7 +66,7 @@ async function Editly(config: Config): Promise { assert(outPath, 'Please provide an output path'); assert(clipsIn.length > 0, 'Please provide at least 1 clip'); - const { clips, arbitraryAudio } = await parseConfig({ defaults, clips: clipsIn, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests, ffprobePath }); + const { clips, arbitraryAudio } = await parseConfig({ defaults, clips: clipsIn, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests }); if (verbose) console.log('Calculated', JSON5.stringify({ clips, arbitraryAudio }, null, 2)); const outDir = dirname(outPath); @@ -75,7 +74,7 @@ async function Editly(config: Config): Promise { if (verbose) console.log({ tmpDir }); await fsExtra.mkdirp(tmpDir); - const { editAudio } = Audio({ ffmpegPath, ffprobePath, enableFfmpegLog, verbose, tmpDir }); + const { editAudio } = Audio({ verbose, tmpDir }); const audioFilePath = !isGif ? await editAudio({ keepSourceAudio, arbitraryAudio, clipsAudioVolume, clips, audioNorm, outputVolume }) : undefined; @@ -212,8 +211,6 @@ async function Editly(config: Config): Promise { function startFfmpegWriterProcess() { const args = [ - ...(enableFfmpegLog ? [] : ['-hide_banner', '-loglevel', 'error']), - '-f', 'rawvideo', '-vcodec', 'rawvideo', '-pix_fmt', 'rgba', @@ -230,8 +227,7 @@ async function Editly(config: Config): Promise { '-y', outPath, ]; - if (verbose) console.log('ffmpeg', args.join(' ')); - return execa(ffmpegPath, args, { encoding: null, buffer: false, stdin: 'pipe', stdout: process.stdout, stderr: process.stderr }); + return ffmpeg(args, { encoding: null, buffer: false, stdin: 'pipe', stdout: process.stdout, stderr: process.stderr }); } let outProcess: ExecaChildProcess> | undefined = undefined; @@ -252,7 +248,7 @@ async function Editly(config: Config): Promise { const getTransitionFromClip = () => clips[transitionFromClipId]; const getTransitionToClip = () => clips[getTransitionToClipId()]; - const getSource = async (clip: ProcessedClip, clipIndex: number) => createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }); + const getSource = async (clip: ProcessedClip, clipIndex: number) => createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, framerateStr }); const getTransitionFromSource = async () => getSource(getTransitionFromClip(), transitionFromClipId); const getTransitionToSource = async () => (getTransitionToClip() && getSource(getTransitionToClip(), getTransitionToClipId())); @@ -423,14 +419,16 @@ export async function renderSingleFrame(config: RenderSingleFrameConfig): Promis verbose, logTimes, - enableFfmpegLog, allowRemoteRequests, - ffprobePath = 'ffprobe', ffmpegPath = 'ffmpeg', + ffprobePath = 'ffprobe', + enableFfmpegLog, outPath = `${Math.floor(Math.random() * 1e12)}.png`, } = config; - const { clips } = await parseConfig({ defaults, clips: clipsIn, arbitraryAudio: [], allowRemoteRequests, ffprobePath }); + configureFf({ ffmpegPath, ffprobePath, enableFfmpegLog }); + + const { clips } = await parseConfig({ defaults, clips: clipsIn, arbitraryAudio: [], allowRemoteRequests }); let clipStartTime = 0; const clip = clips.find((c) => { if (clipStartTime <= time && clipStartTime + c.duration > time) return true; @@ -439,7 +437,7 @@ export async function renderSingleFrame(config: RenderSingleFrameConfig): Promis }); assert(clip, 'No clip found at requested time'); const clipIndex = clips.indexOf(clip); - const frameSource = await createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr: '1' }); + const frameSource = await createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, framerateStr: '1' }); const rgba = await frameSource.readNextFrame({ time: time - clipStartTime }); // TODO converting rgba to png can be done more easily? diff --git a/src/parseConfig.ts b/src/parseConfig.ts index 6b00aacd..9bf7e8fc 100644 --- a/src/parseConfig.ts +++ b/src/parseConfig.ts @@ -3,13 +3,8 @@ import { basename, join } from 'path'; import flatMap from 'lodash-es/flatMap.js'; import assert from 'assert'; import { fileURLToPath } from 'url'; - -import { - readVideoFileInfo, - readAudioFileInfo, - assertFileValid, - checkTransition, -} from './util.js'; +import { assertFileValid, checkTransition } from './util.js'; +import { readVideoFileInfo, readDuration } from './ffmpeg.js'; import { registerFont } from 'canvas'; import { calcTransition, type CalculatedTransition } from './transitions.js'; import type { AudioTrack, CanvasLayer, EditlyBannerLayer, FabricLayer, GlLayer, ImageLayer, ImageOverlayLayer, Layer, LinearGradientLayer, NewsTitleLayer, SlideInTextLayer, SubtitleLayer, TitleBackgroundLayer, TitleLayer, DefaultOptions, Clip, VideoLayer } from './types.js'; @@ -47,11 +42,10 @@ type ParseConfigOptions = { backgroundAudioPath?: string; loopAudio?: boolean; allowRemoteRequests?: boolean; - ffprobePath: string; arbitraryAudio: AudioTrack[]; }; -export default async function parseConfig({ defaults: defaultsIn = {}, clips, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests, ffprobePath }: ParseConfigOptions) { +export default async function parseConfig({ defaults: defaultsIn = {}, clips, arbitraryAudio: arbitraryAudioIn, backgroundAudioPath, backgroundAudioVolume, loopAudio, allowRemoteRequests }: ParseConfigOptions) { const defaults = { duration: 4, ...defaultsIn, @@ -160,7 +154,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar const layer: T = { ...globalLayerDefaults, ...thisLayerDefaults, ...layerIn }; if (layer.type === 'video') { - const { duration: fileDuration, width: widthIn, height: heightIn, framerateStr, rotation } = await readVideoFileInfo(ffprobePath, layer.path); + const { duration: fileDuration, width: widthIn, height: heightIn, framerateStr, rotation } = await readVideoFileInfo(layer.path); let { cutFrom, cutTo } = layer; if (!cutFrom) cutFrom = 0; cutFrom = Math.max(cutFrom, 0); @@ -205,7 +199,7 @@ export default async function parseConfig({ defaults: defaultsIn = {}, clips, ar const layer: T = { ...layerIn, layerDuration }; if (layer.type === 'audio') { - const { duration: fileDuration } = await readAudioFileInfo(ffprobePath, layer.path); + const fileDuration = await readDuration(layer.path); let { cutFrom, cutTo } = layer; // console.log({ cutFrom, cutTo, fileDuration, clipDuration }); diff --git a/src/sources/frameSource.ts b/src/sources/frameSource.ts index 2de0652d..033bde32 100644 --- a/src/sources/frameSource.ts +++ b/src/sources/frameSource.ts @@ -52,15 +52,13 @@ const frameSources: Record> = { type FrameSourceOptions = DebugOptions & { clip: ProcessedClip; clipIndex: number; - ffmpegPath: string; - ffprobePath: string; width: number, height: number, channels: number, framerateStr: string, } -export async function createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, framerateStr }: FrameSourceOptions) { +export async function createFrameSource({ clip, clipIndex, width, height, channels, verbose, logTimes, framerateStr }: FrameSourceOptions) { const { layers, duration } = clip; const visualLayers = layers.filter((layer) => layer.type !== 'audio'); @@ -80,7 +78,7 @@ export async function createFrameSource({ clip, clipIndex, width, height, channe assert(createFrameSourceFunc, `Invalid type ${type}`); - const frameSource = await createFrameSourceFunc({ ffmpegPath, ffprobePath, width, height, duration, channels, verbose, logTimes, enableFfmpegLog, framerateStr, params }); + const frameSource = await createFrameSourceFunc({ width, height, duration, channels, verbose, logTimes, framerateStr, params }); return { layer, frameSource }; }, { concurrency: 1 }); diff --git a/src/sources/videoFrameSource.ts b/src/sources/videoFrameSource.ts index c57f348b..6a5de966 100644 --- a/src/sources/videoFrameSource.ts +++ b/src/sources/videoFrameSource.ts @@ -1,16 +1,15 @@ -import { execa } from 'execa'; import assert from 'assert'; import * as fabric from 'fabric/node'; -import { getFfmpegCommonArgs } from '../ffmpeg.js'; -import { readFileStreams } from '../util.js'; +import { ffmpeg } from '../ffmpeg.js'; +import { readFileStreams } from '../ffmpeg.js'; import { rgbaToFabricImage, blurImage, } from './fabric.js'; import type { CreateFrameSourceOptions, VideoLayer } from '../types.js'; -export default async ({ width: canvasWidth, height: canvasHeight, channels, framerateStr, verbose, logTimes, ffmpegPath, ffprobePath, enableFfmpegLog, params }: CreateFrameSourceOptions) => { +export default async ({ width: canvasWidth, height: canvasHeight, channels, framerateStr, verbose, logTimes, params }: CreateFrameSourceOptions) => { const { path, cutFrom, cutTo, resizeMode = 'contain-blur', speedFactor, inputWidth, inputHeight, width: requestedWidthRel, height: requestedHeightRel, left: leftRel = 0, top: topRel = 0, originX = 'left', originY = 'top', fabricImagePostProcessing = null } = params; const requestedWidth = requestedWidthRel ? Math.round(requestedWidthRel * canvasWidth) : canvasWidth; @@ -73,7 +72,7 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram // let inFrameCount = 0; // https://forum.unity.com/threads/settings-for-importing-a-video-with-an-alpha-channel.457657/ - const streams = await readFileStreams(ffprobePath, path); + const streams = await readFileStreams(path); const firstVideoStream = streams.find((s) => s.codec_type === 'video'); // https://superuser.com/a/1116905/658247 @@ -85,7 +84,7 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram // Testing: ffmpeg -i 'vid.mov' -t 1 -vcodec rawvideo -pix_fmt rgba -f image2pipe - | ffmpeg -f rawvideo -vcodec rawvideo -pix_fmt rgba -s 2166x1650 -i - -vf format=yuv420p -vcodec libx264 -y out.mp4 // https://trac.ffmpeg.org/wiki/ChangingFrameRate const args = [ - ...getFfmpegCommonArgs({ enableFfmpegLog }), + '-nostdin', ...(inputCodec ? ['-vcodec', inputCodec] : []), ...(cutFrom ? ['-ss', cutFrom.toString()] : []), '-i', path, @@ -97,9 +96,8 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram '-f', 'image2pipe', '-', ]; - if (verbose) console.log(args.join(' ')); - const ps = execa(ffmpegPath, args, { encoding: null, buffer: false, stdin: 'ignore', stdout: 'pipe', stderr: process.stderr }); + const ps = ffmpeg(args, { encoding: null, buffer: false, stdin: 'ignore', stdout: 'pipe', stderr: process.stderr }); const stream = ps.stdout!; @@ -138,16 +136,11 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram resolve(); return; } - // console.log('Reading new frame', path); - - function onEnd() { - resolve(); - } function cleanup() { stream.pause(); stream.removeListener('data', handleChunk); - stream.removeListener('end', onEnd); + stream.removeListener('end', resolve); stream.removeListener('error', reject); } @@ -184,7 +177,7 @@ export default async ({ width: canvasWidth, height: canvasHeight, channels, fram }, 60000); stream.on('data', handleChunk); - stream.on('end', onEnd); + stream.on('end', resolve); stream.on('error', reject); stream.resume(); }); diff --git a/src/types.ts b/src/types.ts index 2b3ddd1a..3392e1b1 100644 --- a/src/types.ts +++ b/src/types.ts @@ -2,6 +2,7 @@ import type * as Fabric from 'fabric/node'; import type { Canvas } from "canvas" +import type { FfmpegConfig } from './ffmpeg.js'; /** Little utility */ export type OptionalPromise = Promise | T; @@ -935,12 +936,11 @@ export interface AudioNormalizationOptions { } export interface DebugOptions { - enableFfmpegLog?: boolean; verbose?: boolean; logTimes?: boolean; } -export interface Config extends DebugOptions { +export interface Config extends DebugOptions, FfmpegConfig { /** * Output path (`.mp4` or `.mkv`, can also be a `.gif`). */ @@ -1061,16 +1061,6 @@ export interface Config extends DebugOptions { */ audioNorm?: AudioNormalizationOptions; - /** - * WARNING: Undocumented feature! - */ - ffmpegPath?: string; - - /** - * WARNING: Undocumented feature! - */ - ffprobePath?: string; - /** * WARNING: Undocumented feature! */ @@ -1093,20 +1083,6 @@ export interface RenderSingleFrameConfig extends Config { // Internal types -export type Stream = { - codec_type: string; - codec_name: string; - r_frame_rate: string; - width?: number; - height?: number; - tags?: { - rotate: string; - }; - side_data_list?: { - rotation: string; - }[]; -}; - export type Keyframe = { t: number; props: { [key: string]: number }; @@ -1118,8 +1094,6 @@ export interface FrameSource { } export type CreateFrameSourceOptions = DebugOptions & { - ffmpegPath: string; - ffprobePath: string; width: number, height: number, duration: number, diff --git a/src/util.ts b/src/util.ts index 54f44a52..bfc3a97a 100644 --- a/src/util.ts +++ b/src/util.ts @@ -1,70 +1,11 @@ -import { execa } from 'execa'; import assert from 'assert'; import { sortBy } from 'lodash-es'; import { pathExists } from 'fs-extra'; -import type { Keyframe, Stream } from './types.js'; +import type { Keyframe } from './types.js'; import type { Position, PositionObject, Transition } from './types.js'; import type { TOriginX, TOriginY } from 'fabric'; -export function parseFps(fps?: string) { - const match = typeof fps === 'string' && fps.match(/^([0-9]+)\/([0-9]+)$/); - if (match) { - const num = parseInt(match[1], 10); - const den = parseInt(match[2], 10); - if (den > 0) return num / den; - } - return undefined; -} - -export async function readDuration(ffprobePath: string, p: string) { - const { stdout } = await execa(ffprobePath, ['-v', 'error', '-show_entries', 'format=duration', '-of', 'default=noprint_wrappers=1:nokey=1', p]); - const parsed = parseFloat(stdout); - assert(!Number.isNaN(parsed)); - return parsed; -} - -export async function readFileStreams(ffprobePath: string, p: string) { - const { stdout } = await execa(ffprobePath, [ - '-show_entries', 'stream', '-of', 'json', p, - ]); - return JSON.parse(stdout).streams as Stream[]; -} - - -export async function readVideoFileInfo(ffprobePath: string, p: string) { - const streams = await readFileStreams(ffprobePath, p); - const stream = streams.find((s) => s.codec_type === 'video'); // TODO - - if (!stream) { - throw new Error(`Could not find a video stream in ${p}`); - } - - const duration = await readDuration(ffprobePath, p); - - let rotation = parseInt(stream.tags?.rotate ?? '', 10); - - // If we can't find rotation, try side_data_list - if (Number.isNaN(rotation) && stream.side_data_list?.[0]?.rotation) { - rotation = parseInt(stream.side_data_list[0].rotation, 10); - } - - return { - // numFrames: parseInt(stream.nb_frames, 10), - duration, - width: stream.width, // TODO coded_width? - height: stream.height, - framerateStr: stream.r_frame_rate, - rotation: !Number.isNaN(rotation) ? rotation : undefined, - }; -} - -export async function readAudioFileInfo(ffprobePath: string, p: string) { - const duration = await readDuration(ffprobePath, p); - - return { duration }; -} - export function toArrayInteger(buffer: Buffer) { if (buffer.length > 0) { const data = new Uint8ClampedArray(buffer.length);