packages/docs/docs/mediabunny/extract-frames.mdx
Extracting frames from a video file, for example to display a filmstrip in an editing interface, can be done using Mediabunny.
Here's a extractFrames() function you can use copy and paste into your project:
import {ALL_FORMATS, Input, InputDisposedError, UrlSource, VideoSample, VideoSampleSink} from 'mediabunny';
type Options = {
track: {width: number; height: number};
container: string;
durationInSeconds: number | null;
};
export type ExtractFramesTimestampsInSecondsFn = (options: Options) => Promise<number[]> | number[];
export type ExtractFramesProps = {
src: string;
timestampsInSeconds: number[] | ExtractFramesTimestampsInSecondsFn;
onVideoSample: (sample: VideoSample) => void;
signal?: AbortSignal;
};
export async function extractFrames({src, timestampsInSeconds, onVideoSample, signal}: ExtractFramesProps): Promise<void> {
using input = new Input({
formats: ALL_FORMATS,
source: new UrlSource(src),
});
const [durationInSeconds, format, videoTrack] = await Promise.all([input.computeDuration(), input.getFormat(), input.getPrimaryVideoTrack()]);
if (!videoTrack) {
throw new Error('No video track found in the input');
}
if (signal?.aborted) {
throw new Error('Aborted');
}
const timestamps =
typeof timestampsInSeconds === 'function'
? await timestampsInSeconds({
track: {
width: videoTrack.displayWidth,
height: videoTrack.displayHeight,
},
container: format.name,
durationInSeconds,
})
: timestampsInSeconds;
if (timestamps.length === 0) {
return;
}
if (signal?.aborted) {
throw new Error('Aborted');
}
const sink = new VideoSampleSink(videoTrack);
for await (using videoSample of sink.samplesAtTimestamps(timestamps)) {
if (signal?.aborted) {
break;
}
if (!videoSample) {
continue;
}
onVideoSample(videoSample);
}
}
await extractFrames({
src: 'https://remotion.media/video.mp4',
timestampsInSeconds: [0, 1, 2, 3, 4],
onVideoSample: (sample) => {
// Draw frame to canvas
const canvas = document.createElement('canvas');
canvas.width = sample.displayWidth;
canvas.height = sample.displayHeight;
const ctx = canvas.getContext('2d');
sample.draw(ctx!, 0, 0);
},
});
Extract as many frames as fit in a canvas based on the video's aspect ratio:
import {ALL_FORMATS, Input, InputDisposedError, UrlSource, VideoSample, VideoSampleSink} from 'mediabunny';
type Options = {
track: {width: number; height: number};
container: string;
durationInSeconds: number | null;
};
export type ExtractFramesTimestampsInSecondsFn = (options: Options) => Promise<number[]> | number[];
export type ExtractFramesProps = {
src: string;
timestampsInSeconds: number[] | ExtractFramesTimestampsInSecondsFn;
onVideoSample: (sample: VideoSample) => void;
signal?: AbortSignal;
};
export async function extractFrames({src, timestampsInSeconds, onVideoSample, signal}: ExtractFramesProps): Promise<void> {
using input = new Input({
formats: ALL_FORMATS,
source: new UrlSource(src),
});
const [durationInSeconds, format, videoTrack] = await Promise.all([input.computeDuration(), input.getFormat(), input.getPrimaryVideoTrack()]);
if (!videoTrack) {
throw new Error('No video track found in the input');
}
if (signal?.aborted) {
throw new Error('Aborted');
}
const timestamps =
typeof timestampsInSeconds === 'function'
? await timestampsInSeconds({
track: {
width: videoTrack.displayWidth,
height: videoTrack.displayHeight,
},
container: format.name,
durationInSeconds,
})
: timestampsInSeconds;
if (timestamps.length === 0) {
return;
}
if (signal?.aborted) {
throw new Error('Aborted');
}
const sink = new VideoSampleSink(videoTrack);
for await (using videoSample of sink.samplesAtTimestamps(timestamps)) {
if (signal?.aborted) {
break;
}
if (!videoSample) {
continue;
}
onVideoSample(videoSample);
}
}
// ---cut---
const canvasWidth = 500;
const canvasHeight = 80;
const fromSeconds = 0;
const toSeconds = 10;
await extractFrames({
src: 'https://remotion.media/video.mp4',
timestampsInSeconds: async ({track, durationInSeconds}) => {
const aspectRatio = track.width / track.height;
const amountOfFramesFit = Math.ceil(canvasWidth / (canvasHeight * aspectRatio));
const segmentDuration = toSeconds - fromSeconds;
const timestamps: number[] = [];
for (let i = 0; i < amountOfFramesFit; i++) {
timestamps.push(fromSeconds + (segmentDuration / amountOfFramesFit) * (i + 0.5));
}
return timestamps;
},
onVideoSample: (sample) => {
// Process the sample
console.log(`Frame at ${sample.timestamp}s`);
// Draw to canvas or process as needed
const canvas = document.createElement('canvas');
canvas.width = sample.displayWidth;
canvas.height = sample.displayHeight;
const ctx = canvas.getContext('2d');
sample.draw(ctx!, 0, 0);
},
});
In the example above, the using keyword is used to automatically close the VideoSample and Input objects when they go out of scope. Make sure to keep no references to them to ensure proper cleanup.
Pass an AbortSignal to cancel frame extraction:
import {ALL_FORMATS, Input, InputDisposedError, UrlSource, VideoSample, VideoSampleSink} from 'mediabunny';
type Options = {
track: {width: number; height: number};
container: string;
durationInSeconds: number | null;
};
export type ExtractFramesTimestampsInSecondsFn = (options: Options) => Promise<number[]> | number[];
export type ExtractFramesProps = {
src: string;
timestampsInSeconds: number[] | ExtractFramesTimestampsInSecondsFn;
onVideoSample: (sample: VideoSample) => void;
signal?: AbortSignal;
};
export async function extractFrames({src, timestampsInSeconds, onVideoSample, signal}: ExtractFramesProps): Promise<void> {
using input = new Input({
formats: ALL_FORMATS,
source: new UrlSource(src),
});
const [durationInSeconds, format, videoTrack] = await Promise.all([input.computeDuration(), input.getFormat(), input.getPrimaryVideoTrack()]);
if (!videoTrack) {
throw new Error('No video track found in the input');
}
if (signal?.aborted) {
throw new Error('Aborted');
}
const timestamps =
typeof timestampsInSeconds === 'function'
? await timestampsInSeconds({
track: {
width: videoTrack.displayWidth,
height: videoTrack.displayHeight,
},
container: format.name,
durationInSeconds,
})
: timestampsInSeconds;
if (timestamps.length === 0) {
return;
}
if (signal?.aborted) {
throw new Error('Aborted');
}
const sink = new VideoSampleSink(videoTrack);
for await (using videoSample of sink.samplesAtTimestamps(timestamps)) {
if (signal?.aborted) {
break;
}
if (!videoSample) {
continue;
}
onVideoSample(videoSample);
}
}
// ---cut---
const controller = new AbortController();
// Cancel after 5 seconds
setTimeout(() => controller.abort(), 5000);
try {
await extractFrames({
src: 'https://remotion.media/video.mp4',
timestampsInSeconds: [0, 1, 2, 3, 4],
onVideoSample: (sample) => {
using frame = sample;
const canvas = document.createElement('canvas');
canvas.width = frame.displayWidth;
canvas.height = frame.displayHeight;
const ctx = canvas.getContext('2d');
frame.draw(ctx!, 0, 0);
},
signal: controller.signal,
});
console.log('Frame extraction complete!');
} catch (error) {
console.error('Frame extraction was aborted or failed:', error);
}
Here is how you can set a maximum duration for extracting frames:
import {ALL_FORMATS, Input, InputDisposedError, UrlSource, VideoSample, VideoSampleSink} from 'mediabunny';
type Options = {
track: {width: number; height: number};
container: string;
durationInSeconds: number | null;
};
export type ExtractFramesTimestampsInSecondsFn = (options: Options) => Promise<number[]> | number[];
export type ExtractFramesProps = {
src: string;
timestampsInSeconds: number[] | ExtractFramesTimestampsInSecondsFn;
onVideoSample: (sample: VideoSample) => void;
signal?: AbortSignal;
};
export async function extractFrames({src, timestampsInSeconds, onVideoSample, signal}: ExtractFramesProps): Promise<void> {
using input = new Input({
formats: ALL_FORMATS,
source: new UrlSource(src),
});
const [durationInSeconds, format, videoTrack] = await Promise.all([input.computeDuration(), input.getFormat(), input.getPrimaryVideoTrack()]);
if (!videoTrack) {
throw new Error('No video track found in the input');
}
if (signal?.aborted) {
throw new Error('Aborted');
}
const timestamps =
typeof timestampsInSeconds === 'function'
? await timestampsInSeconds({
track: {
width: videoTrack.displayWidth,
height: videoTrack.displayHeight,
},
container: format.name,
durationInSeconds,
})
: timestampsInSeconds;
if (timestamps.length === 0) {
return;
}
if (signal?.aborted) {
throw new Error('Aborted');
}
const sink = new VideoSampleSink(videoTrack);
for await (using videoSample of sink.samplesAtTimestamps(timestamps)) {
if (signal?.aborted) {
break;
}
if (!videoSample) {
continue;
}
onVideoSample(videoSample);
}
}
// ---cut---
const controller = new AbortController();
const timeoutPromise = new Promise<never>((_, reject) => {
const timeoutId = setTimeout(() => {
controller.abort();
reject(new Error('Frame extraction timed out after 10 seconds'));
}, 10000);
controller.signal.addEventListener('abort', () => clearTimeout(timeoutId), {once: true});
});
try {
await Promise.race([
extractFrames({
src: 'https://remotion.media/video.mp4',
timestampsInSeconds: [0, 1, 2, 3, 4],
onVideoSample: (sample) => {
using frame = sample;
const canvas = document.createElement('canvas');
canvas.width = frame.displayWidth;
canvas.height = frame.displayHeight;
const ctx = canvas.getContext('2d');
frame.draw(ctx!, 0, 0);
},
signal: controller.signal,
}),
timeoutPromise,
]);
console.log('Frame extraction complete!');
} catch (error) {
console.error('Frame extraction was aborted or failed:', error);
}
VideoSampleSink API