mirror of
https://github.com/eggent-ai/eggent.git
synced 2026-03-07 01:53:08 +00:00
remotion skill
This commit is contained in:
@@ -1,328 +1,61 @@
|
||||
---
|
||||
name: remotion
|
||||
description: AI video production workflow using Remotion. Use when creating videos, short films, commercials, or motion graphics. Triggers on requests to make promotional videos, product demos, social media videos, animated explainers, or any programmatic video content. Produces polished motion graphics, not slideshows.
|
||||
name: remotion-best-practices
|
||||
description: Best practices for Remotion - Video creation in React
|
||||
metadata:
|
||||
tags: remotion, video, react, animation, composition
|
||||
---
|
||||
|
||||
# Video Generator (Remotion)
|
||||
|
||||
Create professional motion graphics videos programmatically with React and Remotion.
|
||||
|
||||
## Default Workflow (ALWAYS follow this)
|
||||
|
||||
1. **Scrape brand data** (if featuring a product) using Firecrawl
|
||||
2. **Create the project** in `output/<project-name>/`
|
||||
3. **Build all scenes** with proper motion graphics
|
||||
4. **Install dependencies** with `npm install`
|
||||
5. **Fix package.json scripts** to use `npx remotion` (not `bun`):
|
||||
```json
|
||||
"scripts": {
|
||||
"dev": "npx remotion studio",
|
||||
"build": "npx remotion bundle"
|
||||
}
|
||||
```
|
||||
5. **Start Remotion Studio** as a background process:
|
||||
```bash
|
||||
cd output/<project-name> && npm run dev
|
||||
```
|
||||
Wait for "Server ready" on port 3000.
|
||||
6. **Expose via Cloudflare tunnel** so user can access it:
|
||||
```bash
|
||||
bash skills/cloudflare-tunnel/scripts/tunnel.sh start 3000
|
||||
```
|
||||
7. **Send the user the public URL** (e.g. `https://xxx.trycloudflare.com`)
|
||||
|
||||
The user will preview in their browser, request changes, and you edit the source files. Remotion hot-reloads automatically.
|
||||
|
||||
### Rendering (only when user explicitly asks to export):
|
||||
```bash
|
||||
cd output/<project-name>
|
||||
npx remotion render CompositionName out/video.mp4
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
# Scaffold project
|
||||
cd output && npx --yes create-video@latest my-video --template blank
|
||||
cd my-video && npm install
|
||||
|
||||
# Add motion libraries
|
||||
npm install lucide-react
|
||||
|
||||
# Fix scripts in package.json (replace any "bun" references with "npx remotion")
|
||||
|
||||
# Start dev server
|
||||
npm run dev
|
||||
|
||||
# Expose publicly
|
||||
bash skills/cloudflare-tunnel/scripts/tunnel.sh start 3000
|
||||
```
|
||||
|
||||
## Fetching Brand Data with Firecrawl
|
||||
|
||||
**MANDATORY:** When a video mentions or features any product/company, use Firecrawl to scrape the product's website for brand data, colors, screenshots, and copy BEFORE designing the video. This ensures visual accuracy and brand consistency.
|
||||
|
||||
API Key: Set `FIRECRAWL_API_KEY` in `.env` (see TOOLS.md).
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
bash scripts/firecrawl.sh "https://example.com"
|
||||
```
|
||||
|
||||
Returns structured brand data: brandName, tagline, headline, description, features, logoUrl, faviconUrl, primaryColors, ctaText, socialLinks, plus screenshot URL and OG image URL.
|
||||
|
||||
### Download Assets After Scraping
|
||||
|
||||
```bash
|
||||
mkdir -p public/images/brand
|
||||
curl -s "https://example.com/favicon.svg" -o public/images/brand/logo.svg
|
||||
curl -s "${OG_IMAGE_URL}" -o public/images/brand/og-image.png
|
||||
curl -sL "${SCREENSHOT_URL}" -o public/images/brand/screenshot.png
|
||||
```
|
||||
|
||||
## Core Architecture
|
||||
|
||||
### Scene Management
|
||||
|
||||
Use scene-based architecture with proper transitions:
|
||||
|
||||
```tsx
|
||||
const SCENE_DURATIONS: Record<string, number> = {
|
||||
intro: 3000, // 3s hook
|
||||
problem: 4000, // 4s dramatic
|
||||
solution: 3500, // 3.5s reveal
|
||||
features: 5000, // 5s showcase
|
||||
cta: 3000, // 3s close
|
||||
};
|
||||
```
|
||||
|
||||
### Video Structure Pattern
|
||||
|
||||
```tsx
|
||||
import {
|
||||
AbsoluteFill, Sequence, useCurrentFrame,
|
||||
useVideoConfig, interpolate, spring,
|
||||
Img, staticFile, Audio,
|
||||
} from "remotion";
|
||||
|
||||
export const MyVideo = () => {
|
||||
const frame = useCurrentFrame();
|
||||
const { fps, durationInFrames } = useVideoConfig();
|
||||
|
||||
return (
|
||||
<AbsoluteFill>
|
||||
{/* Background music */}
|
||||
<Audio src={staticFile("audio/bg-music.mp3")} volume={0.35} />
|
||||
|
||||
{/* Persistent background layer - OUTSIDE sequences */}
|
||||
<AnimatedBackground frame={frame} />
|
||||
|
||||
{/* Scene sequences */}
|
||||
<Sequence from={0} durationInFrames={90}>
|
||||
<IntroScene />
|
||||
</Sequence>
|
||||
<Sequence from={90} durationInFrames={120}>
|
||||
<FeatureScene />
|
||||
</Sequence>
|
||||
</AbsoluteFill>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Motion Graphics Principles
|
||||
|
||||
### AVOID (Slideshow patterns)
|
||||
|
||||
- Fading to black between scenes
|
||||
- Centered text on solid backgrounds
|
||||
- Same transition for everything
|
||||
- Linear/robotic animations
|
||||
- Static screens
|
||||
- `slideLeft`, `slideRight`, `crossDissolve`, `fadeBlur` presets
|
||||
- Emoji icons — NEVER use emoji, always use Lucide React icons
|
||||
|
||||
### PURSUE (Motion graphics)
|
||||
|
||||
- Overlapping transitions (next starts BEFORE current ends)
|
||||
- Layered compositions (background/midground/foreground)
|
||||
- Spring physics for organic motion
|
||||
- Varied timing (2-5s scenes, mixed rhythms)
|
||||
- Continuous visual elements across scenes
|
||||
- Custom transitions with clipPath, 3D transforms, morphs
|
||||
- Lucide React for ALL icons (`npm install lucide-react`) — never emoji
|
||||
|
||||
## Transition Techniques
|
||||
|
||||
1. **Morph/Scale** - Element scales up to fill screen, becomes next scene's background
|
||||
2. **Wipe** - Colored shape sweeps across, revealing next scene
|
||||
3. **Zoom-through** - Camera pushes into element, emerges into new scene
|
||||
4. **Clip-path reveal** - Circle/polygon grows from point to reveal
|
||||
5. **Persistent anchor** - One element stays while surroundings change
|
||||
6. **Directional flow** - Scene 1 exits right, Scene 2 enters from right
|
||||
7. **Split/unfold** - Screen divides, panels slide apart
|
||||
8. **Perspective flip** - Scene rotates on Y-axis in 3D
|
||||
|
||||
## Animation Timing Reference
|
||||
|
||||
```tsx
|
||||
// Timing values (in seconds)
|
||||
const timing = {
|
||||
micro: 0.1-0.2, // Small shifts, subtle feedback
|
||||
snappy: 0.2-0.4, // Element entrances, position changes
|
||||
standard: 0.5-0.8, // Scene transitions, major reveals
|
||||
dramatic: 1.0-1.5, // Hero moments, cinematic reveals
|
||||
};
|
||||
|
||||
// Spring configs
|
||||
const springs = {
|
||||
snappy: { stiffness: 400, damping: 30 },
|
||||
bouncy: { stiffness: 300, damping: 15 },
|
||||
smooth: { stiffness: 120, damping: 25 },
|
||||
};
|
||||
```
|
||||
|
||||
## Visual Style Guidelines
|
||||
|
||||
### Typography
|
||||
- One display font + one body font max
|
||||
- Massive headlines, tight tracking
|
||||
- Mix weights for hierarchy
|
||||
- Keep text SHORT (viewers can't pause)
|
||||
|
||||
### Colors
|
||||
- **Use brand colors from Firecrawl scrape** as the primary palette — match the product's actual look
|
||||
- **Avoid purple/indigo gradients** unless the brand uses them or the user explicitly requests them
|
||||
- Simple, clean backgrounds are generally best — a single dark tone or subtle gradient beats layered textures
|
||||
- Intentional accent colors pulled from the brand
|
||||
|
||||
### Layout
|
||||
- Use asymmetric layouts, off-center type
|
||||
- Edge-aligned elements create visual tension
|
||||
- Generous whitespace as design element
|
||||
- Use depth sparingly — a subtle backdrop blur or single gradient, not stacked textures
|
||||
|
||||
## Remotion Essentials
|
||||
|
||||
### Interpolation
|
||||
|
||||
```tsx
|
||||
const opacity = interpolate(frame, [0, 30], [0, 1], {
|
||||
extrapolateLeft: "clamp",
|
||||
extrapolateRight: "clamp"
|
||||
});
|
||||
|
||||
const scale = spring({
|
||||
frame, fps,
|
||||
from: 0.8, to: 1,
|
||||
durationInFrames: 30,
|
||||
config: { damping: 12 }
|
||||
});
|
||||
```
|
||||
|
||||
### Sequences with Overlap
|
||||
|
||||
```tsx
|
||||
<Sequence from={0} durationInFrames={100}>
|
||||
<Scene1 />
|
||||
</Sequence>
|
||||
<Sequence from={80} durationInFrames={100}>
|
||||
<Scene2 />
|
||||
</Sequence>
|
||||
```
|
||||
|
||||
### Cross-Scene Continuity
|
||||
|
||||
Place persistent elements OUTSIDE Sequence blocks:
|
||||
|
||||
```tsx
|
||||
const PersistentShape = ({ currentScene }: { currentScene: number }) => {
|
||||
const positions = {
|
||||
0: { x: 100, y: 100, scale: 1, opacity: 0.3 },
|
||||
1: { x: 800, y: 200, scale: 2, opacity: 0.5 },
|
||||
2: { x: 400, y: 600, scale: 0.5, opacity: 1 },
|
||||
};
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
animate={positions[currentScene]}
|
||||
transition={{ duration: 0.8, ease: "easeInOut" }}
|
||||
className="absolute w-32 h-32 rounded-full bg-gradient-to-r from-coral to-orange"
|
||||
/>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Quality Tests
|
||||
|
||||
Before delivering, verify:
|
||||
|
||||
- **Mute test:** Story follows visually without sound?
|
||||
- **Squint test:** Hierarchy visible when squinting?
|
||||
- **Timing test:** Motion feels natural, not robotic?
|
||||
- **Consistency test:** Similar elements behave similarly?
|
||||
- **Slideshow test:** Does NOT look like PowerPoint?
|
||||
- **Loop test:** Video loops smoothly back to start?
|
||||
|
||||
## Implementation Steps
|
||||
|
||||
1. **Firecrawl brand scrape** — If featuring a product, scrape its site first
|
||||
2. **Director's treatment** — Write vibe, camera style, emotional arc
|
||||
3. **Visual direction** — Colors, fonts, brand feel, animation style
|
||||
4. **Scene breakdown** — List every scene with description, duration, text, transitions
|
||||
5. **Plan assets** — User assets + generated images/videos + brand scrape assets
|
||||
9. **Define durations** — Vary pacing (2-3s punchy, 4-5s dramatic)
|
||||
10. **Build persistent layer** — Animated background outside scenes
|
||||
11. **Build scenes** — Each with enter/exit animations, 3-5 timed moments
|
||||
12. **Open with hook** — High-impact first scene
|
||||
13. **Develop narrative** — Content-driven middle scenes
|
||||
14. **Strong ending** — Intentional, resolved close
|
||||
15. **Start Remotion Studio** — `npm run dev` on port 3000
|
||||
16. **Expose via tunnel** — `bash skills/cloudflare-tunnel/scripts/tunnel.sh start 3000`
|
||||
17. **Send user the public URL** — They preview and request changes live
|
||||
18. **Iterate** — Edit source, hot-reload, repeat
|
||||
19. **Render** — Only when user says to export final video
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
my-video/
|
||||
├── src/
|
||||
│ ├── Root.tsx # Composition definitions
|
||||
│ ├── index.ts # Entry point
|
||||
│ ├── index.css # Global styles
|
||||
│ ├── MyVideo.tsx # Main video component
|
||||
│ └── scenes/ # Scene components (optional)
|
||||
├── public/
|
||||
│ ├── images/
|
||||
│ │ └── brand/ # Firecrawl-scraped assets
|
||||
│ └── audio/ # Background music
|
||||
├── remotion.config.ts
|
||||
└── package.json
|
||||
```
|
||||
|
||||
## Common Components
|
||||
|
||||
See `references/components.md` for reusable:
|
||||
- Animated backgrounds
|
||||
- Terminal windows
|
||||
- Feature cards
|
||||
- Stats displays
|
||||
- CTA buttons
|
||||
- Text reveal animations
|
||||
|
||||
## Tunnel Management
|
||||
|
||||
```bash
|
||||
# Start tunnel (exposes port 3000 publicly)
|
||||
bash skills/cloudflare-tunnel/scripts/tunnel.sh start 3000
|
||||
|
||||
# Check status
|
||||
bash skills/cloudflare-tunnel/scripts/tunnel.sh status 3000
|
||||
|
||||
# List all tunnels
|
||||
bash skills/cloudflare-tunnel/scripts/tunnel.sh list
|
||||
|
||||
# Stop tunnel
|
||||
bash skills/cloudflare-tunnel/scripts/tunnel.sh stop 3000
|
||||
```
|
||||
## When to use
|
||||
|
||||
Use this skills whenever you are dealing with Remotion code to obtain the domain-specific knowledge.
|
||||
|
||||
## Captions
|
||||
|
||||
When dealing with captions or subtitles, load the [./rules/subtitles.md](./rules/subtitles.md) file for more information.
|
||||
|
||||
## Using FFmpeg
|
||||
|
||||
For some video operations, such as trimming videos or detecting silence, FFmpeg should be used. Load the [./rules/ffmpeg.md](./rules/ffmpeg.md) file for more information.
|
||||
|
||||
## Audio visualization
|
||||
|
||||
When needing to visualize audio (spectrum bars, waveforms, bass-reactive effects), load the [./rules/audio-visualization.md](./rules/audio-visualization.md) file for more information.
|
||||
|
||||
## Sound effects
|
||||
|
||||
When needing to use sound effects, load the [./rules/sound-effects.md](./rules/sound-effects.md) file for more information.
|
||||
|
||||
## How to use
|
||||
|
||||
Read individual rule files for detailed explanations and code examples:
|
||||
|
||||
- [rules/3d.md](rules/3d.md) - 3D content in Remotion using Three.js and React Three Fiber
|
||||
- [rules/animations.md](rules/animations.md) - Fundamental animation skills for Remotion
|
||||
- [rules/assets.md](rules/assets.md) - Importing images, videos, audio, and fonts into Remotion
|
||||
- [rules/audio.md](rules/audio.md) - Using audio and sound in Remotion - importing, trimming, volume, speed, pitch
|
||||
- [rules/calculate-metadata.md](rules/calculate-metadata.md) - Dynamically set composition duration, dimensions, and props
|
||||
- [rules/can-decode.md](rules/can-decode.md) - Check if a video can be decoded by the browser using Mediabunny
|
||||
- [rules/charts.md](rules/charts.md) - Chart and data visualization patterns for Remotion (bar, pie, line, stock charts)
|
||||
- [rules/compositions.md](rules/compositions.md) - Defining compositions, stills, folders, default props and dynamic metadata
|
||||
- [rules/extract-frames.md](rules/extract-frames.md) - Extract frames from videos at specific timestamps using Mediabunny
|
||||
- [rules/fonts.md](rules/fonts.md) - Loading Google Fonts and local fonts in Remotion
|
||||
- [rules/get-audio-duration.md](rules/get-audio-duration.md) - Getting the duration of an audio file in seconds with Mediabunny
|
||||
- [rules/get-video-dimensions.md](rules/get-video-dimensions.md) - Getting the width and height of a video file with Mediabunny
|
||||
- [rules/get-video-duration.md](rules/get-video-duration.md) - Getting the duration of a video file in seconds with Mediabunny
|
||||
- [rules/gifs.md](rules/gifs.md) - Displaying GIFs synchronized with Remotion's timeline
|
||||
- [rules/images.md](rules/images.md) - Embedding images in Remotion using the Img component
|
||||
- [rules/light-leaks.md](rules/light-leaks.md) - Light leak overlay effects using @remotion/light-leaks
|
||||
- [rules/lottie.md](rules/lottie.md) - Embedding Lottie animations in Remotion
|
||||
- [rules/measuring-dom-nodes.md](rules/measuring-dom-nodes.md) - Measuring DOM element dimensions in Remotion
|
||||
- [rules/measuring-text.md](rules/measuring-text.md) - Measuring text dimensions, fitting text to containers, and checking overflow
|
||||
- [rules/sequencing.md](rules/sequencing.md) - Sequencing patterns for Remotion - delay, trim, limit duration of items
|
||||
- [rules/tailwind.md](rules/tailwind.md) - Using TailwindCSS in Remotion
|
||||
- [rules/text-animations.md](rules/text-animations.md) - Typography and text animation patterns for Remotion
|
||||
- [rules/timing.md](rules/timing.md) - Interpolation curves in Remotion - linear, easing, spring animations
|
||||
- [rules/transitions.md](rules/transitions.md) - Scene transition patterns for Remotion
|
||||
- [rules/transparent-videos.md](rules/transparent-videos.md) - Rendering out a video with transparency
|
||||
- [rules/trimming.md](rules/trimming.md) - Trimming patterns for Remotion - cut the beginning or end of animations
|
||||
- [rules/videos.md](rules/videos.md) - Embedding videos in Remotion - trimming, volume, speed, looping, pitch
|
||||
- [rules/parameters.md](rules/parameters.md) - Make a video parametrizable by adding a Zod schema
|
||||
- [rules/maps.md](rules/maps.md) - Add a map using Mapbox and animate it
|
||||
- [rules/voiceover.md](rules/voiceover.md) - Adding AI-generated voiceover to Remotion compositions using ElevenLabs TTS
|
||||
|
||||
@@ -1,217 +0,0 @@
|
||||
# Reusable Components
|
||||
|
||||
## Animated Background
|
||||
|
||||
```tsx
|
||||
import { useCurrentFrame, interpolate } from "remotion";
|
||||
|
||||
export const AnimatedBackground = ({ frame }: { frame: number }) => {
|
||||
const hueShift = interpolate(frame, [0, 300], [0, 360]);
|
||||
const gradientAngle = interpolate(frame, [0, 300], [0, 180]);
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
position: "absolute",
|
||||
inset: 0,
|
||||
background: `linear-gradient(${gradientAngle}deg,
|
||||
hsl(${hueShift}, 70%, 15%),
|
||||
hsl(${hueShift + 60}, 60%, 10%))`,
|
||||
}}
|
||||
/>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Terminal Window
|
||||
|
||||
```tsx
|
||||
export const TerminalWindow = ({
|
||||
lines,
|
||||
frame,
|
||||
fps,
|
||||
}: {
|
||||
lines: string[];
|
||||
frame: number;
|
||||
fps: number;
|
||||
}) => {
|
||||
const visibleLines = Math.floor(frame / (fps * 0.3));
|
||||
|
||||
return (
|
||||
<div className="bg-gray-900 rounded-xl p-6 font-mono text-sm shadow-2xl border border-gray-700">
|
||||
<div className="flex gap-2 mb-4">
|
||||
<div className="w-3 h-3 rounded-full bg-red-500" />
|
||||
<div className="w-3 h-3 rounded-full bg-yellow-500" />
|
||||
<div className="w-3 h-3 rounded-full bg-green-500" />
|
||||
</div>
|
||||
{lines.slice(0, visibleLines).map((line, i) => (
|
||||
<div key={i} className="text-green-400 leading-relaxed">
|
||||
<span className="text-gray-500">$ </span>{line}
|
||||
</div>
|
||||
))}
|
||||
{visibleLines <= lines.length && (
|
||||
<span className="inline-block w-2 h-5 bg-green-400 animate-pulse" />
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Feature Card
|
||||
|
||||
```tsx
|
||||
import { spring, useCurrentFrame, useVideoConfig, interpolate } from "remotion";
|
||||
|
||||
// icon should be a Lucide React component, NEVER an emoji string
|
||||
export const FeatureCard = ({
|
||||
icon: Icon,
|
||||
title,
|
||||
description,
|
||||
delay = 0,
|
||||
}: {
|
||||
icon: React.FC<{ size?: number; color?: string }>;
|
||||
title: string;
|
||||
description: string;
|
||||
delay?: number;
|
||||
}) => {
|
||||
const frame = useCurrentFrame();
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
const scale = spring({
|
||||
frame: frame - delay,
|
||||
fps,
|
||||
config: { stiffness: 300, damping: 20 },
|
||||
});
|
||||
|
||||
const opacity = interpolate(frame - delay, [0, 15], [0, 1], {
|
||||
extrapolateLeft: "clamp",
|
||||
extrapolateRight: "clamp",
|
||||
});
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{ transform: `scale(${scale})`, opacity }}
|
||||
className="bg-white/10 backdrop-blur-md rounded-2xl p-8 border border-white/20"
|
||||
>
|
||||
<div className="mb-4"><Icon size={40} color="white" /></div>
|
||||
<h3 className="text-2xl font-bold text-white mb-2">{title}</h3>
|
||||
<p className="text-gray-300">{description}</p>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Stats Display
|
||||
|
||||
```tsx
|
||||
import { interpolate } from "remotion";
|
||||
|
||||
export const StatsDisplay = ({
|
||||
value,
|
||||
label,
|
||||
frame,
|
||||
fps,
|
||||
}: {
|
||||
value: number;
|
||||
label: string;
|
||||
frame: number;
|
||||
fps: number;
|
||||
}) => {
|
||||
const progress = interpolate(frame, [0, fps * 1.5], [0, 1], {
|
||||
extrapolateRight: "clamp",
|
||||
});
|
||||
const displayValue = Math.round(value * progress);
|
||||
|
||||
return (
|
||||
<div className="text-center">
|
||||
<div className="text-7xl font-black text-white tracking-tight">
|
||||
{displayValue.toLocaleString()}
|
||||
</div>
|
||||
<div className="text-lg text-gray-400 uppercase tracking-widest mt-2">
|
||||
{label}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## CTA Button
|
||||
|
||||
```tsx
|
||||
import { spring, useCurrentFrame, useVideoConfig, interpolate } from "remotion";
|
||||
|
||||
export const CTAButton = ({
|
||||
text,
|
||||
frame,
|
||||
fps,
|
||||
}: {
|
||||
text: string;
|
||||
frame: number;
|
||||
fps: number;
|
||||
}) => {
|
||||
const scale = spring({
|
||||
frame,
|
||||
fps,
|
||||
config: { stiffness: 200, damping: 15 },
|
||||
});
|
||||
|
||||
const shimmer = interpolate(frame, [0, fps * 2], [-100, 200]);
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{ transform: `scale(${scale})` }}
|
||||
className="relative inline-block px-12 py-5 bg-gradient-to-r from-blue-500 to-purple-600 rounded-full text-white text-2xl font-bold overflow-hidden"
|
||||
>
|
||||
{text}
|
||||
<div
|
||||
className="absolute inset-0 bg-gradient-to-r from-transparent via-white/30 to-transparent"
|
||||
style={{ transform: `translateX(${shimmer}%)` }}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Text Reveal
|
||||
|
||||
```tsx
|
||||
import { interpolate } from "remotion";
|
||||
|
||||
export const TextReveal = ({
|
||||
text,
|
||||
frame,
|
||||
fps,
|
||||
charDelay = 2,
|
||||
}: {
|
||||
text: string;
|
||||
frame: number;
|
||||
fps: number;
|
||||
charDelay?: number;
|
||||
}) => {
|
||||
return (
|
||||
<div className="flex flex-wrap">
|
||||
{text.split("").map((char, i) => {
|
||||
const charFrame = frame - i * charDelay;
|
||||
const opacity = interpolate(charFrame, [0, 8], [0, 1], {
|
||||
extrapolateLeft: "clamp",
|
||||
extrapolateRight: "clamp",
|
||||
});
|
||||
const y = interpolate(charFrame, [0, 8], [20, 0], {
|
||||
extrapolateLeft: "clamp",
|
||||
extrapolateRight: "clamp",
|
||||
});
|
||||
|
||||
return (
|
||||
<span
|
||||
key={i}
|
||||
style={{ opacity, transform: `translateY(${y}px)` }}
|
||||
className="text-6xl font-bold text-white"
|
||||
>
|
||||
{char === " " ? "\u00A0" : char}
|
||||
</span>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
```
|
||||
@@ -1,144 +0,0 @@
|
||||
# Remotion Composition Patterns
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
my-video/
|
||||
├── src/
|
||||
│ ├── index.ts # registerRoot entry point
|
||||
│ ├── Root.tsx # <Composition> declarations
|
||||
│ └── MyVideo/
|
||||
│ ├── index.tsx # Main component
|
||||
│ └── styles.ts # Optional styles
|
||||
├── public/ # Static assets (images, fonts, audio)
|
||||
├── remotion.config.ts # Remotion config
|
||||
├── package.json
|
||||
└── tsconfig.json
|
||||
```
|
||||
|
||||
## Basic Composition (Root.tsx)
|
||||
|
||||
```tsx
|
||||
import { Composition } from "remotion";
|
||||
import { MyVideo } from "./MyVideo";
|
||||
|
||||
export const RemotionRoot = () => (
|
||||
<>
|
||||
<Composition
|
||||
id="MyVideo"
|
||||
component={MyVideo}
|
||||
durationInFrames={300} // 10s at 30fps
|
||||
fps={30}
|
||||
width={1920}
|
||||
height={1080}
|
||||
defaultProps={{ title: "Hello World" }}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
```
|
||||
|
||||
## Common Aspect Ratios
|
||||
|
||||
- **16:9 landscape (YouTube):** 1920x1080 or 1280x720
|
||||
- **9:16 vertical (Reels/TikTok/Shorts):** 1080x1920
|
||||
- **4:5 Instagram feed:** 1080x1350
|
||||
- **1:1 square:** 1080x1080
|
||||
|
||||
## Key Remotion APIs
|
||||
|
||||
```tsx
|
||||
import {
|
||||
useCurrentFrame, // Current frame number
|
||||
useVideoConfig, // { fps, width, height, durationInFrames }
|
||||
interpolate, // Map frame ranges to values
|
||||
spring, // Physics-based spring animation
|
||||
Sequence, // Time-offset children
|
||||
AbsoluteFill, // Full-frame container
|
||||
Img, // Image component (preloads)
|
||||
Audio, // Audio component
|
||||
Video, // Video component
|
||||
staticFile, // Reference files in public/
|
||||
delayRender, // Hold render until async ready
|
||||
continueRender, // Resume after delayRender
|
||||
} from "remotion";
|
||||
```
|
||||
|
||||
## Animation Example
|
||||
|
||||
```tsx
|
||||
import { useCurrentFrame, interpolate, spring, useVideoConfig, AbsoluteFill } from "remotion";
|
||||
|
||||
export const FadeInText: React.FC<{ text: string }> = ({ text }) => {
|
||||
const frame = useCurrentFrame();
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
const opacity = interpolate(frame, [0, 30], [0, 1], { extrapolateRight: "clamp" });
|
||||
const scale = spring({ frame, fps, config: { damping: 200 } });
|
||||
|
||||
return (
|
||||
<AbsoluteFill className="items-center justify-center bg-black">
|
||||
<h1
|
||||
style={{ opacity, transform: `scale(${scale})` }}
|
||||
className="text-white text-7xl font-bold"
|
||||
>
|
||||
{text}
|
||||
</h1>
|
||||
</AbsoluteFill>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Sequences (Timing)
|
||||
|
||||
```tsx
|
||||
<AbsoluteFill>
|
||||
<Sequence from={0} durationInFrames={60}>
|
||||
<Intro />
|
||||
</Sequence>
|
||||
<Sequence from={60} durationInFrames={120}>
|
||||
<MainContent />
|
||||
</Sequence>
|
||||
<Sequence from={180}>
|
||||
<Outro />
|
||||
</Sequence>
|
||||
</AbsoluteFill>
|
||||
```
|
||||
|
||||
## Input Props (Dynamic Data)
|
||||
|
||||
Pass data via `--props` flag or `defaultProps`:
|
||||
|
||||
```tsx
|
||||
// Component
|
||||
export const MyVideo: React.FC<{ title: string; items: string[] }> = ({ title, items }) => { ... };
|
||||
|
||||
// Render with props
|
||||
// npx remotion render MyVideo --props='{"title":"Demo","items":["a","b"]}'
|
||||
```
|
||||
|
||||
## Audio
|
||||
|
||||
```tsx
|
||||
import { Audio, staticFile, Sequence } from "remotion";
|
||||
|
||||
<Sequence from={0}>
|
||||
<Audio src={staticFile("bgm.mp3")} volume={0.5} />
|
||||
</Sequence>
|
||||
```
|
||||
|
||||
## Fetching Data (delayRender)
|
||||
|
||||
```tsx
|
||||
const [data, setData] = useState(null);
|
||||
const [handle] = useState(() => delayRender());
|
||||
|
||||
useEffect(() => {
|
||||
fetch("https://api.example.com/data")
|
||||
.then((r) => r.json())
|
||||
.then((d) => { setData(d); continueRender(handle); });
|
||||
}, []);
|
||||
```
|
||||
|
||||
## TailwindCSS
|
||||
|
||||
Remotion supports Tailwind out of the box when scaffolded with `--tailwind`. Use className as normal on any element.
|
||||
@@ -1,98 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# remotion.sh - Wrapper for common Remotion operations
|
||||
# Usage: remotion.sh <command> [args...]
|
||||
#
|
||||
# Commands:
|
||||
# init <project-name> - Scaffold a new Remotion project (blank + tailwind)
|
||||
# render <project-dir> [comp] [output] [--props '{}'] [--width N] [--height N]
|
||||
# still <project-dir> [comp] [output] [--props '{}'] [--frame N]
|
||||
# preview <project-dir> - Start Remotion Studio dev server
|
||||
# list <project-dir> - List available compositions
|
||||
# upgrade <project-dir> - Upgrade Remotion packages to latest
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
CMD="${1:-help}"
|
||||
shift || true
|
||||
|
||||
case "$CMD" in
|
||||
init)
|
||||
PROJECT_NAME="${1:?Usage: remotion.sh init <project-name>}"
|
||||
echo "Creating Remotion project: $PROJECT_NAME"
|
||||
npx --yes create-video@latest --blank --tailwind --no-skills "$PROJECT_NAME" 2>&1 || {
|
||||
# Fallback: manual scaffold if create-video doesn't support flags
|
||||
echo "Trying interactive scaffold..."
|
||||
echo -e "blank\ny\nn" | npx --yes create-video@latest "$PROJECT_NAME" 2>&1
|
||||
}
|
||||
echo "Project created at ./$PROJECT_NAME"
|
||||
echo "Next: cd $PROJECT_NAME && npm install && npm run dev"
|
||||
;;
|
||||
|
||||
render)
|
||||
PROJECT_DIR="${1:?Usage: remotion.sh render <project-dir> [composition] [output] [flags...]}"
|
||||
shift
|
||||
COMP="${1:-}"
|
||||
shift 2>/dev/null || true
|
||||
OUTPUT="${1:-}"
|
||||
shift 2>/dev/null || true
|
||||
|
||||
cd "$PROJECT_DIR"
|
||||
|
||||
ARGS=()
|
||||
if [ -n "$COMP" ]; then ARGS+=("$COMP"); fi
|
||||
if [ -n "$OUTPUT" ]; then ARGS+=("$OUTPUT"); fi
|
||||
|
||||
# Pass remaining flags through
|
||||
ARGS+=("$@")
|
||||
|
||||
npx remotion render "${ARGS[@]}" 2>&1
|
||||
;;
|
||||
|
||||
still)
|
||||
PROJECT_DIR="${1:?Usage: remotion.sh still <project-dir> [composition] [output] [flags...]}"
|
||||
shift
|
||||
COMP="${1:-}"
|
||||
shift 2>/dev/null || true
|
||||
OUTPUT="${1:-}"
|
||||
shift 2>/dev/null || true
|
||||
|
||||
cd "$PROJECT_DIR"
|
||||
|
||||
ARGS=()
|
||||
if [ -n "$COMP" ]; then ARGS+=("$COMP"); fi
|
||||
if [ -n "$OUTPUT" ]; then ARGS+=("$OUTPUT"); fi
|
||||
ARGS+=("$@")
|
||||
|
||||
npx remotion still "${ARGS[@]}" 2>&1
|
||||
;;
|
||||
|
||||
preview)
|
||||
PROJECT_DIR="${1:?Usage: remotion.sh preview <project-dir>}"
|
||||
cd "$PROJECT_DIR"
|
||||
npm run dev 2>&1
|
||||
;;
|
||||
|
||||
list)
|
||||
PROJECT_DIR="${1:?Usage: remotion.sh list <project-dir>}"
|
||||
cd "$PROJECT_DIR"
|
||||
npx remotion compositions 2>&1
|
||||
;;
|
||||
|
||||
upgrade)
|
||||
PROJECT_DIR="${1:?Usage: remotion.sh upgrade <project-dir>}"
|
||||
cd "$PROJECT_DIR"
|
||||
npx remotion upgrade 2>&1
|
||||
;;
|
||||
|
||||
help|*)
|
||||
echo "remotion.sh - Remotion video toolkit"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " init <name> Scaffold new project"
|
||||
echo " render <dir> [comp] [out] Render video (mp4/webm/gif)"
|
||||
echo " still <dir> [comp] [out] Render single frame"
|
||||
echo " preview <dir> Start dev server"
|
||||
echo " list <dir> List compositions"
|
||||
echo " upgrade <dir> Upgrade Remotion"
|
||||
;;
|
||||
esac
|
||||
86
bundled-skills/remotion/rules/3d.md
Normal file
86
bundled-skills/remotion/rules/3d.md
Normal file
@@ -0,0 +1,86 @@
|
||||
---
|
||||
name: 3d
|
||||
description: 3D content in Remotion using Three.js and React Three Fiber.
|
||||
metadata:
|
||||
tags: 3d, three, threejs
|
||||
---
|
||||
|
||||
# Using Three.js and React Three Fiber in Remotion
|
||||
|
||||
Follow React Three Fiber and Three.js best practices.
|
||||
Only the following Remotion-specific rules need to be followed:
|
||||
|
||||
## Prerequisites
|
||||
|
||||
First, the `@remotion/three` package needs to be installed.
|
||||
If it is not, use the following command:
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/three # If project uses npm
|
||||
bunx remotion add @remotion/three # If project uses bun
|
||||
yarn remotion add @remotion/three # If project uses yarn
|
||||
pnpm exec remotion add @remotion/three # If project uses pnpm
|
||||
```
|
||||
|
||||
## Using ThreeCanvas
|
||||
|
||||
You MUST wrap 3D content in `<ThreeCanvas>` and include proper lighting.
|
||||
`<ThreeCanvas>` MUST have a `width` and `height` prop.
|
||||
|
||||
```tsx
|
||||
import { ThreeCanvas } from "@remotion/three";
|
||||
import { useVideoConfig } from "remotion";
|
||||
|
||||
const { width, height } = useVideoConfig();
|
||||
|
||||
<ThreeCanvas width={width} height={height}>
|
||||
<ambientLight intensity={0.4} />
|
||||
<directionalLight position={[5, 5, 5]} intensity={0.8} />
|
||||
<mesh>
|
||||
<sphereGeometry args={[1, 32, 32]} />
|
||||
<meshStandardMaterial color="red" />
|
||||
</mesh>
|
||||
</ThreeCanvas>;
|
||||
```
|
||||
|
||||
## No animations not driven by `useCurrentFrame()`
|
||||
|
||||
Shaders, models etc MUST NOT animate by themselves.
|
||||
No animations are allowed unless they are driven by `useCurrentFrame()`.
|
||||
Otherwise, it will cause flickering during rendering.
|
||||
|
||||
Using `useFrame()` from `@react-three/fiber` is forbidden.
|
||||
|
||||
## Animate using `useCurrentFrame()`
|
||||
|
||||
Use `useCurrentFrame()` to perform animations.
|
||||
|
||||
```tsx
|
||||
const frame = useCurrentFrame();
|
||||
const rotationY = frame * 0.02;
|
||||
|
||||
<mesh rotation={[0, rotationY, 0]}>
|
||||
<boxGeometry args={[2, 2, 2]} />
|
||||
<meshStandardMaterial color="#4a9eff" />
|
||||
</mesh>;
|
||||
```
|
||||
|
||||
## Using `<Sequence>` inside `<ThreeCanvas>`
|
||||
|
||||
The `layout` prop of any `<Sequence>` inside a `<ThreeCanvas>` must be set to `none`.
|
||||
|
||||
```tsx
|
||||
import { Sequence } from "remotion";
|
||||
import { ThreeCanvas } from "@remotion/three";
|
||||
|
||||
const { width, height } = useVideoConfig();
|
||||
|
||||
<ThreeCanvas width={width} height={height}>
|
||||
<Sequence layout="none">
|
||||
<mesh>
|
||||
<boxGeometry args={[2, 2, 2]} />
|
||||
<meshStandardMaterial color="#4a9eff" />
|
||||
</mesh>
|
||||
</Sequence>
|
||||
</ThreeCanvas>;
|
||||
```
|
||||
27
bundled-skills/remotion/rules/animations.md
Normal file
27
bundled-skills/remotion/rules/animations.md
Normal file
@@ -0,0 +1,27 @@
|
||||
---
|
||||
name: animations
|
||||
description: Fundamental animation skills for Remotion
|
||||
metadata:
|
||||
tags: animations, transitions, frames, useCurrentFrame
|
||||
---
|
||||
|
||||
All animations MUST be driven by the `useCurrentFrame()` hook.
|
||||
Write animations in seconds and multiply them by the `fps` value from `useVideoConfig()`.
|
||||
|
||||
```tsx
|
||||
import { useCurrentFrame } from "remotion";
|
||||
|
||||
export const FadeIn = () => {
|
||||
const frame = useCurrentFrame();
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
const opacity = interpolate(frame, [0, 2 * fps], [0, 1], {
|
||||
extrapolateRight: "clamp",
|
||||
});
|
||||
|
||||
return <div style={{ opacity }}>Hello World!</div>;
|
||||
};
|
||||
```
|
||||
|
||||
CSS transitions or animations are FORBIDDEN - they will not render correctly.
|
||||
Tailwind animation class names are FORBIDDEN - they will not render correctly.
|
||||
78
bundled-skills/remotion/rules/assets.md
Normal file
78
bundled-skills/remotion/rules/assets.md
Normal file
@@ -0,0 +1,78 @@
|
||||
---
|
||||
name: assets
|
||||
description: Importing images, videos, audio, and fonts into Remotion
|
||||
metadata:
|
||||
tags: assets, staticFile, images, fonts, public
|
||||
---
|
||||
|
||||
# Importing assets in Remotion
|
||||
|
||||
## The public folder
|
||||
|
||||
Place assets in the `public/` folder at your project root.
|
||||
|
||||
## Using staticFile()
|
||||
|
||||
You MUST use `staticFile()` to reference files from the `public/` folder:
|
||||
|
||||
```tsx
|
||||
import { Img, staticFile } from "remotion";
|
||||
|
||||
export const MyComposition = () => {
|
||||
return <Img src={staticFile("logo.png")} />;
|
||||
};
|
||||
```
|
||||
|
||||
The function returns an encoded URL that works correctly when deploying to subdirectories.
|
||||
|
||||
## Using with components
|
||||
|
||||
**Images:**
|
||||
|
||||
```tsx
|
||||
import { Img, staticFile } from "remotion";
|
||||
|
||||
<Img src={staticFile("photo.png")} />;
|
||||
```
|
||||
|
||||
**Videos:**
|
||||
|
||||
```tsx
|
||||
import { Video } from "@remotion/media";
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
<Video src={staticFile("clip.mp4")} />;
|
||||
```
|
||||
|
||||
**Audio:**
|
||||
|
||||
```tsx
|
||||
import { Audio } from "@remotion/media";
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
<Audio src={staticFile("music.mp3")} />;
|
||||
```
|
||||
|
||||
**Fonts:**
|
||||
|
||||
```tsx
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
const fontFamily = new FontFace("MyFont", `url(${staticFile("font.woff2")})`);
|
||||
await fontFamily.load();
|
||||
document.fonts.add(fontFamily);
|
||||
```
|
||||
|
||||
## Remote URLs
|
||||
|
||||
Remote URLs can be used directly without `staticFile()`:
|
||||
|
||||
```tsx
|
||||
<Img src="https://example.com/image.png" />
|
||||
<Video src="https://remotion.media/video.mp4" />
|
||||
```
|
||||
|
||||
## Important notes
|
||||
|
||||
- Remotion components (`<Img>`, `<Video>`, `<Audio>`) ensure assets are fully loaded before rendering
|
||||
- Special characters in filenames (`#`, `?`, `&`) are automatically encoded
|
||||
173
bundled-skills/remotion/rules/assets/charts-bar-chart.tsx
Normal file
173
bundled-skills/remotion/rules/assets/charts-bar-chart.tsx
Normal file
@@ -0,0 +1,173 @@
|
||||
import {loadFont} from '@remotion/google-fonts/Inter';
|
||||
import {AbsoluteFill, spring, useCurrentFrame, useVideoConfig} from 'remotion';
|
||||
|
||||
const {fontFamily} = loadFont();
|
||||
|
||||
const COLOR_BAR = '#D4AF37';
|
||||
const COLOR_TEXT = '#ffffff';
|
||||
const COLOR_MUTED = '#888888';
|
||||
const COLOR_BG = '#0a0a0a';
|
||||
const COLOR_AXIS = '#333333';
|
||||
|
||||
// Ideal composition size: 1280x720
|
||||
|
||||
const Title: React.FC<{children: React.ReactNode}> = ({children}) => (
|
||||
<div style={{textAlign: 'center', marginBottom: 40}}>
|
||||
<div style={{color: COLOR_TEXT, fontSize: 48, fontWeight: 600}}>
|
||||
{children}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
const YAxis: React.FC<{steps: number[]; height: number}> = ({
|
||||
steps,
|
||||
height,
|
||||
}) => (
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
justifyContent: 'space-between',
|
||||
height,
|
||||
paddingRight: 16,
|
||||
}}
|
||||
>
|
||||
{steps
|
||||
.slice()
|
||||
.reverse()
|
||||
.map((step) => (
|
||||
<div
|
||||
key={step}
|
||||
style={{
|
||||
color: COLOR_MUTED,
|
||||
fontSize: 20,
|
||||
textAlign: 'right',
|
||||
}}
|
||||
>
|
||||
{step.toLocaleString()}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
|
||||
const Bar: React.FC<{
|
||||
height: number;
|
||||
progress: number;
|
||||
}> = ({height, progress}) => (
|
||||
<div
|
||||
style={{
|
||||
flex: 1,
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
justifyContent: 'flex-end',
|
||||
}}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
width: '100%',
|
||||
height,
|
||||
backgroundColor: COLOR_BAR,
|
||||
borderRadius: '8px 8px 0 0',
|
||||
opacity: progress,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
const XAxis: React.FC<{
|
||||
children: React.ReactNode;
|
||||
labels: string[];
|
||||
height: number;
|
||||
}> = ({children, labels, height}) => (
|
||||
<div style={{flex: 1, display: 'flex', flexDirection: 'column'}}>
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
alignItems: 'flex-end',
|
||||
gap: 16,
|
||||
height,
|
||||
borderLeft: `2px solid ${COLOR_AXIS}`,
|
||||
borderBottom: `2px solid ${COLOR_AXIS}`,
|
||||
paddingLeft: 16,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
gap: 16,
|
||||
paddingLeft: 16,
|
||||
marginTop: 12,
|
||||
}}
|
||||
>
|
||||
{labels.map((label) => (
|
||||
<div
|
||||
key={label}
|
||||
style={{
|
||||
flex: 1,
|
||||
textAlign: 'center',
|
||||
color: COLOR_MUTED,
|
||||
fontSize: 20,
|
||||
}}
|
||||
>
|
||||
{label}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
export const MyAnimation = () => {
|
||||
const frame = useCurrentFrame();
|
||||
const {fps, height} = useVideoConfig();
|
||||
|
||||
const data = [
|
||||
{month: 'Jan', price: 2039},
|
||||
{month: 'Mar', price: 2160},
|
||||
{month: 'May', price: 2327},
|
||||
{month: 'Jul', price: 2426},
|
||||
{month: 'Sep', price: 2634},
|
||||
{month: 'Nov', price: 2672},
|
||||
];
|
||||
|
||||
const minPrice = 2000;
|
||||
const maxPrice = 2800;
|
||||
const priceRange = maxPrice - minPrice;
|
||||
const chartHeight = height - 280;
|
||||
const yAxisSteps = [2000, 2400, 2800];
|
||||
|
||||
return (
|
||||
<AbsoluteFill
|
||||
style={{
|
||||
backgroundColor: COLOR_BG,
|
||||
padding: 60,
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
fontFamily,
|
||||
}}
|
||||
>
|
||||
<Title>Gold Price 2024</Title>
|
||||
|
||||
<div style={{display: 'flex', flex: 1}}>
|
||||
<YAxis steps={yAxisSteps} height={chartHeight} />
|
||||
<XAxis height={chartHeight} labels={data.map((d) => d.month)}>
|
||||
{data.map((item, i) => {
|
||||
const progress = spring({
|
||||
frame: frame - i * 5 - 10,
|
||||
fps,
|
||||
config: {damping: 18, stiffness: 80},
|
||||
});
|
||||
|
||||
const barHeight =
|
||||
((item.price - minPrice) / priceRange) * chartHeight * progress;
|
||||
|
||||
return (
|
||||
<Bar key={item.month} height={barHeight} progress={progress} />
|
||||
);
|
||||
})}
|
||||
</XAxis>
|
||||
</div>
|
||||
</AbsoluteFill>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,100 @@
|
||||
import {
|
||||
AbsoluteFill,
|
||||
interpolate,
|
||||
useCurrentFrame,
|
||||
useVideoConfig,
|
||||
} from 'remotion';
|
||||
|
||||
const COLOR_BG = '#ffffff';
|
||||
const COLOR_TEXT = '#000000';
|
||||
const FULL_TEXT = 'From prompt to motion graphics. This is Remotion.';
|
||||
const PAUSE_AFTER = 'From prompt to motion graphics.';
|
||||
const FONT_SIZE = 72;
|
||||
const FONT_WEIGHT = 700;
|
||||
const CHAR_FRAMES = 2;
|
||||
const CURSOR_BLINK_FRAMES = 16;
|
||||
const PAUSE_SECONDS = 1;
|
||||
|
||||
// Ideal composition size: 1280x720
|
||||
|
||||
const getTypedText = ({
|
||||
frame,
|
||||
fullText,
|
||||
pauseAfter,
|
||||
charFrames,
|
||||
pauseFrames,
|
||||
}: {
|
||||
frame: number;
|
||||
fullText: string;
|
||||
pauseAfter: string;
|
||||
charFrames: number;
|
||||
pauseFrames: number;
|
||||
}): string => {
|
||||
const pauseIndex = fullText.indexOf(pauseAfter);
|
||||
const preLen =
|
||||
pauseIndex >= 0 ? pauseIndex + pauseAfter.length : fullText.length;
|
||||
|
||||
let typedChars = 0;
|
||||
if (frame < preLen * charFrames) {
|
||||
typedChars = Math.floor(frame / charFrames);
|
||||
} else if (frame < preLen * charFrames + pauseFrames) {
|
||||
typedChars = preLen;
|
||||
} else {
|
||||
const postPhase = frame - preLen * charFrames - pauseFrames;
|
||||
typedChars = Math.min(
|
||||
fullText.length,
|
||||
preLen + Math.floor(postPhase / charFrames),
|
||||
);
|
||||
}
|
||||
return fullText.slice(0, typedChars);
|
||||
};
|
||||
|
||||
const Cursor: React.FC<{
|
||||
frame: number;
|
||||
blinkFrames: number;
|
||||
symbol?: string;
|
||||
}> = ({frame, blinkFrames, symbol = '\u258C'}) => {
|
||||
const opacity = interpolate(
|
||||
frame % blinkFrames,
|
||||
[0, blinkFrames / 2, blinkFrames],
|
||||
[1, 0, 1],
|
||||
{extrapolateLeft: 'clamp', extrapolateRight: 'clamp'},
|
||||
);
|
||||
|
||||
return <span style={{opacity}}>{symbol}</span>;
|
||||
};
|
||||
|
||||
export const MyAnimation = () => {
|
||||
const frame = useCurrentFrame();
|
||||
const {fps} = useVideoConfig();
|
||||
|
||||
const pauseFrames = Math.round(fps * PAUSE_SECONDS);
|
||||
|
||||
const typedText = getTypedText({
|
||||
frame,
|
||||
fullText: FULL_TEXT,
|
||||
pauseAfter: PAUSE_AFTER,
|
||||
charFrames: CHAR_FRAMES,
|
||||
pauseFrames,
|
||||
});
|
||||
|
||||
return (
|
||||
<AbsoluteFill
|
||||
style={{
|
||||
backgroundColor: COLOR_BG,
|
||||
}}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
color: COLOR_TEXT,
|
||||
fontSize: FONT_SIZE,
|
||||
fontWeight: FONT_WEIGHT,
|
||||
fontFamily: 'sans-serif',
|
||||
}}
|
||||
>
|
||||
<span>{typedText}</span>
|
||||
<Cursor frame={frame} blinkFrames={CURSOR_BLINK_FRAMES} />
|
||||
</div>
|
||||
</AbsoluteFill>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,103 @@
|
||||
import {loadFont} from '@remotion/google-fonts/Inter';
|
||||
import React from 'react';
|
||||
import {AbsoluteFill, spring, useCurrentFrame, useVideoConfig} from 'remotion';
|
||||
|
||||
/*
|
||||
* Highlight a word in a sentence with a spring-animated wipe effect.
|
||||
*/
|
||||
|
||||
// Ideal composition size: 1280x720
|
||||
|
||||
const COLOR_BG = '#ffffff';
|
||||
const COLOR_TEXT = '#000000';
|
||||
const COLOR_HIGHLIGHT = '#A7C7E7';
|
||||
const FULL_TEXT = 'This is Remotion.';
|
||||
const HIGHLIGHT_WORD = 'Remotion';
|
||||
const FONT_SIZE = 72;
|
||||
const FONT_WEIGHT = 700;
|
||||
const HIGHLIGHT_START_FRAME = 30;
|
||||
const HIGHLIGHT_WIPE_DURATION = 18;
|
||||
|
||||
const {fontFamily} = loadFont();
|
||||
|
||||
const Highlight: React.FC<{
|
||||
word: string;
|
||||
color: string;
|
||||
delay: number;
|
||||
durationInFrames: number;
|
||||
}> = ({word, color, delay, durationInFrames}) => {
|
||||
const frame = useCurrentFrame();
|
||||
const {fps} = useVideoConfig();
|
||||
|
||||
const highlightProgress = spring({
|
||||
fps,
|
||||
frame,
|
||||
config: {damping: 200},
|
||||
delay,
|
||||
durationInFrames,
|
||||
});
|
||||
const scaleX = Math.max(0, Math.min(1, highlightProgress));
|
||||
|
||||
return (
|
||||
<span style={{position: 'relative', display: 'inline-block'}}>
|
||||
<span
|
||||
style={{
|
||||
position: 'absolute',
|
||||
left: 0,
|
||||
right: 0,
|
||||
top: '50%',
|
||||
height: '1.05em',
|
||||
transform: `translateY(-50%) scaleX(${scaleX})`,
|
||||
transformOrigin: 'left center',
|
||||
backgroundColor: color,
|
||||
borderRadius: '0.18em',
|
||||
zIndex: 0,
|
||||
}}
|
||||
/>
|
||||
<span style={{position: 'relative', zIndex: 1}}>{word}</span>
|
||||
</span>
|
||||
);
|
||||
};
|
||||
|
||||
export const MyAnimation = () => {
|
||||
const highlightIndex = FULL_TEXT.indexOf(HIGHLIGHT_WORD);
|
||||
const hasHighlight = highlightIndex >= 0;
|
||||
const preText = hasHighlight ? FULL_TEXT.slice(0, highlightIndex) : FULL_TEXT;
|
||||
const postText = hasHighlight
|
||||
? FULL_TEXT.slice(highlightIndex + HIGHLIGHT_WORD.length)
|
||||
: '';
|
||||
|
||||
return (
|
||||
<AbsoluteFill
|
||||
style={{
|
||||
backgroundColor: COLOR_BG,
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
fontFamily,
|
||||
}}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
color: COLOR_TEXT,
|
||||
fontSize: FONT_SIZE,
|
||||
fontWeight: FONT_WEIGHT,
|
||||
}}
|
||||
>
|
||||
{hasHighlight ? (
|
||||
<>
|
||||
<span>{preText}</span>
|
||||
<Highlight
|
||||
word={HIGHLIGHT_WORD}
|
||||
color={COLOR_HIGHLIGHT}
|
||||
delay={HIGHLIGHT_START_FRAME}
|
||||
durationInFrames={HIGHLIGHT_WIPE_DURATION}
|
||||
/>
|
||||
<span>{postText}</span>
|
||||
</>
|
||||
) : (
|
||||
<span>{FULL_TEXT}</span>
|
||||
)}
|
||||
</div>
|
||||
</AbsoluteFill>
|
||||
);
|
||||
};
|
||||
198
bundled-skills/remotion/rules/audio-visualization.md
Normal file
198
bundled-skills/remotion/rules/audio-visualization.md
Normal file
@@ -0,0 +1,198 @@
|
||||
---
|
||||
name: audio-visualization
|
||||
description: Audio visualization patterns - spectrum bars, waveforms, bass-reactive effects
|
||||
metadata:
|
||||
tags: audio, visualization, spectrum, waveform, bass, music, audiogram, frequency
|
||||
---
|
||||
|
||||
# Audio Visualization in Remotion
|
||||
|
||||
## Prerequisites
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/media-utils
|
||||
```
|
||||
|
||||
## Loading Audio Data
|
||||
|
||||
Use `useWindowedAudioData()` (https://www.remotion.dev/docs/use-windowed-audio-data) to load audio data:
|
||||
|
||||
```tsx
|
||||
import { useWindowedAudioData } from "@remotion/media-utils";
|
||||
import { staticFile, useCurrentFrame, useVideoConfig } from "remotion";
|
||||
|
||||
const frame = useCurrentFrame();
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
const { audioData, dataOffsetInSeconds } = useWindowedAudioData({
|
||||
src: staticFile("podcast.wav"),
|
||||
frame,
|
||||
fps,
|
||||
windowInSeconds: 30,
|
||||
});
|
||||
```
|
||||
|
||||
## Spectrum Bar Visualization
|
||||
|
||||
Use `visualizeAudio()` (https://www.remotion.dev/docs/visualize-audio) to get frequency data for bar charts:
|
||||
|
||||
```tsx
|
||||
import { useWindowedAudioData, visualizeAudio } from "@remotion/media-utils";
|
||||
import { staticFile, useCurrentFrame, useVideoConfig } from "remotion";
|
||||
|
||||
const frame = useCurrentFrame();
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
const { audioData, dataOffsetInSeconds } = useWindowedAudioData({
|
||||
src: staticFile("music.mp3"),
|
||||
frame,
|
||||
fps,
|
||||
windowInSeconds: 30,
|
||||
});
|
||||
|
||||
if (!audioData) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const frequencies = visualizeAudio({
|
||||
fps,
|
||||
frame,
|
||||
audioData,
|
||||
numberOfSamples: 256,
|
||||
optimizeFor: "speed",
|
||||
dataOffsetInSeconds,
|
||||
});
|
||||
|
||||
return (
|
||||
<div style={{ display: "flex", alignItems: "flex-end", height: 200 }}>
|
||||
{frequencies.map((v, i) => (
|
||||
<div
|
||||
key={i}
|
||||
style={{
|
||||
flex: 1,
|
||||
height: `${v * 100}%`,
|
||||
backgroundColor: "#0b84f3",
|
||||
margin: "0 1px",
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
```
|
||||
|
||||
- `numberOfSamples` must be power of 2 (32, 64, 128, 256, 512, 1024)
|
||||
- Values range 0-1; left of array = bass, right = highs
|
||||
- Use `optimizeFor: "speed"` for Lambda or high sample counts
|
||||
|
||||
**Important:** When passing `audioData` to child components, also pass the `frame` from the parent. Do not call `useCurrentFrame()` in each child - this causes discontinuous visualization when children are inside `<Sequence>` with offsets.
|
||||
|
||||
## Waveform Visualization
|
||||
|
||||
Use `visualizeAudioWaveform()` (https://www.remotion.dev/docs/media-utils/visualize-audio-waveform) with `createSmoothSvgPath()` (https://www.remotion.dev/docs/media-utils/create-smooth-svg-path) for oscilloscope-style displays:
|
||||
|
||||
```tsx
|
||||
import {
|
||||
createSmoothSvgPath,
|
||||
useWindowedAudioData,
|
||||
visualizeAudioWaveform,
|
||||
} from "@remotion/media-utils";
|
||||
import { staticFile, useCurrentFrame, useVideoConfig } from "remotion";
|
||||
|
||||
const frame = useCurrentFrame();
|
||||
const { width, fps } = useVideoConfig();
|
||||
const HEIGHT = 200;
|
||||
|
||||
const { audioData, dataOffsetInSeconds } = useWindowedAudioData({
|
||||
src: staticFile("voice.wav"),
|
||||
frame,
|
||||
fps,
|
||||
windowInSeconds: 30,
|
||||
});
|
||||
|
||||
if (!audioData) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const waveform = visualizeAudioWaveform({
|
||||
fps,
|
||||
frame,
|
||||
audioData,
|
||||
numberOfSamples: 256,
|
||||
windowInSeconds: 0.5,
|
||||
dataOffsetInSeconds,
|
||||
});
|
||||
|
||||
const path = createSmoothSvgPath({
|
||||
points: waveform.map((y, i) => ({
|
||||
x: (i / (waveform.length - 1)) * width,
|
||||
y: HEIGHT / 2 + (y * HEIGHT) / 2,
|
||||
})),
|
||||
});
|
||||
|
||||
return (
|
||||
<svg width={width} height={HEIGHT}>
|
||||
<path d={path} fill="none" stroke="#0b84f3" strokeWidth={2} />
|
||||
</svg>
|
||||
);
|
||||
```
|
||||
|
||||
## Bass-Reactive Effects
|
||||
|
||||
Extract low frequencies for beat-reactive animations:
|
||||
|
||||
```tsx
|
||||
const frequencies = visualizeAudio({
|
||||
fps,
|
||||
frame,
|
||||
audioData,
|
||||
numberOfSamples: 128,
|
||||
optimizeFor: "speed",
|
||||
dataOffsetInSeconds,
|
||||
});
|
||||
|
||||
const lowFrequencies = frequencies.slice(0, 32);
|
||||
const bassIntensity =
|
||||
lowFrequencies.reduce((sum, v) => sum + v, 0) / lowFrequencies.length;
|
||||
|
||||
const scale = 1 + bassIntensity * 0.5;
|
||||
const opacity = Math.min(0.6, bassIntensity * 0.8);
|
||||
```
|
||||
|
||||
## Volume-Based Waveform
|
||||
|
||||
Use `getWaveformPortion()` (https://www.remotion.dev/docs/get-waveform-portion) when you need simplified volume data instead of frequency spectrum:
|
||||
|
||||
```tsx
|
||||
import { getWaveformPortion } from "@remotion/media-utils";
|
||||
import { useCurrentFrame, useVideoConfig } from "remotion";
|
||||
|
||||
const frame = useCurrentFrame();
|
||||
const { fps } = useVideoConfig();
|
||||
const currentTimeInSeconds = frame / fps;
|
||||
|
||||
const waveform = getWaveformPortion({
|
||||
audioData,
|
||||
startTimeInSeconds: currentTimeInSeconds,
|
||||
durationInSeconds: 5,
|
||||
numberOfSamples: 50,
|
||||
});
|
||||
|
||||
// Returns array of { index, amplitude } objects (amplitude: 0-1)
|
||||
waveform.map((bar) => (
|
||||
<div key={bar.index} style={{ height: bar.amplitude * 100 }} />
|
||||
));
|
||||
```
|
||||
|
||||
## Postprocessing
|
||||
|
||||
Low frequencies naturally dominate. Apply logarithmic scaling for visual balance:
|
||||
|
||||
```tsx
|
||||
const minDb = -100;
|
||||
const maxDb = -30;
|
||||
|
||||
const scaled = frequencies.map((value) => {
|
||||
const db = 20 * Math.log10(value);
|
||||
return (db - minDb) / (maxDb - minDb);
|
||||
});
|
||||
```
|
||||
169
bundled-skills/remotion/rules/audio.md
Normal file
169
bundled-skills/remotion/rules/audio.md
Normal file
@@ -0,0 +1,169 @@
|
||||
---
|
||||
name: audio
|
||||
description: Using audio and sound in Remotion - importing, trimming, volume, speed, pitch
|
||||
metadata:
|
||||
tags: audio, media, trim, volume, speed, loop, pitch, mute, sound, sfx
|
||||
---
|
||||
|
||||
# Using audio in Remotion
|
||||
|
||||
## Prerequisites
|
||||
|
||||
First, the @remotion/media package needs to be installed.
|
||||
If it is not installed, use the following command:
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/media
|
||||
```
|
||||
|
||||
## Importing Audio
|
||||
|
||||
Use `<Audio>` from `@remotion/media` to add audio to your composition.
|
||||
|
||||
```tsx
|
||||
import { Audio } from "@remotion/media";
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
export const MyComposition = () => {
|
||||
return <Audio src={staticFile("audio.mp3")} />;
|
||||
};
|
||||
```
|
||||
|
||||
Remote URLs are also supported:
|
||||
|
||||
```tsx
|
||||
<Audio src="https://remotion.media/audio.mp3" />
|
||||
```
|
||||
|
||||
By default, audio plays from the start, at full volume and full length.
|
||||
Multiple audio tracks can be layered by adding multiple `<Audio>` components.
|
||||
|
||||
## Trimming
|
||||
|
||||
Use `trimBefore` and `trimAfter` to remove portions of the audio. Values are in frames.
|
||||
|
||||
```tsx
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
return (
|
||||
<Audio
|
||||
src={staticFile("audio.mp3")}
|
||||
trimBefore={2 * fps} // Skip the first 2 seconds
|
||||
trimAfter={10 * fps} // End at the 10 second mark
|
||||
/>
|
||||
);
|
||||
```
|
||||
|
||||
The audio still starts playing at the beginning of the composition - only the specified portion is played.
|
||||
|
||||
## Delaying
|
||||
|
||||
Wrap the audio in a `<Sequence>` to delay when it starts:
|
||||
|
||||
```tsx
|
||||
import { Sequence, staticFile } from "remotion";
|
||||
import { Audio } from "@remotion/media";
|
||||
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
return (
|
||||
<Sequence from={1 * fps}>
|
||||
<Audio src={staticFile("audio.mp3")} />
|
||||
</Sequence>
|
||||
);
|
||||
```
|
||||
|
||||
The audio will start playing after 1 second.
|
||||
|
||||
## Volume
|
||||
|
||||
Set a static volume (0 to 1):
|
||||
|
||||
```tsx
|
||||
<Audio src={staticFile("audio.mp3")} volume={0.5} />
|
||||
```
|
||||
|
||||
Or use a callback for dynamic volume based on the current frame:
|
||||
|
||||
```tsx
|
||||
import { interpolate } from "remotion";
|
||||
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
return (
|
||||
<Audio
|
||||
src={staticFile("audio.mp3")}
|
||||
volume={(f) =>
|
||||
interpolate(f, [0, 1 * fps], [0, 1], { extrapolateRight: "clamp" })
|
||||
}
|
||||
/>
|
||||
);
|
||||
```
|
||||
|
||||
The value of `f` starts at 0 when the audio begins to play, not the composition frame.
|
||||
|
||||
## Muting
|
||||
|
||||
Use `muted` to silence the audio. It can be set dynamically:
|
||||
|
||||
```tsx
|
||||
const frame = useCurrentFrame();
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
return (
|
||||
<Audio
|
||||
src={staticFile("audio.mp3")}
|
||||
muted={frame >= 2 * fps && frame <= 4 * fps} // Mute between 2s and 4s
|
||||
/>
|
||||
);
|
||||
```
|
||||
|
||||
## Speed
|
||||
|
||||
Use `playbackRate` to change the playback speed:
|
||||
|
||||
```tsx
|
||||
<Audio src={staticFile("audio.mp3")} playbackRate={2} /> {/* 2x speed */}
|
||||
<Audio src={staticFile("audio.mp3")} playbackRate={0.5} /> {/* Half speed */}
|
||||
```
|
||||
|
||||
Reverse playback is not supported.
|
||||
|
||||
## Looping
|
||||
|
||||
Use `loop` to loop the audio indefinitely:
|
||||
|
||||
```tsx
|
||||
<Audio src={staticFile("audio.mp3")} loop />
|
||||
```
|
||||
|
||||
Use `loopVolumeCurveBehavior` to control how the frame count behaves when looping:
|
||||
|
||||
- `"repeat"`: Frame count resets to 0 each loop (default)
|
||||
- `"extend"`: Frame count continues incrementing
|
||||
|
||||
```tsx
|
||||
<Audio
|
||||
src={staticFile("audio.mp3")}
|
||||
loop
|
||||
loopVolumeCurveBehavior="extend"
|
||||
volume={(f) => interpolate(f, [0, 300], [1, 0])} // Fade out over multiple loops
|
||||
/>
|
||||
```
|
||||
|
||||
## Pitch
|
||||
|
||||
Use `toneFrequency` to adjust the pitch without affecting speed. Values range from 0.01 to 2:
|
||||
|
||||
```tsx
|
||||
<Audio
|
||||
src={staticFile("audio.mp3")}
|
||||
toneFrequency={1.5} // Higher pitch
|
||||
/>
|
||||
<Audio
|
||||
src={staticFile("audio.mp3")}
|
||||
toneFrequency={0.8} // Lower pitch
|
||||
/>
|
||||
```
|
||||
|
||||
Pitch shifting only works during server-side rendering, not in the Remotion Studio preview or in the `<Player />`.
|
||||
134
bundled-skills/remotion/rules/calculate-metadata.md
Normal file
134
bundled-skills/remotion/rules/calculate-metadata.md
Normal file
@@ -0,0 +1,134 @@
|
||||
---
|
||||
name: calculate-metadata
|
||||
description: Dynamically set composition duration, dimensions, and props
|
||||
metadata:
|
||||
tags: calculateMetadata, duration, dimensions, props, dynamic
|
||||
---
|
||||
|
||||
# Using calculateMetadata
|
||||
|
||||
Use `calculateMetadata` on a `<Composition>` to dynamically set duration, dimensions, and transform props before rendering.
|
||||
|
||||
```tsx
|
||||
<Composition
|
||||
id="MyComp"
|
||||
component={MyComponent}
|
||||
durationInFrames={300}
|
||||
fps={30}
|
||||
width={1920}
|
||||
height={1080}
|
||||
defaultProps={{ videoSrc: "https://remotion.media/video.mp4" }}
|
||||
calculateMetadata={calculateMetadata}
|
||||
/>
|
||||
```
|
||||
|
||||
## Setting duration based on a video
|
||||
|
||||
Use the [`getVideoDuration`](./get-video-duration.md) and [`getVideoDimensions`](./get-video-dimensions.md) skills to get the video duration and dimensions:
|
||||
|
||||
```tsx
|
||||
import { CalculateMetadataFunction } from "remotion";
|
||||
import { getVideoDuration } from "./get-video-duration";
|
||||
|
||||
const calculateMetadata: CalculateMetadataFunction<Props> = async ({
|
||||
props,
|
||||
}) => {
|
||||
const durationInSeconds = await getVideoDuration(props.videoSrc);
|
||||
|
||||
return {
|
||||
durationInFrames: Math.ceil(durationInSeconds * 30),
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
## Matching dimensions of a video
|
||||
|
||||
Use the [`getVideoDimensions`](./get-video-dimensions.md) skill to get the video dimensions:
|
||||
|
||||
```tsx
|
||||
import { CalculateMetadataFunction } from "remotion";
|
||||
import { getVideoDuration } from "./get-video-duration";
|
||||
import { getVideoDimensions } from "./get-video-dimensions";
|
||||
|
||||
const calculateMetadata: CalculateMetadataFunction<Props> = async ({
|
||||
props,
|
||||
}) => {
|
||||
const dimensions = await getVideoDimensions(props.videoSrc);
|
||||
|
||||
return {
|
||||
width: dimensions.width,
|
||||
height: dimensions.height,
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
## Setting duration based on multiple videos
|
||||
|
||||
```tsx
|
||||
const calculateMetadata: CalculateMetadataFunction<Props> = async ({
|
||||
props,
|
||||
}) => {
|
||||
const metadataPromises = props.videos.map((video) =>
|
||||
getVideoDuration(video.src),
|
||||
);
|
||||
const allMetadata = await Promise.all(metadataPromises);
|
||||
|
||||
const totalDuration = allMetadata.reduce(
|
||||
(sum, durationInSeconds) => sum + durationInSeconds,
|
||||
0,
|
||||
);
|
||||
|
||||
return {
|
||||
durationInFrames: Math.ceil(totalDuration * 30),
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
## Setting a default outName
|
||||
|
||||
Set the default output filename based on props:
|
||||
|
||||
```tsx
|
||||
const calculateMetadata: CalculateMetadataFunction<Props> = async ({
|
||||
props,
|
||||
}) => {
|
||||
return {
|
||||
defaultOutName: `video-${props.id}.mp4`,
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
## Transforming props
|
||||
|
||||
Fetch data or transform props before rendering:
|
||||
|
||||
```tsx
|
||||
const calculateMetadata: CalculateMetadataFunction<Props> = async ({
|
||||
props,
|
||||
abortSignal,
|
||||
}) => {
|
||||
const response = await fetch(props.dataUrl, { signal: abortSignal });
|
||||
const data = await response.json();
|
||||
|
||||
return {
|
||||
props: {
|
||||
...props,
|
||||
fetchedData: data,
|
||||
},
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
The `abortSignal` cancels stale requests when props change in the Studio.
|
||||
|
||||
## Return value
|
||||
|
||||
All fields are optional. Returned values override the `<Composition>` props:
|
||||
|
||||
- `durationInFrames`: Number of frames
|
||||
- `width`: Composition width in pixels
|
||||
- `height`: Composition height in pixels
|
||||
- `fps`: Frames per second
|
||||
- `props`: Transformed props passed to the component
|
||||
- `defaultOutName`: Default output filename
|
||||
- `defaultCodec`: Default codec for rendering
|
||||
75
bundled-skills/remotion/rules/can-decode.md
Normal file
75
bundled-skills/remotion/rules/can-decode.md
Normal file
@@ -0,0 +1,75 @@
|
||||
---
|
||||
name: can-decode
|
||||
description: Check if a video can be decoded by the browser using Mediabunny
|
||||
metadata:
|
||||
tags: decode, validation, video, audio, compatibility, browser
|
||||
---
|
||||
|
||||
# Checking if a video can be decoded
|
||||
|
||||
Use Mediabunny to check if a video can be decoded by the browser before attempting to play it.
|
||||
|
||||
## The `canDecode()` function
|
||||
|
||||
This function can be copy-pasted into any project.
|
||||
|
||||
```tsx
|
||||
import { Input, ALL_FORMATS, UrlSource } from "mediabunny";
|
||||
|
||||
export const canDecode = async (src: string) => {
|
||||
const input = new Input({
|
||||
formats: ALL_FORMATS,
|
||||
source: new UrlSource(src, {
|
||||
getRetryDelay: () => null,
|
||||
}),
|
||||
});
|
||||
|
||||
try {
|
||||
await input.getFormat();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
|
||||
const videoTrack = await input.getPrimaryVideoTrack();
|
||||
if (videoTrack && !(await videoTrack.canDecode())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const audioTrack = await input.getPrimaryAudioTrack();
|
||||
if (audioTrack && !(await audioTrack.canDecode())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```tsx
|
||||
const src = "https://remotion.media/video.mp4";
|
||||
const isDecodable = await canDecode(src);
|
||||
|
||||
if (isDecodable) {
|
||||
console.log("Video can be decoded");
|
||||
} else {
|
||||
console.log("Video cannot be decoded by this browser");
|
||||
}
|
||||
```
|
||||
|
||||
## Using with Blob
|
||||
|
||||
For file uploads or drag-and-drop, use `BlobSource`:
|
||||
|
||||
```tsx
|
||||
import { Input, ALL_FORMATS, BlobSource } from "mediabunny";
|
||||
|
||||
export const canDecodeBlob = async (blob: Blob) => {
|
||||
const input = new Input({
|
||||
formats: ALL_FORMATS,
|
||||
source: new BlobSource(blob),
|
||||
});
|
||||
|
||||
// Same validation logic as above
|
||||
};
|
||||
```
|
||||
120
bundled-skills/remotion/rules/charts.md
Normal file
120
bundled-skills/remotion/rules/charts.md
Normal file
@@ -0,0 +1,120 @@
|
||||
---
|
||||
name: charts
|
||||
description: Chart and data visualization patterns for Remotion. Use when creating bar charts, pie charts, line charts, stock graphs, or any data-driven animations.
|
||||
metadata:
|
||||
tags: charts, data, visualization, bar-chart, pie-chart, line-chart, stock-chart, svg-paths, graphs
|
||||
---
|
||||
|
||||
# Charts in Remotion
|
||||
|
||||
Create charts using React code - HTML, SVG, and D3.js are all supported.
|
||||
|
||||
Disable all animations from third party libraries - they cause flickering.
|
||||
Drive all animations from `useCurrentFrame()`.
|
||||
|
||||
## Bar Chart
|
||||
|
||||
```tsx
|
||||
const STAGGER_DELAY = 5;
|
||||
const frame = useCurrentFrame();
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
const bars = data.map((item, i) => {
|
||||
const height = spring({
|
||||
frame,
|
||||
fps,
|
||||
delay: i * STAGGER_DELAY,
|
||||
config: { damping: 200 },
|
||||
});
|
||||
return <div style={{ height: height * item.value }} />;
|
||||
});
|
||||
```
|
||||
|
||||
## Pie Chart
|
||||
|
||||
Animate segments using stroke-dashoffset, starting from 12 o'clock:
|
||||
|
||||
```tsx
|
||||
const progress = interpolate(frame, [0, 100], [0, 1]);
|
||||
const circumference = 2 * Math.PI * radius;
|
||||
const segmentLength = (value / total) * circumference;
|
||||
const offset = interpolate(progress, [0, 1], [segmentLength, 0]);
|
||||
|
||||
<circle
|
||||
r={radius}
|
||||
cx={center}
|
||||
cy={center}
|
||||
fill="none"
|
||||
stroke={color}
|
||||
strokeWidth={strokeWidth}
|
||||
strokeDasharray={`${segmentLength} ${circumference}`}
|
||||
strokeDashoffset={offset}
|
||||
transform={`rotate(-90 ${center} ${center})`}
|
||||
/>;
|
||||
```
|
||||
|
||||
## Line Chart / Path Animation
|
||||
|
||||
Use `@remotion/paths` for animating SVG paths (line charts, stock graphs, signatures).
|
||||
|
||||
Install: `npx remotion add @remotion/paths`
|
||||
Docs: https://remotion.dev/docs/paths.md
|
||||
|
||||
### Convert data points to SVG path
|
||||
|
||||
```tsx
|
||||
type Point = { x: number; y: number };
|
||||
|
||||
const generateLinePath = (points: Point[]): string => {
|
||||
if (points.length < 2) return "";
|
||||
return points.map((p, i) => `${i === 0 ? "M" : "L"} ${p.x} ${p.y}`).join(" ");
|
||||
};
|
||||
```
|
||||
|
||||
### Draw path with animation
|
||||
|
||||
```tsx
|
||||
import { evolvePath } from "@remotion/paths";
|
||||
|
||||
const path = "M 100 200 L 200 150 L 300 180 L 400 100";
|
||||
const progress = interpolate(frame, [0, 2 * fps], [0, 1], {
|
||||
extrapolateLeft: "clamp",
|
||||
extrapolateRight: "clamp",
|
||||
easing: Easing.out(Easing.quad),
|
||||
});
|
||||
|
||||
const { strokeDasharray, strokeDashoffset } = evolvePath(progress, path);
|
||||
|
||||
<path
|
||||
d={path}
|
||||
fill="none"
|
||||
stroke="#FF3232"
|
||||
strokeWidth={4}
|
||||
strokeDasharray={strokeDasharray}
|
||||
strokeDashoffset={strokeDashoffset}
|
||||
/>;
|
||||
```
|
||||
|
||||
### Follow path with marker/arrow
|
||||
|
||||
```tsx
|
||||
import {
|
||||
getLength,
|
||||
getPointAtLength,
|
||||
getTangentAtLength,
|
||||
} from "@remotion/paths";
|
||||
|
||||
const pathLength = getLength(path);
|
||||
const point = getPointAtLength(path, progress * pathLength);
|
||||
const tangent = getTangentAtLength(path, progress * pathLength);
|
||||
const angle = Math.atan2(tangent.y, tangent.x);
|
||||
|
||||
<g
|
||||
style={{
|
||||
transform: `translate(${point.x}px, ${point.y}px) rotate(${angle}rad)`,
|
||||
transformOrigin: "0 0",
|
||||
}}
|
||||
>
|
||||
<polygon points="0,0 -20,-10 -20,10" fill="#FF3232" />
|
||||
</g>;
|
||||
```
|
||||
154
bundled-skills/remotion/rules/compositions.md
Normal file
154
bundled-skills/remotion/rules/compositions.md
Normal file
@@ -0,0 +1,154 @@
|
||||
---
|
||||
name: compositions
|
||||
description: Defining compositions, stills, folders, default props and dynamic metadata
|
||||
metadata:
|
||||
tags: composition, still, folder, props, metadata
|
||||
---
|
||||
|
||||
A `<Composition>` defines the component, width, height, fps and duration of a renderable video.
|
||||
|
||||
It normally is placed in the `src/Root.tsx` file.
|
||||
|
||||
```tsx
|
||||
import { Composition } from "remotion";
|
||||
import { MyComposition } from "./MyComposition";
|
||||
|
||||
export const RemotionRoot = () => {
|
||||
return (
|
||||
<Composition
|
||||
id="MyComposition"
|
||||
component={MyComposition}
|
||||
durationInFrames={100}
|
||||
fps={30}
|
||||
width={1080}
|
||||
height={1080}
|
||||
/>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Default Props
|
||||
|
||||
Pass `defaultProps` to provide initial values for your component.
|
||||
Values must be JSON-serializable (`Date`, `Map`, `Set`, and `staticFile()` are supported).
|
||||
|
||||
```tsx
|
||||
import { Composition } from "remotion";
|
||||
import { MyComposition, MyCompositionProps } from "./MyComposition";
|
||||
|
||||
export const RemotionRoot = () => {
|
||||
return (
|
||||
<Composition
|
||||
id="MyComposition"
|
||||
component={MyComposition}
|
||||
durationInFrames={100}
|
||||
fps={30}
|
||||
width={1080}
|
||||
height={1080}
|
||||
defaultProps={
|
||||
{
|
||||
title: "Hello World",
|
||||
color: "#ff0000",
|
||||
} satisfies MyCompositionProps
|
||||
}
|
||||
/>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
Use `type` declarations for props rather than `interface` to ensure `defaultProps` type safety.
|
||||
|
||||
## Folders
|
||||
|
||||
Use `<Folder>` to organize compositions in the sidebar.
|
||||
Folder names can only contain letters, numbers, and hyphens.
|
||||
|
||||
```tsx
|
||||
import { Composition, Folder } from "remotion";
|
||||
|
||||
export const RemotionRoot = () => {
|
||||
return (
|
||||
<>
|
||||
<Folder name="Marketing">
|
||||
<Composition id="Promo" /* ... */ />
|
||||
<Composition id="Ad" /* ... */ />
|
||||
</Folder>
|
||||
<Folder name="Social">
|
||||
<Folder name="Instagram">
|
||||
<Composition id="Story" /* ... */ />
|
||||
<Composition id="Reel" /* ... */ />
|
||||
</Folder>
|
||||
</Folder>
|
||||
</>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Stills
|
||||
|
||||
Use `<Still>` for single-frame images. It does not require `durationInFrames` or `fps`.
|
||||
|
||||
```tsx
|
||||
import { Still } from "remotion";
|
||||
import { Thumbnail } from "./Thumbnail";
|
||||
|
||||
export const RemotionRoot = () => {
|
||||
return (
|
||||
<Still id="Thumbnail" component={Thumbnail} width={1280} height={720} />
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Calculate Metadata
|
||||
|
||||
Use `calculateMetadata` to make dimensions, duration, or props dynamic based on data.
|
||||
|
||||
```tsx
|
||||
import { Composition, CalculateMetadataFunction } from "remotion";
|
||||
import { MyComposition, MyCompositionProps } from "./MyComposition";
|
||||
|
||||
const calculateMetadata: CalculateMetadataFunction<
|
||||
MyCompositionProps
|
||||
> = async ({ props, abortSignal }) => {
|
||||
const data = await fetch(`https://api.example.com/video/${props.videoId}`, {
|
||||
signal: abortSignal,
|
||||
}).then((res) => res.json());
|
||||
|
||||
return {
|
||||
durationInFrames: Math.ceil(data.duration * 30),
|
||||
props: {
|
||||
...props,
|
||||
videoUrl: data.url,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export const RemotionRoot = () => {
|
||||
return (
|
||||
<Composition
|
||||
id="MyComposition"
|
||||
component={MyComposition}
|
||||
durationInFrames={100} // Placeholder, will be overridden
|
||||
fps={30}
|
||||
width={1080}
|
||||
height={1080}
|
||||
defaultProps={{ videoId: "abc123" }}
|
||||
calculateMetadata={calculateMetadata}
|
||||
/>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
The function can return `props`, `durationInFrames`, `width`, `height`, `fps`, and codec-related defaults. It runs once before rendering begins.
|
||||
|
||||
## Nesting compositions within another
|
||||
|
||||
To add a composition within another composition, you can use the `<Sequence>` component with a `width` and `height` prop to specify the size of the composition.
|
||||
|
||||
```tsx
|
||||
<AbsoluteFill>
|
||||
<Sequence width={COMPOSITION_WIDTH} height={COMPOSITION_HEIGHT}>
|
||||
<CompositionComponent />
|
||||
</Sequence>
|
||||
</AbsoluteFill>
|
||||
```
|
||||
184
bundled-skills/remotion/rules/display-captions.md
Normal file
184
bundled-skills/remotion/rules/display-captions.md
Normal file
@@ -0,0 +1,184 @@
|
||||
---
|
||||
name: display-captions
|
||||
description: Displaying captions in Remotion with TikTok-style pages and word highlighting
|
||||
metadata:
|
||||
tags: captions, subtitles, display, tiktok, highlight
|
||||
---
|
||||
|
||||
# Displaying captions in Remotion
|
||||
|
||||
This guide explains how to display captions in Remotion, assuming you already have captions in the [`Caption`](https://www.remotion.dev/docs/captions/caption) format.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Read [Transcribing audio](transcribe-captions.md) for how to generate captions.
|
||||
|
||||
First, the [`@remotion/captions`](https://www.remotion.dev/docs/captions) package needs to be installed.
|
||||
If it is not installed, use the following command:
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/captions
|
||||
```
|
||||
|
||||
## Fetching captions
|
||||
|
||||
First, fetch your captions JSON file. Use [`useDelayRender()`](https://www.remotion.dev/docs/use-delay-render) to hold the render until the captions are loaded:
|
||||
|
||||
```tsx
|
||||
import { useState, useEffect, useCallback } from "react";
|
||||
import { AbsoluteFill, staticFile, useDelayRender } from "remotion";
|
||||
import type { Caption } from "@remotion/captions";
|
||||
|
||||
export const MyComponent: React.FC = () => {
|
||||
const [captions, setCaptions] = useState<Caption[] | null>(null);
|
||||
const { delayRender, continueRender, cancelRender } = useDelayRender();
|
||||
const [handle] = useState(() => delayRender());
|
||||
|
||||
const fetchCaptions = useCallback(async () => {
|
||||
try {
|
||||
// Assuming captions.json is in the public/ folder.
|
||||
const response = await fetch(staticFile("captions123.json"));
|
||||
const data = await response.json();
|
||||
setCaptions(data);
|
||||
continueRender(handle);
|
||||
} catch (e) {
|
||||
cancelRender(e);
|
||||
}
|
||||
}, [continueRender, cancelRender, handle]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchCaptions();
|
||||
}, [fetchCaptions]);
|
||||
|
||||
if (!captions) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return <AbsoluteFill>{/* Render captions here */}</AbsoluteFill>;
|
||||
};
|
||||
```
|
||||
|
||||
## Creating pages
|
||||
|
||||
Use `createTikTokStyleCaptions()` to group captions into pages. The `combineTokensWithinMilliseconds` option controls how many words appear at once:
|
||||
|
||||
```tsx
|
||||
import { useMemo } from "react";
|
||||
import { createTikTokStyleCaptions } from "@remotion/captions";
|
||||
import type { Caption } from "@remotion/captions";
|
||||
|
||||
// How often captions should switch (in milliseconds)
|
||||
// Higher values = more words per page
|
||||
// Lower values = fewer words (more word-by-word)
|
||||
const SWITCH_CAPTIONS_EVERY_MS = 1200;
|
||||
|
||||
const { pages } = useMemo(() => {
|
||||
return createTikTokStyleCaptions({
|
||||
captions,
|
||||
combineTokensWithinMilliseconds: SWITCH_CAPTIONS_EVERY_MS,
|
||||
});
|
||||
}, [captions]);
|
||||
```
|
||||
|
||||
## Rendering with Sequences
|
||||
|
||||
Map over the pages and render each one in a `<Sequence>`. Calculate the start frame and duration from the page timing:
|
||||
|
||||
```tsx
|
||||
import { Sequence, useVideoConfig, AbsoluteFill } from "remotion";
|
||||
import type { TikTokPage } from "@remotion/captions";
|
||||
|
||||
const CaptionedContent: React.FC = () => {
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
return (
|
||||
<AbsoluteFill>
|
||||
{pages.map((page, index) => {
|
||||
const nextPage = pages[index + 1] ?? null;
|
||||
const startFrame = (page.startMs / 1000) * fps;
|
||||
const endFrame = Math.min(
|
||||
nextPage ? (nextPage.startMs / 1000) * fps : Infinity,
|
||||
startFrame + (SWITCH_CAPTIONS_EVERY_MS / 1000) * fps,
|
||||
);
|
||||
const durationInFrames = endFrame - startFrame;
|
||||
|
||||
if (durationInFrames <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Sequence
|
||||
key={index}
|
||||
from={startFrame}
|
||||
durationInFrames={durationInFrames}
|
||||
>
|
||||
<CaptionPage page={page} />
|
||||
</Sequence>
|
||||
);
|
||||
})}
|
||||
</AbsoluteFill>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## White-space preservation
|
||||
|
||||
The captions are whitespace sensitive. You should include spaces in the `text` field before each word. Use `whiteSpace: "pre"` to preserve the whitespace in the captions.
|
||||
|
||||
## Separate component for captions
|
||||
|
||||
Put captioning logic in a separate component.
|
||||
Make a new file for it.
|
||||
|
||||
## Word highlighting
|
||||
|
||||
A caption page contains `tokens` which you can use to highlight the currently spoken word:
|
||||
|
||||
```tsx
|
||||
import { AbsoluteFill, useCurrentFrame, useVideoConfig } from "remotion";
|
||||
import type { TikTokPage } from "@remotion/captions";
|
||||
|
||||
const HIGHLIGHT_COLOR = "#39E508";
|
||||
|
||||
const CaptionPage: React.FC<{ page: TikTokPage }> = ({ page }) => {
|
||||
const frame = useCurrentFrame();
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
// Current time relative to the start of the sequence
|
||||
const currentTimeMs = (frame / fps) * 1000;
|
||||
// Convert to absolute time by adding the page start
|
||||
const absoluteTimeMs = page.startMs + currentTimeMs;
|
||||
|
||||
return (
|
||||
<AbsoluteFill style={{ justifyContent: "center", alignItems: "center" }}>
|
||||
<div style={{ fontSize: 80, fontWeight: "bold", whiteSpace: "pre" }}>
|
||||
{page.tokens.map((token) => {
|
||||
const isActive =
|
||||
token.fromMs <= absoluteTimeMs && token.toMs > absoluteTimeMs;
|
||||
|
||||
return (
|
||||
<span
|
||||
key={token.fromMs}
|
||||
style={{ color: isActive ? HIGHLIGHT_COLOR : "white" }}
|
||||
>
|
||||
{token.text}
|
||||
</span>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</AbsoluteFill>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
## Display captions alongside video content
|
||||
|
||||
By default, put the captions alongside the video content, so the captions are in sync.
|
||||
For each video, make a new captions JSON file.
|
||||
|
||||
```tsx
|
||||
<AbsoluteFill>
|
||||
<Video src={staticFile("video.mp4")} />
|
||||
<CaptionPage page={page} />
|
||||
</AbsoluteFill>
|
||||
```
|
||||
229
bundled-skills/remotion/rules/extract-frames.md
Normal file
229
bundled-skills/remotion/rules/extract-frames.md
Normal file
@@ -0,0 +1,229 @@
|
||||
---
|
||||
name: extract-frames
|
||||
description: Extract frames from videos at specific timestamps using Mediabunny
|
||||
metadata:
|
||||
tags: frames, extract, video, thumbnail, filmstrip, canvas
|
||||
---
|
||||
|
||||
# Extracting frames from videos
|
||||
|
||||
Use Mediabunny to extract frames from videos at specific timestamps. This is useful for generating thumbnails, filmstrips, or processing individual frames.
|
||||
|
||||
## The `extractFrames()` function
|
||||
|
||||
This function can be copy-pasted into any project.
|
||||
|
||||
```tsx
|
||||
import {
|
||||
ALL_FORMATS,
|
||||
Input,
|
||||
UrlSource,
|
||||
VideoSample,
|
||||
VideoSampleSink,
|
||||
} from "mediabunny";
|
||||
|
||||
type Options = {
|
||||
track: { width: number; height: number };
|
||||
container: string;
|
||||
durationInSeconds: number | null;
|
||||
};
|
||||
|
||||
export type ExtractFramesTimestampsInSecondsFn = (
|
||||
options: Options,
|
||||
) => Promise<number[]> | number[];
|
||||
|
||||
export type ExtractFramesProps = {
|
||||
src: string;
|
||||
timestampsInSeconds: number[] | ExtractFramesTimestampsInSecondsFn;
|
||||
onVideoSample: (sample: VideoSample) => void;
|
||||
signal?: AbortSignal;
|
||||
};
|
||||
|
||||
export async function extractFrames({
|
||||
src,
|
||||
timestampsInSeconds,
|
||||
onVideoSample,
|
||||
signal,
|
||||
}: ExtractFramesProps): Promise<void> {
|
||||
using input = new Input({
|
||||
formats: ALL_FORMATS,
|
||||
source: new UrlSource(src),
|
||||
});
|
||||
|
||||
const [durationInSeconds, format, videoTrack] = await Promise.all([
|
||||
input.computeDuration(),
|
||||
input.getFormat(),
|
||||
input.getPrimaryVideoTrack(),
|
||||
]);
|
||||
|
||||
if (!videoTrack) {
|
||||
throw new Error("No video track found in the input");
|
||||
}
|
||||
|
||||
if (signal?.aborted) {
|
||||
throw new Error("Aborted");
|
||||
}
|
||||
|
||||
const timestamps =
|
||||
typeof timestampsInSeconds === "function"
|
||||
? await timestampsInSeconds({
|
||||
track: {
|
||||
width: videoTrack.displayWidth,
|
||||
height: videoTrack.displayHeight,
|
||||
},
|
||||
container: format.name,
|
||||
durationInSeconds,
|
||||
})
|
||||
: timestampsInSeconds;
|
||||
|
||||
if (timestamps.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (signal?.aborted) {
|
||||
throw new Error("Aborted");
|
||||
}
|
||||
|
||||
const sink = new VideoSampleSink(videoTrack);
|
||||
|
||||
for await (using videoSample of sink.samplesAtTimestamps(timestamps)) {
|
||||
if (signal?.aborted) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (!videoSample) {
|
||||
continue;
|
||||
}
|
||||
|
||||
onVideoSample(videoSample);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Basic usage
|
||||
|
||||
Extract frames at specific timestamps:
|
||||
|
||||
```tsx
|
||||
await extractFrames({
|
||||
src: "https://remotion.media/video.mp4",
|
||||
timestampsInSeconds: [0, 1, 2, 3, 4],
|
||||
onVideoSample: (sample) => {
|
||||
const canvas = document.createElement("canvas");
|
||||
canvas.width = sample.displayWidth;
|
||||
canvas.height = sample.displayHeight;
|
||||
const ctx = canvas.getContext("2d");
|
||||
sample.draw(ctx!, 0, 0);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Creating a filmstrip
|
||||
|
||||
Use a callback function to dynamically calculate timestamps based on video metadata:
|
||||
|
||||
```tsx
|
||||
const canvasWidth = 500;
|
||||
const canvasHeight = 80;
|
||||
const fromSeconds = 0;
|
||||
const toSeconds = 10;
|
||||
|
||||
await extractFrames({
|
||||
src: "https://remotion.media/video.mp4",
|
||||
timestampsInSeconds: async ({ track, durationInSeconds }) => {
|
||||
const aspectRatio = track.width / track.height;
|
||||
const amountOfFramesFit = Math.ceil(
|
||||
canvasWidth / (canvasHeight * aspectRatio),
|
||||
);
|
||||
const segmentDuration = toSeconds - fromSeconds;
|
||||
const timestamps: number[] = [];
|
||||
|
||||
for (let i = 0; i < amountOfFramesFit; i++) {
|
||||
timestamps.push(
|
||||
fromSeconds + (segmentDuration / amountOfFramesFit) * (i + 0.5),
|
||||
);
|
||||
}
|
||||
|
||||
return timestamps;
|
||||
},
|
||||
onVideoSample: (sample) => {
|
||||
console.log(`Frame at ${sample.timestamp}s`);
|
||||
|
||||
const canvas = document.createElement("canvas");
|
||||
canvas.width = sample.displayWidth;
|
||||
canvas.height = sample.displayHeight;
|
||||
const ctx = canvas.getContext("2d");
|
||||
sample.draw(ctx!, 0, 0);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Cancellation with AbortSignal
|
||||
|
||||
Cancel frame extraction after a timeout:
|
||||
|
||||
```tsx
|
||||
const controller = new AbortController();
|
||||
|
||||
setTimeout(() => controller.abort(), 5000);
|
||||
|
||||
try {
|
||||
await extractFrames({
|
||||
src: "https://remotion.media/video.mp4",
|
||||
timestampsInSeconds: [0, 1, 2, 3, 4],
|
||||
onVideoSample: (sample) => {
|
||||
using frame = sample;
|
||||
const canvas = document.createElement("canvas");
|
||||
canvas.width = frame.displayWidth;
|
||||
canvas.height = frame.displayHeight;
|
||||
const ctx = canvas.getContext("2d");
|
||||
frame.draw(ctx!, 0, 0);
|
||||
},
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
console.log("Frame extraction complete!");
|
||||
} catch (error) {
|
||||
console.error("Frame extraction was aborted or failed:", error);
|
||||
}
|
||||
```
|
||||
|
||||
## Timeout with Promise.race
|
||||
|
||||
```tsx
|
||||
const controller = new AbortController();
|
||||
|
||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||
const timeoutId = setTimeout(() => {
|
||||
controller.abort();
|
||||
reject(new Error("Frame extraction timed out after 10 seconds"));
|
||||
}, 10000);
|
||||
|
||||
controller.signal.addEventListener("abort", () => clearTimeout(timeoutId), {
|
||||
once: true,
|
||||
});
|
||||
});
|
||||
|
||||
try {
|
||||
await Promise.race([
|
||||
extractFrames({
|
||||
src: "https://remotion.media/video.mp4",
|
||||
timestampsInSeconds: [0, 1, 2, 3, 4],
|
||||
onVideoSample: (sample) => {
|
||||
using frame = sample;
|
||||
const canvas = document.createElement("canvas");
|
||||
canvas.width = frame.displayWidth;
|
||||
canvas.height = frame.displayHeight;
|
||||
const ctx = canvas.getContext("2d");
|
||||
frame.draw(ctx!, 0, 0);
|
||||
},
|
||||
signal: controller.signal,
|
||||
}),
|
||||
timeoutPromise,
|
||||
]);
|
||||
|
||||
console.log("Frame extraction complete!");
|
||||
} catch (error) {
|
||||
console.error("Frame extraction was aborted or failed:", error);
|
||||
}
|
||||
```
|
||||
38
bundled-skills/remotion/rules/ffmpeg.md
Normal file
38
bundled-skills/remotion/rules/ffmpeg.md
Normal file
@@ -0,0 +1,38 @@
|
||||
---
|
||||
name: ffmpeg
|
||||
description: Using FFmpeg and FFprobe in Remotion
|
||||
metadata:
|
||||
tags: ffmpeg, ffprobe, video, trimming
|
||||
---
|
||||
|
||||
## FFmpeg in Remotion
|
||||
|
||||
`ffmpeg` and `ffprobe` do not need to be installed. They are available via the `bunx remotion ffmpeg` and `bunx remotion ffprobe`:
|
||||
|
||||
```bash
|
||||
bunx remotion ffmpeg -i input.mp4 output.mp3
|
||||
bunx remotion ffprobe input.mp4
|
||||
```
|
||||
|
||||
### Trimming videos
|
||||
|
||||
You have 2 options for trimming videos:
|
||||
|
||||
1. Use the FFMpeg command line. You MUST re-encode the video to avoid frozen frames at the start of the video.
|
||||
|
||||
```bash
|
||||
# Re-encodes from the exact frame
|
||||
bunx remotion ffmpeg -ss 00:00:05 -i public/input.mp4 -to 00:00:10 -c:v libx264 -c:a aac public/output.mp4
|
||||
```
|
||||
|
||||
2. Use the `trimBefore` and `trimAfter` props of the `<Video>` component. The benefit is that this is non-destructive and you can change the trim at any time.
|
||||
|
||||
```tsx
|
||||
import { Video } from "@remotion/media";
|
||||
|
||||
<Video
|
||||
src={staticFile("video.mp4")}
|
||||
trimBefore={5 * fps}
|
||||
trimAfter={10 * fps}
|
||||
/>;
|
||||
```
|
||||
152
bundled-skills/remotion/rules/fonts.md
Normal file
152
bundled-skills/remotion/rules/fonts.md
Normal file
@@ -0,0 +1,152 @@
|
||||
---
|
||||
name: fonts
|
||||
description: Loading Google Fonts and local fonts in Remotion
|
||||
metadata:
|
||||
tags: fonts, google-fonts, typography, text
|
||||
---
|
||||
|
||||
# Using fonts in Remotion
|
||||
|
||||
## Google Fonts with @remotion/google-fonts
|
||||
|
||||
The recommended way to use Google Fonts. It's type-safe and automatically blocks rendering until the font is ready.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
First, the @remotion/google-fonts package needs to be installed.
|
||||
If it is not installed, use the following command:
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/google-fonts # If project uses npm
|
||||
bunx remotion add @remotion/google-fonts # If project uses bun
|
||||
yarn remotion add @remotion/google-fonts # If project uses yarn
|
||||
pnpm exec remotion add @remotion/google-fonts # If project uses pnpm
|
||||
```
|
||||
|
||||
```tsx
|
||||
import { loadFont } from "@remotion/google-fonts/Lobster";
|
||||
|
||||
const { fontFamily } = loadFont();
|
||||
|
||||
export const MyComposition = () => {
|
||||
return <div style={{ fontFamily }}>Hello World</div>;
|
||||
};
|
||||
```
|
||||
|
||||
Preferrably, specify only needed weights and subsets to reduce file size:
|
||||
|
||||
```tsx
|
||||
import { loadFont } from "@remotion/google-fonts/Roboto";
|
||||
|
||||
const { fontFamily } = loadFont("normal", {
|
||||
weights: ["400", "700"],
|
||||
subsets: ["latin"],
|
||||
});
|
||||
```
|
||||
|
||||
### Waiting for font to load
|
||||
|
||||
Use `waitUntilDone()` if you need to know when the font is ready:
|
||||
|
||||
```tsx
|
||||
import { loadFont } from "@remotion/google-fonts/Lobster";
|
||||
|
||||
const { fontFamily, waitUntilDone } = loadFont();
|
||||
|
||||
await waitUntilDone();
|
||||
```
|
||||
|
||||
## Local fonts with @remotion/fonts
|
||||
|
||||
For local font files, use the `@remotion/fonts` package.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
First, install @remotion/fonts:
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/fonts # If project uses npm
|
||||
bunx remotion add @remotion/fonts # If project uses bun
|
||||
yarn remotion add @remotion/fonts # If project uses yarn
|
||||
pnpm exec remotion add @remotion/fonts # If project uses pnpm
|
||||
```
|
||||
|
||||
### Loading a local font
|
||||
|
||||
Place your font file in the `public/` folder and use `loadFont()`:
|
||||
|
||||
```tsx
|
||||
import { loadFont } from "@remotion/fonts";
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
await loadFont({
|
||||
family: "MyFont",
|
||||
url: staticFile("MyFont-Regular.woff2"),
|
||||
});
|
||||
|
||||
export const MyComposition = () => {
|
||||
return <div style={{ fontFamily: "MyFont" }}>Hello World</div>;
|
||||
};
|
||||
```
|
||||
|
||||
### Loading multiple weights
|
||||
|
||||
Load each weight separately with the same family name:
|
||||
|
||||
```tsx
|
||||
import { loadFont } from "@remotion/fonts";
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
await Promise.all([
|
||||
loadFont({
|
||||
family: "Inter",
|
||||
url: staticFile("Inter-Regular.woff2"),
|
||||
weight: "400",
|
||||
}),
|
||||
loadFont({
|
||||
family: "Inter",
|
||||
url: staticFile("Inter-Bold.woff2"),
|
||||
weight: "700",
|
||||
}),
|
||||
]);
|
||||
```
|
||||
|
||||
### Available options
|
||||
|
||||
```tsx
|
||||
loadFont({
|
||||
family: "MyFont", // Required: name to use in CSS
|
||||
url: staticFile("font.woff2"), // Required: font file URL
|
||||
format: "woff2", // Optional: auto-detected from extension
|
||||
weight: "400", // Optional: font weight
|
||||
style: "normal", // Optional: normal or italic
|
||||
display: "block", // Optional: font-display behavior
|
||||
});
|
||||
```
|
||||
|
||||
## Using in components
|
||||
|
||||
Call `loadFont()` at the top level of your component or in a separate file that's imported early:
|
||||
|
||||
```tsx
|
||||
import { loadFont } from "@remotion/google-fonts/Montserrat";
|
||||
|
||||
const { fontFamily } = loadFont("normal", {
|
||||
weights: ["400", "700"],
|
||||
subsets: ["latin"],
|
||||
});
|
||||
|
||||
export const Title: React.FC<{ text: string }> = ({ text }) => {
|
||||
return (
|
||||
<h1
|
||||
style={{
|
||||
fontFamily,
|
||||
fontSize: 80,
|
||||
fontWeight: "bold",
|
||||
}}
|
||||
>
|
||||
{text}
|
||||
</h1>
|
||||
);
|
||||
};
|
||||
```
|
||||
58
bundled-skills/remotion/rules/get-audio-duration.md
Normal file
58
bundled-skills/remotion/rules/get-audio-duration.md
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
name: get-audio-duration
|
||||
description: Getting the duration of an audio file in seconds with Mediabunny
|
||||
metadata:
|
||||
tags: duration, audio, length, time, seconds, mp3, wav
|
||||
---
|
||||
|
||||
# Getting audio duration with Mediabunny
|
||||
|
||||
Mediabunny can extract the duration of an audio file. It works in browser, Node.js, and Bun environments.
|
||||
|
||||
## Getting audio duration
|
||||
|
||||
```tsx title="get-audio-duration.ts"
|
||||
import { Input, ALL_FORMATS, UrlSource } from "mediabunny";
|
||||
|
||||
export const getAudioDuration = async (src: string) => {
|
||||
const input = new Input({
|
||||
formats: ALL_FORMATS,
|
||||
source: new UrlSource(src, {
|
||||
getRetryDelay: () => null,
|
||||
}),
|
||||
});
|
||||
|
||||
const durationInSeconds = await input.computeDuration();
|
||||
return durationInSeconds;
|
||||
};
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```tsx
|
||||
const duration = await getAudioDuration("https://remotion.media/audio.mp3");
|
||||
console.log(duration); // e.g. 180.5 (seconds)
|
||||
```
|
||||
|
||||
## Using with staticFile in Remotion
|
||||
|
||||
Make sure to wrap the file path in `staticFile()`:
|
||||
|
||||
```tsx
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
const duration = await getAudioDuration(staticFile("audio.mp3"));
|
||||
```
|
||||
|
||||
## In Node.js and Bun
|
||||
|
||||
Use `FileSource` instead of `UrlSource`:
|
||||
|
||||
```tsx
|
||||
import { Input, ALL_FORMATS, FileSource } from "mediabunny";
|
||||
|
||||
const input = new Input({
|
||||
formats: ALL_FORMATS,
|
||||
source: new FileSource(file), // File object from input or drag-drop
|
||||
});
|
||||
```
|
||||
68
bundled-skills/remotion/rules/get-video-dimensions.md
Normal file
68
bundled-skills/remotion/rules/get-video-dimensions.md
Normal file
@@ -0,0 +1,68 @@
|
||||
---
|
||||
name: get-video-dimensions
|
||||
description: Getting the width and height of a video file with Mediabunny
|
||||
metadata:
|
||||
tags: dimensions, width, height, resolution, size, video
|
||||
---
|
||||
|
||||
# Getting video dimensions with Mediabunny
|
||||
|
||||
Mediabunny can extract the width and height of a video file. It works in browser, Node.js, and Bun environments.
|
||||
|
||||
## Getting video dimensions
|
||||
|
||||
```tsx
|
||||
import { Input, ALL_FORMATS, UrlSource } from "mediabunny";
|
||||
|
||||
export const getVideoDimensions = async (src: string) => {
|
||||
const input = new Input({
|
||||
formats: ALL_FORMATS,
|
||||
source: new UrlSource(src, {
|
||||
getRetryDelay: () => null,
|
||||
}),
|
||||
});
|
||||
|
||||
const videoTrack = await input.getPrimaryVideoTrack();
|
||||
if (!videoTrack) {
|
||||
throw new Error("No video track found");
|
||||
}
|
||||
|
||||
return {
|
||||
width: videoTrack.displayWidth,
|
||||
height: videoTrack.displayHeight,
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```tsx
|
||||
const dimensions = await getVideoDimensions("https://remotion.media/video.mp4");
|
||||
console.log(dimensions.width); // e.g. 1920
|
||||
console.log(dimensions.height); // e.g. 1080
|
||||
```
|
||||
|
||||
## Using with local files
|
||||
|
||||
For local files, use `FileSource` instead of `UrlSource`:
|
||||
|
||||
```tsx
|
||||
import { Input, ALL_FORMATS, FileSource } from "mediabunny";
|
||||
|
||||
const input = new Input({
|
||||
formats: ALL_FORMATS,
|
||||
source: new FileSource(file), // File object from input or drag-drop
|
||||
});
|
||||
|
||||
const videoTrack = await input.getPrimaryVideoTrack();
|
||||
const width = videoTrack.displayWidth;
|
||||
const height = videoTrack.displayHeight;
|
||||
```
|
||||
|
||||
## Using with staticFile in Remotion
|
||||
|
||||
```tsx
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
const dimensions = await getVideoDimensions(staticFile("video.mp4"));
|
||||
```
|
||||
60
bundled-skills/remotion/rules/get-video-duration.md
Normal file
60
bundled-skills/remotion/rules/get-video-duration.md
Normal file
@@ -0,0 +1,60 @@
|
||||
---
|
||||
name: get-video-duration
|
||||
description: Getting the duration of a video file in seconds with Mediabunny
|
||||
metadata:
|
||||
tags: duration, video, length, time, seconds
|
||||
---
|
||||
|
||||
# Getting video duration with Mediabunny
|
||||
|
||||
Mediabunny can extract the duration of a video file. It works in browser, Node.js, and Bun environments.
|
||||
|
||||
## Getting video duration
|
||||
|
||||
```tsx
|
||||
import { Input, ALL_FORMATS, UrlSource } from "mediabunny";
|
||||
|
||||
export const getVideoDuration = async (src: string) => {
|
||||
const input = new Input({
|
||||
formats: ALL_FORMATS,
|
||||
source: new UrlSource(src, {
|
||||
getRetryDelay: () => null,
|
||||
}),
|
||||
});
|
||||
|
||||
const durationInSeconds = await input.computeDuration();
|
||||
return durationInSeconds;
|
||||
};
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```tsx
|
||||
const duration = await getVideoDuration("https://remotion.media/video.mp4");
|
||||
console.log(duration); // e.g. 10.5 (seconds)
|
||||
```
|
||||
|
||||
## Video files from the public/ directory
|
||||
|
||||
Make sure to wrap the file path in `staticFile()`:
|
||||
|
||||
```tsx
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
const duration = await getVideoDuration(staticFile("video.mp4"));
|
||||
```
|
||||
|
||||
## In Node.js and Bun
|
||||
|
||||
Use `FileSource` instead of `UrlSource`:
|
||||
|
||||
```tsx
|
||||
import { Input, ALL_FORMATS, FileSource } from "mediabunny";
|
||||
|
||||
const input = new Input({
|
||||
formats: ALL_FORMATS,
|
||||
source: new FileSource(file), // File object from input or drag-drop
|
||||
});
|
||||
|
||||
const durationInSeconds = await input.computeDuration();
|
||||
```
|
||||
141
bundled-skills/remotion/rules/gifs.md
Normal file
141
bundled-skills/remotion/rules/gifs.md
Normal file
@@ -0,0 +1,141 @@
|
||||
---
|
||||
name: gif
|
||||
description: Displaying GIFs, APNG, AVIF and WebP in Remotion
|
||||
metadata:
|
||||
tags: gif, animation, images, animated, apng, avif, webp
|
||||
---
|
||||
|
||||
# Using Animated images in Remotion
|
||||
|
||||
## Basic usage
|
||||
|
||||
Use `<AnimatedImage>` to display a GIF, APNG, AVIF or WebP image synchronized with Remotion's timeline:
|
||||
|
||||
```tsx
|
||||
import { AnimatedImage, staticFile } from "remotion";
|
||||
|
||||
export const MyComposition = () => {
|
||||
return (
|
||||
<AnimatedImage src={staticFile("animation.gif")} width={500} height={500} />
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
Remote URLs are also supported (must have CORS enabled):
|
||||
|
||||
```tsx
|
||||
<AnimatedImage
|
||||
src="https://example.com/animation.gif"
|
||||
width={500}
|
||||
height={500}
|
||||
/>
|
||||
```
|
||||
|
||||
## Sizing and fit
|
||||
|
||||
Control how the image fills its container with the `fit` prop:
|
||||
|
||||
```tsx
|
||||
// Stretch to fill (default)
|
||||
<AnimatedImage src={staticFile("animation.gif")} width={500} height={300} fit="fill" />
|
||||
|
||||
// Maintain aspect ratio, fit inside container
|
||||
<AnimatedImage src={staticFile("animation.gif")} width={500} height={300} fit="contain" />
|
||||
|
||||
// Fill container, crop if needed
|
||||
<AnimatedImage src={staticFile("animation.gif")} width={500} height={300} fit="cover" />
|
||||
```
|
||||
|
||||
## Playback speed
|
||||
|
||||
Use `playbackRate` to control the animation speed:
|
||||
|
||||
```tsx
|
||||
<AnimatedImage src={staticFile("animation.gif")} width={500} height={500} playbackRate={2} /> {/* 2x speed */}
|
||||
<AnimatedImage src={staticFile("animation.gif")} width={500} height={500} playbackRate={0.5} /> {/* Half speed */}
|
||||
```
|
||||
|
||||
## Looping behavior
|
||||
|
||||
Control what happens when the animation finishes:
|
||||
|
||||
```tsx
|
||||
// Loop indefinitely (default)
|
||||
<AnimatedImage src={staticFile("animation.gif")} width={500} height={500} loopBehavior="loop" />
|
||||
|
||||
// Play once, show final frame
|
||||
<AnimatedImage src={staticFile("animation.gif")} width={500} height={500} loopBehavior="pause-after-finish" />
|
||||
|
||||
// Play once, then clear canvas
|
||||
<AnimatedImage src={staticFile("animation.gif")} width={500} height={500} loopBehavior="clear-after-finish" />
|
||||
```
|
||||
|
||||
## Styling
|
||||
|
||||
Use the `style` prop for additional CSS (use `width` and `height` props for sizing):
|
||||
|
||||
```tsx
|
||||
<AnimatedImage
|
||||
src={staticFile("animation.gif")}
|
||||
width={500}
|
||||
height={500}
|
||||
style={{
|
||||
borderRadius: 20,
|
||||
position: "absolute",
|
||||
top: 100,
|
||||
left: 50,
|
||||
}}
|
||||
/>
|
||||
```
|
||||
|
||||
## Getting GIF duration
|
||||
|
||||
Use `getGifDurationInSeconds()` from `@remotion/gif` to get the duration of a GIF.
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/gif
|
||||
```
|
||||
|
||||
```tsx
|
||||
import { getGifDurationInSeconds } from "@remotion/gif";
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
const duration = await getGifDurationInSeconds(staticFile("animation.gif"));
|
||||
console.log(duration); // e.g. 2.5
|
||||
```
|
||||
|
||||
This is useful for setting the composition duration to match the GIF:
|
||||
|
||||
```tsx
|
||||
import { getGifDurationInSeconds } from "@remotion/gif";
|
||||
import { staticFile, CalculateMetadataFunction } from "remotion";
|
||||
|
||||
const calculateMetadata: CalculateMetadataFunction = async () => {
|
||||
const duration = await getGifDurationInSeconds(staticFile("animation.gif"));
|
||||
return {
|
||||
durationInFrames: Math.ceil(duration * 30),
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
## Alternative
|
||||
|
||||
If `<AnimatedImage>` does not work (only supported in Chrome and Firefox), you can use `<Gif>` from `@remotion/gif` instead.
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/gif # If project uses npm
|
||||
bunx remotion add @remotion/gif # If project uses bun
|
||||
yarn remotion add @remotion/gif # If project uses yarn
|
||||
pnpm exec remotion add @remotion/gif # If project uses pnpm
|
||||
```
|
||||
|
||||
```tsx
|
||||
import { Gif } from "@remotion/gif";
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
export const MyComposition = () => {
|
||||
return <Gif src={staticFile("animation.gif")} width={500} height={500} />;
|
||||
};
|
||||
```
|
||||
|
||||
The `<Gif>` component has the same props as `<AnimatedImage>` but only supports GIF files.
|
||||
134
bundled-skills/remotion/rules/images.md
Normal file
134
bundled-skills/remotion/rules/images.md
Normal file
@@ -0,0 +1,134 @@
|
||||
---
|
||||
name: images
|
||||
description: Embedding images in Remotion using the <Img> component
|
||||
metadata:
|
||||
tags: images, img, staticFile, png, jpg, svg, webp
|
||||
---
|
||||
|
||||
# Using images in Remotion
|
||||
|
||||
## The `<Img>` component
|
||||
|
||||
Always use the `<Img>` component from `remotion` to display images:
|
||||
|
||||
```tsx
|
||||
import { Img, staticFile } from "remotion";
|
||||
|
||||
export const MyComposition = () => {
|
||||
return <Img src={staticFile("photo.png")} />;
|
||||
};
|
||||
```
|
||||
|
||||
## Important restrictions
|
||||
|
||||
**You MUST use the `<Img>` component from `remotion`.** Do not use:
|
||||
|
||||
- Native HTML `<img>` elements
|
||||
- Next.js `<Image>` component
|
||||
- CSS `background-image`
|
||||
|
||||
The `<Img>` component ensures images are fully loaded before rendering, preventing flickering and blank frames during video export.
|
||||
|
||||
## Local images with staticFile()
|
||||
|
||||
Place images in the `public/` folder and use `staticFile()` to reference them:
|
||||
|
||||
```
|
||||
my-video/
|
||||
├─ public/
|
||||
│ ├─ logo.png
|
||||
│ ├─ avatar.jpg
|
||||
│ └─ icon.svg
|
||||
├─ src/
|
||||
├─ package.json
|
||||
```
|
||||
|
||||
```tsx
|
||||
import { Img, staticFile } from "remotion";
|
||||
|
||||
<Img src={staticFile("logo.png")} />;
|
||||
```
|
||||
|
||||
## Remote images
|
||||
|
||||
Remote URLs can be used directly without `staticFile()`:
|
||||
|
||||
```tsx
|
||||
<Img src="https://example.com/image.png" />
|
||||
```
|
||||
|
||||
Ensure remote images have CORS enabled.
|
||||
|
||||
For animated GIFs, use the `<Gif>` component from `@remotion/gif` instead.
|
||||
|
||||
## Sizing and positioning
|
||||
|
||||
Use the `style` prop to control size and position:
|
||||
|
||||
```tsx
|
||||
<Img
|
||||
src={staticFile("photo.png")}
|
||||
style={{
|
||||
width: 500,
|
||||
height: 300,
|
||||
position: "absolute",
|
||||
top: 100,
|
||||
left: 50,
|
||||
objectFit: "cover",
|
||||
}}
|
||||
/>
|
||||
```
|
||||
|
||||
## Dynamic image paths
|
||||
|
||||
Use template literals for dynamic file references:
|
||||
|
||||
```tsx
|
||||
import { Img, staticFile, useCurrentFrame } from "remotion";
|
||||
|
||||
const frame = useCurrentFrame();
|
||||
|
||||
// Image sequence
|
||||
<Img src={staticFile(`frames/frame${frame}.png`)} />
|
||||
|
||||
// Selecting based on props
|
||||
<Img src={staticFile(`avatars/${props.userId}.png`)} />
|
||||
|
||||
// Conditional images
|
||||
<Img src={staticFile(`icons/${isActive ? "active" : "inactive"}.svg`)} />
|
||||
```
|
||||
|
||||
This pattern is useful for:
|
||||
|
||||
- Image sequences (frame-by-frame animations)
|
||||
- User-specific avatars or profile images
|
||||
- Theme-based icons
|
||||
- State-dependent graphics
|
||||
|
||||
## Getting image dimensions
|
||||
|
||||
Use `getImageDimensions()` to get the dimensions of an image:
|
||||
|
||||
```tsx
|
||||
import { getImageDimensions, staticFile } from "remotion";
|
||||
|
||||
const { width, height } = await getImageDimensions(staticFile("photo.png"));
|
||||
```
|
||||
|
||||
This is useful for calculating aspect ratios or sizing compositions:
|
||||
|
||||
```tsx
|
||||
import {
|
||||
getImageDimensions,
|
||||
staticFile,
|
||||
CalculateMetadataFunction,
|
||||
} from "remotion";
|
||||
|
||||
const calculateMetadata: CalculateMetadataFunction = async () => {
|
||||
const { width, height } = await getImageDimensions(staticFile("photo.png"));
|
||||
return {
|
||||
width,
|
||||
height,
|
||||
};
|
||||
};
|
||||
```
|
||||
69
bundled-skills/remotion/rules/import-srt-captions.md
Normal file
69
bundled-skills/remotion/rules/import-srt-captions.md
Normal file
@@ -0,0 +1,69 @@
|
||||
---
|
||||
name: import-srt-captions
|
||||
description: Importing .srt subtitle files into Remotion using @remotion/captions
|
||||
metadata:
|
||||
tags: captions, subtitles, srt, import, parse
|
||||
---
|
||||
|
||||
# Importing .srt subtitles into Remotion
|
||||
|
||||
If you have an existing `.srt` subtitle file, you can import it into Remotion using `parseSrt()` from `@remotion/captions`.
|
||||
|
||||
If you don't have a .srt file, read [Transcribing audio](transcribe-captions.md) for how to generate captions instead.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
First, the @remotion/captions package needs to be installed.
|
||||
If it is not installed, use the following command:
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/captions # If project uses npm
|
||||
bunx remotion add @remotion/captions # If project uses bun
|
||||
yarn remotion add @remotion/captions # If project uses yarn
|
||||
pnpm exec remotion add @remotion/captions # If project uses pnpm
|
||||
```
|
||||
|
||||
## Reading an .srt file
|
||||
|
||||
Use `staticFile()` to reference an `.srt` file in your `public` folder, then fetch and parse it:
|
||||
|
||||
```tsx
|
||||
import { useState, useEffect, useCallback } from "react";
|
||||
import { AbsoluteFill, staticFile, useDelayRender } from "remotion";
|
||||
import { parseSrt } from "@remotion/captions";
|
||||
import type { Caption } from "@remotion/captions";
|
||||
|
||||
export const MyComponent: React.FC = () => {
|
||||
const [captions, setCaptions] = useState<Caption[] | null>(null);
|
||||
const { delayRender, continueRender, cancelRender } = useDelayRender();
|
||||
const [handle] = useState(() => delayRender());
|
||||
|
||||
const fetchCaptions = useCallback(async () => {
|
||||
try {
|
||||
const response = await fetch(staticFile("subtitles.srt"));
|
||||
const text = await response.text();
|
||||
const { captions: parsed } = parseSrt({ input: text });
|
||||
setCaptions(parsed);
|
||||
continueRender(handle);
|
||||
} catch (e) {
|
||||
cancelRender(e);
|
||||
}
|
||||
}, [continueRender, cancelRender, handle]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchCaptions();
|
||||
}, [fetchCaptions]);
|
||||
|
||||
if (!captions) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return <AbsoluteFill>{/* Use captions here */}</AbsoluteFill>;
|
||||
};
|
||||
```
|
||||
|
||||
Remote URLs are also supported - you can `fetch()` a remote file via URL instead of using `staticFile()`.
|
||||
|
||||
## Using imported captions
|
||||
|
||||
Once parsed, the captions are in the `Caption` format and can be used with all `@remotion/captions` utilities.
|
||||
73
bundled-skills/remotion/rules/light-leaks.md
Normal file
73
bundled-skills/remotion/rules/light-leaks.md
Normal file
@@ -0,0 +1,73 @@
|
||||
---
|
||||
name: light-leaks
|
||||
description: Light leak overlay effects for Remotion using @remotion/light-leaks.
|
||||
metadata:
|
||||
tags: light-leaks, overlays, effects, transitions
|
||||
---
|
||||
|
||||
## Light Leaks
|
||||
|
||||
This only works from Remotion 4.0.415 and up. Use `npx remotion versions` to check your Remotion version and `npx remotion upgrade` to upgrade your Remotion version.
|
||||
|
||||
`<LightLeak>` from `@remotion/light-leaks` renders a WebGL-based light leak effect. It reveals during the first half of its duration and retracts during the second half.
|
||||
|
||||
Typically used inside a `<TransitionSeries.Overlay>` to play over the cut point between two scenes. See the **transitions** rule for `<TransitionSeries>` and overlay usage.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/light-leaks
|
||||
```
|
||||
|
||||
## Basic usage with TransitionSeries
|
||||
|
||||
```tsx
|
||||
import { TransitionSeries } from "@remotion/transitions";
|
||||
import { LightLeak } from "@remotion/light-leaks";
|
||||
|
||||
<TransitionSeries>
|
||||
<TransitionSeries.Sequence durationInFrames={60}>
|
||||
<SceneA />
|
||||
</TransitionSeries.Sequence>
|
||||
<TransitionSeries.Overlay durationInFrames={30}>
|
||||
<LightLeak />
|
||||
</TransitionSeries.Overlay>
|
||||
<TransitionSeries.Sequence durationInFrames={60}>
|
||||
<SceneB />
|
||||
</TransitionSeries.Sequence>
|
||||
</TransitionSeries>;
|
||||
```
|
||||
|
||||
## Props
|
||||
|
||||
- `durationInFrames?` — defaults to the parent sequence/composition duration. The effect reveals during the first half and retracts during the second half.
|
||||
- `seed?` — determines the shape of the light leak pattern. Different seeds produce different patterns. Default: `0`.
|
||||
- `hueShift?` — rotates the hue in degrees (`0`–`360`). Default: `0` (yellow-to-orange). `120` = green, `240` = blue.
|
||||
|
||||
## Customizing the look
|
||||
|
||||
```tsx
|
||||
import { LightLeak } from "@remotion/light-leaks";
|
||||
|
||||
// Blue-tinted light leak with a different pattern
|
||||
<LightLeak seed={5} hueShift={240} />;
|
||||
|
||||
// Green-tinted light leak
|
||||
<LightLeak seed={2} hueShift={120} />;
|
||||
```
|
||||
|
||||
## Standalone usage
|
||||
|
||||
`<LightLeak>` can also be used outside of `<TransitionSeries>`, for example as a decorative overlay in any composition:
|
||||
|
||||
```tsx
|
||||
import { AbsoluteFill } from "remotion";
|
||||
import { LightLeak } from "@remotion/light-leaks";
|
||||
|
||||
const MyComp: React.FC = () => (
|
||||
<AbsoluteFill>
|
||||
<MyContent />
|
||||
<LightLeak durationInFrames={60} seed={3} />
|
||||
</AbsoluteFill>
|
||||
);
|
||||
```
|
||||
70
bundled-skills/remotion/rules/lottie.md
Normal file
70
bundled-skills/remotion/rules/lottie.md
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
name: lottie
|
||||
description: Embedding Lottie animations in Remotion.
|
||||
metadata:
|
||||
category: Animation
|
||||
---
|
||||
|
||||
# Using Lottie Animations in Remotion
|
||||
|
||||
## Prerequisites
|
||||
|
||||
First, the @remotion/lottie package needs to be installed.
|
||||
If it is not, use the following command:
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/lottie # If project uses npm
|
||||
bunx remotion add @remotion/lottie # If project uses bun
|
||||
yarn remotion add @remotion/lottie # If project uses yarn
|
||||
pnpm exec remotion add @remotion/lottie # If project uses pnpm
|
||||
```
|
||||
|
||||
## Displaying a Lottie file
|
||||
|
||||
To import a Lottie animation:
|
||||
|
||||
- Fetch the Lottie asset
|
||||
- Wrap the loading process in `delayRender()` and `continueRender()`
|
||||
- Save the animation data in a state
|
||||
- Render the Lottie animation using the `Lottie` component from the `@remotion/lottie` package
|
||||
|
||||
```tsx
|
||||
import { Lottie, LottieAnimationData } from "@remotion/lottie";
|
||||
import { useEffect, useState } from "react";
|
||||
import { cancelRender, continueRender, delayRender } from "remotion";
|
||||
|
||||
export const MyAnimation = () => {
|
||||
const [handle] = useState(() => delayRender("Loading Lottie animation"));
|
||||
|
||||
const [animationData, setAnimationData] =
|
||||
useState<LottieAnimationData | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
fetch("https://assets4.lottiefiles.com/packages/lf20_zyquagfl.json")
|
||||
.then((data) => data.json())
|
||||
.then((json) => {
|
||||
setAnimationData(json);
|
||||
continueRender(handle);
|
||||
})
|
||||
.catch((err) => {
|
||||
cancelRender(err);
|
||||
});
|
||||
}, [handle]);
|
||||
|
||||
if (!animationData) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return <Lottie animationData={animationData} />;
|
||||
};
|
||||
```
|
||||
|
||||
## Styling and animating
|
||||
|
||||
Lottie supports the `style` prop to allow styles and animations:
|
||||
|
||||
```tsx
|
||||
return (
|
||||
<Lottie animationData={animationData} style={{ width: 400, height: 400 }} />
|
||||
);
|
||||
```
|
||||
412
bundled-skills/remotion/rules/maps.md
Normal file
412
bundled-skills/remotion/rules/maps.md
Normal file
@@ -0,0 +1,412 @@
|
||||
---
|
||||
name: maps
|
||||
description: Make map animations with Mapbox
|
||||
metadata:
|
||||
tags: map, map animation, mapbox
|
||||
---
|
||||
|
||||
Maps can be added to a Remotion video with Mapbox.
|
||||
The [Mapbox documentation](https://docs.mapbox.com/mapbox-gl-js/api/) has the API reference.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Mapbox and `@turf/turf` need to be installed.
|
||||
|
||||
Search the project for lockfiles and run the correct command depending on the package manager:
|
||||
|
||||
If `package-lock.json` is found, use the following command:
|
||||
|
||||
```bash
|
||||
npm i mapbox-gl @turf/turf @types/mapbox-gl
|
||||
```
|
||||
|
||||
If `bun.lock` is found, use the following command:
|
||||
|
||||
```bash
|
||||
bun i mapbox-gl @turf/turf @types/mapbox-gl
|
||||
```
|
||||
|
||||
If `yarn.lock` is found, use the following command:
|
||||
|
||||
```bash
|
||||
yarn add mapbox-gl @turf/turf @types/mapbox-gl
|
||||
```
|
||||
|
||||
If `pnpm-lock.yaml` is found, use the following command:
|
||||
|
||||
```bash
|
||||
pnpm i mapbox-gl @turf/turf @types/mapbox-gl
|
||||
```
|
||||
|
||||
The user needs to create a free Mapbox account and create an access token by visiting https://console.mapbox.com/account/access-tokens/.
|
||||
|
||||
The mapbox token needs to be added to the `.env` file:
|
||||
|
||||
```txt title=".env"
|
||||
REMOTION_MAPBOX_TOKEN==pk.your-mapbox-access-token
|
||||
```
|
||||
|
||||
## Adding a map
|
||||
|
||||
Here is a basic example of a map in Remotion.
|
||||
|
||||
```tsx
|
||||
import { useEffect, useMemo, useRef, useState } from "react";
|
||||
import { AbsoluteFill, useDelayRender, useVideoConfig } from "remotion";
|
||||
import mapboxgl, { Map } from "mapbox-gl";
|
||||
|
||||
export const lineCoordinates = [
|
||||
[6.56158447265625, 46.059891147620725],
|
||||
[6.5691375732421875, 46.05679376154153],
|
||||
[6.5842437744140625, 46.05059898938315],
|
||||
[6.594886779785156, 46.04702502069337],
|
||||
[6.601066589355469, 46.0460718554722],
|
||||
[6.6089630126953125, 46.0365370783104],
|
||||
[6.6185760498046875, 46.018420689207964],
|
||||
];
|
||||
|
||||
mapboxgl.accessToken = process.env.REMOTION_MAPBOX_TOKEN as string;
|
||||
|
||||
export const MyComposition = () => {
|
||||
const ref = useRef<HTMLDivElement>(null);
|
||||
const { delayRender, continueRender } = useDelayRender();
|
||||
|
||||
const { width, height } = useVideoConfig();
|
||||
const [handle] = useState(() => delayRender("Loading map..."));
|
||||
const [map, setMap] = useState<Map | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const _map = new Map({
|
||||
container: ref.current!,
|
||||
zoom: 11.53,
|
||||
center: [6.5615, 46.0598],
|
||||
pitch: 65,
|
||||
bearing: 0,
|
||||
style: "mapbox://styles/mapbox/standard",
|
||||
interactive: false,
|
||||
fadeDuration: 0,
|
||||
});
|
||||
|
||||
_map.on("style.load", () => {
|
||||
// Hide all features from the Mapbox Standard style
|
||||
const hideFeatures = [
|
||||
"showRoadsAndTransit",
|
||||
"showRoads",
|
||||
"showTransit",
|
||||
"showPedestrianRoads",
|
||||
"showRoadLabels",
|
||||
"showTransitLabels",
|
||||
"showPlaceLabels",
|
||||
"showPointOfInterestLabels",
|
||||
"showPointsOfInterest",
|
||||
"showAdminBoundaries",
|
||||
"showLandmarkIcons",
|
||||
"showLandmarkIconLabels",
|
||||
"show3dObjects",
|
||||
"show3dBuildings",
|
||||
"show3dTrees",
|
||||
"show3dLandmarks",
|
||||
"show3dFacades",
|
||||
];
|
||||
for (const feature of hideFeatures) {
|
||||
_map.setConfigProperty("basemap", feature, false);
|
||||
}
|
||||
|
||||
_map.setConfigProperty("basemap", "colorTrunks", "rgba(0, 0, 0, 0)");
|
||||
|
||||
_map.addSource("trace", {
|
||||
type: "geojson",
|
||||
data: {
|
||||
type: "Feature",
|
||||
properties: {},
|
||||
geometry: {
|
||||
type: "LineString",
|
||||
coordinates: lineCoordinates,
|
||||
},
|
||||
},
|
||||
});
|
||||
_map.addLayer({
|
||||
type: "line",
|
||||
source: "trace",
|
||||
id: "line",
|
||||
paint: {
|
||||
"line-color": "black",
|
||||
"line-width": 5,
|
||||
},
|
||||
layout: {
|
||||
"line-cap": "round",
|
||||
"line-join": "round",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
_map.on("load", () => {
|
||||
continueRender(handle);
|
||||
setMap(_map);
|
||||
});
|
||||
}, [handle, lineCoordinates]);
|
||||
|
||||
const style: React.CSSProperties = useMemo(
|
||||
() => ({ width, height, position: "absolute" }),
|
||||
[width, height],
|
||||
);
|
||||
|
||||
return <AbsoluteFill ref={ref} style={style} />;
|
||||
};
|
||||
```
|
||||
|
||||
The following is important in Remotion:
|
||||
|
||||
- Animations must be driven by `useCurrentFrame()` and animations that Mapbox brings itself should be disabled. For example, the `fadeDuration` prop should be set to `0`, `interactive` should be set to `false`, etc.
|
||||
- Loading the map should be delayed using `useDelayRender()` and the map should be set to `null` until it is loaded.
|
||||
- The element containing the ref MUST have an explicit width and height and `position: "absolute"`.
|
||||
- Do not add a `_map.remove();` cleanup function.
|
||||
|
||||
## Drawing lines
|
||||
|
||||
Unless I request it, do not add a glow effect to the lines.
|
||||
Unless I request it, do not add additional points to the lines.
|
||||
|
||||
## Map style
|
||||
|
||||
By default, use the `mapbox://styles/mapbox/standard` style.
|
||||
Hide the labels from the base map style.
|
||||
|
||||
Unless I request otherwise, remove all features from the Mapbox Standard style.
|
||||
|
||||
```tsx
|
||||
// Hide all features from the Mapbox Standard style
|
||||
const hideFeatures = [
|
||||
"showRoadsAndTransit",
|
||||
"showRoads",
|
||||
"showTransit",
|
||||
"showPedestrianRoads",
|
||||
"showRoadLabels",
|
||||
"showTransitLabels",
|
||||
"showPlaceLabels",
|
||||
"showPointOfInterestLabels",
|
||||
"showPointsOfInterest",
|
||||
"showAdminBoundaries",
|
||||
"showLandmarkIcons",
|
||||
"showLandmarkIconLabels",
|
||||
"show3dObjects",
|
||||
"show3dBuildings",
|
||||
"show3dTrees",
|
||||
"show3dLandmarks",
|
||||
"show3dFacades",
|
||||
];
|
||||
for (const feature of hideFeatures) {
|
||||
_map.setConfigProperty("basemap", feature, false);
|
||||
}
|
||||
|
||||
_map.setConfigProperty("basemap", "colorMotorways", "transparent");
|
||||
_map.setConfigProperty("basemap", "colorRoads", "transparent");
|
||||
_map.setConfigProperty("basemap", "colorTrunks", "transparent");
|
||||
```
|
||||
|
||||
## Animating the camera
|
||||
|
||||
You can animate the camera along the line by adding a `useEffect` hook that updates the camera position based on the current frame.
|
||||
|
||||
Unless I ask for it, do not jump between camera angles.
|
||||
|
||||
```tsx
|
||||
import * as turf from "@turf/turf";
|
||||
import { interpolate } from "remotion";
|
||||
import { Easing } from "remotion";
|
||||
import { useCurrentFrame, useVideoConfig, useDelayRender } from "remotion";
|
||||
|
||||
const animationDuration = 20;
|
||||
const cameraAltitude = 4000;
|
||||
```
|
||||
|
||||
```tsx
|
||||
const frame = useCurrentFrame();
|
||||
const { fps } = useVideoConfig();
|
||||
const { delayRender, continueRender } = useDelayRender();
|
||||
|
||||
useEffect(() => {
|
||||
if (!map) {
|
||||
return;
|
||||
}
|
||||
const handle = delayRender("Moving point...");
|
||||
|
||||
const routeDistance = turf.length(turf.lineString(lineCoordinates));
|
||||
|
||||
const progress = interpolate(
|
||||
frame / fps,
|
||||
[0.00001, animationDuration],
|
||||
[0, 1],
|
||||
{
|
||||
easing: Easing.inOut(Easing.sin),
|
||||
extrapolateLeft: "clamp",
|
||||
extrapolateRight: "clamp",
|
||||
},
|
||||
);
|
||||
|
||||
const camera = map.getFreeCameraOptions();
|
||||
|
||||
const alongRoute = turf.along(
|
||||
turf.lineString(lineCoordinates),
|
||||
routeDistance * progress,
|
||||
).geometry.coordinates;
|
||||
|
||||
camera.lookAtPoint({
|
||||
lng: alongRoute[0],
|
||||
lat: alongRoute[1],
|
||||
});
|
||||
|
||||
map.setFreeCameraOptions(camera);
|
||||
map.once("idle", () => continueRender(handle));
|
||||
}, [lineCoordinates, fps, frame, handle, map]);
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
IMPORTANT: Keep the camera by default so north is up.
|
||||
IMPORTANT: For multi-step animations, set all properties at all stages (zoom, position, line progress) to prevent jumps. Override initial values.
|
||||
|
||||
- The progress is clamped to a minimum value to avoid the line being empty, which can lead to turf errors
|
||||
- See [Timing](./timing.md) for more options for timing.
|
||||
- Consider the dimensions of the composition and make the lines thick enough and the label font size large enough to be legible for when the composition is scaled down.
|
||||
|
||||
## Animating lines
|
||||
|
||||
### Straight lines (linear interpolation)
|
||||
|
||||
To animate a line that appears straight on the map, use linear interpolation between coordinates. Do NOT use turf's `lineSliceAlong` or `along` functions, as they use geodesic (great circle) calculations which appear curved on a Mercator projection.
|
||||
|
||||
```tsx
|
||||
const frame = useCurrentFrame();
|
||||
const { durationInFrames } = useVideoConfig();
|
||||
|
||||
useEffect(() => {
|
||||
if (!map) return;
|
||||
|
||||
const animationHandle = delayRender("Animating line...");
|
||||
|
||||
const progress = interpolate(frame, [0, durationInFrames - 1], [0, 1], {
|
||||
extrapolateLeft: "clamp",
|
||||
extrapolateRight: "clamp",
|
||||
easing: Easing.inOut(Easing.cubic),
|
||||
});
|
||||
|
||||
// Linear interpolation for a straight line on the map
|
||||
const start = lineCoordinates[0];
|
||||
const end = lineCoordinates[1];
|
||||
const currentLng = start[0] + (end[0] - start[0]) * progress;
|
||||
const currentLat = start[1] + (end[1] - start[1]) * progress;
|
||||
|
||||
const lineData: GeoJSON.Feature<GeoJSON.LineString> = {
|
||||
type: "Feature",
|
||||
properties: {},
|
||||
geometry: {
|
||||
type: "LineString",
|
||||
coordinates: [start, [currentLng, currentLat]],
|
||||
},
|
||||
};
|
||||
|
||||
const source = map.getSource("trace") as mapboxgl.GeoJSONSource;
|
||||
if (source) {
|
||||
source.setData(lineData);
|
||||
}
|
||||
|
||||
map.once("idle", () => continueRender(animationHandle));
|
||||
}, [frame, map, durationInFrames]);
|
||||
```
|
||||
|
||||
### Curved lines (geodesic/great circle)
|
||||
|
||||
To animate a line that follows the geodesic (great circle) path between two points, use turf's `lineSliceAlong`. This is useful for showing flight paths or the actual shortest distance on Earth.
|
||||
|
||||
```tsx
|
||||
import * as turf from "@turf/turf";
|
||||
|
||||
const routeLine = turf.lineString(lineCoordinates);
|
||||
const routeDistance = turf.length(routeLine);
|
||||
|
||||
const currentDistance = Math.max(0.001, routeDistance * progress);
|
||||
const slicedLine = turf.lineSliceAlong(routeLine, 0, currentDistance);
|
||||
|
||||
const source = map.getSource("route") as mapboxgl.GeoJSONSource;
|
||||
if (source) {
|
||||
source.setData(slicedLine);
|
||||
}
|
||||
```
|
||||
|
||||
## Markers
|
||||
|
||||
Add labels, and markers where appropriate.
|
||||
|
||||
```tsx
|
||||
_map.addSource("markers", {
|
||||
type: "geojson",
|
||||
data: {
|
||||
type: "FeatureCollection",
|
||||
features: [
|
||||
{
|
||||
type: "Feature",
|
||||
properties: { name: "Point 1" },
|
||||
geometry: { type: "Point", coordinates: [-118.2437, 34.0522] },
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
_map.addLayer({
|
||||
id: "city-markers",
|
||||
type: "circle",
|
||||
source: "markers",
|
||||
paint: {
|
||||
"circle-radius": 40,
|
||||
"circle-color": "#FF4444",
|
||||
"circle-stroke-width": 4,
|
||||
"circle-stroke-color": "#FFFFFF",
|
||||
},
|
||||
});
|
||||
|
||||
_map.addLayer({
|
||||
id: "labels",
|
||||
type: "symbol",
|
||||
source: "markers",
|
||||
layout: {
|
||||
"text-field": ["get", "name"],
|
||||
"text-font": ["DIN Pro Bold", "Arial Unicode MS Bold"],
|
||||
"text-size": 50,
|
||||
"text-offset": [0, 0.5],
|
||||
"text-anchor": "top",
|
||||
},
|
||||
paint: {
|
||||
"text-color": "#FFFFFF",
|
||||
"text-halo-color": "#000000",
|
||||
"text-halo-width": 2,
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Make sure they are big enough. Check the composition dimensions and scale the labels accordingly.
|
||||
For a composition size of 1920x1080, the label font size should be at least 40px.
|
||||
|
||||
IMPORTANT: Keep the `text-offset` small enough so it is close to the marker. Consider the marker circle radius. For a circle radius of 40, this is a good offset:
|
||||
|
||||
```tsx
|
||||
"text-offset": [0, 0.5],
|
||||
```
|
||||
|
||||
## 3D buildings
|
||||
|
||||
To enable 3D buildings, use the following code:
|
||||
|
||||
```tsx
|
||||
_map.setConfigProperty("basemap", "show3dObjects", true);
|
||||
_map.setConfigProperty("basemap", "show3dLandmarks", true);
|
||||
_map.setConfigProperty("basemap", "show3dBuildings", true);
|
||||
```
|
||||
|
||||
## Rendering
|
||||
|
||||
When rendering a map animation, make sure to render with the following flags:
|
||||
|
||||
```
|
||||
npx remotion render --gl=angle --concurrency=1
|
||||
```
|
||||
34
bundled-skills/remotion/rules/measuring-dom-nodes.md
Normal file
34
bundled-skills/remotion/rules/measuring-dom-nodes.md
Normal file
@@ -0,0 +1,34 @@
|
||||
---
|
||||
name: measuring-dom-nodes
|
||||
description: Measuring DOM element dimensions in Remotion
|
||||
metadata:
|
||||
tags: measure, layout, dimensions, getBoundingClientRect, scale
|
||||
---
|
||||
|
||||
# Measuring DOM nodes in Remotion
|
||||
|
||||
Remotion applies a `scale()` transform to the video container, which affects values from `getBoundingClientRect()`. Use `useCurrentScale()` to get correct measurements.
|
||||
|
||||
## Measuring element dimensions
|
||||
|
||||
```tsx
|
||||
import { useCurrentScale } from "remotion";
|
||||
import { useRef, useEffect, useState } from "react";
|
||||
|
||||
export const MyComponent = () => {
|
||||
const ref = useRef<HTMLDivElement>(null);
|
||||
const scale = useCurrentScale();
|
||||
const [dimensions, setDimensions] = useState({ width: 0, height: 0 });
|
||||
|
||||
useEffect(() => {
|
||||
if (!ref.current) return;
|
||||
const rect = ref.current.getBoundingClientRect();
|
||||
setDimensions({
|
||||
width: rect.width / scale,
|
||||
height: rect.height / scale,
|
||||
});
|
||||
}, [scale]);
|
||||
|
||||
return <div ref={ref}>Content to measure</div>;
|
||||
};
|
||||
```
|
||||
140
bundled-skills/remotion/rules/measuring-text.md
Normal file
140
bundled-skills/remotion/rules/measuring-text.md
Normal file
@@ -0,0 +1,140 @@
|
||||
---
|
||||
name: measuring-text
|
||||
description: Measuring text dimensions, fitting text to containers, and checking overflow
|
||||
metadata:
|
||||
tags: measure, text, layout, dimensions, fitText, fillTextBox
|
||||
---
|
||||
|
||||
# Measuring text in Remotion
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Install @remotion/layout-utils if it is not already installed:
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/layout-utils
|
||||
```
|
||||
|
||||
## Measuring text dimensions
|
||||
|
||||
Use `measureText()` to calculate the width and height of text:
|
||||
|
||||
```tsx
|
||||
import { measureText } from "@remotion/layout-utils";
|
||||
|
||||
const { width, height } = measureText({
|
||||
text: "Hello World",
|
||||
fontFamily: "Arial",
|
||||
fontSize: 32,
|
||||
fontWeight: "bold",
|
||||
});
|
||||
```
|
||||
|
||||
Results are cached - duplicate calls return the cached result.
|
||||
|
||||
## Fitting text to a width
|
||||
|
||||
Use `fitText()` to find the optimal font size for a container:
|
||||
|
||||
```tsx
|
||||
import { fitText } from "@remotion/layout-utils";
|
||||
|
||||
const { fontSize } = fitText({
|
||||
text: "Hello World",
|
||||
withinWidth: 600,
|
||||
fontFamily: "Inter",
|
||||
fontWeight: "bold",
|
||||
});
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
fontSize: Math.min(fontSize, 80), // Cap at 80px
|
||||
fontFamily: "Inter",
|
||||
fontWeight: "bold",
|
||||
}}
|
||||
>
|
||||
Hello World
|
||||
</div>
|
||||
);
|
||||
```
|
||||
|
||||
## Checking text overflow
|
||||
|
||||
Use `fillTextBox()` to check if text exceeds a box:
|
||||
|
||||
```tsx
|
||||
import { fillTextBox } from "@remotion/layout-utils";
|
||||
|
||||
const box = fillTextBox({ maxBoxWidth: 400, maxLines: 3 });
|
||||
|
||||
const words = ["Hello", "World", "This", "is", "a", "test"];
|
||||
for (const word of words) {
|
||||
const { exceedsBox } = box.add({
|
||||
text: word + " ",
|
||||
fontFamily: "Arial",
|
||||
fontSize: 24,
|
||||
});
|
||||
if (exceedsBox) {
|
||||
// Text would overflow, handle accordingly
|
||||
break;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Best practices
|
||||
|
||||
**Load fonts first:** Only call measurement functions after fonts are loaded.
|
||||
|
||||
```tsx
|
||||
import { loadFont } from "@remotion/google-fonts/Inter";
|
||||
|
||||
const { fontFamily, waitUntilDone } = loadFont("normal", {
|
||||
weights: ["400"],
|
||||
subsets: ["latin"],
|
||||
});
|
||||
|
||||
waitUntilDone().then(() => {
|
||||
// Now safe to measure
|
||||
const { width } = measureText({
|
||||
text: "Hello",
|
||||
fontFamily,
|
||||
fontSize: 32,
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
**Use validateFontIsLoaded:** Catch font loading issues early:
|
||||
|
||||
```tsx
|
||||
measureText({
|
||||
text: "Hello",
|
||||
fontFamily: "MyCustomFont",
|
||||
fontSize: 32,
|
||||
validateFontIsLoaded: true, // Throws if font not loaded
|
||||
});
|
||||
```
|
||||
|
||||
**Match font properties:** Use the same properties for measurement and rendering:
|
||||
|
||||
```tsx
|
||||
const fontStyle = {
|
||||
fontFamily: "Inter",
|
||||
fontSize: 32,
|
||||
fontWeight: "bold" as const,
|
||||
letterSpacing: "0.5px",
|
||||
};
|
||||
|
||||
const { width } = measureText({
|
||||
text: "Hello",
|
||||
...fontStyle,
|
||||
});
|
||||
|
||||
return <div style={fontStyle}>Hello</div>;
|
||||
```
|
||||
|
||||
**Avoid padding and border:** Use `outline` instead of `border` to prevent layout differences:
|
||||
|
||||
```tsx
|
||||
<div style={{ outline: "2px solid red" }}>Text</div>
|
||||
```
|
||||
109
bundled-skills/remotion/rules/parameters.md
Normal file
109
bundled-skills/remotion/rules/parameters.md
Normal file
@@ -0,0 +1,109 @@
|
||||
---
|
||||
name: parameters
|
||||
description: Make a video parametrizable by adding a Zod schema
|
||||
metadata:
|
||||
tags: parameters, zod, schema
|
||||
---
|
||||
|
||||
To make a video parametrizable, a Zod schema can be added to a composition.
|
||||
|
||||
First, `zod` must be installed .
|
||||
|
||||
Search the project for lockfiles and run the correct command depending on the package manager:
|
||||
|
||||
If `package-lock.json` is found, use the following command:
|
||||
|
||||
```bash
|
||||
npm i zod
|
||||
```
|
||||
|
||||
If `bun.lockb` is found, use the following command:
|
||||
|
||||
```bash
|
||||
bun i zod
|
||||
```
|
||||
|
||||
If `yarn.lock` is found, use the following command:
|
||||
|
||||
```bash
|
||||
yarn add zod
|
||||
```
|
||||
|
||||
If `pnpm-lock.yaml` is found, use the following command:
|
||||
|
||||
```bash
|
||||
pnpm i zod
|
||||
```
|
||||
|
||||
Then, a Zod schema can be defined alongside the component:
|
||||
|
||||
```tsx title="src/MyComposition.tsx"
|
||||
import { z } from "zod";
|
||||
|
||||
export const MyCompositionSchema = z.object({
|
||||
title: z.string(),
|
||||
});
|
||||
|
||||
const MyComponent: React.FC<z.infer<typeof MyCompositionSchema>> = () => {
|
||||
return (
|
||||
<div>
|
||||
<h1>{props.title}</h1>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
In the root file, the schema can be passed to the composition:
|
||||
|
||||
```tsx title="src/Root.tsx"
|
||||
import { Composition } from "remotion";
|
||||
import { MycComponent, MyCompositionSchema } from "./MyComposition";
|
||||
|
||||
export const RemotionRoot = () => {
|
||||
return (
|
||||
<Composition
|
||||
id="MyComposition"
|
||||
component={MyComponent}
|
||||
durationInFrames={100}
|
||||
fps={30}
|
||||
width={1080}
|
||||
height={1080}
|
||||
defaultProps={{ title: "Hello World" }}
|
||||
schema={MyCompositionSchema}
|
||||
/>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
Now, the user can edit the parameter visually in the sidebar.
|
||||
|
||||
All schemas that are supported by Zod are supported by Remotion.
|
||||
|
||||
Remotion requires that the top-level type is a z.object(), because the collection of props of a React component is always an object.
|
||||
|
||||
## Color picker
|
||||
|
||||
For adding a color picker, use `zColor()` from `@remotion/zod-types`.
|
||||
|
||||
If it is not installed, use the following command:
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/zod-types # If project uses npm
|
||||
bunx remotion add @remotion/zod-types # If project uses bun
|
||||
yarn remotion add @remotion/zod-types # If project uses yarn
|
||||
pnpm exec remotion add @remotion/zod-types # If project uses pnpm
|
||||
```
|
||||
|
||||
Then import `zColor` from `@remotion/zod-types`:
|
||||
|
||||
```tsx
|
||||
import { zColor } from "@remotion/zod-types";
|
||||
```
|
||||
|
||||
Then use it in the schema:
|
||||
|
||||
```tsx
|
||||
export const MyCompositionSchema = z.object({
|
||||
color: zColor(),
|
||||
});
|
||||
```
|
||||
118
bundled-skills/remotion/rules/sequencing.md
Normal file
118
bundled-skills/remotion/rules/sequencing.md
Normal file
@@ -0,0 +1,118 @@
|
||||
---
|
||||
name: sequencing
|
||||
description: Sequencing patterns for Remotion - delay, trim, limit duration of items
|
||||
metadata:
|
||||
tags: sequence, series, timing, delay, trim
|
||||
---
|
||||
|
||||
Use `<Sequence>` to delay when an element appears in the timeline.
|
||||
|
||||
```tsx
|
||||
import { Sequence } from "remotion";
|
||||
|
||||
const {fps} = useVideoConfig();
|
||||
|
||||
<Sequence from={1 * fps} durationInFrames={2 * fps} premountFor={1 * fps}>
|
||||
<Title />
|
||||
</Sequence>
|
||||
<Sequence from={2 * fps} durationInFrames={2 * fps} premountFor={1 * fps}>
|
||||
<Subtitle />
|
||||
</Sequence>
|
||||
```
|
||||
|
||||
This will by default wrap the component in an absolute fill element.
|
||||
If the items should not be wrapped, use the `layout` prop:
|
||||
|
||||
```tsx
|
||||
<Sequence layout="none">
|
||||
<Title />
|
||||
</Sequence>
|
||||
```
|
||||
|
||||
## Premounting
|
||||
|
||||
This loads the component in the timeline before it is actually played.
|
||||
Always premount any `<Sequence>`!
|
||||
|
||||
```tsx
|
||||
<Sequence premountFor={1 * fps}>
|
||||
<Title />
|
||||
</Sequence>
|
||||
```
|
||||
|
||||
## Series
|
||||
|
||||
Use `<Series>` when elements should play one after another without overlap.
|
||||
|
||||
```tsx
|
||||
import { Series } from "remotion";
|
||||
|
||||
<Series>
|
||||
<Series.Sequence durationInFrames={45}>
|
||||
<Intro />
|
||||
</Series.Sequence>
|
||||
<Series.Sequence durationInFrames={60}>
|
||||
<MainContent />
|
||||
</Series.Sequence>
|
||||
<Series.Sequence durationInFrames={30}>
|
||||
<Outro />
|
||||
</Series.Sequence>
|
||||
</Series>;
|
||||
```
|
||||
|
||||
Same as with `<Sequence>`, the items will be wrapped in an absolute fill element by default when using `<Series.Sequence>`, unless the `layout` prop is set to `none`.
|
||||
|
||||
### Series with overlaps
|
||||
|
||||
Use negative offset for overlapping sequences:
|
||||
|
||||
```tsx
|
||||
<Series>
|
||||
<Series.Sequence durationInFrames={60}>
|
||||
<SceneA />
|
||||
</Series.Sequence>
|
||||
<Series.Sequence offset={-15} durationInFrames={60}>
|
||||
{/* Starts 15 frames before SceneA ends */}
|
||||
<SceneB />
|
||||
</Series.Sequence>
|
||||
</Series>
|
||||
```
|
||||
|
||||
## Frame References Inside Sequences
|
||||
|
||||
Inside a Sequence, `useCurrentFrame()` returns the local frame (starting from 0):
|
||||
|
||||
```tsx
|
||||
<Sequence from={60} durationInFrames={30}>
|
||||
<MyComponent />
|
||||
{/* Inside MyComponent, useCurrentFrame() returns 0-29, not 60-89 */}
|
||||
</Sequence>
|
||||
```
|
||||
|
||||
## Nested Sequences
|
||||
|
||||
Sequences can be nested for complex timing:
|
||||
|
||||
```tsx
|
||||
<Sequence from={0} durationInFrames={120}>
|
||||
<Background />
|
||||
<Sequence from={15} durationInFrames={90} layout="none">
|
||||
<Title />
|
||||
</Sequence>
|
||||
<Sequence from={45} durationInFrames={60} layout="none">
|
||||
<Subtitle />
|
||||
</Sequence>
|
||||
</Sequence>
|
||||
```
|
||||
|
||||
## Nesting compositions within another
|
||||
|
||||
To add a composition within another composition, you can use the `<Sequence>` component with a `width` and `height` prop to specify the size of the composition.
|
||||
|
||||
```tsx
|
||||
<AbsoluteFill>
|
||||
<Sequence width={COMPOSITION_WIDTH} height={COMPOSITION_HEIGHT}>
|
||||
<CompositionComponent />
|
||||
</Sequence>
|
||||
</AbsoluteFill>
|
||||
```
|
||||
26
bundled-skills/remotion/rules/sfx.md
Normal file
26
bundled-skills/remotion/rules/sfx.md
Normal file
@@ -0,0 +1,26 @@
|
||||
---
|
||||
name: sfx
|
||||
description: Including sound effects
|
||||
metadata:
|
||||
tags: sfx, sound, effect, audio
|
||||
---
|
||||
|
||||
To include a sound effect, use the `<Audio>` tag:
|
||||
|
||||
```tsx
|
||||
import { Audio } from "@remotion/sfx";
|
||||
|
||||
<Audio src={"https://remotion.media/whoosh.wav"} />;
|
||||
```
|
||||
|
||||
The following sound effects are available:
|
||||
|
||||
- `https://remotion.media/whoosh.wav`
|
||||
- `https://remotion.media/whip.wav`
|
||||
- `https://remotion.media/page-turn.wav`
|
||||
- `https://remotion.media/switch.wav`
|
||||
- `https://remotion.media/mouse-click.wav`
|
||||
- `https://remotion.media/shutter-modern.wav`
|
||||
- `https://remotion.media/shutter-old.wav`
|
||||
|
||||
For more sound effects, search the internet. A good resource is https://github.com/kapishdima/soundcn/tree/main/assets.
|
||||
36
bundled-skills/remotion/rules/subtitles.md
Normal file
36
bundled-skills/remotion/rules/subtitles.md
Normal file
@@ -0,0 +1,36 @@
|
||||
---
|
||||
name: subtitles
|
||||
description: subtitles and caption rules
|
||||
metadata:
|
||||
tags: subtitles, captions, remotion, json
|
||||
---
|
||||
|
||||
All captions must be processed in JSON. The captions must use the `Caption` type which is the following:
|
||||
|
||||
```ts
|
||||
import type { Caption } from "@remotion/captions";
|
||||
```
|
||||
|
||||
This is the definition:
|
||||
|
||||
```ts
|
||||
type Caption = {
|
||||
text: string;
|
||||
startMs: number;
|
||||
endMs: number;
|
||||
timestampMs: number | null;
|
||||
confidence: number | null;
|
||||
};
|
||||
```
|
||||
|
||||
## Generating captions
|
||||
|
||||
To transcribe video and audio files to generate captions, load the [./transcribe-captions.md](./transcribe-captions.md) file for more instructions.
|
||||
|
||||
## Displaying captions
|
||||
|
||||
To display captions in your video, load the [./display-captions.md](./display-captions.md) file for more instructions.
|
||||
|
||||
## Importing captions
|
||||
|
||||
To import captions from a .srt file, load the [./import-srt-captions.md](./import-srt-captions.md) file for more instructions.
|
||||
11
bundled-skills/remotion/rules/tailwind.md
Normal file
11
bundled-skills/remotion/rules/tailwind.md
Normal file
@@ -0,0 +1,11 @@
|
||||
---
|
||||
name: tailwind
|
||||
description: Using TailwindCSS in Remotion.
|
||||
metadata:
|
||||
---
|
||||
|
||||
You can and should use TailwindCSS in Remotion, if TailwindCSS is installed in the project.
|
||||
|
||||
Don't use `transition-*` or `animate-*` classes - always animate using the `useCurrentFrame()` hook.
|
||||
|
||||
Tailwind must be installed and enabled first in a Remotion project - fetch https://www.remotion.dev/docs/tailwind using WebFetch for instructions.
|
||||
20
bundled-skills/remotion/rules/text-animations.md
Normal file
20
bundled-skills/remotion/rules/text-animations.md
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: text-animations
|
||||
description: Typography and text animation patterns for Remotion.
|
||||
metadata:
|
||||
tags: typography, text, typewriter, highlighter ken
|
||||
---
|
||||
|
||||
## Text animations
|
||||
|
||||
Based on `useCurrentFrame()`, reduce the string character by character to create a typewriter effect.
|
||||
|
||||
## Typewriter Effect
|
||||
|
||||
See [Typewriter](assets/text-animations-typewriter.tsx) for an advanced example with a blinking cursor and a pause after the first sentence.
|
||||
|
||||
Always use string slicing for typewriter effects. Never use per-character opacity.
|
||||
|
||||
## Word Highlighting
|
||||
|
||||
See [Word Highlight](assets/text-animations-word-highlight.tsx) for an example for how a word highlight is animated, like with a highlighter pen.
|
||||
179
bundled-skills/remotion/rules/timing.md
Normal file
179
bundled-skills/remotion/rules/timing.md
Normal file
@@ -0,0 +1,179 @@
|
||||
---
|
||||
name: timing
|
||||
description: Interpolation curves in Remotion - linear, easing, spring animations
|
||||
metadata:
|
||||
tags: spring, bounce, easing, interpolation
|
||||
---
|
||||
|
||||
A simple linear interpolation is done using the `interpolate` function.
|
||||
|
||||
```ts title="Going from 0 to 1 over 100 frames"
|
||||
import { interpolate } from "remotion";
|
||||
|
||||
const opacity = interpolate(frame, [0, 100], [0, 1]);
|
||||
```
|
||||
|
||||
By default, the values are not clamped, so the value can go outside the range [0, 1].
|
||||
Here is how they can be clamped:
|
||||
|
||||
```ts title="Going from 0 to 1 over 100 frames with extrapolation"
|
||||
const opacity = interpolate(frame, [0, 100], [0, 1], {
|
||||
extrapolateRight: "clamp",
|
||||
extrapolateLeft: "clamp",
|
||||
});
|
||||
```
|
||||
|
||||
## Spring animations
|
||||
|
||||
Spring animations have a more natural motion.
|
||||
They go from 0 to 1 over time.
|
||||
|
||||
```ts title="Spring animation from 0 to 1 over 100 frames"
|
||||
import { spring, useCurrentFrame, useVideoConfig } from "remotion";
|
||||
|
||||
const frame = useCurrentFrame();
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
const scale = spring({
|
||||
frame,
|
||||
fps,
|
||||
});
|
||||
```
|
||||
|
||||
### Physical properties
|
||||
|
||||
The default configuration is: `mass: 1, damping: 10, stiffness: 100`.
|
||||
This leads to the animation having a bit of bounce before it settles.
|
||||
|
||||
The config can be overwritten like this:
|
||||
|
||||
```ts
|
||||
const scale = spring({
|
||||
frame,
|
||||
fps,
|
||||
config: { damping: 200 },
|
||||
});
|
||||
```
|
||||
|
||||
The recommended configuration for a natural motion without a bounce is: `{ damping: 200 }`.
|
||||
|
||||
Here are some common configurations:
|
||||
|
||||
```tsx
|
||||
const smooth = { damping: 200 }; // Smooth, no bounce (subtle reveals)
|
||||
const snappy = { damping: 20, stiffness: 200 }; // Snappy, minimal bounce (UI elements)
|
||||
const bouncy = { damping: 8 }; // Bouncy entrance (playful animations)
|
||||
const heavy = { damping: 15, stiffness: 80, mass: 2 }; // Heavy, slow, small bounce
|
||||
```
|
||||
|
||||
### Delay
|
||||
|
||||
The animation starts immediately by default.
|
||||
Use the `delay` parameter to delay the animation by a number of frames.
|
||||
|
||||
```tsx
|
||||
const entrance = spring({
|
||||
frame: frame - ENTRANCE_DELAY,
|
||||
fps,
|
||||
delay: 20,
|
||||
});
|
||||
```
|
||||
|
||||
### Duration
|
||||
|
||||
A `spring()` has a natural duration based on the physical properties.
|
||||
To stretch the animation to a specific duration, use the `durationInFrames` parameter.
|
||||
|
||||
```tsx
|
||||
const spring = spring({
|
||||
frame,
|
||||
fps,
|
||||
durationInFrames: 40,
|
||||
});
|
||||
```
|
||||
|
||||
### Combining spring() with interpolate()
|
||||
|
||||
Map spring output (0-1) to custom ranges:
|
||||
|
||||
```tsx
|
||||
const springProgress = spring({
|
||||
frame,
|
||||
fps,
|
||||
});
|
||||
|
||||
// Map to rotation
|
||||
const rotation = interpolate(springProgress, [0, 1], [0, 360]);
|
||||
|
||||
<div style={{ rotate: rotation + "deg" }} />;
|
||||
```
|
||||
|
||||
### Adding springs
|
||||
|
||||
Springs return just numbers, so math can be performed:
|
||||
|
||||
```tsx
|
||||
const frame = useCurrentFrame();
|
||||
const { fps, durationInFrames } = useVideoConfig();
|
||||
|
||||
const inAnimation = spring({
|
||||
frame,
|
||||
fps,
|
||||
});
|
||||
const outAnimation = spring({
|
||||
frame,
|
||||
fps,
|
||||
durationInFrames: 1 * fps,
|
||||
delay: durationInFrames - 1 * fps,
|
||||
});
|
||||
|
||||
const scale = inAnimation - outAnimation;
|
||||
```
|
||||
|
||||
## Easing
|
||||
|
||||
Easing can be added to the `interpolate` function:
|
||||
|
||||
```ts
|
||||
import { interpolate, Easing } from "remotion";
|
||||
|
||||
const value1 = interpolate(frame, [0, 100], [0, 1], {
|
||||
easing: Easing.inOut(Easing.quad),
|
||||
extrapolateLeft: "clamp",
|
||||
extrapolateRight: "clamp",
|
||||
});
|
||||
```
|
||||
|
||||
The default easing is `Easing.linear`.
|
||||
There are various other convexities:
|
||||
|
||||
- `Easing.in` for starting slow and accelerating
|
||||
- `Easing.out` for starting fast and slowing down
|
||||
- `Easing.inOut`
|
||||
|
||||
and curves (sorted from most linear to most curved):
|
||||
|
||||
- `Easing.quad`
|
||||
- `Easing.sin`
|
||||
- `Easing.exp`
|
||||
- `Easing.circle`
|
||||
|
||||
Convexities and curves need be combined for an easing function:
|
||||
|
||||
```ts
|
||||
const value1 = interpolate(frame, [0, 100], [0, 1], {
|
||||
easing: Easing.inOut(Easing.quad),
|
||||
extrapolateLeft: "clamp",
|
||||
extrapolateRight: "clamp",
|
||||
});
|
||||
```
|
||||
|
||||
Cubic bezier curves are also supported:
|
||||
|
||||
```ts
|
||||
const value1 = interpolate(frame, [0, 100], [0, 1], {
|
||||
easing: Easing.bezier(0.8, 0.22, 0.96, 0.65),
|
||||
extrapolateLeft: "clamp",
|
||||
extrapolateRight: "clamp",
|
||||
});
|
||||
```
|
||||
70
bundled-skills/remotion/rules/transcribe-captions.md
Normal file
70
bundled-skills/remotion/rules/transcribe-captions.md
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
name: transcribe-captions
|
||||
description: Transcribing audio to generate captions in Remotion
|
||||
metadata:
|
||||
tags: captions, transcribe, whisper, audio, speech-to-text
|
||||
---
|
||||
|
||||
# Transcribing audio
|
||||
|
||||
To transcribe audio to generate captions in Remotion, you can use the [`transcribe()`](https://www.remotion.dev/docs/install-whisper-cpp/transcribe) function from the [`@remotion/install-whisper-cpp`](https://www.remotion.dev/docs/install-whisper-cpp) package.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
First, the @remotion/install-whisper-cpp package needs to be installed.
|
||||
If it is not installed, use the following command:
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/install-whisper-cpp
|
||||
```
|
||||
|
||||
## Transcribing
|
||||
|
||||
Make a Node.js script to download Whisper.cpp and a model, and transcribe the audio.
|
||||
|
||||
```ts
|
||||
import path from "path";
|
||||
import {
|
||||
downloadWhisperModel,
|
||||
installWhisperCpp,
|
||||
transcribe,
|
||||
toCaptions,
|
||||
} from "@remotion/install-whisper-cpp";
|
||||
import fs from "fs";
|
||||
|
||||
const to = path.join(process.cwd(), "whisper.cpp");
|
||||
|
||||
await installWhisperCpp({
|
||||
to,
|
||||
version: "1.5.5",
|
||||
});
|
||||
|
||||
await downloadWhisperModel({
|
||||
model: "medium.en",
|
||||
folder: to,
|
||||
});
|
||||
|
||||
// Convert the audio to a 16KHz wav file first if needed:
|
||||
// import {execSync} from 'child_process';
|
||||
// execSync('ffmpeg -i /path/to/audio.mp4 -ar 16000 /path/to/audio.wav -y');
|
||||
|
||||
const whisperCppOutput = await transcribe({
|
||||
model: "medium.en",
|
||||
whisperPath: to,
|
||||
whisperCppVersion: "1.5.5",
|
||||
inputPath: "/path/to/audio123.wav",
|
||||
tokenLevelTimestamps: true,
|
||||
});
|
||||
|
||||
// Optional: Apply our recommended postprocessing
|
||||
const { captions } = toCaptions({
|
||||
whisperCppOutput,
|
||||
});
|
||||
|
||||
// Write it to the public/ folder so it can be fetched from Remotion
|
||||
fs.writeFileSync("captions123.json", JSON.stringify(captions, null, 2));
|
||||
```
|
||||
|
||||
Transcribe each clip individually and create multiple JSON files.
|
||||
|
||||
See [Displaying captions](display-captions.md) for how to display the captions in Remotion.
|
||||
197
bundled-skills/remotion/rules/transitions.md
Normal file
197
bundled-skills/remotion/rules/transitions.md
Normal file
@@ -0,0 +1,197 @@
|
||||
---
|
||||
name: transitions
|
||||
description: Scene transitions and overlays for Remotion using TransitionSeries.
|
||||
metadata:
|
||||
tags: transitions, overlays, fade, slide, wipe, scenes
|
||||
---
|
||||
|
||||
## TransitionSeries
|
||||
|
||||
`<TransitionSeries>` arranges scenes and supports two ways to enhance the cut point between them:
|
||||
|
||||
- **Transitions** (`<TransitionSeries.Transition>`) — crossfade, slide, wipe, etc. between two scenes. Shortens the timeline because both scenes play simultaneously during the transition.
|
||||
- **Overlays** (`<TransitionSeries.Overlay>`) — render an effect (e.g. a light leak) on top of the cut point without shortening the timeline.
|
||||
|
||||
Children are absolutely positioned.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/transitions
|
||||
```
|
||||
|
||||
## Transition example
|
||||
|
||||
```tsx
|
||||
import { TransitionSeries, linearTiming } from "@remotion/transitions";
|
||||
import { fade } from "@remotion/transitions/fade";
|
||||
|
||||
<TransitionSeries>
|
||||
<TransitionSeries.Sequence durationInFrames={60}>
|
||||
<SceneA />
|
||||
</TransitionSeries.Sequence>
|
||||
<TransitionSeries.Transition
|
||||
presentation={fade()}
|
||||
timing={linearTiming({ durationInFrames: 15 })}
|
||||
/>
|
||||
<TransitionSeries.Sequence durationInFrames={60}>
|
||||
<SceneB />
|
||||
</TransitionSeries.Sequence>
|
||||
</TransitionSeries>;
|
||||
```
|
||||
|
||||
## Overlay example
|
||||
|
||||
Any React component can be used as an overlay. For a ready-made effect, see the **light-leaks** rule.
|
||||
|
||||
```tsx
|
||||
import { TransitionSeries } from "@remotion/transitions";
|
||||
import { LightLeak } from "@remotion/light-leaks";
|
||||
|
||||
<TransitionSeries>
|
||||
<TransitionSeries.Sequence durationInFrames={60}>
|
||||
<SceneA />
|
||||
</TransitionSeries.Sequence>
|
||||
<TransitionSeries.Overlay durationInFrames={20}>
|
||||
<LightLeak />
|
||||
</TransitionSeries.Overlay>
|
||||
<TransitionSeries.Sequence durationInFrames={60}>
|
||||
<SceneB />
|
||||
</TransitionSeries.Sequence>
|
||||
</TransitionSeries>;
|
||||
```
|
||||
|
||||
## Mixing transitions and overlays
|
||||
|
||||
Transitions and overlays can coexist in the same `<TransitionSeries>`, but an overlay cannot be adjacent to a transition or another overlay.
|
||||
|
||||
```tsx
|
||||
import { TransitionSeries, linearTiming } from "@remotion/transitions";
|
||||
import { fade } from "@remotion/transitions/fade";
|
||||
import { LightLeak } from "@remotion/light-leaks";
|
||||
|
||||
<TransitionSeries>
|
||||
<TransitionSeries.Sequence durationInFrames={60}>
|
||||
<SceneA />
|
||||
</TransitionSeries.Sequence>
|
||||
<TransitionSeries.Overlay durationInFrames={30}>
|
||||
<LightLeak />
|
||||
</TransitionSeries.Overlay>
|
||||
<TransitionSeries.Sequence durationInFrames={60}>
|
||||
<SceneB />
|
||||
</TransitionSeries.Sequence>
|
||||
<TransitionSeries.Transition
|
||||
presentation={fade()}
|
||||
timing={linearTiming({ durationInFrames: 15 })}
|
||||
/>
|
||||
<TransitionSeries.Sequence durationInFrames={60}>
|
||||
<SceneC />
|
||||
</TransitionSeries.Sequence>
|
||||
</TransitionSeries>;
|
||||
```
|
||||
|
||||
## Transition props
|
||||
|
||||
`<TransitionSeries.Transition>` requires:
|
||||
|
||||
- `presentation` — the visual effect (e.g. `fade()`, `slide()`, `wipe()`).
|
||||
- `timing` — controls speed and easing (e.g. `linearTiming()`, `springTiming()`).
|
||||
|
||||
## Overlay props
|
||||
|
||||
`<TransitionSeries.Overlay>` accepts:
|
||||
|
||||
- `durationInFrames` — how long the overlay is visible (positive integer).
|
||||
- `offset?` — shifts the overlay relative to the cut point center. Positive = later, negative = earlier. Default: `0`.
|
||||
|
||||
## Available transition types
|
||||
|
||||
Import transitions from their respective modules:
|
||||
|
||||
```tsx
|
||||
import { fade } from "@remotion/transitions/fade";
|
||||
import { slide } from "@remotion/transitions/slide";
|
||||
import { wipe } from "@remotion/transitions/wipe";
|
||||
import { flip } from "@remotion/transitions/flip";
|
||||
import { clockWipe } from "@remotion/transitions/clock-wipe";
|
||||
```
|
||||
|
||||
## Slide transition with direction
|
||||
|
||||
```tsx
|
||||
import { slide } from "@remotion/transitions/slide";
|
||||
|
||||
<TransitionSeries.Transition
|
||||
presentation={slide({ direction: "from-left" })}
|
||||
timing={linearTiming({ durationInFrames: 20 })}
|
||||
/>;
|
||||
```
|
||||
|
||||
Directions: `"from-left"`, `"from-right"`, `"from-top"`, `"from-bottom"`
|
||||
|
||||
## Timing options
|
||||
|
||||
```tsx
|
||||
import { linearTiming, springTiming } from "@remotion/transitions";
|
||||
|
||||
// Linear timing - constant speed
|
||||
linearTiming({ durationInFrames: 20 });
|
||||
|
||||
// Spring timing - organic motion
|
||||
springTiming({ config: { damping: 200 }, durationInFrames: 25 });
|
||||
```
|
||||
|
||||
## Duration calculation
|
||||
|
||||
Transitions overlap adjacent scenes, so the total composition length is **shorter** than the sum of all sequence durations. Overlays do **not** affect the total duration.
|
||||
|
||||
For example, with two 60-frame sequences and a 15-frame transition:
|
||||
|
||||
- Without transitions: `60 + 60 = 120` frames
|
||||
- With transition: `60 + 60 - 15 = 105` frames
|
||||
|
||||
Adding an overlay between two other sequences does not change the total.
|
||||
|
||||
### Getting the duration of a transition
|
||||
|
||||
Use the `getDurationInFrames()` method on the timing object:
|
||||
|
||||
```tsx
|
||||
import { linearTiming, springTiming } from "@remotion/transitions";
|
||||
|
||||
const linearDuration = linearTiming({
|
||||
durationInFrames: 20,
|
||||
}).getDurationInFrames({ fps: 30 });
|
||||
// Returns 20
|
||||
|
||||
const springDuration = springTiming({
|
||||
config: { damping: 200 },
|
||||
}).getDurationInFrames({ fps: 30 });
|
||||
// Returns calculated duration based on spring physics
|
||||
```
|
||||
|
||||
For `springTiming` without an explicit `durationInFrames`, the duration depends on `fps` because it calculates when the spring animation settles.
|
||||
|
||||
### Calculating total composition duration
|
||||
|
||||
```tsx
|
||||
import { linearTiming } from "@remotion/transitions";
|
||||
|
||||
const scene1Duration = 60;
|
||||
const scene2Duration = 60;
|
||||
const scene3Duration = 60;
|
||||
|
||||
const timing1 = linearTiming({ durationInFrames: 15 });
|
||||
const timing2 = linearTiming({ durationInFrames: 20 });
|
||||
|
||||
const transition1Duration = timing1.getDurationInFrames({ fps: 30 });
|
||||
const transition2Duration = timing2.getDurationInFrames({ fps: 30 });
|
||||
|
||||
const totalDuration =
|
||||
scene1Duration +
|
||||
scene2Duration +
|
||||
scene3Duration -
|
||||
transition1Duration -
|
||||
transition2Duration;
|
||||
// 60 + 60 + 60 - 15 - 20 = 145 frames
|
||||
```
|
||||
106
bundled-skills/remotion/rules/transparent-videos.md
Normal file
106
bundled-skills/remotion/rules/transparent-videos.md
Normal file
@@ -0,0 +1,106 @@
|
||||
---
|
||||
name: transparent-videos
|
||||
description: Rendering transparent videos in Remotion
|
||||
metadata:
|
||||
tags: transparent, alpha, codec, vp9, prores, webm
|
||||
---
|
||||
|
||||
# Rendering Transparent Videos
|
||||
|
||||
Remotion can render transparent videos in two ways: as a ProRes video or as a WebM video.
|
||||
|
||||
## Transparent ProRes
|
||||
|
||||
Ideal for when importing into video editing software.
|
||||
|
||||
**CLI:**
|
||||
|
||||
```bash
|
||||
npx remotion render --image-format=png --pixel-format=yuva444p10le --codec=prores --prores-profile=4444 MyComp out.mov
|
||||
```
|
||||
|
||||
**Default in Studio** (restart Studio after changing):
|
||||
|
||||
```ts
|
||||
// remotion.config.ts
|
||||
import { Config } from "@remotion/cli/config";
|
||||
|
||||
Config.setVideoImageFormat("png");
|
||||
Config.setPixelFormat("yuva444p10le");
|
||||
Config.setCodec("prores");
|
||||
Config.setProResProfile("4444");
|
||||
```
|
||||
|
||||
**Setting it as the default export settings for a composition** (using `calculateMetadata`):
|
||||
|
||||
```tsx
|
||||
import { CalculateMetadataFunction } from "remotion";
|
||||
|
||||
const calculateMetadata: CalculateMetadataFunction<Props> = async ({
|
||||
props,
|
||||
}) => {
|
||||
return {
|
||||
defaultCodec: "prores",
|
||||
defaultVideoImageFormat: "png",
|
||||
defaultPixelFormat: "yuva444p10le",
|
||||
defaultProResProfile: "4444",
|
||||
};
|
||||
};
|
||||
|
||||
<Composition
|
||||
id="my-video"
|
||||
component={MyVideo}
|
||||
durationInFrames={150}
|
||||
fps={30}
|
||||
width={1920}
|
||||
height={1080}
|
||||
calculateMetadata={calculateMetadata}
|
||||
/>;
|
||||
```
|
||||
|
||||
## Transparent WebM (VP9)
|
||||
|
||||
Ideal for when playing in a browser.
|
||||
|
||||
**CLI:**
|
||||
|
||||
```bash
|
||||
npx remotion render --image-format=png --pixel-format=yuva420p --codec=vp9 MyComp out.webm
|
||||
```
|
||||
|
||||
**Default in Studio** (restart Studio after changing):
|
||||
|
||||
```ts
|
||||
// remotion.config.ts
|
||||
import { Config } from "@remotion/cli/config";
|
||||
|
||||
Config.setVideoImageFormat("png");
|
||||
Config.setPixelFormat("yuva420p");
|
||||
Config.setCodec("vp9");
|
||||
```
|
||||
|
||||
**Setting it as the default export settings for a composition** (using `calculateMetadata`):
|
||||
|
||||
```tsx
|
||||
import { CalculateMetadataFunction } from "remotion";
|
||||
|
||||
const calculateMetadata: CalculateMetadataFunction<Props> = async ({
|
||||
props,
|
||||
}) => {
|
||||
return {
|
||||
defaultCodec: "vp8",
|
||||
defaultVideoImageFormat: "png",
|
||||
defaultPixelFormat: "yuva420p",
|
||||
};
|
||||
};
|
||||
|
||||
<Composition
|
||||
id="my-video"
|
||||
component={MyVideo}
|
||||
durationInFrames={150}
|
||||
fps={30}
|
||||
width={1920}
|
||||
height={1080}
|
||||
calculateMetadata={calculateMetadata}
|
||||
/>;
|
||||
```
|
||||
51
bundled-skills/remotion/rules/trimming.md
Normal file
51
bundled-skills/remotion/rules/trimming.md
Normal file
@@ -0,0 +1,51 @@
|
||||
---
|
||||
name: trimming
|
||||
description: Trimming patterns for Remotion - cut the beginning or end of animations
|
||||
metadata:
|
||||
tags: sequence, trim, clip, cut, offset
|
||||
---
|
||||
|
||||
Use `<Sequence>` with a negative `from` value to trim the start of an animation.
|
||||
|
||||
## Trim the Beginning
|
||||
|
||||
A negative `from` value shifts time backwards, making the animation start partway through:
|
||||
|
||||
```tsx
|
||||
import { Sequence, useVideoConfig } from "remotion";
|
||||
|
||||
const fps = useVideoConfig();
|
||||
|
||||
<Sequence from={-0.5 * fps}>
|
||||
<MyAnimation />
|
||||
</Sequence>;
|
||||
```
|
||||
|
||||
The animation appears 15 frames into its progress - the first 15 frames are trimmed off.
|
||||
Inside `<MyAnimation>`, `useCurrentFrame()` starts at 15 instead of 0.
|
||||
|
||||
## Trim the End
|
||||
|
||||
Use `durationInFrames` to unmount content after a specified duration:
|
||||
|
||||
```tsx
|
||||
<Sequence durationInFrames={1.5 * fps}>
|
||||
<MyAnimation />
|
||||
</Sequence>
|
||||
```
|
||||
|
||||
The animation plays for 45 frames, then the component unmounts.
|
||||
|
||||
## Trim and Delay
|
||||
|
||||
Nest sequences to both trim the beginning and delay when it appears:
|
||||
|
||||
```tsx
|
||||
<Sequence from={30}>
|
||||
<Sequence from={-15}>
|
||||
<MyAnimation />
|
||||
</Sequence>
|
||||
</Sequence>
|
||||
```
|
||||
|
||||
The inner sequence trims 15 frames from the start, and the outer sequence delays the result by 30 frames.
|
||||
171
bundled-skills/remotion/rules/videos.md
Normal file
171
bundled-skills/remotion/rules/videos.md
Normal file
@@ -0,0 +1,171 @@
|
||||
---
|
||||
name: videos
|
||||
description: Embedding videos in Remotion - trimming, volume, speed, looping, pitch
|
||||
metadata:
|
||||
tags: video, media, trim, volume, speed, loop, pitch
|
||||
---
|
||||
|
||||
# Using videos in Remotion
|
||||
|
||||
## Prerequisites
|
||||
|
||||
First, the @remotion/media package needs to be installed.
|
||||
If it is not, use the following command:
|
||||
|
||||
```bash
|
||||
npx remotion add @remotion/media # If project uses npm
|
||||
bunx remotion add @remotion/media # If project uses bun
|
||||
yarn remotion add @remotion/media # If project uses yarn
|
||||
pnpm exec remotion add @remotion/media # If project uses pnpm
|
||||
```
|
||||
|
||||
Use `<Video>` from `@remotion/media` to embed videos into your composition.
|
||||
|
||||
```tsx
|
||||
import { Video } from "@remotion/media";
|
||||
import { staticFile } from "remotion";
|
||||
|
||||
export const MyComposition = () => {
|
||||
return <Video src={staticFile("video.mp4")} />;
|
||||
};
|
||||
```
|
||||
|
||||
Remote URLs are also supported:
|
||||
|
||||
```tsx
|
||||
<Video src="https://remotion.media/video.mp4" />
|
||||
```
|
||||
|
||||
## Trimming
|
||||
|
||||
Use `trimBefore` and `trimAfter` to remove portions of the video. Values are in seconds.
|
||||
|
||||
```tsx
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
return (
|
||||
<Video
|
||||
src={staticFile("video.mp4")}
|
||||
trimBefore={2 * fps} // Skip the first 2 seconds
|
||||
trimAfter={10 * fps} // End at the 10 second mark
|
||||
/>
|
||||
);
|
||||
```
|
||||
|
||||
## Delaying
|
||||
|
||||
Wrap the video in a `<Sequence>` to delay when it appears:
|
||||
|
||||
```tsx
|
||||
import { Sequence, staticFile } from "remotion";
|
||||
import { Video } from "@remotion/media";
|
||||
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
return (
|
||||
<Sequence from={1 * fps}>
|
||||
<Video src={staticFile("video.mp4")} />
|
||||
</Sequence>
|
||||
);
|
||||
```
|
||||
|
||||
The video will appear after 1 second.
|
||||
|
||||
## Sizing and Position
|
||||
|
||||
Use the `style` prop to control size and position:
|
||||
|
||||
```tsx
|
||||
<Video
|
||||
src={staticFile("video.mp4")}
|
||||
style={{
|
||||
width: 500,
|
||||
height: 300,
|
||||
position: "absolute",
|
||||
top: 100,
|
||||
left: 50,
|
||||
objectFit: "cover",
|
||||
}}
|
||||
/>
|
||||
```
|
||||
|
||||
## Volume
|
||||
|
||||
Set a static volume (0 to 1):
|
||||
|
||||
```tsx
|
||||
<Video src={staticFile("video.mp4")} volume={0.5} />
|
||||
```
|
||||
|
||||
Or use a callback for dynamic volume based on the current frame:
|
||||
|
||||
```tsx
|
||||
import { interpolate } from "remotion";
|
||||
|
||||
const { fps } = useVideoConfig();
|
||||
|
||||
return (
|
||||
<Video
|
||||
src={staticFile("video.mp4")}
|
||||
volume={(f) =>
|
||||
interpolate(f, [0, 1 * fps], [0, 1], { extrapolateRight: "clamp" })
|
||||
}
|
||||
/>
|
||||
);
|
||||
```
|
||||
|
||||
Use `muted` to silence the video entirely:
|
||||
|
||||
```tsx
|
||||
<Video src={staticFile("video.mp4")} muted />
|
||||
```
|
||||
|
||||
## Speed
|
||||
|
||||
Use `playbackRate` to change the playback speed:
|
||||
|
||||
```tsx
|
||||
<Video src={staticFile("video.mp4")} playbackRate={2} /> {/* 2x speed */}
|
||||
<Video src={staticFile("video.mp4")} playbackRate={0.5} /> {/* Half speed */}
|
||||
```
|
||||
|
||||
Reverse playback is not supported.
|
||||
|
||||
## Looping
|
||||
|
||||
Use `loop` to loop the video indefinitely:
|
||||
|
||||
```tsx
|
||||
<Video src={staticFile("video.mp4")} loop />
|
||||
```
|
||||
|
||||
Use `loopVolumeCurveBehavior` to control how the frame count behaves when looping:
|
||||
|
||||
- `"repeat"`: Frame count resets to 0 each loop (for `volume` callback)
|
||||
- `"extend"`: Frame count continues incrementing
|
||||
|
||||
```tsx
|
||||
<Video
|
||||
src={staticFile("video.mp4")}
|
||||
loop
|
||||
loopVolumeCurveBehavior="extend"
|
||||
volume={(f) => interpolate(f, [0, 300], [1, 0])} // Fade out over multiple loops
|
||||
/>
|
||||
```
|
||||
|
||||
## Pitch
|
||||
|
||||
Use `toneFrequency` to adjust the pitch without affecting speed. Values range from 0.01 to 2:
|
||||
|
||||
```tsx
|
||||
<Video
|
||||
src={staticFile("video.mp4")}
|
||||
toneFrequency={1.5} // Higher pitch
|
||||
/>
|
||||
<Video
|
||||
src={staticFile("video.mp4")}
|
||||
toneFrequency={0.8} // Lower pitch
|
||||
/>
|
||||
```
|
||||
|
||||
Pitch shifting only works during server-side rendering, not in the Remotion Studio preview or in the `<Player />`.
|
||||
103
bundled-skills/remotion/rules/voiceover.md
Normal file
103
bundled-skills/remotion/rules/voiceover.md
Normal file
@@ -0,0 +1,103 @@
|
||||
---
|
||||
name: voiceover
|
||||
description: Adding AI-generated voiceover to Remotion compositions using ElevenLabs TTS
|
||||
metadata:
|
||||
tags: voiceover, audio, elevenlabs, tts, speech, calculateMetadata, dynamic duration
|
||||
---
|
||||
|
||||
# Adding AI voiceover to a Remotion composition
|
||||
|
||||
Use ElevenLabs TTS to generate speech audio per scene, then use [`calculateMetadata`](./calculate-metadata) to dynamically size the composition to match the audio.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
An **ElevenLabs API key** is required. Store it in a `.env` file at the project root:
|
||||
|
||||
```
|
||||
ELEVENLABS_API_KEY=your_key_here
|
||||
```
|
||||
|
||||
**MUST** ask the user for their ElevenLabs API key if no `.env` file exists or `ELEVENLABS_API_KEY` is not set. **MUST NOT** fall back to other TTS tools.
|
||||
|
||||
When running the generation script, use the `--env-file` flag to load the `.env` file:
|
||||
|
||||
```bash
|
||||
node --env-file=.env --strip-types generate-voiceover.ts
|
||||
```
|
||||
|
||||
## Generating audio with ElevenLabs
|
||||
|
||||
Create a script that reads the config, calls the ElevenLabs API for each scene, and writes MP3 files to the `public/` directory so Remotion can access them via `staticFile()`.
|
||||
|
||||
The core API call for a single scene:
|
||||
|
||||
```ts title="generate-voiceover.ts"
|
||||
const response = await fetch(
|
||||
`https://api.elevenlabs.io/v1/text-to-speech/${voiceId}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"xi-api-key": process.env.ELEVENLABS_API_KEY!,
|
||||
"Content-Type": "application/json",
|
||||
Accept: "audio/mpeg",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
text: "Welcome to the show.",
|
||||
model_id: "eleven_multilingual_v2",
|
||||
voice_settings: {
|
||||
stability: 0.5,
|
||||
similarity_boost: 0.75,
|
||||
style: 0.3,
|
||||
},
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
||||
const audioBuffer = Buffer.from(await response.arrayBuffer());
|
||||
writeFileSync(`public/voiceover/${compositionId}/${scene.id}.mp3`, audioBuffer);
|
||||
```
|
||||
|
||||
## Dynamic composition duration with calculateMetadata
|
||||
|
||||
Use [`calculateMetadata`](./calculate-metadata.md) to measure the [audio durations](./get-audio-duration.md) and set the composition length accordingly.
|
||||
|
||||
```tsx
|
||||
import { CalculateMetadataFunction, staticFile } from "remotion";
|
||||
import { getAudioDuration } from "./get-audio-duration";
|
||||
|
||||
const FPS = 30;
|
||||
|
||||
const SCENE_AUDIO_FILES = [
|
||||
"voiceover/my-comp/scene-01-intro.mp3",
|
||||
"voiceover/my-comp/scene-02-main.mp3",
|
||||
"voiceover/my-comp/scene-03-outro.mp3",
|
||||
];
|
||||
|
||||
export const calculateMetadata: CalculateMetadataFunction<Props> = async ({
|
||||
props,
|
||||
}) => {
|
||||
const durations = await Promise.all(
|
||||
SCENE_AUDIO_FILES.map((file) => getAudioDuration(staticFile(file))),
|
||||
);
|
||||
|
||||
const sceneDurations = durations.map((durationInSeconds) => {
|
||||
return durationInSeconds * FPS;
|
||||
});
|
||||
|
||||
return {
|
||||
durationInFrames: Math.ceil(sceneDurations.reduce((sum, d) => sum + d, 0)),
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
The computed `sceneDurations` are passed into the component via a `voiceover` prop so the component knows how long each scene should be.
|
||||
|
||||
If the composition uses [`<TransitionSeries>`](./transitions.md), subtract the overlap from total duration: [./transitions.md#calculating-total-composition-duration](./transitions.md#calculating-total-composition-duration)
|
||||
|
||||
## Rendering audio in the component
|
||||
|
||||
See [audio.md](./audio.md) for more information on how to render audio in the component.
|
||||
|
||||
## Delaying audio start
|
||||
|
||||
See [audio.md#delaying](./audio.md#delaying) for more information on how to delay the audio start.
|
||||
Reference in New Issue
Block a user