{
81 | ref.current = r;
82 | if (r) r.style.left = `${framesToPixels(frame.current)}px`;
83 | }}/>}
84 | >
85 | );
86 | }
87 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/Timeline.module.scss:
--------------------------------------------------------------------------------
1 | .timeline {
2 | background-color: var(--background-color);
3 | overflow: visible;
4 | position: relative;
5 | display: grid;
6 | height: 100%;
7 | grid-template-columns: auto auto 1fr;
8 |
9 | > * {
10 | max-height: 100%;
11 | }
12 | }
13 |
14 | .timeline_wrapper {
15 | width: 100%;
16 | position: relative;
17 | overflow-x: scroll;
18 | overflow-y: auto;
19 |
20 | &.scrubbing { cursor: none; }
21 |
22 | &::-webkit-scrollbar { background-color: var(--background-color); }
23 | &::-webkit-scrollbar-thumb { border-color: var(--background-color); }
24 | }
25 |
26 | .timeline_content {
27 | position: relative;
28 | overflow: visible;
29 | height: 100%;
30 | width: max-content;
31 | background-color: color-mix(in hsl, var(--surface-color) 30%, var(--background-color));
32 | }
33 |
34 | .timestamp {
35 | user-select: none;
36 | pointer-events: none;
37 | position: absolute;
38 | top: 0px;
39 | bottom: 0;
40 | width: 0;
41 | border-left: 1px solid var(--surface-color);
42 | border-right: 1px solid var(--surface-color);
43 | display: flex;
44 | opacity: 0.5;
45 | justify-content: center;
46 |
47 | &.odd { opacity: 0.25; }
48 |
49 | &:not(.odd)::after {
50 | position: absolute;
51 | font-family: var(--font-family-mono);
52 | font-size: var(--font-size-small);
53 | top: 0;
54 | text-align: center;
55 | display: block;
56 | content: attr(data-frame);
57 | font-weight: 600;
58 | color: rgba(255, 255, 255, 0.4);
59 | }
60 | }
61 |
62 | .timeline_track {
63 | display: grid;
64 | grid-template-columns: 1fr;
65 |
66 | &.clips_track {
67 | height: 76px;
68 | margin-top: 4px;
69 | padding-bottom: 40px;
70 | }
71 |
72 | &.audio_track {
73 | height: 36px;
74 | padding-bottom: 4px;
75 | }
76 | }
77 |
78 | .playhead {
79 | position: absolute;
80 | width: 2px;
81 | top: 4px;
82 | bottom: 0;
83 | z-index: 200;
84 | background-color: var(--theme);
85 | pointer-events: none;
86 | display: flex;
87 | align-items: flex-start;
88 | justify-content: center;
89 |
90 | &::before {
91 | display: block;
92 | padding: 0 3px;
93 | border-radius: 3px;
94 | background-color: var(--theme);
95 | font-family: var(--font-family-mono);
96 | font-size: var(--font-size-small);
97 | line-height: 16px;
98 | font-weight: bold;
99 | color: #000000;
100 | content: attr(data-frame);
101 | }
102 | }
103 |
104 | .scrub_line {
105 | position: absolute;
106 | width: 2px;
107 | top: 0;
108 | bottom: 0;
109 | background-color: white;
110 | opacity: 0;
111 | z-index: 40;
112 | pointer-events: none;
113 |
114 | .timeline:hover & {
115 | opacity: 0.05;
116 | }
117 | }
118 |
119 | .range_track {
120 | height: 24px;
121 | cursor: pointer;
122 |
123 | &:active .handle {
124 | fill: transparent !important;
125 | }
126 |
127 | .range {
128 | border-radius: var(--radius);
129 | cursor: grab;
130 | position: absolute;
131 | height: 24px;
132 | background-color: rgba(255,255,255,0.03);
133 | border: 2px solid var(--surface-color-light);
134 | display: flex;
135 | z-index: 5;
136 | align-items: center;
137 | justify-content: center;
138 |
139 | &:active {
140 | cursor: grabbing;
141 | }
142 | }
143 |
144 | &:is(:hover,:active):not(:has(.handle:active)) .handle {
145 | fill: rgba(255,255,255,0.3);
146 | }
147 |
148 | .handle {
149 | color: rgba(255, 255, 255, 0);
150 | width: 20px;
151 | height: 20px;
152 |
153 | .range.active > & {
154 | color: rgba(255, 255, 255, 0.24);
155 | z-index: 1;
156 | min-width: 24px;
157 | }
158 |
159 | .range > &:hover,
160 | .range > &:active {
161 | color: rgba(255, 255, 255, 0.6);
162 | }
163 | }
164 |
165 | .spacer {
166 | flex-grow: 1;
167 | flex-shrink: 1;
168 | }
169 | }
170 |
171 | .toolbar {
172 | --input-background: var(--surface-color);
173 | background-color: var(--surface-color-dark);
174 | border-right: 2px solid var(--background-color-dark);
175 | display: flex;
176 | max-height: 100%;
177 | padding: 8px 8px;
178 | gap: 8px;
179 | flex-direction: column;
180 |
181 | .button_group {
182 | display: flex;
183 | flex-direction: column;
184 |
185 | button {
186 | padding: 0;
187 | width: 28px;
188 | height: unset;
189 | color: #aaa;
190 | aspect-ratio: 1;
191 | border-radius: 0;
192 | box-sizing: content-box;
193 | box-shadow: none !important;
194 |
195 | &[class*="main"] {
196 | background-color: var(--theme-overlay) !important;
197 | color: var(--theme);
198 | }
199 |
200 | // &:hover {
201 | // filter: brightness(1.2);
202 | // }
203 | // flex-shrink: 0;
204 | // background-color: var(--input-background);
205 |
206 | &:not(:last-child) { border-bottom: 1px solid var(--background-color); }
207 |
208 | &:first-child { border-top-left-radius: 4px; border-top-right-radius: 4px; }
209 | &:last-child { border-bottom-left-radius: 4px; border-bottom-right-radius: 4px; }
210 | }
211 | }
212 | }
213 |
214 | .timeline_labels {
215 | display: grid;
216 | width: 136px;
217 | grid-template-columns: 1fr;
218 | grid-template-rows: 102px;
219 | grid-auto-rows: 36px;
220 | border-right: 2px solid var(--background-color-dark);
221 | background-color: color-mix(in hsl, var(--surface-color) 50%, var(--background-color));
222 | }
223 |
224 | .timeline_track_label {
225 | display: flex;
226 | border-bottom: 2px solid var(--background-color-dark);
227 | justify-content: space-between;
228 | align-items: center;
229 | position: relative;
230 | padding: 8px;
231 | width: 100%;
232 | overflow: hidden;
233 |
234 | .label {
235 | overflow: hidden;
236 | text-overflow: ellipsis;
237 | white-space: nowrap;
238 | flex-shrink: 1;
239 | padding-right: 6px;
240 | }
241 |
242 | .buttons {
243 | --input-background: var(--surface-color);
244 | display: flex;
245 | flex-direction: row-reverse;
246 | gap: 4px;
247 | position: absolute;
248 | top: 0;
249 | right: 0;
250 | bottom: 0;
251 | align-items: center;
252 | padding-right: 4px;
253 | padding-left: 28px;
254 | background: linear-gradient(to right, transparent, color-mix(in hsl, var(--surface-color) 50%, var(--background-color)) 24px);
255 | opacity: 0;
256 | z-index: 2;
257 | transform-origin: right;
258 | transition: opacity 0.07s, scale 0.07s;
259 | scale: 0.95;
260 |
261 | .button {
262 | // opacity: 0.7;
263 |
264 | &.active {
265 |
266 | background-color: var(--theme-overlay) !important;
267 | color: var(--theme);
268 | // filter: brightness(1.4);
269 | opacity: 1;
270 | }
271 | }
272 | }
273 |
274 | &:hover .buttons {
275 | opacity: 1;
276 | scale: 1;
277 | }
278 |
279 | .statuses {
280 | display: flex;
281 | flex-direction: row-reverse;
282 | gap: 4px;
283 | flex-shrink: 0;
284 |
285 | .status.solo {
286 | margin: 0 -2px;
287 | scale: 1.1;
288 | }
289 |
290 | .status.muted {
291 | margin-left: 1px;
292 | }
293 |
294 | .status.target {
295 | scale: 0.87;
296 | }
297 | }
298 |
299 | .status {
300 | width: 16px;
301 | color: #666;
302 | }
303 |
304 | }
305 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/Timeline.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import clsx from 'clsx';
4 | import { useSignal } from '@preact/signals';
5 | import { useLayoutEffect, useMemo, useRef } from 'preact/hooks';
6 | import { MouseButton, MouseMask, borderHighlight, clamp, useApplication, useDuration, usePlayerTime, usePreviewSettings, useSharedSettings, useSize, useStateChange, useStorage } from '@motion-canvas/ui';
7 |
8 | import styles from './Timeline.module.scss';
9 |
10 | import Toolbar from './Toolbar';
11 | import { Playhead } from './Playhead';
12 | import { Timestamps } from './Timestamps';
13 | import ScrubPreview from './ScrubPreview';
14 | import TimelineTrack from './TimelineTrack';
15 | import MotionComposer from '../MotionComposer';
16 | import { useClips, useTracks } from '../Hooks';
17 | import { RangeSelector } from './RangeSelector';
18 | import TimelineTrackLabel from './TimelineTrackLabel';
19 | import * as Shortcut from '../shortcut/ShortcutMappings';
20 | import useShortcutHover from '../shortcut/useShortcutHover';
21 | import { useShortcut, useStore, useStoredState } from '../Hooks';
22 | import { Clip, EditorMode, EditorTool, copyClip } from '../Types';
23 | import { TimelineContext, TimelineContextData } from '../Contexts';
24 |
25 | const NUM_SNAP_FRAMES = 3;
26 |
27 | const ZOOM_SPEED = 0.1;
28 |
29 | const ZOOM_MIN = 0.5;
30 |
31 | const MAX_FRAME_SIZE = 128;
32 |
33 | const TIMESTAMP_SPACING = 32;
34 |
35 | /** If the mouse is less than this many pixels from the left edge of the timeline,
36 | * the timeline start will not shift on zoom. */
37 | const ZOOM_START_THRESHOLD = 48;
38 |
39 | export default function Timeline() {
40 | const [ shortcutRef ] = useShortcutHover
('timeline');
41 |
42 | const [ scale, setScale ] = useStorage('composer-scale', 1);
43 | const [ viewOffset, setViewOffset ] = useStorage('composer-offset', 0);
44 | const { range } = useSharedSettings();
45 | const { fps } = usePreviewSettings();
46 | const { player } = useApplication();
47 | const time = usePlayerTime();
48 | const clips = useClips();
49 | const { tracks, targetTrack } = useTracks();
50 | const duration = useDuration();
51 |
52 | const wrapperRef = useRef();
53 | const rect = useSize(wrapperRef);
54 | const rangeRef = useRef();
55 | const setScrubFrame = useRef<(frame: number, pixels?: number) => void>();
56 |
57 | const [ tool, setTool ] = useStoredState('shift', 'editor-tool');
58 | const [ mode, setMode ] = useStoredState('compose', 'editor-mode');
59 | const [ snap, setSnap ] = useStoredState(true, 'editor-snap');
60 |
61 | const modifiedClips = useStore(() => clips.map(arr => [ ...arr ]));
62 | useLayoutEffect(() => void modifiedClips(clips.map(arr => [ ...arr ])), [ clips ]);
63 |
64 | const warnedAboutRange = useRef(false);
65 | const seeking = useSignal(null);
66 |
67 | /** Loaded the scene information. */
68 | const isReady = duration > 0;
69 |
70 | /** Set the initial scroll position once everything loads. */
71 | useLayoutEffect(() => void(wrapperRef.current.scrollLeft = viewOffset), [ rect.width > 0 && isReady ]);
72 |
73 | const sizes = useMemo(() => ({
74 | viewLength: rect.width,
75 | fullLength: rect.width * scale + rect.width,
76 | visibleLength: rect.width * scale,
77 | }), [rect.width, scale]);
78 |
79 | const zoomMax = (MAX_FRAME_SIZE / sizes.viewLength) * duration;
80 |
81 | const conversion = useMemo(() => ({
82 | framesToPixels: (value: number) => (value / duration) * sizes.visibleLength,
83 | framesToPercents: (value: number) => (value / duration) * 100,
84 | pixelsToFrames: (value: number) => (value / sizes.visibleLength) * duration,
85 | }), [ duration, sizes ]);
86 |
87 | const ctx = useMemo(() => {
88 | const density = Math.pow(2, Math.round(Math.log2(duration / sizes.visibleLength)));
89 | const firstFrame = conversion.pixelsToFrames(viewOffset);
90 | const lastFrame = conversion.pixelsToFrames(viewOffset + sizes.viewLength);
91 |
92 | return {
93 | viewOffset,
94 | firstFrame, lastFrame,
95 | density,
96 | ...sizes,
97 | ...conversion,
98 | pointerToFrames: (value: number) => conversion.pixelsToFrames(value - rect.x + viewOffset),
99 | tool, setTool,
100 | mode, setMode,
101 | snap, setSnap
102 | };
103 | }, [ sizes, conversion, viewOffset, duration, tool, mode, snap, rect.x ]);
104 |
105 | useStateChange(([ prevDuration, prevWidth ]) => {
106 | const newDuration = duration / fps;
107 | let newScale = scale;
108 |
109 |
110 | if (prevDuration !== 0 && newDuration !== 0) newScale *= newDuration / prevDuration;
111 | if (prevWidth !== 0 && rect.width !== 0) newScale *= prevWidth / rect.width;
112 | if (!isNaN(newScale) && duration > 0) setScale(clamp(ZOOM_MIN, zoomMax, newScale));
113 | },
114 | [ duration / fps, rect.width ],
115 | );
116 |
117 | const timestampDensity = Math.max(1, Math.floor(TIMESTAMP_SPACING * ctx.density));
118 | const timestampFirstFrame = Math.floor(conversion.pixelsToFrames(
119 | viewOffset) / timestampDensity) * timestampDensity;
120 | const timestampLastFrame = Math.ceil(conversion.pixelsToFrames(
121 | viewOffset + sizes.viewLength + TIMESTAMP_SPACING) / timestampDensity) * timestampDensity;
122 |
123 | /** Shortcuts. */
124 |
125 | const releaseTool = useRef(tool);
126 |
127 | useShortcut(Shortcut.RazorTool, {
128 | press: () => setTool(tool => (releaseTool.current = tool, 'cut')),
129 | holdRelease: () => setTool(releaseTool.current),
130 | holdTimeout: 300
131 | }, []);
132 |
133 | useShortcut(Shortcut.ShiftTool, {
134 | press: () => setTool(tool => (releaseTool.current = tool, 'shift')),
135 | holdRelease: () => setTool(releaseTool.current),
136 | holdTimeout: 300
137 | }, []);
138 |
139 | useShortcut(Shortcut.SelectTool, {
140 | press: () => setTool(tool => (releaseTool.current = tool, 'select')),
141 | holdRelease: () => setTool(releaseTool.current),
142 | holdTimeout: 300
143 | }, []);
144 |
145 | useShortcut(Shortcut.ToggleSnapping, {
146 | press: () => setSnap(s => !s),
147 | holdRelease: () => setSnap(s => !s),
148 | holdTimeout: 300
149 | }, []);
150 |
151 | useShortcut(Shortcut.SwapTimelineMode, {
152 | press: () => setMode(mode => mode === 'compose' ? 'clip' : 'compose'),
153 | holdRelease: () => setMode(mode => mode === 'compose' ? 'clip' : 'compose'),
154 | holdTimeout: 300
155 | }, []);
156 |
157 | useShortcut(Shortcut.HoldTimelineMode, {
158 | press: () => setMode(mode => mode === 'compose' ? 'clip' : 'compose'),
159 | release: () => setMode(mode => mode === 'compose' ? 'clip' : 'compose'),
160 | holdTimeout: 300
161 | }, []);
162 |
163 | useLayoutEffect(() => {
164 | wrapperRef.current.scrollLeft = viewOffset;
165 | }, [scale]);
166 |
167 | /** Updates the offset for horizontal scrolling. */
168 | const handleScroll = (evt: UIEvent) => {
169 | setViewOffset((evt.target as HTMLElement).scrollLeft);
170 | };
171 |
172 | /** Updates the scale of the timeline. */
173 | const handleWheel = (evt: WheelEvent) => {
174 | evt.stopPropagation();
175 |
176 | const isVertical = Math.abs(evt.deltaX) > Math.abs(evt.deltaY);
177 | if (evt.shiftKey || isVertical) return;
178 |
179 | evt.preventDefault();
180 |
181 | let ratio = 1 - Math.sign(evt.deltaY) * ZOOM_SPEED;
182 | let newScale = scale * ratio;
183 | if (newScale < ZOOM_MIN) {
184 | newScale = ZOOM_MIN;
185 | ratio = newScale / scale;
186 | }
187 | if (newScale > zoomMax) {
188 | newScale = zoomMax;
189 | ratio = newScale / scale;
190 | }
191 | if (newScale === scale) {
192 | return;
193 | }
194 |
195 | let pointer = viewOffset + evt.x - rect.x;
196 | if (evt.x - rect.x < ZOOM_START_THRESHOLD) pointer = viewOffset;
197 |
198 | const newTrackSize = rect.width * newScale * +rect.width;
199 | const maxOffset = newTrackSize - rect.width;
200 | const newOffset = clamp(0, maxOffset, viewOffset - pointer + pointer * ratio);
201 |
202 | wrapperRef.current.scrollLeft = newOffset;
203 | if (!isNaN(newScale)) setScale(newScale);
204 | if (!isNaN(newOffset)) setViewOffset(newOffset);
205 | };
206 |
207 | function scrub(pos: number) {
208 | const frame = Math.round(ctx.pointerToFrames(pos));
209 | const minFrame = player.status.secondsToFrames(range[0]);
210 | const maxFrame = Math.min(player.status.secondsToFrames(range[1]), duration);
211 |
212 | seeking.value = clamp(minFrame, maxFrame, frame);
213 | if (time.frame !== seeking.value) player.requestSeek(seeking.value);
214 |
215 | const isInUserRange = player.isInUserRange(frame);
216 | const isOutOfRange = player.isInRange(frame) && !isInUserRange;
217 | if (!warnedAboutRange.current && isOutOfRange) {
218 | warnedAboutRange.current = true;
219 | rangeRef.current?.animate(borderHighlight(), { duration: 200 });
220 | }
221 | if (isInUserRange) warnedAboutRange.current = false;
222 | }
223 |
224 | function handleScrubStart(evt: PointerEvent) {
225 | if (evt.button === MouseButton.Left) {
226 | evt.preventDefault();
227 | (evt.currentTarget as any).setPointerCapture(evt.pointerId);
228 | scrub(evt.x);
229 | }
230 | else if (evt.button === MouseButton.Middle) {
231 | evt.preventDefault();
232 | (evt.currentTarget as any).setPointerCapture(evt.pointerId);
233 | }
234 | }
235 |
236 | function handleScrubMove(evt: PointerEvent) {
237 | if (!(evt.currentTarget as any).hasPointerCapture(evt.pointerId)) return;
238 | if (evt.buttons & MouseMask.Primary) {
239 | scrub(evt.x);
240 | }
241 | else if (evt.buttons & MouseMask.Auxiliary) {
242 | const newOffset = clamp(
243 | 0,
244 | sizes.visibleLength,
245 | viewOffset - evt.movementX,
246 | );
247 | setViewOffset(newOffset);
248 | wrapperRef.current.scrollLeft = newOffset;
249 | }
250 | }
251 |
252 | function handleScrubEnd(evt: PointerEvent) {
253 | if (evt.button === MouseButton.Left || evt.button === MouseButton.Middle) {
254 | seeking.value = null;
255 | warnedAboutRange.current = false;
256 | (evt.currentTarget as any).releasePointerCapture(evt.pointerId);
257 | }
258 | }
259 |
260 | function recomputeFromCache(clip: Clip) {
261 | clip.length = player.status.framesToSeconds(clip.cache.lengthFrames);
262 | clip.offset = player.status.framesToSeconds(clip.cache.clipRange[0]);
263 | clip.start = player.status.framesToSeconds(clip.cache.startFrames);
264 | }
265 |
266 | function fixOverlap(channel: Clip[], newClip: Clip, oldClip: Clip) {
267 | let toDelete = [];
268 |
269 | if (mode === 'clip') {
270 | for (let i = 0; i < channel.length; i++) {
271 | let clip = channel[i];
272 |
273 | // Left side overlapping.
274 | if (newClip.cache.clipRange[1] > clip.cache.clipRange[0] &&
275 | newClip.cache.clipRange[0] < clip.cache.clipRange[0]) {
276 |
277 | const repl = channel[i] = copyClip(clip);
278 | const diff = newClip.cache.clipRange[1] - clip.cache.clipRange[0];
279 | repl.cache.clipRange[0] = newClip.cache.clipRange[1];
280 | repl.cache.startFrames += diff;
281 | repl.cache.lengthFrames = repl.cache.clipRange[1] - repl.cache.clipRange[0];
282 | recomputeFromCache(repl);
283 |
284 | if (repl.cache.lengthFrames <= 0) toDelete.push(repl);
285 | }
286 |
287 | // Right side overlapping.
288 | if (newClip.cache.clipRange[0] < clip.cache.clipRange[1] &&
289 | newClip.cache.clipRange[1] > clip.cache.clipRange[1]) {
290 |
291 | const repl = channel[i] = copyClip(clip);
292 | repl.cache.clipRange[1] = newClip.cache.clipRange[0];
293 | repl.cache.lengthFrames = repl.cache.clipRange[1] - repl.cache.clipRange[0];
294 | recomputeFromCache(repl);
295 |
296 | if (repl.cache.lengthFrames <= 0) toDelete.push(repl);
297 | }
298 | }
299 | }
300 | else {
301 | // Bring things back left.
302 | const backAmount = oldClip.cache.clipRange[0] - newClip.cache.clipRange[0];
303 | if (backAmount > 0) {
304 | let oldRange1 = oldClip.cache.clipRange[1];
305 | for (let i = 1; i < channel.length; i++) {
306 | let clip = channel[i];
307 | if (clip.cache.clipRange[0] < oldRange1) continue;
308 | if (clip.cache.clipRange[0] > oldRange1) break;
309 |
310 | oldRange1 = clip.cache.clipRange[1];
311 | const repl = channel[i] = copyClip(clip);
312 | repl.cache.clipRange[0] -= backAmount;
313 | repl.cache.clipRange[1] -= backAmount;
314 | recomputeFromCache(repl);
315 | }
316 | }
317 |
318 | // Push things right.
319 | for (let i = 1; i < channel.length; i++) {
320 | let clip = channel[i];
321 | let prevClip = channel[i - 1];
322 |
323 | if (prevClip.cache.clipRange[1] > clip.cache.clipRange[0]) {
324 | const repl = channel[i] = copyClip(clip);
325 | const diff = prevClip.cache.clipRange[1] - clip.cache.clipRange[0];
326 | repl.cache.clipRange[0] += diff;
327 | repl.cache.clipRange[1] += diff;
328 | recomputeFromCache(repl);
329 | }
330 | }
331 | }
332 |
333 | for (let clip of toDelete) {
334 | const ind = channel.indexOf(clip);
335 | if (ind !== -1) channel.splice(ind, 1);
336 | }
337 | }
338 |
339 | function handleClipResizeStart(clip: Clip, side: 'left' | 'right', offset: number) {
340 | const newClips = clips.map(arr => [ ...arr ]);
341 | const newClipInd = newClips[clip.cache.channel].findIndex(c => c.uuid === clip.uuid);
342 | if (newClipInd === -1) return;
343 | const oldClip = newClips[clip.cache.channel][newClipInd];
344 | const newClip = newClips[clip.cache.channel][newClipInd] = copyClip(oldClip);
345 |
346 | if (side === 'right') {
347 | const maxNewPos = newClip.cache.sourceFrames - newClip.cache.startFrames + newClip.cache.clipRange[0];
348 | let newPos = newClip.cache.clipRange[0] + newClip.cache.lengthFrames + offset;
349 |
350 | if (snap) {
351 | const nearbyClip = newClips[0].find(
352 | c => Math.abs(c.cache.clipRange[0] - newPos) <= NUM_SNAP_FRAMES &&
353 | c.cache.clipRange[0] <= maxNewPos);
354 | if (nearbyClip) newPos = nearbyClip.cache.clipRange[0];
355 | }
356 |
357 | newPos = Math.min(newPos, maxNewPos);
358 | newClip.cache.clipRange[1] = newPos;
359 | newClip.cache.lengthFrames = newClip.cache.clipRange[1] - newClip.cache.clipRange[0];
360 | }
361 | else if (side === 'left') {
362 | const oldPos = newClip.cache.clipRange[0];
363 | const minNewPos = newClip.cache.clipRange[0] - newClip.cache.startFrames;
364 | let newPos = newClip.cache.clipRange[0] + offset;
365 |
366 | if (snap) {
367 | const nearbyClip = newClips[0].find(c =>
368 | Math.abs(c.cache.clipRange[1] - newPos) <= NUM_SNAP_FRAMES &&
369 | c.cache.clipRange[1] >= minNewPos);
370 | if (nearbyClip) newPos = nearbyClip.cache.clipRange[1];
371 | }
372 |
373 | newPos = Math.max(newPos, minNewPos);
374 | const diff = newPos - oldPos;
375 | newClip.cache.clipRange[0] = newPos;
376 | newClip.cache.lengthFrames = newClip.cache.clipRange[1] - newClip.cache.clipRange[0];
377 | newClip.cache.startFrames = Math.max(newClip.cache.startFrames + diff, 0);
378 | }
379 |
380 | recomputeFromCache(newClip);
381 | fixOverlap(newClips[clip.cache.channel], newClip, oldClip);
382 | modifiedClips(newClips);
383 | }
384 |
385 | function handleClipResizeMove(clip: Clip, offset: number) {
386 | const newClips = clips.map(arr => [ ...arr ]);
387 | const newClipInd = newClips[clip.cache.channel].findIndex(c => c.uuid === clip.uuid);
388 | if (newClipInd === -1) return;
389 | const oldClip = newClips[clip.cache.channel][newClipInd];
390 | const newClip = newClips[clip.cache.channel][newClipInd] = copyClip(oldClip);
391 |
392 | let newPos = Math.max(newClip.cache.clipRange[0] + offset);
393 |
394 | if (snap) {
395 | const snapRight = newClips[clip.cache.channel].find(c =>
396 | Math.abs(c.cache.clipRange[0] - (newPos + newClip.cache.lengthFrames)) <= NUM_SNAP_FRAMES);
397 | if (snapRight) newPos = snapRight.cache.clipRange[0] - newClip.cache.lengthFrames;
398 | else {
399 | const snapLeft = newClips[clip.cache.channel].find(c =>
400 | Math.abs(c.cache.clipRange[1] - newPos) <= NUM_SNAP_FRAMES);
401 | if (snapLeft) newPos = snapLeft.cache.clipRange[1];
402 | }
403 | }
404 |
405 | newPos = Math.max(newPos, 0);
406 |
407 | newClip.cache.clipRange[0] = newPos;
408 | newClip.cache.clipRange[1] = newPos + newClip.cache.lengthFrames;
409 |
410 | recomputeFromCache(newClip);
411 | fixOverlap(newClips[clip.cache.channel], newClip, oldClip);
412 | modifiedClips(newClips);
413 | }
414 |
415 | function handleDragClip(clip: Clip, side: 'left' | 'right' | 'replace') {
416 | console.log(clip, side);
417 | }
418 |
419 | function handleClipCommit() {
420 | MotionComposer.setClips(modifiedClips());
421 | }
422 |
423 | function handleSetTrackSolo(channel: number, solo: boolean) {
424 | const newTracks = [ ...tracks ];
425 | newTracks[channel] = { ...newTracks[channel], solo };
426 | MotionComposer.setTracks(newTracks);
427 | }
428 |
429 | function handleSetTrackMuted(channel: number, muted: boolean) {
430 | const newTracks = [ ...tracks ];
431 | newTracks[channel] = { ...newTracks[channel], muted };
432 | MotionComposer.setTracks(newTracks);
433 | }
434 |
435 | function handleSetTrackLocked(channel: number, locked: boolean) {
436 | const newTracks = [ ...tracks ];
437 | newTracks[channel] = { ...newTracks[channel], locked };
438 | MotionComposer.setTracks(newTracks);
439 | }
440 |
441 | function handleSetTargetTrack(channel: number) {
442 | MotionComposer.setTargetTrack(channel);
443 | }
444 |
445 | return (
446 |
447 |
448 |
449 |
450 |
451 | {(tracks.map((track, i) => handleSetTrackSolo(i, solo)}
458 | setLocked={(locked: boolean) => handleSetTrackLocked(i, locked)}
459 | setMuted={(muted: boolean) => handleSetTrackMuted(i, muted)}
460 | setAsTarget={() => handleSetTargetTrack(i)}
461 | />))}
462 |
463 |
472 |
477 |
481 |
482 |
483 |
484 |
485 |
493 |
494 | {(modifiedClips().slice(1).map(track => ))}
502 |
503 |
504 |
505 |
506 |
507 |
508 |
509 | )
510 | }
511 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/TimelineTrack.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import clsx from 'clsx';
4 | import { useApplication, useSharedSettings } from '@motion-canvas/ui';
5 |
6 | import styles from './Timeline.module.scss';
7 |
8 | import { Clip } from '../Types';
9 | import { ensure } from '../Util';
10 | import { useTimeline } from '../Contexts';
11 | import { ClipComponents, MissingClip } from './clip/Clip';
12 |
13 | interface Props {
14 | clips: Clip[];
15 | type: 'video' | 'audio';
16 |
17 | onClipResizeStart: (clip: Clip, side: 'left' | 'right', diff: number) => void;
18 | onClipResizeMove: (clip: Clip, diff: number) => void;
19 | onClipCommit: (clip: Clip) => void;
20 | onClipDrag: (clip: Clip, side: 'left' | 'right' | 'replace') => void;
21 | }
22 |
23 | export default function TimelineTrack({ clips, type, ...props }: Props) {
24 | const { player } = useApplication();
25 | const ctx = useTimeline();
26 | const { range } = useSharedSettings();
27 |
28 | return (
29 |
31 | {clips.map(clip => {
32 | ensure((clip.type === 'audio' && type === 'audio') || (clip.type !== 'audio' && type !== 'audio'),
33 | 'Invalid clip type in timeline track!');
34 |
35 | const Component = ClipComponents[clip.type];
36 | const clipProps = {
37 | key: clip.uuid,
38 | clip: clip,
39 |
40 | onResize: (side: 'left' | 'right', diff: number) => props.onClipResizeStart(clip, side, diff),
41 | onMove: (diff: number) => props.onClipResizeMove(clip, diff),
42 | onCommit: () => props.onClipCommit(clip),
43 | onDragClip: (side: 'left' | 'right') => props.onClipDrag(clip, side)
44 | }
45 |
46 | if (!clip.cache.source || !Component) return
47 | return
48 | })}
49 |
50 | )
51 | }
52 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/TimelineTrackLabel.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import { Button as ButtonElem } from '@motion-canvas/ui';
4 | const Button = ButtonElem as any;
5 |
6 | import styles from './Timeline.module.scss';
7 |
8 | import * as Icon from '../icon';
9 | import clsx from 'clsx';
10 |
11 | interface Props {
12 | channel: number;
13 |
14 | muted: boolean;
15 | locked: boolean;
16 | solo: boolean;
17 | target: boolean;
18 |
19 | setMuted: (muted: boolean) => void;
20 | setLocked: (locked: boolean) => void;
21 | setSolo: (solo: boolean) => void;
22 | setAsTarget: () => void;
23 | }
24 |
25 | export default function TimelineTrackLabel(props: Props) {
26 | return (
27 |
28 |
{props.channel === 0 ? 'Clips' : `Audio ${props.channel}`}
29 |
30 |
31 | {props.channel !== 0 && }
33 |
35 |
37 |
39 |
40 |
41 |
42 | {props.target && }
43 | {props.locked && }
44 | {props.muted && }
45 | {props.solo && }
46 |
47 |
48 | );
49 | }
50 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/Timestamps.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import clsx from 'clsx';
4 | import { useMemo, useContext } from 'preact/hooks';
5 |
6 | import styles from './Timeline.module.scss';
7 |
8 | import { useTimeline } from '../Contexts';
9 |
10 | interface Props {
11 | firstFrame: number;
12 | lastFrame: number;
13 | density: number;
14 | }
15 |
16 | export function Timestamps({ firstFrame, lastFrame, density }: Props) {
17 | const { framesToPixels } = useTimeline();
18 |
19 | const timestamps = useMemo(() => {
20 | const timestamps = [];
21 | const clamped = Math.max(1, density);
22 | for (let i = firstFrame; i < lastFrame; i += clamped) {
23 | if (i === 0) continue;
24 | timestamps.push(
25 | 0 && (i / density) % 2 !== 0) && styles.odd)}
27 | style={{ left: `${framesToPixels(i)}px` }}
28 | />,
29 | );
30 | }
31 | return timestamps;
32 | }, [ firstFrame, lastFrame, framesToPixels, density ]);
33 |
34 | return <>{timestamps}>;
35 | }
36 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/Toolbar.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import { Button } from '@motion-canvas/ui';
4 |
5 | import styles from './Timeline.module.scss';
6 |
7 | import * as Icon from '../icon';
8 | import { useTimeline } from '../Contexts';
9 |
10 | export default function Toolbar() {
11 | const ctx = useTimeline();
12 |
13 | return (
14 |
36 | )
37 | }
38 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/clip/AudioClip.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import { useRef } from 'preact/hooks';
4 |
5 | import styles from './Clip.module.scss';
6 |
7 | import Waveform from './Waveform';
8 | import * as Icon from '../../icon';
9 | import { useAudio } from '../../Contexts';
10 | import Clip, { ClipChildProps } from './Clip';
11 |
12 |
13 | export default function AudioClip({ clip, ...props }: ClipChildProps) {
14 | const audio = useAudio();
15 | const clipRef = useRef
();
16 | const audioData = audio.getAudioData(clip.cache.source).value;
17 |
18 | return (
19 |
26 |
27 |
28 | (e.preventDefault(), e.stopPropagation())}
31 | >
32 | {clip.cache.source?.name ?? clip.path}
33 |
34 |
35 | >
36 | }
37 | staticChildren={
38 |
39 | }
40 | />
41 | );
42 | }
43 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/clip/Clip.module.scss:
--------------------------------------------------------------------------------
1 | .clip {
2 | --clip-gap: 4px;
3 | --clip-pad-left: calc(var(--clip-gap) / 2);
4 | &:first-child { --clip-pad-left: calc(var(--clip-gap)); }
5 | --clip-pad-right: calc(var(--clip-gap) / 2);
6 | &:last-child { --clip-pad-right: calc(var(--clip-gap)); }
7 |
8 | grid-area: 1 / 1 / 2 / 2;
9 | pointer-events: none;
10 | padding-left: var(--clip-pad-left);
11 | padding-right: var(--clip-pad-right);
12 |
13 | .hide_overflow {
14 | position: absolute;
15 | width: 100%;
16 | height: 100%;
17 | inset: 0;
18 | margin-bottom: -100%;
19 | border-radius: 4px;
20 | overflow: hidden;
21 | }
22 |
23 | .invert_gap {
24 | margin-left: calc(-1 * var(--clip-pad-left));
25 | margin-right: calc(-1 * var(--clip-pad-right));
26 | width: calc(100% + var(--clip-pad-left) + var(--clip-pad-right));
27 | height: 100%;
28 | }
29 |
30 | .relative {
31 | position: relative;
32 | }
33 |
34 | .drop_targets {
35 | --target-max-width: 12px;
36 |
37 | display: flex;
38 | position: absolute;
39 | left: 0;
40 | right: 0;
41 | top: -4px;
42 | bottom: -4px;
43 | pointer-events: auto;
44 |
45 | .drop_target {
46 | z-index: 1000;
47 | opacity: 0.3;
48 |
49 | &:hover { opacity: 1; }
50 |
51 | &.left, &.right {
52 | max-width: var(--target-max-width);
53 | flex-grow: 1.5;
54 |
55 | &::after {
56 | content: ' ';
57 | position: absolute;
58 | height: calc(100% - 8px);
59 | width: 2px;
60 | background-color: var(--theme);
61 | z-index: 10000;
62 | top: 4px;
63 | opacity: 0;
64 | border-radius: 4px;
65 | pointer-events: none;
66 | }
67 |
68 | &:hover::after {
69 | opacity: 0.6;
70 | }
71 |
72 | &.left::after {
73 | left: -1px;
74 | }
75 |
76 | &.right::after {
77 | right: -1px;
78 | }
79 | }
80 |
81 | &.replace {
82 | flex-grow: 1;
83 |
84 | &::after {
85 | content: ' ';
86 | position: absolute;
87 | height: calc(100% - 8px);
88 | width: calc(100% - 8px);
89 | border: 2px solid var(--theme);
90 | z-index: 10000;
91 | box-sizing: border-box;
92 | top: 4px;
93 | left: 4px;
94 | opacity: 0;
95 | border-radius: 4px;
96 | pointer-events: none;
97 | }
98 |
99 | &:hover::after {
100 | opacity: 0.6;
101 | }
102 | }
103 | }
104 | }
105 |
106 | &:not(.add_source) .clip_wrapper:hover {
107 | z-index: 100;
108 |
109 | > .clip_inner > .label::before {
110 | background-color: var(--theme);
111 | font-weight: bold;
112 | color: var(--surface-color);
113 | }
114 |
115 | > .clip_inner > .label_backdrop {
116 | background-color: var(--theme-overlay);
117 | }
118 |
119 | // .clip_inner {
120 | // // background-color: color-mix(in hsl, var(--surface-color-hover) 40%, var(--surface-color-light)) !important;
121 | // }
122 | }
123 |
124 | .clip_wrapper {
125 | position: relative;
126 | cursor: pointer;
127 | height: 100%;
128 | pointer-events: auto;
129 |
130 | &:active {
131 | cursor: grabbing;
132 | }
133 |
134 | .clip_inner {
135 | height: 100%;
136 | background-color: var(--surface-color-hover);
137 | border-radius: var(--radius);
138 | transition: background-color 0.15s;
139 | }
140 |
141 | .clip_container {
142 | --label-padding: 16px;
143 | --label-buffer: 512px;
144 | --label-fade-dist: 32px;
145 |
146 | pointer-events: none;
147 | width: calc(100% + var(--label-buffer));
148 | mask-image: linear-gradient(to right,
149 | black 0px,
150 | black calc(100% - var(--label-buffer) - var(--label-fade-dist) - var(--label-padding)),
151 | transparent calc(100% - var(--label-buffer) - var(--label-padding)));
152 | height: 100%;
153 | display: flex;
154 | margin-left: 8px;
155 | align-items: center;
156 | will-change: transform;
157 | }
158 |
159 | .clip_drag {
160 | position: absolute;
161 | width: 4px;
162 | height: 100%;
163 | top: 0;
164 | bottom: 0;
165 | opacity: 0;
166 | border-radius: 0px 4px 4px 0px;
167 | transition: background-color 0.15s, opacity 0.15s;
168 |
169 | &::before {
170 | content: '';
171 | display: block;
172 | position: absolute;
173 | top: 0;
174 | bottom: 0;
175 | left: -6px;
176 | right: -3px;
177 | }
178 |
179 | background-color: rgba(255, 255, 255, 0.15);
180 |
181 | &.can_extend {
182 | background-color: color-mix(in hsl, var(--theme) 40%, transparent);
183 | }
184 |
185 | &:hover {
186 | opacity: 1 !important;
187 | // background-color: var(--theme);
188 | }
189 |
190 | &.left { transform: scaleX(-1); left: 0; cursor: w-resize; }
191 | &.right { right: 0; cursor: e-resize; }
192 |
193 | &:active {
194 | opacity: 1;
195 |
196 | background-color: var(--surface-color-light);
197 | &.can_extend { background-color: var(--theme); }
198 | }
199 | }
200 |
201 | &:hover .clip_drag {
202 | opacity: 0.4;
203 | }
204 | }
205 | }
206 |
207 | .label {
208 | position: absolute;
209 | height: 32px;
210 | padding: 2px;
211 | top: 100%;
212 | cursor: pointer;
213 | z-index: 5;
214 |
215 | &:active {
216 | cursor: grabbing;
217 | }
218 |
219 | &::before {
220 | content: attr(data-name);
221 | display: block;
222 | border-radius: 0 14px 14px 14px;
223 | padding: 0 8px;
224 | font-weight: 500;
225 | font-size: 14px;
226 | background-color: color-mix(in hsl, white 18%, black);
227 | color: color-mix(in hsl, white 40%, black);
228 | border: 2px solid color-mix(in hsl, var(--surface-color) 30%, var(--background-color));
229 | transition: color 0.15s, background-color 0.15s;
230 | }
231 |
232 | &:hover,
233 | &:active {
234 | z-index: 10;
235 |
236 | &::before {
237 | box-shadow: 0 0 0 2px white inset;
238 | }
239 | }
240 | }
241 |
242 | .label_backdrop {
243 | box-sizing: content-box;
244 | position: absolute;
245 | height: 24px;
246 | top: 100%;
247 | margin-top: 4px;
248 | margin-left: 4px;
249 | padding-right: 24px;
250 | cursor: pointer;
251 | border-radius: 0 0 12px 12px;
252 | background-color: color-mix(in hsl, white 4%, transparent);
253 | pointer-events: none;
254 | transition: color 0.15s, background-color 0.15s;
255 | }
256 |
257 | .scene_clip, .missing_clip, .image_clip, .video_clip {
258 | --color: white;
259 | height: 40px;
260 | }
261 |
262 | .clip_label {
263 | display: flex;
264 | gap: 6px;
265 | width: max-content;
266 | position: sticky;
267 | left: 12px;
268 | z-index: 10;
269 | width: max-content;
270 |
271 | svg {
272 | width: 16px;
273 | color: #fff;
274 | opacity: 0.3;
275 | flex-shrink: 0;
276 | }
277 |
278 | .name {
279 | margin: 0;
280 | flex-grow: 1;
281 | overflow: hidden;
282 | white-space: nowrap;
283 |
284 | .source {
285 | cursor: pointer;
286 | position: relative;
287 | height: 100%;
288 |
289 | &:hover {
290 | text-decoration: underline;
291 | }
292 | }
293 | }
294 | }
295 |
296 | .waveform {
297 | position: absolute;
298 | z-index: 0;
299 | opacity: 0.2;
300 | pointer-events: none;
301 | height: 100%;
302 | image-rendering: pixelated;
303 | image-rendering: crisp-edges;
304 | }
305 |
306 | .audio_clip {
307 | height: 32px;
308 | --color: var(--theme);
309 | --background: color-mix(in oklab, var(--theme) 17%, var(--background-color-dark));
310 |
311 | .clip_wrapper {
312 | .clip_inner {
313 | background-color: var(--background);
314 | }
315 | }
316 |
317 | .name {
318 | color: color-mix(in oklab, var(--theme) 40%, white) !important;
319 | text-shadow:
320 | 0px 0px 7px var(--background),
321 | // 0px 0px 8px var(--background),
322 | // 0px 0px 8px var(--background),
323 | // 0px 0px 8px var(--background),
324 | 0px 0px 7px var(--background),
325 | 0px 0px 8px var(--background),
326 | 0px 0px 9px var(--background),
327 | 0px 0px 10px var(--background);
328 | // font-weight: bold;
329 | // background-color: color-mix(in oklab, var(--theme) 15%, var(--background-color-dark));
330 | // padding: 6px 12px 6px 0px !important;
331 | // display: block;
332 | z-index: 20;
333 | }
334 |
335 | .clip_label {
336 | transition: opacity 0.35s;
337 | }
338 |
339 | &:hover .clip_label {
340 | opacity: 0;
341 | }
342 |
343 | svg {
344 | color: color-mix(in oklab, white 30%, var(--theme));
345 | opacity: 0.7;
346 | }
347 |
348 |
349 | .audio_container {
350 | display: grid;
351 | position: absolute;
352 | top: 0;
353 | }
354 | }
355 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/clip/Clip.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import clsx from 'clsx';
4 | import { useRef } from 'preact/hooks';
5 | import { forwardRef } from 'preact/compat';
6 | import { ComponentChildren } from 'preact';
7 | import { useApplication } from '@motion-canvas/ui';
8 |
9 | import styles from './Clip.module.scss';
10 |
11 | import ImageClip from './ImageClip';
12 | import SceneClip from './SceneClip';
13 | import VideoClip from './VideoClip';
14 | import AudioClip from './AudioClip';
15 | import MissingClip from './MissingClip';
16 | import { Clip, ClipType } from '../../Types';
17 | import { useTimeline, useUIContext } from '../../Contexts';
18 |
19 | export interface ClipChildProps {
20 | clip: Clip;
21 |
22 | onMove: (frames: number) => void;
23 | onResize: (side: 'left' | 'right', offset: number) => void;
24 | onCommit: () => void;
25 | onDragClip: (side: 'left' | 'right' | 'replace') => void;
26 | }
27 |
28 | interface ClipProps extends ClipChildProps {
29 | staticChildren?: ComponentChildren;
30 | stickyChildren?: ComponentChildren;
31 |
32 | class?: string;
33 | }
34 |
35 | export default forwardRef(function Clip({ clip, ...props }, ref) {
36 | const { addSource: dragging } = useUIContext();
37 | const { player, meta } = useApplication();
38 | const { framesToPixels, pixelsToFrames } = useTimeline();
39 | const { addSource } = useUIContext();
40 |
41 | const moveSide = useRef<'left' | 'right'>('left');
42 | const moveOffset = useRef(0);
43 |
44 | function handleSeek(e: MouseEvent) {
45 | e.stopPropagation();
46 | meta.shared.range.set([
47 | player.status.framesToSeconds(clip.cache.clipRange[0]),
48 | player.status.framesToSeconds(clip.cache.clipRange[1]),
49 | ]);
50 | }
51 |
52 | function handleMoveStart(e: PointerEvent) {
53 | e.preventDefault();
54 | e.stopPropagation();
55 | (e.currentTarget as HTMLElement).setPointerCapture(e.pointerId);
56 | moveOffset.current = 0;
57 | }
58 |
59 | function handleMoveMove(e: PointerEvent) {
60 | if (!(e.currentTarget as HTMLElement).hasPointerCapture(e.pointerId)) return;
61 | e.preventDefault();
62 | e.stopPropagation();
63 |
64 | moveOffset.current += pixelsToFrames(e.movementX);
65 | props.onMove(Math.round(moveOffset.current));
66 | }
67 |
68 | function handleMoveEnd(e: PointerEvent) {
69 | props.onCommit();
70 | (e.currentTarget as HTMLElement).releasePointerCapture(e.pointerId);
71 | }
72 |
73 | function handleResizeStart(e: PointerEvent, side: 'left' | 'right') {
74 | e.preventDefault();
75 | e.stopPropagation();
76 | (e.currentTarget as HTMLElement).setPointerCapture(e.pointerId);
77 | moveSide.current = side;
78 | moveOffset.current = 0;
79 | }
80 |
81 | function handleResizeMove(e: PointerEvent) {
82 | if (!(e.currentTarget as HTMLElement).hasPointerCapture(e.pointerId)) return;
83 | e.preventDefault();
84 | e.stopPropagation();
85 |
86 | moveOffset.current += pixelsToFrames(e.movementX);
87 | props.onResize(moveSide.current, Math.round(moveOffset.current));
88 | }
89 |
90 | function handleResizeEnd(e: PointerEvent) {
91 | props.onCommit();
92 | (e.currentTarget as HTMLElement).releasePointerCapture(e.pointerId);
93 | }
94 |
95 | const width = framesToPixels(clip.cache.clipRange[1] - clip.cache.clipRange[0]);
96 |
97 | const croppedLeft = clip.start > 0;
98 | const croppedRight = clip.cache.sourceFrames
99 | ? clip.cache.lengthFrames < clip.cache.sourceFrames - clip.cache.startFrames
100 | : false;
101 |
102 | return (
103 |
112 |
113 |
119 | {props.staticChildren}
120 |
121 |
122 |
{props.stickyChildren}
123 |
124 |
125 |
handleResizeStart(e, 'left')}
127 | onPointerMove={handleResizeMove}
128 | onPointerUp={handleResizeEnd}
129 | />
130 |
handleResizeStart(e, 'right')}
132 | onPointerMove={handleResizeMove}
133 | onPointerUp={handleResizeEnd}
134 | />
135 |
136 |
137 |
138 | {addSource.value &&
139 |
console.warn('dragOver')}
141 | onMouseOver={dragging && (() => props.onDragClip('left'))}/>
142 |
props.onDragClip('replace'))}/>
144 |
props.onDragClip('right'))}/>
146 |
}
147 |
148 | );
149 | });
150 |
151 | export { default as SceneClip } from './SceneClip';
152 | export { default as ImageClip } from './ImageClip';
153 | export { default as VideoClip } from './VideoClip';
154 | export { default as MissingClip } from './MissingClip';
155 | export { default as AudioClip } from './AudioClip';
156 |
157 | export const ClipComponents: Record
= {
158 | image: ImageClip,
159 | scene: SceneClip,
160 | video: VideoClip,
161 | audio: AudioClip,
162 | }
163 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/clip/EventLabel.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import { useLayoutEffect, useState } from 'preact/hooks';
4 | import { TimeEvent } from '@motion-canvas/core/lib/scenes/timeEvents';
5 | import { findAndOpenFirstUserFile, labelClipDraggingLeftSignal, useApplication } from '@motion-canvas/ui';
6 |
7 | import styles from './Clip.module.scss';
8 |
9 | import { Clip } from '../../Types';
10 | import { useTimeline } from '../../Contexts';
11 |
12 | interface Props {
13 | clip: Clip;
14 | event: TimeEvent;
15 | }
16 |
17 | export default function EventLabel({ clip, event }: Props) {
18 | const { player } = useApplication();
19 | const { framesToPixels, pixelsToFrames } = useTimeline();
20 |
21 | // How long the event waits for before firing.
22 | const [ eventTime, setEventTime ] = useState(event.offset);
23 | useLayoutEffect(() => setEventTime(event.offset), [ event.offset ]);
24 |
25 | // If the mouse is down on this element, whether or not the event has been moved yet.
26 | const [ moved, setMoved ] = useState(false);
27 |
28 | async function handleGoToSource() {
29 | if (!event.stack) return;
30 | await findAndOpenFirstUserFile(event.stack);
31 | }
32 |
33 | function handleSeek() {
34 | player.requestSeek(clip.cache.clipRange[0] - clip.cache.startFrames + player.status.secondsToFrames(event.initialTime + event.offset));
35 | }
36 |
37 | async function handlePointerDown(e: PointerEvent) {
38 | e.preventDefault();
39 |
40 | // Left click to drag.
41 | if (e.button === 0) {
42 | e.stopPropagation();
43 | (e.currentTarget as HTMLElement).setPointerCapture(e.pointerId);
44 | labelClipDraggingLeftSignal.value = event.initialTime + Math.max(0, eventTime);
45 | setMoved(false);
46 | }
47 |
48 | // Middle click to open the source.
49 | else if (e.button === 1) {
50 | await handleGoToSource();
51 | }
52 |
53 | // Right click to seek.
54 | else if (e.button === 2) {
55 | handleSeek();
56 | }
57 | }
58 |
59 | function handlePointerMove(e: PointerEvent) {
60 | setMoved(true);
61 | if (!(e.currentTarget as HTMLElement).hasPointerCapture(e.pointerId)) return;
62 |
63 | e.stopPropagation();
64 | const newTime = eventTime + player.status.framesToSeconds(pixelsToFrames(e.movementX));
65 | labelClipDraggingLeftSignal.value = event.initialTime + Math.max(0, newTime);
66 | setEventTime(newTime);
67 | }
68 |
69 | function handlePointerUp(e: PointerEvent) {
70 | if (e.button !== 0) return;
71 |
72 | (e.currentTarget as HTMLElement).releasePointerCapture(e.pointerId);
73 | labelClipDraggingLeftSignal.value = null;
74 | e.stopPropagation();
75 |
76 | // If the event was moved, update the time.
77 | if (moved) {
78 | const newFrame = Math.max(0, eventTime);
79 | setEventTime(newFrame);
80 | if (event.offset !== newFrame) clip.cache.source.scene!.timeEvents.set(event.name, newFrame, e.shiftKey);
81 | }
82 |
83 | // Else, seek to it.
84 | else {
85 | handleSeek();
86 | }
87 | }
88 |
89 | return (
90 | <>
91 |
102 |
103 |
111 | >
112 | );
113 | }
114 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/clip/ImageClip.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import styles from './Clip.module.scss';
4 |
5 | import * as Icon from '../../icon';
6 | import Clip, { ClipChildProps } from './Clip';
7 |
8 | export default function ImageClip({ clip, ...props }: ClipChildProps) {
9 | return (
10 |
16 |
17 |
18 | (e.preventDefault(), e.stopPropagation())}
21 | >{clip.cache.source.name ?? clip.path}
22 |
23 | >
24 | }
25 | />
26 | );
27 | }
28 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/clip/MissingClip.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import styles from './Clip.module.scss';
4 |
5 | import * as Icon from '../../icon';
6 | import Clip, { ClipChildProps } from './Clip';
7 |
8 | export default function MissingClip({ clip, ...props }: ClipChildProps) {
9 | return (
10 |
16 |
17 |
18 | (e.preventDefault(), e.stopPropagation())}
21 | >Missing '{clip.cache.source?.name ?? clip.path}'
22 |
23 | >
24 | }
25 | />
26 | );
27 | }
28 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/clip/SceneClip.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import { findAndOpenFirstUserFile, useApplication, usePlayerState, useSubscribableValue } from '@motion-canvas/ui';
4 |
5 | import styles from './Clip.module.scss';
6 |
7 | import * as Icon from '../../icon';
8 | import { ensure } from '../../Util';
9 | import EventLabel from './EventLabel';
10 | import Clip, { ClipChildProps } from './Clip';
11 |
12 | export default function SceneClip({ clip, ...props }: ClipChildProps) {
13 | const scene = clip.cache.source?.scene;
14 | ensure(scene, 'SceneClip without scene.');
15 |
16 | const { player } = useApplication();
17 | const events = useSubscribableValue(scene.timeEvents.onChanged);
18 |
19 | async function handleGoToSource(e: MouseEvent) {
20 | e.stopPropagation();
21 | if (!scene.creationStack) return;
22 | await findAndOpenFirstUserFile(scene.creationStack);
23 | }
24 |
25 | return (
26 | event.initialTime < clip.start + clip.length - player.status.framesToSeconds(1))
34 | .map(event => )
35 | }
36 |
37 | stickyChildren={
38 | <>
39 |
40 |
41 | (e.preventDefault(), e.stopPropagation())}
45 | >{scene.name}
46 |
47 | >
48 | }
49 | />
50 | );
51 | }
52 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/clip/VideoClip.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import styles from './Clip.module.scss';
4 |
5 | import * as Icon from '../../icon';
6 | import Clip, { ClipChildProps } from './Clip';
7 | import { useAudio } from '../../Contexts';
8 | import { useRef } from 'preact/hooks';
9 | import Waveform from './Waveform';
10 |
11 | export default function VideoClip({ clip, ...props }: ClipChildProps) {
12 | const audio = useAudio();
13 | const clipRef = useRef();
14 | const audioData = audio.getAudioData(clip.cache.source).value;
15 |
16 | return (
17 |
24 |
25 |
26 | (e.preventDefault(), e.stopPropagation())}
29 | >{clip.cache.source.name ?? clip.path}
30 |
31 | >
32 | }
33 | staticChildren={
34 |
35 | }
36 | />
37 | );
38 | }
39 |
--------------------------------------------------------------------------------
/src/plugin/client/timeline/clip/Waveform.tsx:
--------------------------------------------------------------------------------
1 | /* @jsxImportSource preact */
2 |
3 | import clsx from 'clsx';
4 | import { useApplication } from '@motion-canvas/ui';
5 | import { useRef, useMemo, useEffect, useLayoutEffect } from 'preact/hooks';
6 |
7 | import styles from './Clip.module.scss';
8 |
9 | import { Clip } from '../../Types';
10 | import { useTimeline } from '../../Contexts';
11 | import { AudioData } from '../../audio/AudioController';
12 |
13 | const CANVAS = document.createElement('canvas');
14 | const CTX = CANVAS.getContext('2d');
15 |
16 | const CHUNKINESS = 3;
17 | const OVERFLOW = 256;
18 | const WAVEFORM_AMP = 6;
19 | const WAVEFORM_EXP = 1.7;
20 | const BLANK_IMG_SRC = 'data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==';
21 |
22 | interface Props {
23 | clip: Clip;
24 | audio: AudioData;
25 | height: number;
26 | }
27 |
28 | export default function Waveform({ audio, clip, height }: Props) {
29 | const { player } = useApplication();
30 |
31 | const imgRef = useRef();
32 | const imgSrc = useRef(BLANK_IMG_SRC);
33 | const imgLeft = useRef(0);
34 | const imgWidth = useRef(0);
35 |
36 | const {
37 | firstFrame: viewFirstFrame,
38 | lastFrame: viewLastFrame,
39 | density,
40 | framesToPixels,
41 | pixelsToFrames
42 | } = useTimeline();
43 |
44 | const lastWaveformProps = useRef<[number, number, number, number]>([ 0, 0, 0, 0 ]);
45 | const recomputeOffset = useMemo(() => clip.uuid % OVERFLOW, [clip.uuid]);
46 |
47 | const [ viewFirstPx, viewLastPx ] = useMemo(() => [
48 | Math.floor((framesToPixels(viewFirstFrame) + recomputeOffset) / OVERFLOW) * OVERFLOW - recomputeOffset,
49 | Math.ceil((framesToPixels(viewLastFrame) + recomputeOffset) / OVERFLOW) * OVERFLOW - recomputeOffset,
50 | ], [ viewFirstFrame, framesToPixels, recomputeOffset ]);
51 |
52 |
53 | useLayoutEffect(() => {
54 | if (!audio) return;
55 |
56 | const rawClipLeftPx = framesToPixels(clip.cache.clipRange[0]);
57 | const rawClipRightPx = framesToPixels(clip.cache.clipRange[1]);
58 |
59 | if (rawClipRightPx < viewFirstPx || rawClipLeftPx > viewLastPx) return;
60 |
61 | const imgFirstPx = Math.max(viewFirstPx, rawClipLeftPx);
62 | const imgLastPx = Math.min(viewLastPx, rawClipRightPx);
63 | const imgWidthPx = Math.ceil(imgLastPx - imgFirstPx);
64 |
65 | if (!imgWidthPx) {
66 | imgSrc.current = BLANK_IMG_SRC;
67 | if (imgRef.current) imgRef.current.src = BLANK_IMG_SRC;
68 | return;
69 | }
70 |
71 | const drawFirstPx = Math.floor(imgFirstPx / CHUNKINESS) * CHUNKINESS;
72 | const drawLastPx = Math.ceil(imgLastPx / CHUNKINESS) * CHUNKINESS;
73 | const drawWidth = drawLastPx - drawFirstPx;
74 | const drawInsetPx = Math.max(0, drawFirstPx - rawClipLeftPx);
75 |
76 | const drawFirstFrame = pixelsToFrames(drawInsetPx) + clip.cache.startFrames;
77 | const drawLastFrame = pixelsToFrames(drawWidth) + drawFirstFrame;
78 | const drawLengthFrames = drawLastFrame - drawFirstFrame;
79 |
80 | if (lastWaveformProps.current[0] === imgFirstPx &&
81 | lastWaveformProps.current[1] === imgLastPx &&
82 | lastWaveformProps.current[2] === drawWidth &&
83 | lastWaveformProps.current[3] === drawLengthFrames) return;
84 |
85 | lastWaveformProps.current = [ imgFirstPx, imgLastPx, drawWidth, drawLengthFrames ];
86 |
87 | CANVAS.width = imgWidthPx;
88 | CANVAS.height = height;
89 |
90 | CTX.clearRect(0, 0, imgWidthPx, height);
91 | CTX.fillStyle = getComputedStyle(imgRef.current).getPropertyValue('--color');
92 |
93 | const startSec = player.status.framesToSeconds(drawFirstFrame);
94 | const endSec = player.status.framesToSeconds(drawLastFrame);
95 | const lenSecs = endSec - startSec;
96 |
97 | const pixelsPerFrame = 1/density;
98 |
99 | let waveformInd = 0;
100 | while (true) {
101 | const waveform = audio.peaks[waveformInd];
102 | const samplesPerFrame = waveform.sampleRate / 30;
103 | if (samplesPerFrame / pixelsPerFrame < 2 || waveformInd === audio.peaks.length - 1) break;
104 | waveformInd++;
105 | }
106 |
107 | const numChunks = Math.ceil(drawWidth / CHUNKINESS);
108 | const waveform = audio.peaks[waveformInd];
109 | const numSamples = (lenSecs) * waveform.sampleRate;
110 | const sampleStep = numSamples / numChunks;
111 | const startSample = startSec * waveform.sampleRate;
112 |
113 | const startOffsetPx = drawFirstPx - imgFirstPx;
114 |
115 | for (let i = 0; i < numChunks; i++) {
116 | let amp = 0;
117 | const numSamples = Math.ceil(startSample + (i + 1) * sampleStep) - Math.floor(startSample + i * sampleStep);
118 | for (let j = Math.floor(startSample + i * sampleStep); j < Math.ceil(startSample + (i + 1) * sampleStep); j++)
119 | amp += waveform.peaks[j] / 0xffff;
120 | amp /= numSamples;
121 | amp = Math.pow(amp, WAVEFORM_EXP);
122 | amp *= audio.absoluteMax * clip.volume * WAVEFORM_AMP * height / 2;
123 |
124 | CTX.rect(
125 | i * CHUNKINESS + startOffsetPx,
126 | height / 2 - Math.abs(amp),
127 | CHUNKINESS - 1,
128 | Math.abs(amp) * 2
129 | );
130 | }
131 |
132 | CTX.fill();
133 |
134 | imgSrc.current = CANVAS.toDataURL();
135 | imgLeft.current = imgFirstPx - rawClipLeftPx;
136 | imgWidth.current = imgWidthPx;
137 |
138 | if (imgRef.current) {
139 | imgRef.current.src = imgSrc.current;
140 | imgRef.current.style.left = `${imgLeft.current}px`;
141 | imgRef.current.style.width = `${imgWidth.current}px`;
142 | }
143 | }, [
144 | clip.cache.clipRange[0],
145 | clip.cache.clipRange[1],
146 | viewFirstPx,
147 | viewLastPx,
148 | framesToPixels,
149 | pixelsToFrames,
150 | audio,
151 | imgRef.current
152 | ]);
153 |
154 | return (
155 |
156 |
157 |

159 |
160 |
161 | )
162 | }
163 |
--------------------------------------------------------------------------------
/src/plugin/common/FileTypes.ts:
--------------------------------------------------------------------------------
1 | export const Video = [ 'mp4', 'mkv', 'webm' ];
2 | export const Audio = [ 'wav', 'mp3', 'ogg' ];
3 | export const Image = [ 'png', 'jpg', 'jpeg', 'webp' ];
4 |
--------------------------------------------------------------------------------
/src/plugin/vite/index.ts:
--------------------------------------------------------------------------------
1 | // @ts-ignore
2 | import { exec, spawn } from 'child_process';
3 | // @ts-ignore
4 | import { promises as fs } from 'fs';
5 |
6 | import * as FileTypes from '../common/FileTypes';
7 | import sharp from 'sharp';
8 |
9 | async function audio(path: string) {
10 | const SAMPLE_INTERVAL = 1000;
11 | const peaks = await new Promise((res, rej) => {
12 | const ffmpeg = spawn('ffmpeg', [ '-i', path, '-ac', '1', '-filter:a', `aresample=${SAMPLE_INTERVAL}`,
13 | '-map', '0:a', '-c:a', 'pcm_s16le', '-f', 'data', '-' ]);
14 |
15 | // @ts-ignore
16 | let bufferData = Buffer.alloc(0);
17 |
18 | ffmpeg.stdout.on('data', (data: any) => {
19 | // @ts-ignore
20 | bufferData = Buffer.concat([bufferData, data]);
21 | });
22 |
23 | ffmpeg.on('close', (code: any) => {
24 | if (code === 0) {
25 | const peaks = [];
26 | for (let i = 0; i < bufferData.length; i += 2) peaks.push(bufferData.readInt16LE(i));
27 | res(peaks);
28 | }
29 | else {
30 | rej(`ffmpeg exited with code ${code}`);
31 | }
32 | });
33 | });
34 |
35 | return {
36 | type: 'audio',
37 | peaks,
38 | name: path.slice(path.lastIndexOf('/') + 1),
39 | path,
40 | duration: peaks.length / SAMPLE_INTERVAL
41 | };
42 | }
43 |
44 | async function video(path: string) {
45 | const audioData = await audio(path);
46 |
47 | const outputPath = `/tmp/${audioData.name}.thumbnail.png`;
48 |
49 | await new Promise((res, rej) => {
50 | const ffmpeg = spawn('ffmpeg', [ '-i', path, '-y', '-vf', 'thumbnail,scale=240:-1', '-frames:v', '1', outputPath ]);
51 |
52 | ffmpeg.on('close', (code: any) => {
53 | if (code === 0) res();
54 | else rej(`ffmpeg exited with code ${code}`);
55 | });
56 | });
57 |
58 | const thumbFile = await fs.readFile(outputPath);
59 | const thumbnail = `data:image/x-png;base64,${thumbFile.toString('base64')}`;
60 | await fs.unlink(outputPath);
61 |
62 | return {
63 | ...audioData,
64 | type: 'video',
65 | thumbnail
66 | };
67 | }
68 |
69 | async function image(path: string) {
70 | const buffer = await sharp(path)
71 | .resize(240, 180, { fit: 'cover' })
72 | .png()
73 | .toBuffer()
74 |
75 | const thumbnail = `data:image/x-png;base64,${buffer.toString('base64')}`;
76 |
77 | return {
78 | type: 'image',
79 | name: path.slice(path.lastIndexOf('/') + 1),
80 | path,
81 | thumbnail,
82 | duration: 0
83 | };
84 | }
85 |
86 | export default function Plugin() {
87 | return {
88 | name: 'motion-composer-media-importer',
89 | transform: async (_: string, id: string) => {
90 | if (!id.endsWith('?meta')) return;
91 | const ext = id.slice(id.lastIndexOf('.') + 1, id.lastIndexOf('?'));
92 | const path = id.slice(0, id.lastIndexOf('?meta'));
93 |
94 | if (FileTypes.Audio.indexOf(ext) !== -1) {
95 | const data = await audio(path);
96 | return `export default JSON.parse(\`${JSON.stringify(data)}\`)`;
97 | }
98 |
99 | if (FileTypes.Video.indexOf(ext) !== -1) {
100 | const data = await video(path);
101 | return `export default JSON.parse(\`${JSON.stringify(data)}\`)`;
102 | }
103 |
104 | if (FileTypes.Image.indexOf(ext) !== -1) {
105 | const data = await image(path);
106 | return `export default JSON.parse(\`${JSON.stringify(data)}\`)`;
107 | }
108 | }
109 | }
110 | }
111 |
--------------------------------------------------------------------------------
/src/project.meta:
--------------------------------------------------------------------------------
1 | {
2 | "version": 0,
3 | "shared": {
4 | "background": "rgba(0,0,0,0)",
5 | "range": [
6 | 0,
7 | null
8 | ],
9 | "size": {
10 | "x": 1920,
11 | "y": 1080
12 | },
13 | "audioOffset": 0.01061988304090554
14 | },
15 | "preview": {
16 | "fps": 30,
17 | "resolutionScale": 1
18 | },
19 | "rendering": {
20 | "fps": 30,
21 | "resolutionScale": 1,
22 | "colorSpace": "srgb",
23 | "exporter": {
24 | "name": "@motion-canvas/ffmpeg",
25 | "options": {
26 | "fastStart": true,
27 | "includeAudio": true
28 | }
29 | }
30 | },
31 | "motion-composer": {
32 | "uuidNext": 0,
33 | "clips": [
34 | [
35 | {
36 | "uuid": 0,
37 | "type": "scene",
38 | "path": "Square",
39 | "offset": 0,
40 | "start": 0,
41 | "length": 2,
42 | "volume": 1
43 | },
44 | {
45 | "uuid": 1,
46 | "type": "scene",
47 | "path": "Circle",
48 | "offset": 2.3333333333333335,
49 | "start": 0.8,
50 | "length": 1.6333333333333333,
51 | "volume": 1
52 | },
53 | {
54 | "uuid": 2,
55 | "type": "scene",
56 | "path": "Circle",
57 | "offset": 3.966666666666667,
58 | "start": 0,
59 | "length": 2.966666666666667,
60 | "volume": 1
61 | },
62 | {
63 | "uuid": 6,
64 | "type": "video",
65 | "path": "cobalt.mkv",
66 | "offset": 6.933333333333334,
67 | "start": 23.033333333333335,
68 | "length": 4.333333333333333,
69 | "volume": 1
70 | },
71 | {
72 | "uuid": 5,
73 | "type": "video",
74 | "path": "bindless_audio.mkv",
75 | "offset": 12.133333333333333,
76 | "start": 3,
77 | "length": 2.966666666666667,
78 | "volume": 1
79 | },
80 | {
81 | "uuid": 7,
82 | "type": "image",
83 | "path": "okami_baiku_cropped.png",
84 | "offset": 15.666666666666666,
85 | "start": 0,
86 | "length": 1.4666666666666666,
87 | "volume": 1
88 | },
89 | {
90 | "uuid": 3,
91 | "type": "scene",
92 | "path": "Rectangle",
93 | "offset": 17.533333333333335,
94 | "start": 0,
95 | "length": 2.433333333333333,
96 | "volume": 1
97 | },
98 | {
99 | "uuid": 4,
100 | "type": "scene",
101 | "path": "Square",
102 | "offset": 20.866666666666667,
103 | "start": 0,
104 | "length": 2.466666666666667,
105 | "volume": 1
106 | },
107 | {
108 | "uuid": 8,
109 | "type": "scene",
110 | "path": "Video2",
111 | "offset": 39.56666666666667,
112 | "start": 0,
113 | "length": 5,
114 | "volume": 1
115 | }
116 | ],
117 | [
118 | {
119 | "uuid": 12,
120 | "type": "audio",
121 | "path": "sphinx_of_black_quartz.wav",
122 | "offset": 0,
123 | "start": 0,
124 | "length": 2.6666666666666665,
125 | "volume": 2
126 | },
127 | {
128 | "uuid": 13,
129 | "type": "audio",
130 | "path": "the_quick_brown_fox.wav",
131 | "offset": 4.866666666666666,
132 | "start": 0,
133 | "length": 3.3,
134 | "volume": 2
135 | },
136 | {
137 | "uuid": 24,
138 | "type": "audio",
139 | "path": "test_recording.wav",
140 | "offset": 9,
141 | "start": 0.7333333333333333,
142 | "length": 0.7666666666666667,
143 | "volume": 6
144 | },
145 | {
146 | "uuid": 25,
147 | "type": "audio",
148 | "path": "test_recording.wav",
149 | "offset": 9.766666666666667,
150 | "start": 0.7333333333333333,
151 | "length": 0.7666666666666667,
152 | "volume": 6
153 | },
154 | {
155 | "uuid": 26,
156 | "type": "audio",
157 | "path": "test_recording.wav",
158 | "offset": 10.533333333333333,
159 | "start": 0.7333333333333333,
160 | "length": 0.7666666666666667,
161 | "volume": 6
162 | }
163 | ],
164 | [
165 | {
166 | "uuid": 14,
167 | "type": "audio",
168 | "path": "astronaut_rain.mp3",
169 | "offset": 0,
170 | "start": 29.333333333333332,
171 | "length": 287.3,
172 | "volume": 0.4
173 | }
174 | ],
175 | [
176 | {
177 | "uuid": 18,
178 | "type": "audio",
179 | "path": "astronaut_rain.mp3",
180 | "offset": 12.2,
181 | "start": 41.53333333333333,
182 | "length": 17.6,
183 | "volume": 0.4
184 | }
185 | ]
186 | ],
187 | "tracks": [
188 | {
189 | "solo": false,
190 | "muted": false,
191 | "locked": false
192 | },
193 | {
194 | "solo": false,
195 | "muted": false,
196 | "locked": false
197 | },
198 | {
199 | "solo": false,
200 | "muted": false,
201 | "locked": false
202 | },
203 | {
204 | "solo": false,
205 | "muted": false,
206 | "locked": false
207 | }
208 | ],
209 | "targetTrack": 2
210 | }
211 | }
--------------------------------------------------------------------------------
/src/project.ts:
--------------------------------------------------------------------------------
1 | import { makeProject } from '@motion-canvas/core';
2 |
3 | import Circle from './scenes/Circle?scene';
4 | import Square from './scenes/Square?scene';
5 | import Rectangle from './scenes/Rectangle?scene';
6 | import Video from './scenes/Video?scene';
7 |
8 | import MotionComposer from './plugin/client';
9 |
10 | export default makeProject({
11 | scenes: [
12 | Circle,
13 | Square,
14 | Video,
15 | Rectangle,
16 | ],
17 | plugins: [ MotionComposer() ],
18 | experimentalFeatures: true
19 | });
20 |
--------------------------------------------------------------------------------
/src/scenes/Circle.meta:
--------------------------------------------------------------------------------
1 | {
2 | "version": 0,
3 | "timeEvents": [
4 | {
5 | "name": "almostEnd",
6 | "targetTime": 5.644535945857457
7 | }
8 | ],
9 | "seed": 2790190071
10 | }
--------------------------------------------------------------------------------
/src/scenes/Circle.tsx:
--------------------------------------------------------------------------------
1 | import {Circle, makeScene2D} from '@motion-canvas/2d';
2 | import {createRef, waitFor, waitUntil} from '@motion-canvas/core';
3 |
4 | export default makeScene2D(function* (view) {
5 | view.fill('#111111');
6 |
7 | const circle = createRef();
8 |
9 | view.add();
10 |
11 | yield* circle().scale(2, 1).to(1, 1);
12 | yield* waitUntil('almostEnd');
13 | yield* waitFor(0.1);
14 | });
15 |
--------------------------------------------------------------------------------
/src/scenes/Rectangle.meta:
--------------------------------------------------------------------------------
1 | {
2 | "version": 0,
3 | "timeEvents": [
4 | {
5 | "name": "start",
6 | "targetTime": 0.4496054689846114
7 | },
8 | {
9 | "name": "end",
10 | "targetTime": 3.449605468984611
11 | }
12 | ],
13 | "seed": 3337110269
14 | }
--------------------------------------------------------------------------------
/src/scenes/Rectangle.tsx:
--------------------------------------------------------------------------------
1 | import {Rect, makeScene2D} from '@motion-canvas/2d';
2 | import {createRef, waitUntil} from '@motion-canvas/core';
3 |
4 | export default makeScene2D(function* (view) {
5 | view.fill('#111111');
6 |
7 | const rect = createRef();
8 |
9 | view.add();
10 |
11 | yield* waitUntil('start');
12 | yield* rect().size([ 200, 500 ], 1).to([ 500, 200 ], 1).to([ 300, 300 ], 1);
13 | yield* waitUntil('end');
14 | });
15 |
--------------------------------------------------------------------------------
/src/scenes/Square.meta:
--------------------------------------------------------------------------------
1 | {
2 | "version": 0,
3 | "timeEvents": [
4 | {
5 | "name": "start",
6 | "targetTime": 0.2476332877199803
7 | },
8 | {
9 | "name": "back",
10 | "targetTime": 2.883768543405991
11 | }
12 | ],
13 | "seed": 504328287
14 | }
--------------------------------------------------------------------------------
/src/scenes/Square.tsx:
--------------------------------------------------------------------------------
1 | import {Rect, makeScene2D} from '@motion-canvas/2d';
2 | import {createRef, waitUntil} from '@motion-canvas/core';
3 |
4 | export default makeScene2D(function* (view) {
5 | view.fill('#111111');
6 |
7 | const rect = createRef();
8 |
9 | view.add();
10 |
11 | yield* waitUntil('start');
12 | yield* rect().rotation(360, 1);
13 | yield* waitUntil('back');
14 | yield* rect().rotation(0, 1);
15 | });
16 |
--------------------------------------------------------------------------------
/src/scenes/Video.meta:
--------------------------------------------------------------------------------
1 | {
2 | "version": 0,
3 | "timeEvents": [],
4 | "seed": 2529886763
5 | }
--------------------------------------------------------------------------------
/src/scenes/Video.tsx:
--------------------------------------------------------------------------------
1 | import { Video, makeScene2D} from '@motion-canvas/2d';
2 | import { waitFor } from '@motion-canvas/core';
3 |
4 | export default makeScene2D(function* (view) {
5 | view.fill('#111111');
6 |
7 | yield view.add();
8 |
9 | yield* waitFor(5);
10 | });
11 |
--------------------------------------------------------------------------------
/src/scenes/example.meta:
--------------------------------------------------------------------------------
1 | {
2 | "version": 0,
3 | "timeEvents": [],
4 | "seed": 1354941888
5 | }
--------------------------------------------------------------------------------
/src/test.svg:
--------------------------------------------------------------------------------
1 |
5 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "@motion-canvas/2d/tsconfig.project.json",
3 | "include": ["src"]
4 | }
5 |
--------------------------------------------------------------------------------
/vite.config.ts:
--------------------------------------------------------------------------------
1 | import {defineConfig} from 'vite';
2 | import motionCanvas from '@motion-canvas/vite-plugin';
3 | import ffmpeg from '@motion-canvas/ffmpeg';
4 |
5 | import MotionComposer from './src/plugin/vite';
6 |
7 | export default defineConfig({
8 | plugins: [
9 | motionCanvas(),
10 | ffmpeg(),
11 | MotionComposer(),
12 | ],
13 | });
14 |
--------------------------------------------------------------------------------