Spaces:
Running
on
A100
Running
on
A100
| import React, { | |
| useRef, | |
| useEffect, | |
| useState, | |
| useCallback, | |
| forwardRef, | |
| } from "react"; | |
| import {AlignedSegment} from "../services/transcriptionApi"; | |
| import {useTranscriptionStore} from "../stores/transcriptionStore"; | |
| import {formatTime} from "../utils/subtitleUtils"; | |
| import {assignTracksToSegments, getMaxTrackCount} from "../utils/trackUtils"; | |
| import {useTimelineGeometry} from "../hooks/useTimelineGeometry"; | |
| import {useTimelineDragControls} from "../hooks/useTimelineDragControls"; | |
| import {useTimelineRenderer} from "../hooks/useTimelineRenderer"; | |
| import SegmentEditor from "./SegmentEditor"; | |
| import MediaDownloadControls from "./MediaDownloadControls"; | |
| import MediaEditControls from "./MediaEditControls"; | |
| interface CanvasTimelineProps { | |
| audioRef: React.RefObject<HTMLAudioElement>; | |
| videoRef: React.RefObject<HTMLVideoElement>; | |
| onSeekToSegment: (segment: AlignedSegment) => void; | |
| onTimeUpdate: () => void; | |
| viewport?: {start: number; end: number}; | |
| } | |
| const CanvasTimeline = forwardRef<HTMLDivElement, CanvasTimelineProps>( | |
| ({audioRef, videoRef}, ref) => { | |
| const canvasRef = useRef<HTMLCanvasElement>(null); | |
| const containerRef = useRef<HTMLDivElement>(null); | |
| // Combine the forwarded ref with our internal ref | |
| const combinedRef = useCallback( | |
| (node: HTMLDivElement | null) => { | |
| // Use Object.defineProperty to safely assign to current | |
| if (containerRef.current !== node) { | |
| Object.defineProperty(containerRef, "current", { | |
| value: node, | |
| writable: true, | |
| configurable: true, | |
| }); | |
| } | |
| if (typeof ref === "function") { | |
| ref(node); | |
| } else if (ref) { | |
| // Type assertion to overcome readonly constraint | |
| const mutableRef = ref as any; | |
| mutableRef.current = node; | |
| } | |
| }, | |
| [ref] | |
| ); | |
| const [canvasSize, setCanvasSize] = useState({width: 1200, height: 200}); | |
| const { | |
| transcription, | |
| currentTime, | |
| activeSegmentIndex, | |
| selectedSegmentIndex, | |
| currentSegments, | |
| setSelectedSegmentIndex, | |
| updateSegmentText, | |
| deleteSegment, | |
| } = useTranscriptionStore(); | |
| // Constants | |
| const constants = { | |
| TRACK_HEIGHT: 32, | |
| TRACK_PADDING: 4, | |
| TIMELINE_PADDING: 0, | |
| PIXELS_PER_SECOND: 300, // Increased from 200 to give segments more space | |
| }; | |
| // Early return if no transcription | |
| if (!transcription) { | |
| return null; | |
| } | |
| const displaySegments = currentSegments || transcription.aligned_segments; | |
| const segmentsWithTracks = assignTracksToSegments(displaySegments); | |
| const trackCount = getMaxTrackCount(segmentsWithTracks); | |
| // Get actual media duration from audio/video elements | |
| const getMediaDuration = useCallback(() => { | |
| const audioElement = audioRef.current; | |
| const videoElement = videoRef.current; | |
| if (audioElement && !isNaN(audioElement.duration)) { | |
| return audioElement.duration; | |
| } | |
| if (videoElement && !isNaN(videoElement.duration)) { | |
| return videoElement.duration; | |
| } | |
| // Fallback to transcription duration if media duration is not available | |
| return transcription.total_duration; | |
| }, [audioRef, videoRef, transcription.total_duration]); | |
| const mediaDuration = getMediaDuration(); | |
| console.log({mediaDuration}); | |
| // Calculate canvas dimensions based on full media duration | |
| const timelineWidth = mediaDuration * constants.PIXELS_PER_SECOND; | |
| const timelineHeight = | |
| constants.TIMELINE_PADDING * 2 + | |
| trackCount * (constants.TRACK_HEIGHT + constants.TRACK_PADDING); | |
| // Update canvas size when needed | |
| useEffect(() => { | |
| setCanvasSize({ | |
| width: timelineWidth, // Canvas internal resolution | |
| height: Math.max(timelineHeight, 200), | |
| }); | |
| }, [timelineWidth, timelineHeight, trackCount]); | |
| // Initialize geometry utilities | |
| const geometryUtils = useTimelineGeometry({ | |
| mediaDuration, | |
| constants, | |
| }); | |
| // Initialize drag controls | |
| const dragControls = useTimelineDragControls({ | |
| segmentsWithTracks, | |
| displaySegments, | |
| geometryUtils, | |
| canvasRef, | |
| containerRef, | |
| mediaDuration, | |
| constants: { | |
| TRACK_HEIGHT: constants.TRACK_HEIGHT, | |
| TIMELINE_PADDING: constants.TIMELINE_PADDING, | |
| }, | |
| }); | |
| // Initialize renderer | |
| const {draw} = useTimelineRenderer({ | |
| canvasRef, | |
| canvasSize, | |
| segmentsWithTracks, | |
| displaySegments, | |
| currentTime, | |
| activeSegmentIndex, | |
| selectedSegmentIndex, | |
| hoveredSegment: dragControls.hoveredSegment, | |
| isDragging: dragControls.isDragging, | |
| dragSegmentIndex: dragControls.dragSegmentIndex, | |
| mediaDuration, | |
| geometryUtils, | |
| constants, | |
| }); | |
| // State for smooth scrolling animation | |
| const scrollAnimationRef = useRef<number | null>(null); | |
| // Smooth scroll implementation using requestAnimationFrame | |
| const smoothScrollTo = useCallback( | |
| ( | |
| container: HTMLDivElement, | |
| targetScrollLeft: number, | |
| duration = 500 | |
| ): Promise<void> => { | |
| return new Promise((resolve) => { | |
| const startScrollLeft = container.scrollLeft; | |
| const scrollDistance = targetScrollLeft - startScrollLeft; | |
| const startTime = Date.now(); | |
| const animate = () => { | |
| const currentTime = Date.now(); | |
| const elapsedTime = currentTime - startTime; | |
| const progress = Math.min(elapsedTime / duration, 1); | |
| // Use easeOutQuart for smooth deceleration | |
| const easeOutQuart = 1 - Math.pow(1 - progress, 4); | |
| container.scrollLeft = | |
| startScrollLeft + scrollDistance * easeOutQuart; | |
| if (progress < 1) { | |
| scrollAnimationRef.current = requestAnimationFrame(animate); | |
| } else { | |
| scrollAnimationRef.current = null; | |
| resolve(); | |
| } | |
| }; | |
| // Cancel any existing animation | |
| if (scrollAnimationRef.current) { | |
| cancelAnimationFrame(scrollAnimationRef.current); | |
| } | |
| animate(); | |
| }); | |
| }, | |
| [] | |
| ); | |
| // Cleanup animation on unmount | |
| useEffect(() => { | |
| return () => { | |
| if (scrollAnimationRef.current) { | |
| cancelAnimationFrame(scrollAnimationRef.current); | |
| } | |
| }; | |
| }, []); | |
| // Determine if media is playing for auto-scroll behavior | |
| const isMediaPlaying = useCallback(() => { | |
| const audioElement = audioRef.current; | |
| const videoElement = videoRef.current; | |
| const mediaElement = audioElement || videoElement; | |
| return mediaElement && !mediaElement.paused && !mediaElement.ended; | |
| }, [audioRef, videoRef]); | |
| // Track if we're currently animating scroll to avoid re-triggering | |
| const isScrollingRef = useRef(false); | |
| const prevCurrentTimeRef = useRef(currentTime); | |
| // Auto-scroll during playback: only when playing and playhead gets near edges (20%) | |
| useEffect(() => { | |
| const container = containerRef.current; | |
| if (!container || !isMediaPlaying() || isScrollingRef.current) return; | |
| const timeX = geometryUtils.timeToX(currentTime); | |
| const containerWidth = container.clientWidth; | |
| const currentScrollLeft = container.scrollLeft; | |
| const maxScrollLeft = Math.max(0, container.scrollWidth - containerWidth); | |
| // Calculate 20% edge boundaries | |
| const leftEdge = currentScrollLeft + containerWidth * 0.2; | |
| const rightEdge = | |
| currentScrollLeft + containerWidth - containerWidth * 0.2; | |
| // Only scroll if playhead is near edges | |
| if (timeX < leftEdge || timeX > rightEdge) { | |
| isScrollingRef.current = true; | |
| // Center the playhead position | |
| const targetScrollLeft = Math.max( | |
| 0, | |
| Math.min(maxScrollLeft, timeX - containerWidth / 2) | |
| ); | |
| smoothScrollTo(container, targetScrollLeft, 800).then(() => { | |
| isScrollingRef.current = false; | |
| }); | |
| } | |
| }, [currentTime, geometryUtils, isMediaPlaying, smoothScrollTo]); | |
| // Handle manual seeking (scrubbing, keyboard shortcuts, etc.) | |
| useEffect(() => { | |
| const container = containerRef.current; | |
| if (!container || isScrollingRef.current) return; | |
| const timeDifference = Math.abs(currentTime - prevCurrentTimeRef.current); | |
| const isSeekOperation = timeDifference > 0.5; // Significant time jump indicates seeking | |
| if (isSeekOperation) { | |
| const timeX = geometryUtils.timeToX(currentTime); | |
| const containerWidth = container.clientWidth; | |
| const currentScrollLeft = container.scrollLeft; | |
| const maxScrollLeft = Math.max( | |
| 0, | |
| container.scrollWidth - containerWidth | |
| ); | |
| // Check if the seek position is outside the visible area | |
| const visibleStart = currentScrollLeft; | |
| const visibleEnd = currentScrollLeft + containerWidth; | |
| if (timeX < visibleStart || timeX > visibleEnd) { | |
| isScrollingRef.current = true; | |
| // Center the seek position | |
| const targetScrollLeft = Math.max( | |
| 0, | |
| Math.min(maxScrollLeft, timeX - containerWidth / 2) | |
| ); | |
| smoothScrollTo(container, targetScrollLeft, 600).then(() => { | |
| isScrollingRef.current = false; | |
| }); | |
| } | |
| } | |
| prevCurrentTimeRef.current = currentTime; | |
| }, [currentTime, geometryUtils, smoothScrollTo]); | |
| // Redraw on scroll | |
| useEffect(() => { | |
| const container = containerRef.current; | |
| if (!container) return; | |
| const handleScroll = () => { | |
| draw(); | |
| }; | |
| container.addEventListener("scroll", handleScroll); | |
| return () => container.removeEventListener("scroll", handleScroll); | |
| }, [draw]); | |
| return ( | |
| <div className="flex-1 flex flex-col bg-gray-900 border-t border-gray-700 min-h-32"> | |
| {/* Header */} | |
| <div className="px-4 py-2 bg-gray-800 border-b border-gray-700"> | |
| {/* Download Buttons - Centered above edit controls */} | |
| {/* <div className="flex justify-center mb-2"> | |
| <MediaDownloadControls /> | |
| </div> */} | |
| {/* Edit Controls */} | |
| {/* <MediaEditControls /> */} | |
| </div> | |
| {/* Canvas Container */} | |
| <div | |
| ref={combinedRef} | |
| className="flex-1 overflow-auto bg-black border-t border-slate-700" | |
| style={{ | |
| minHeight: "200px", | |
| scrollBehavior: "auto", // Changed from 'smooth' to 'auto' for responsive following | |
| }} | |
| > | |
| <canvas | |
| ref={canvasRef} | |
| onMouseMove={dragControls.handleMouseMove} | |
| onMouseDown={dragControls.handleMouseDown} | |
| className="block" | |
| style={{ | |
| width: `${canvasSize.width}px`, | |
| height: `${canvasSize.height}px`, | |
| }} | |
| /> | |
| </div> | |
| {/* Tooltip for hovered segment */} | |
| {dragControls.hoveredSegment !== null && | |
| !dragControls.isDragging && | |
| !dragControls.isTimelineDragging && | |
| (() => { | |
| // Find the segment in segmentsWithTracks that corresponds to the hovered original segment | |
| const originalSegment = | |
| displaySegments[dragControls.hoveredSegment]; | |
| // Safety check: ensure the segment exists | |
| if (!originalSegment) return null; | |
| const hoveredSegmentWithTrack = segmentsWithTracks.find( | |
| (s) => | |
| s.start === originalSegment.start && | |
| s.end === originalSegment.end && | |
| s.text === originalSegment.text | |
| ); | |
| if (!hoveredSegmentWithTrack) return null; | |
| return ( | |
| <div className="absolute bottom-4 left-4 bg-gray-800 text-white text-xs rounded px-2 py-1 pointer-events-none z-30 max-w-xs"> | |
| <div className="whitespace-normal break-words"> | |
| {hoveredSegmentWithTrack.text} | |
| </div> | |
| <div className="text-gray-400 mt-1"> | |
| {formatTime(hoveredSegmentWithTrack.start)} -{" "} | |
| {formatTime(hoveredSegmentWithTrack.end)} ( | |
| {hoveredSegmentWithTrack.duration.toFixed(1)}s) | |
| </div> | |
| <div className="text-yellow-400 mt-1 text-xs"> | |
| Click to select • Drag to move • Drag edges to resize | |
| </div> | |
| </div> | |
| ); | |
| })()} | |
| {/* Segment Editor at Bottom */} | |
| {selectedSegmentIndex !== null && | |
| displaySegments[selectedSegmentIndex] && ( | |
| <SegmentEditor | |
| segment={displaySegments[selectedSegmentIndex]} | |
| segmentIndex={selectedSegmentIndex} | |
| onUpdateText={updateSegmentText} | |
| onDeleteSegment={deleteSegment} | |
| onClose={() => setSelectedSegmentIndex(null)} | |
| /> | |
| )} | |
| </div> | |
| ); | |
| } | |
| ); | |
| CanvasTimeline.displayName = "CanvasTimeline"; | |
| export default CanvasTimeline; | |