"use client" import { useState, useRef, useEffect } from "react" import type { Track } from "@/lib/types" interface AudioPlayerProps { track: Track | null isPlaying: boolean onPlayingChange: (playing: boolean) => void } export default function AudioPlayer({ track, isPlaying, onPlayingChange }: AudioPlayerProps) { const [currentTime, setCurrentTime] = useState(0) const [duration, setDuration] = useState(0) const [volume, setVolume] = useState(1) const [isMuted, setIsMuted] = useState(false) const [waveformPeaks, setWaveformPeaks] = useState([]) const [isLoadingWaveform, setIsLoadingWaveform] = useState(false) const audioRef = useRef(null) const progressRef = useRef(null) // Load audio and waveform when track changes useEffect(() => { if (!track) { onPlayingChange(false) setCurrentTime(0) setWaveformPeaks([]) return } setCurrentTime(0) loadWaveform(track.id) if (audioRef.current) { audioRef.current.load() // Autoplay when track loads if isPlaying is true if (isPlaying) { audioRef.current.play().catch((error: unknown) => { console.error("Autoplay failed:", error) onPlayingChange(false) }) } } }, [track?.id]) // Update current time as audio plays useEffect(() => { const audio = audioRef.current if (!audio) return const updateTime = () => setCurrentTime(audio.currentTime) const updateDuration = () => { if (audio.duration && isFinite(audio.duration)) { setDuration(audio.duration) } } const handleEnded = () => onPlayingChange(false) audio.addEventListener("timeupdate", updateTime) audio.addEventListener("loadedmetadata", updateDuration) audio.addEventListener("durationchange", updateDuration) audio.addEventListener("ended", handleEnded) // Initialize duration if already loaded if (audio.duration && isFinite(audio.duration)) { setDuration(audio.duration) } return () => { audio.removeEventListener("timeupdate", updateTime) audio.removeEventListener("loadedmetadata", updateDuration) audio.removeEventListener("durationchange", updateDuration) audio.removeEventListener("ended", handleEnded) } }, [track?.id]) const loadWaveform = async (trackId: string) => { setIsLoadingWaveform(true) try { const response = await fetch( `${process.env.NEXT_PUBLIC_API_URL}/api/audio/waveform/${trackId}` ) if (response.ok) { const data = await response.json() setWaveformPeaks(data.peaks || []) } } catch (error) { console.error("Failed to load waveform:", error) } finally { setIsLoadingWaveform(false) } } // Sync playing state with audio element useEffect(() => { const audio = audioRef.current if (!audio) return if (isPlaying) { audio.play().catch((error: unknown) => { console.error("Play failed:", error) onPlayingChange(false) }) } else { audio.pause() } }, [isPlaying, onPlayingChange]) const togglePlay = () => { if (!audioRef.current || !track) return onPlayingChange(!isPlaying) } const handleVolumeChange = (e: React.ChangeEvent) => { const newVolume = parseFloat(e.target.value) setVolume(newVolume) if (audioRef.current) { audioRef.current.volume = newVolume } if (newVolume === 0) { setIsMuted(true) } else if (isMuted) { setIsMuted(false) } } const toggleMute = () => { if (!audioRef.current) return if (isMuted) { audioRef.current.volume = volume setIsMuted(false) } else { audioRef.current.volume = 0 setIsMuted(true) } } const handleWaveformClick = (e: React.MouseEvent) => { if (!audioRef.current || !progressRef.current || !track) return const rect = progressRef.current.getBoundingClientRect() const x = e.clientX - rect.left const percentage = x / rect.width const newTime = percentage * duration audioRef.current.currentTime = newTime setCurrentTime(newTime) } const formatTime = (seconds: number) => { if (!isFinite(seconds)) return "0:00" const mins = Math.floor(seconds / 60) const secs = Math.floor(seconds % 60) return `${mins}:${secs.toString().padStart(2, "0")}` } const progress = duration > 0 ? (currentTime / duration) * 100 : 0 return (
{/* Hidden audio element */} {track &&