Files
Audio-Classifier/frontend/components/AudioPlayer.tsx
Benoit 4d8fa57ab2
All checks were successful
Build and Push Docker Images / Build Frontend Image (push) Successful in 3m31s
Fix tous les appels API pour utiliser getApiUrl() au lieu de process.env
Problème: Le commit précédent n'avait corrigé que api.ts, mais AudioPlayer
et page.tsx utilisaient encore directement process.env.NEXT_PUBLIC_API_URL,
ce qui ignorait la config runtime.

Fichiers corrigés:
1. lib/api.ts:
   - Export getApiUrl() pour usage externe

2. app/page.tsx:
   - Import getApiUrl
   - /api/library/scan: process.env → getApiUrl()
   - /api/library/scan/status: process.env → getApiUrl()

3. components/AudioPlayer.tsx:
   - Import getApiUrl
   - /api/audio/waveform: process.env → getApiUrl()
   - /api/audio/stream: process.env → getApiUrl()
   - /api/audio/download: process.env → getApiUrl()

Maintenant TOUS les appels API utilisent la config runtime
(window.__RUNTIME_CONFIG__) côté client.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-12-24 10:54:38 +01:00

318 lines
11 KiB
TypeScript

"use client"
import { useState, useRef, useEffect } from "react"
import type { Track } from "@/lib/types"
import { getApiUrl } from "@/lib/api"
interface AudioPlayerProps {
track: Track | null
isPlaying: boolean
onPlayingChange: (playing: boolean) => void
}
export default function AudioPlayer({ track, isPlaying, onPlayingChange }: AudioPlayerProps) {
const [currentTime, setCurrentTime] = useState(0)
const [duration, setDuration] = useState(0)
const [volume, setVolume] = useState(1)
const [isMuted, setIsMuted] = useState(false)
const [waveformPeaks, setWaveformPeaks] = useState<number[]>([])
const [isLoadingWaveform, setIsLoadingWaveform] = useState(false)
const audioRef = useRef<HTMLAudioElement>(null)
const progressRef = useRef<HTMLDivElement>(null)
// Load audio and waveform when track changes
useEffect(() => {
if (!track) {
onPlayingChange(false)
setCurrentTime(0)
setWaveformPeaks([])
return
}
setCurrentTime(0)
loadWaveform(track.id)
if (audioRef.current) {
audioRef.current.load()
// Autoplay when track loads if isPlaying is true
if (isPlaying) {
audioRef.current.play().catch((error: unknown) => {
console.error("Autoplay failed:", error)
onPlayingChange(false)
})
}
}
}, [track?.id])
// Update current time as audio plays
useEffect(() => {
const audio = audioRef.current
if (!audio) return
const updateTime = () => setCurrentTime(audio.currentTime)
const updateDuration = () => {
if (audio.duration && isFinite(audio.duration)) {
setDuration(audio.duration)
}
}
const handleEnded = () => onPlayingChange(false)
audio.addEventListener("timeupdate", updateTime)
audio.addEventListener("loadedmetadata", updateDuration)
audio.addEventListener("durationchange", updateDuration)
audio.addEventListener("ended", handleEnded)
// Initialize duration if already loaded
if (audio.duration && isFinite(audio.duration)) {
setDuration(audio.duration)
}
return () => {
audio.removeEventListener("timeupdate", updateTime)
audio.removeEventListener("loadedmetadata", updateDuration)
audio.removeEventListener("durationchange", updateDuration)
audio.removeEventListener("ended", handleEnded)
}
}, [track?.id])
const loadWaveform = async (trackId: string) => {
setIsLoadingWaveform(true)
try {
const response = await fetch(
`${getApiUrl()}/api/audio/waveform/${trackId}`
)
if (response.ok) {
const data = await response.json()
setWaveformPeaks(data.peaks || [])
}
} catch (error) {
console.error("Failed to load waveform:", error)
} finally {
setIsLoadingWaveform(false)
}
}
// Sync playing state with audio element
useEffect(() => {
const audio = audioRef.current
if (!audio) return
if (isPlaying) {
audio.play().catch((error: unknown) => {
console.error("Play failed:", error)
onPlayingChange(false)
})
} else {
audio.pause()
}
}, [isPlaying, onPlayingChange])
const togglePlay = () => {
if (!audioRef.current || !track) return
onPlayingChange(!isPlaying)
}
const handleVolumeChange = (e: React.ChangeEvent<HTMLInputElement>) => {
const newVolume = parseFloat(e.target.value)
setVolume(newVolume)
if (audioRef.current) {
audioRef.current.volume = newVolume
}
if (newVolume === 0) {
setIsMuted(true)
} else if (isMuted) {
setIsMuted(false)
}
}
const toggleMute = () => {
if (!audioRef.current) return
if (isMuted) {
audioRef.current.volume = volume
setIsMuted(false)
} else {
audioRef.current.volume = 0
setIsMuted(true)
}
}
const handleWaveformClick = (e: React.MouseEvent<HTMLDivElement>) => {
if (!audioRef.current || !progressRef.current || !track) return
const rect = progressRef.current.getBoundingClientRect()
const x = e.clientX - rect.left
const percentage = x / rect.width
const newTime = percentage * duration
audioRef.current.currentTime = newTime
setCurrentTime(newTime)
}
const formatTime = (seconds: number) => {
if (!isFinite(seconds)) return "0:00"
const mins = Math.floor(seconds / 60)
const secs = Math.floor(seconds % 60)
return `${mins}:${secs.toString().padStart(2, "0")}`
}
const progress = duration > 0 ? (currentTime / duration) * 100 : 0
return (
<div className="bg-gray-50 border-t border-gray-300 shadow-lg" style={{ height: '80px' }}>
{/* Hidden audio element */}
{track && <audio ref={audioRef} src={`${getApiUrl()}/api/audio/stream/${track.id}`} />}
<div className="h-full flex items-center gap-3 px-4">
{/* Play/Pause button */}
<button
onClick={togglePlay}
disabled={!track}
className="w-10 h-10 flex items-center justify-center bg-orange-500 hover:bg-orange-600 disabled:bg-gray-300 disabled:cursor-not-allowed rounded-full transition-colors flex-shrink-0"
aria-label={isPlaying ? "Pause" : "Play"}
>
{isPlaying ? (
<svg className="w-4 h-4 text-white" fill="currentColor" viewBox="0 0 24 24">
<path d="M6 4h4v16H6V4zm8 0h4v16h-4V4z"/>
</svg>
) : (
<svg className="w-4 h-4 text-white ml-0.5" fill="currentColor" viewBox="0 0 24 24">
<path d="M8 5v14l11-7z"/>
</svg>
)}
</button>
{/* Track info */}
<div className="flex-shrink-0 w-48">
{track ? (
<>
<div className="text-sm font-medium text-gray-900 truncate">
{track.filename}
</div>
<div className="text-xs text-gray-500">
{track.classification.genre.primary.split("---")[0]} {Math.round(track.features.tempo_bpm)} BPM
</div>
</>
) : (
<div className="text-sm text-gray-400">No track selected</div>
)}
</div>
{/* Time */}
<div className="text-xs text-gray-500 flex-shrink-0 w-16">
{formatTime(currentTime)}
</div>
{/* Waveform */}
<div className="flex-1 min-w-0">
<div
ref={progressRef}
className="relative h-12 cursor-pointer overflow-hidden flex items-center bg-gray-100 rounded"
onClick={handleWaveformClick}
>
{isLoadingWaveform ? (
<div className="flex items-center justify-center h-full w-full">
<span className="text-xs text-gray-400">Loading...</span>
</div>
) : waveformPeaks.length > 0 ? (
<div className="flex items-center h-full w-full gap-[1px] px-1">
{waveformPeaks
.filter((_: number, index: number) => index % 4 === 0) // Take every 4th peak to reduce from 800 to 200
.map((peak: number, index: number) => {
const originalIndex = index * 4
const isPlayed = (originalIndex / waveformPeaks.length) * 100 <= progress
return (
<div
key={index}
className="flex-1 flex items-center justify-center"
style={{
minWidth: "1px",
maxWidth: "4px",
height: "100%",
}}
>
<div
className={`w-full rounded-sm transition-colors ${
isPlayed ? "bg-orange-500" : "bg-gray-400"
}`}
style={{
height: `${Math.max(peak * 70, 4)}%`,
}}
/>
</div>
)
})}
</div>
) : (
<div className="flex items-center h-full w-full px-2">
<div className="w-full h-1 bg-gray-300 rounded-full">
<div
className="h-full bg-orange-500 rounded-full transition-all"
style={{ width: `${progress}%` }}
/>
</div>
</div>
)}
</div>
</div>
{/* Time remaining */}
<div className="text-xs text-gray-500 flex-shrink-0 w-16 text-right">
{formatTime(duration)}
</div>
{/* Volume control */}
<div className="flex items-center gap-2 flex-shrink-0">
<button
onClick={toggleMute}
className="w-8 h-8 flex items-center justify-center text-gray-600 hover:text-gray-900 transition-colors rounded hover:bg-gray-200"
aria-label={isMuted ? "Unmute" : "Mute"}
>
{isMuted || volume === 0 ? (
<svg className="w-5 h-5" fill="currentColor" viewBox="0 0 24 24">
<path d="M16.5 12c0-1.77-1.02-3.29-2.5-4.03v2.21l2.45 2.45c.03-.2.05-.41.05-.63zm2.5 0c0 .94-.2 1.82-.54 2.64l1.51 1.51C20.63 14.91 21 13.5 21 12c0-4.28-2.99-7.86-7-8.77v2.06c2.89.86 5 3.54 5 6.71zM4.27 3L3 4.27 7.73 9H3v6h4l5 5v-6.73l4.25 4.25c-.67.52-1.42.93-2.25 1.18v2.06c1.38-.31 2.63-.95 3.69-1.81L19.73 21 21 19.73l-9-9L4.27 3zM12 4L9.91 6.09 12 8.18V4z"/>
</svg>
) : volume < 0.5 ? (
<svg className="w-5 h-5" fill="currentColor" viewBox="0 0 24 24">
<path d="M7 9v6h4l5 5V4l-5 5H7z"/>
</svg>
) : (
<svg className="w-5 h-5" fill="currentColor" viewBox="0 0 24 24">
<path d="M3 9v6h4l5 5V4L7 9H3zm13.5 3c0-1.77-1.02-3.29-2.5-4.03v8.05c1.48-.73 2.5-2.25 2.5-4.02zM14 3.23v2.06c2.89.86 5 3.54 5 6.71s-2.11 5.85-5 6.71v2.06c4.01-.91 7-4.49 7-8.77s-2.99-7.86-7-8.77z"/>
</svg>
)}
</button>
<input
type="range"
min="0"
max="1"
step="0.01"
value={isMuted ? 0 : volume}
onChange={handleVolumeChange}
className="w-20 h-1 bg-gray-300 rounded-lg appearance-none cursor-pointer accent-orange-500"
style={{
background: `linear-gradient(to right, #f97316 0%, #f97316 ${(isMuted ? 0 : volume) * 100}%, #d1d5db ${(isMuted ? 0 : volume) * 100}%, #d1d5db 100%)`
}}
aria-label="Volume"
/>
</div>
{/* Download button */}
{track && (
<a
href={`${getApiUrl()}/api/audio/download/${track.id}`}
download
className="w-8 h-8 flex items-center justify-center text-gray-600 hover:text-gray-900 transition-colors rounded hover:bg-gray-200 flex-shrink-0"
aria-label="Download"
>
<svg className="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4" />
</svg>
</a>
)}
</div>
</div>
)
}