'use client'; import React, { useState } from 'react'; import { Fab, Tooltip, Snackbar, Alert, Dialog, DialogTitle, DialogContent, DialogActions, Button, Box, Typography, CircularProgress, Chip, IconButton, } from '@mui/material'; import MicIcon from '@mui/icons-material/Mic'; import MicOffIcon from '@mui/icons-material/MicOff'; import { useRouter } from 'next/navigation'; import { useVoiceInput } from '@/hooks/useVoiceInput'; import { useAuth } from '@/lib/auth/AuthContext'; import { trackingApi } from '@/lib/api/tracking'; import { childrenApi } from '@/lib/api/children'; /** * Floating voice input button * * Always visible floating action button for quick voice commands. * Positioned in bottom-right corner for easy thumb access. */ export function VoiceFloatingButton() { const router = useRouter(); const { user } = useAuth(); const [open, setOpen] = useState(false); const [isProcessing, setIsProcessing] = useState(false); const [classificationResult, setClassificationResult] = useState(null); const [lastClassifiedTranscript, setLastClassifiedTranscript] = useState(''); const [snackbar, setSnackbar] = useState<{ open: boolean; message: string; severity: 'success' | 'info' | 'warning' | 'error'; }>({ open: false, message: '', severity: 'info', }); const familyId = user?.families?.[0]?.familyId; const { isListening, isSupported, transcript, classification, error, usesFallback, startListening, stopListening, reset } = useVoiceInput(); // Auto-use classification from backend when transcription completes (MediaRecorder fallback) React.useEffect(() => { if (classification && !isListening && !isProcessing && open) { setClassificationResult(classification); handleClassifiedIntent(classification); } }, [classification, isListening, isProcessing, open]); // For Web Speech API (desktop), classify the transcript client-side React.useEffect(() => { if (!usesFallback && transcript && !isListening && !isProcessing && open && transcript !== lastClassifiedTranscript) { classifyTranscript(transcript); } }, [usesFallback, transcript, isListening, isProcessing, open, lastClassifiedTranscript]); const handleOpen = () => { if (!isSupported) { setSnackbar({ open: true, message: 'Voice input not supported in this browser. Please use Chrome, Edge, or Safari.', severity: 'error', }); return; } setOpen(true); reset(); setClassificationResult(null); setLastClassifiedTranscript(''); }; const handleClose = () => { if (isListening) { stopListening(); } setOpen(false); reset(); setClassificationResult(null); setLastClassifiedTranscript(''); }; const handleStartListening = () => { reset(); setClassificationResult(null); setLastClassifiedTranscript(''); startListening(); }; const handleStopListening = () => { stopListening(); }; const classifyTranscript = async (text: string) => { // Mark this transcript as being classified to prevent duplicate calls setLastClassifiedTranscript(text); setIsProcessing(true); try { const response = await fetch('/api/voice/transcribe', { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ text }), }); const data = await response.json(); if (response.ok && data.success) { setClassificationResult(data.classification); handleClassifiedIntent(data.classification); } else { setClassificationResult({ error: true, message: data.message || 'Could not understand command', }); } } catch (error) { console.error('[Voice] Classification error:', error); setClassificationResult({ error: true, message: 'Failed to process command', }); } finally { setIsProcessing(false); } }; const handleClassifiedIntent = async (result: any) => { if (result.error) { setSnackbar({ open: true, message: result.message, severity: 'error', }); return; } // Handle unknown or low confidence if (result.type === 'unknown' || (result.confidence && result.confidence < 0.3)) { setSnackbar({ open: true, message: 'Could not understand the command. Please try again or use manual entry.', severity: 'warning', }); return; } // Get the first child from the family if (!familyId) { setSnackbar({ open: true, message: 'No family found. Please set up your profile first.', severity: 'error', }); return; } try { setIsProcessing(true); // Fetch children const children = await childrenApi.getChildren(familyId); if (children.length === 0) { setSnackbar({ open: true, message: 'No children found. Please add a child first.', severity: 'error', }); setIsProcessing(false); return; } // Use the first child (or you could enhance this to support child name matching) const childId = children[0].id; // Create the activity const activityData = { type: result.type, timestamp: result.timestamp || new Date().toISOString(), data: result.details || {}, notes: result.details?.notes || undefined, }; console.log('[Voice] Creating activity:', activityData); await trackingApi.createActivity(childId, activityData); // Show success message const activityLabel = result.type.charAt(0).toUpperCase() + result.type.slice(1); setSnackbar({ open: true, message: `${activityLabel} activity saved successfully!`, severity: 'success', }); // Auto-close dialog setTimeout(() => { handleClose(); }, 1500); } catch (error: any) { console.error('[Voice] Failed to create activity:', error); setSnackbar({ open: true, message: error.response?.data?.message || 'Failed to save activity. Please try again.', severity: 'error', }); } finally { setIsProcessing(false); } }; const handleCloseSnackbar = () => { setSnackbar(prev => ({ ...prev, open: false })); }; return ( <> {/* Floating button positioned in bottom-right */} {/* Voice input dialog */} Voice Command {classificationResult && !classificationResult.error && ( )} {/* Microphone animation */} {isListening ? : } {/* Status text */} {isListening ? 'Listening... Speak now' : 'Click the microphone to start'} {/* Transcript */} {transcript && ( Transcript: {transcript} )} {/* Processing indicator */} {isProcessing && ( Processing command... )} {/* Classification result */} {classificationResult && !classificationResult.error && ( Understood: {classificationResult.intent} )} {/* Error messages */} {(error || (classificationResult && classificationResult.error)) && ( {error || classificationResult.message} )} {/* Examples */} {!transcript && !isListening && ( Example commands: • "Fed baby 120 ml"
• "Nursed on left breast for 15 minutes"
• "Changed wet diaper"
• "Baby napped for 45 minutes"
)}
{/* Snackbar for feedback */} {snackbar.message} ); }