'use client'; import React, { useState } from 'react'; import { Fab, Tooltip, Snackbar, Alert, Dialog, DialogTitle, DialogContent, DialogActions, Button, Box, Typography, CircularProgress, Chip, IconButton, } from '@mui/material'; import MicIcon from '@mui/icons-material/Mic'; import MicOffIcon from '@mui/icons-material/MicOff'; import { useRouter } from 'next/navigation'; import { useVoiceInput } from '@/hooks/useVoiceInput'; import { useAuth } from '@/lib/auth/AuthContext'; import { trackingApi } from '@/lib/api/tracking'; import { childrenApi } from '@/lib/api/children'; /** * Floating voice input button * * Always visible floating action button for quick voice commands. * Positioned in bottom-right corner for easy thumb access. */ export function VoiceFloatingButton() { const router = useRouter(); const { user } = useAuth(); const [open, setOpen] = useState(false); const [isProcessing, setIsProcessing] = useState(false); const [processingStatus, setProcessingStatus] = useState<'listening' | 'understanding' | 'saving' | null>(null); const [identifiedActivity, setIdentifiedActivity] = useState(''); const [classificationResult, setClassificationResult] = useState(null); const [processedClassificationId, setProcessedClassificationId] = useState(null); const [snackbar, setSnackbar] = useState<{ open: boolean; message: string; severity: 'success' | 'info' | 'warning' | 'error'; }>({ open: false, message: '', severity: 'info', }); const familyId = user?.families?.[0]?.familyId; const { isListening, isSupported, transcript, classification, error, usesFallback, startListening, stopListening, reset } = useVoiceInput(); // Set status when listening starts/stops React.useEffect(() => { if (isListening) { setProcessingStatus('listening'); } else if (processingStatus === 'listening' && transcript) { // Transition from listening to understanding when we have a transcript setProcessingStatus('understanding'); } }, [isListening, transcript]); // Auto-use classification from backend when transcription completes // MediaRecorder sends audio to backend, which transcribes + classifies in one call React.useEffect(() => { // Create a unique ID for this classification based on transcript + type + timestamp const classificationId = classification ? `${transcript}-${classification.type}-${classification.timestamp}` : null; // Only process if we haven't already processed this exact classification if (classification && !isListening && !isProcessing && open && classificationId !== processedClassificationId) { console.log('[Voice] New classification detected, processing...', classificationId); setProcessedClassificationId(classificationId); setClassificationResult(classification); handleClassifiedIntent(classification); } }, [classification, isListening, isProcessing, open, transcript, processedClassificationId]); const handleOpen = () => { if (!isSupported) { setSnackbar({ open: true, message: 'Voice input not supported in this browser. Please use Chrome, Edge, or Safari.', severity: 'error', }); return; } setOpen(true); reset(); setClassificationResult(null); setProcessingStatus(null); setIdentifiedActivity(''); setProcessedClassificationId(null); }; const handleClose = () => { if (isListening) { stopListening(); } setOpen(false); reset(); setClassificationResult(null); setProcessingStatus(null); setIdentifiedActivity(''); setProcessedClassificationId(null); }; const handleStartListening = () => { reset(); setClassificationResult(null); setProcessedClassificationId(null); startListening(); }; const handleStopListening = () => { stopListening(); }; const handleClassifiedIntent = async (result: any) => { console.log('[Voice] handleClassifiedIntent called with result:', result); if (result.error) { console.log('[Voice] Result has error:', result.message); setProcessingStatus(null); setSnackbar({ open: true, message: result.message, severity: 'error', }); return; } // Support both formats: backend returns 'type', frontend local classifier returns 'intent' const activityType = result.type || result.intent; console.log('[Voice] Activity type:', activityType); // Set identified activity for status display setIdentifiedActivity(activityType); // Handle unknown or low confidence if (activityType === 'unknown' || (result.confidence && result.confidence < 0.3)) { console.log('[Voice] Unknown or low confidence:', activityType, result.confidence); setProcessingStatus(null); setSnackbar({ open: true, message: 'Could not understand the command. Please try again or use manual entry.', severity: 'warning', }); return; } // Get the first child from the family if (!familyId) { console.log('[Voice] No familyId found'); setProcessingStatus(null); setSnackbar({ open: true, message: 'No family found. Please set up your profile first.', severity: 'error', }); return; } console.log('[Voice] Family ID:', familyId); try { setIsProcessing(true); setProcessingStatus('saving'); // Fetch children console.log('[Voice] Fetching children for family:', familyId); const children = await childrenApi.getChildren(familyId); console.log('[Voice] Children found:', children.length, children); if (children.length === 0) { setSnackbar({ open: true, message: 'No children found. Please add a child first.', severity: 'error', }); setIsProcessing(false); return; } // Use the first child (or you could enhance this to support child name matching) const childId = children[0].id; console.log('[Voice] Using child ID:', childId); // Create the activity const activityData = { type: activityType, timestamp: result.timestamp || new Date().toISOString(), data: result.details || result.structuredData || {}, notes: result.details?.notes || result.structuredData?.notes || undefined, }; console.log('[Voice] Creating activity with data:', JSON.stringify(activityData, null, 2)); const createdActivity = await trackingApi.createActivity(childId, activityData); console.log('[Voice] Activity created successfully:', createdActivity); // Show success message const activityLabel = activityType.charAt(0).toUpperCase() + activityType.slice(1); setSnackbar({ open: true, message: `${activityLabel} activity saved successfully!`, severity: 'success', }); // Auto-close dialog setTimeout(() => { handleClose(); }, 1500); } catch (error: any) { console.error('[Voice] Failed to create activity - Full error:', error); console.error('[Voice] Error response:', error.response); console.error('[Voice] Error data:', error.response?.data); setSnackbar({ open: true, message: error.response?.data?.message || 'Failed to save activity. Please try again.', severity: 'error', }); } finally { setIsProcessing(false); } }; const handleCloseSnackbar = () => { setSnackbar(prev => ({ ...prev, open: false })); }; return ( <> {/* Floating button positioned in bottom-right */} {/* Voice input dialog */} Voice Command {classificationResult && !classificationResult.error && ( )} {/* Microphone animation */} {isListening ? : } {/* Status text with detailed processing stages */} {processingStatus === 'listening' && 'Listening... Speak now'} {processingStatus === 'understanding' && 'Understanding your request...'} {processingStatus === 'saving' && identifiedActivity && `Adding to ${identifiedActivity.charAt(0).toUpperCase() + identifiedActivity.slice(1)} tracker...`} {!processingStatus && !isListening && 'Click the microphone to start'} {/* Transcript */} {transcript && ( Transcript: {transcript} )} {/* Processing indicator with status */} {processingStatus && ( {processingStatus === 'listening' && 'Listening...'} {processingStatus === 'understanding' && 'Understanding...'} {processingStatus === 'saving' && 'Saving...'} )} {/* Classification result */} {classificationResult && !classificationResult.error && ( Understood: {classificationResult.type || classificationResult.intent} )} {/* Error messages */} {(error || (classificationResult && classificationResult.error)) && ( {error || classificationResult.message} )} {/* Examples */} {!transcript && !isListening && ( Example commands: • "Fed baby 120 ml"
• "Nursed on left breast for 15 minutes"
• "Changed wet diaper"
• "Baby napped for 45 minutes"
)}
{/* Snackbar for feedback */} {snackbar.message} ); }