'use client'; import React, { useState } from 'react'; import { Fab, Tooltip, Snackbar, Alert, Dialog, DialogTitle, DialogContent, DialogActions, Button, Box, Typography, CircularProgress, Chip, IconButton, Select, MenuItem, FormControl, InputLabel, } from '@mui/material'; import MicIcon from '@mui/icons-material/Mic'; import MicOffIcon from '@mui/icons-material/MicOff'; import AddIcon from '@mui/icons-material/Add'; import { useRouter } from 'next/navigation'; import { useVoiceInput } from '@/hooks/useVoiceInput'; import { useAuth } from '@/lib/auth/AuthContext'; import { trackingApi } from '@/lib/api/tracking'; import { childrenApi } from '@/lib/api/children'; import { voiceApi } from '@/lib/api/voice'; import { VoiceActivityReview } from './VoiceActivityReview'; /** * Floating voice input button * * Always visible floating action button for quick voice commands. * Positioned in bottom-right corner for easy thumb access. */ export function VoiceFloatingButton() { const router = useRouter(); const { user } = useAuth(); const [open, setOpen] = useState(false); const [isProcessing, setIsProcessing] = useState(false); const [processingStatus, setProcessingStatus] = useState<'listening' | 'understanding' | 'saving' | null>(null); const [identifiedActivity, setIdentifiedActivity] = useState(''); const [classificationResult, setClassificationResult] = useState(null); const [processedClassificationId, setProcessedClassificationId] = useState(null); const [showReview, setShowReview] = useState(false); const [showUnknownDialog, setShowUnknownDialog] = useState(false); const [manualTrackingType, setManualTrackingType] = useState('feeding'); const [snackbar, setSnackbar] = useState<{ open: boolean; message: string; severity: 'success' | 'info' | 'warning' | 'error'; }>({ open: false, message: '', severity: 'info', }); const familyId = user?.families?.[0]?.familyId; const { isListening, isSupported, transcript, classification, error, usesFallback, startListening, stopListening, reset } = useVoiceInput(); // Set status when listening starts/stops React.useEffect(() => { console.log('[VoiceFloatingButton] isListening changed:', isListening, 'processingStatus:', processingStatus); if (isListening && processingStatus !== 'listening') { console.log('[VoiceFloatingButton] Setting status to listening'); setProcessingStatus('listening'); } else if (!isListening && processingStatus === 'listening') { // When listening stops, transition to understanding console.log('[VoiceFloatingButton] Setting status to understanding'); setProcessingStatus('understanding'); } }, [isListening, processingStatus]); // Auto-use classification from backend when transcription completes // MediaRecorder sends audio to backend, which transcribes + classifies in one call React.useEffect(() => { // Create a unique ID for this classification based on transcript + type + timestamp const classificationId = classification ? `${transcript}-${classification.type}-${classification.timestamp}` : null; // Only process if we haven't already processed this exact classification if (classification && !isListening && !isProcessing && open && classificationId !== processedClassificationId) { console.log('[Voice] New classification detected, showing review...', classificationId); setProcessedClassificationId(classificationId); setClassificationResult(classification); // Show review dialog instead of immediately creating activity if (classification.type !== 'unknown' && classification.confidence >= 0.3) { setProcessingStatus(null); setShowReview(true); } else { // For unknown or low confidence, show unknown dialog setProcessingStatus(null); setOpen(false); setShowUnknownDialog(true); } } }, [classification, isListening, isProcessing, open, transcript, processedClassificationId]); const handleOpen = () => { if (!isSupported) { setSnackbar({ open: true, message: 'Voice input not supported in this browser. Please use Chrome, Edge, or Safari.', severity: 'error', }); return; } setOpen(true); reset(); setClassificationResult(null); setProcessingStatus(null); setIdentifiedActivity(''); setProcessedClassificationId(null); // Auto-start listening after dialog opens setTimeout(() => { startListening(); }, 300); }; const handleClose = () => { if (isListening) { stopListening(); } setOpen(false); reset(); setClassificationResult(null); setProcessingStatus(null); setIdentifiedActivity(''); setProcessedClassificationId(null); }; const handleStartListening = () => { reset(); setClassificationResult(null); setProcessedClassificationId(null); startListening(); }; const handleStopListening = () => { stopListening(); }; const createActivity = async (activityType: string, activityDetails: Record, activityTimestamp?: Date) => { // Get the first child from the family if (!familyId) { console.log('[Voice] No familyId found'); throw new Error('No family found. Please set up your profile first.'); } console.log('[Voice] Family ID:', familyId); // Fetch children console.log('[Voice] Fetching children for family:', familyId); const children = await childrenApi.getChildren(familyId); console.log('[Voice] Children found:', children.length, children); if (children.length === 0) { throw new Error('No children found. Please add a child first.'); } // Use the first child const childId = children[0].id; console.log('[Voice] Using child ID:', childId); // Create the activity - use frontend API format (trackingApi transforms to backend DTO) const activityData = { type: activityType, timestamp: activityTimestamp ? new Date(activityTimestamp).toISOString() : new Date().toISOString(), data: activityDetails, notes: activityDetails.notes || undefined, }; console.log('[Voice] Creating activity with data:', JSON.stringify(activityData, null, 2)); const createdActivity = await trackingApi.createActivity(childId, activityData); console.log('[Voice] Activity created successfully:', createdActivity); return { childId, activity: createdActivity }; }; const saveFeedback = async (action: 'approved' | 'edited' | 'rejected', finalData?: Record, childId?: string, activityId?: string) => { try { await voiceApi.saveFeedback({ childId, activityId, transcript, language: classificationResult?.language || 'en', extractedType: classificationResult.type, extractedData: classificationResult.details, confidence: classificationResult.confidence, action, finalType: action === 'edited' ? classificationResult.type : undefined, finalData: action === 'edited' ? finalData : undefined, }); console.log(`[Voice] Feedback saved: ${action}`); } catch (error) { console.error('[Voice] Failed to save feedback:', error); // Don't throw - feedback is nice-to-have, not critical } }; const handleApprove = async (data: any) => { try { setIsProcessing(true); setIdentifiedActivity(data.type); // Set the activity type for display setProcessingStatus('saving'); setShowReview(false); const { childId, activity } = await createActivity(data.type, data.details, data.timestamp); // Save feedback await saveFeedback('approved', undefined, childId, activity.id); // Show success message const activityLabel = data.type.charAt(0).toUpperCase() + data.type.slice(1); setSnackbar({ open: true, message: `${activityLabel} activity saved successfully!`, severity: 'success', }); // Auto-close dialog setTimeout(() => { handleClose(); }, 1500); } catch (error: any) { console.error('[Voice] Failed to create activity:', error); setSnackbar({ open: true, message: error.message || 'Failed to save activity. Please try again.', severity: 'error', }); } finally { setIsProcessing(false); setProcessingStatus(null); } }; const handleEdit = async (editedData: any) => { try { setIsProcessing(true); setIdentifiedActivity(editedData.type); // Set the activity type for display setProcessingStatus('saving'); setShowReview(false); const { childId, activity } = await createActivity(editedData.type, editedData.details, editedData.timestamp); // Save feedback with edited data await saveFeedback('edited', editedData.details, childId, activity.id); // Show success message const activityLabel = editedData.type.charAt(0).toUpperCase() + editedData.type.slice(1); setSnackbar({ open: true, message: `${activityLabel} activity saved with your edits!`, severity: 'success', }); // Auto-close dialog setTimeout(() => { handleClose(); }, 1500); } catch (error: any) { console.error('[Voice] Failed to create edited activity:', error); setSnackbar({ open: true, message: error.message || 'Failed to save activity. Please try again.', severity: 'error', }); } finally { setIsProcessing(false); setProcessingStatus(null); } }; const handleReject = async () => { try { setShowReview(false); // Save feedback await saveFeedback('rejected'); setSnackbar({ open: true, message: 'Voice command rejected. Try again or use manual entry.', severity: 'info', }); } catch (error) { console.error('[Voice] Failed to save rejection feedback:', error); } }; const handleCloseSnackbar = () => { setSnackbar(prev => ({ ...prev, open: false })); }; const handleRetry = () => { setShowUnknownDialog(false); setOpen(true); reset(); setClassificationResult(null); setProcessingStatus(null); setProcessedClassificationId(null); // Auto-start listening setTimeout(() => { startListening(); }, 300); }; const handleManualTracking = () => { setShowUnknownDialog(false); router.push(`/track/${manualTrackingType}`); }; return ( <> {/* Floating button positioned in bottom-right - Hidden on desktop since we have TabBar center button */} {/* Voice input dialog */} Voice Command {classificationResult && !classificationResult.error && ( )} {/* Microphone animation */} {isListening ? {/* Status text with detailed processing stages */} {processingStatus === 'listening' && 'Listening... Speak now'} {processingStatus === 'understanding' && 'Understanding your request...'} {processingStatus === 'saving' && identifiedActivity && `Adding to ${identifiedActivity.charAt(0).toUpperCase() + identifiedActivity.slice(1)} tracker...`} {!processingStatus && !isListening && 'Click the microphone to start'} {/* Transcript */} {transcript && ( Transcript: {transcript} )} {/* Processing indicator with status */} {processingStatus && ( )} {/* Classification result */} {classificationResult && !classificationResult.error && ( Understood: {classificationResult.type || classificationResult.intent} )} {/* Error messages */} {(error || (classificationResult && classificationResult.error)) && ( {error || classificationResult.message} )} {/* Examples */} {!transcript && !isListening && ( Example commands: • "Fed baby 120 ml"
• "Nursed on left breast for 15 minutes"
• "Changed wet diaper"
• "Baby napped for 45 minutes"
)}
{/* Review Dialog */} {showReview && classificationResult && ( setShowReview(false)} /> )} {/* Unknown Intent Dialog */} setShowUnknownDialog(false)} maxWidth="sm" fullWidth aria-labelledby="unknown-command-dialog-title" aria-describedby="unknown-command-dialog-description" > Could Not Understand Command You said: "{transcript}" I couldn't identify a specific activity from your command. You can either try again or manually add an activity. Activity Type {/* Snackbar for feedback */} {snackbar.message} ); }