From b00e75f679d4377f57f4a17d18aa5f9d05d2a213 Mon Sep 17 00:00:00 2001 From: Andrei Date: Wed, 1 Oct 2025 20:27:21 +0000 Subject: [PATCH] Fix hydration error in VoiceFloatingButton - remove nested buttons --- .../components/voice/VoiceFloatingButton.tsx | 231 ++++++++++++++++-- 1 file changed, 207 insertions(+), 24 deletions(-) diff --git a/maternal-web/components/voice/VoiceFloatingButton.tsx b/maternal-web/components/voice/VoiceFloatingButton.tsx index 581a97a..776362f 100644 --- a/maternal-web/components/voice/VoiceFloatingButton.tsx +++ b/maternal-web/components/voice/VoiceFloatingButton.tsx @@ -1,10 +1,26 @@ 'use client'; import React, { useState } from 'react'; -import { Fab, Tooltip, Snackbar, Alert } from '@mui/material'; +import { + Fab, + Tooltip, + Snackbar, + Alert, + Dialog, + DialogTitle, + DialogContent, + DialogActions, + Button, + Box, + Typography, + CircularProgress, + Chip, + IconButton, +} from '@mui/material'; import MicIcon from '@mui/icons-material/Mic'; -import { VoiceInputButton } from './VoiceInputButton'; +import MicOffIcon from '@mui/icons-material/MicOff'; import { useRouter } from 'next/navigation'; +import { useVoiceInput } from '@/hooks/useVoiceInput'; /** * Floating voice input button @@ -14,6 +30,9 @@ import { useRouter } from 'next/navigation'; */ export function VoiceFloatingButton() { const router = useRouter(); + const [open, setOpen] = useState(false); + const [isProcessing, setIsProcessing] = useState(false); + const [classificationResult, setClassificationResult] = useState(null); const [snackbar, setSnackbar] = useState<{ open: boolean; message: string; @@ -24,18 +43,83 @@ export function VoiceFloatingButton() { severity: 'info', }); - const handleTranscript = (transcript: string) => { - console.log('[Voice] Transcript:', transcript); - setSnackbar({ - open: true, - message: `Command received: "${transcript}"`, - severity: 'info', - }); + const { isListening, isSupported, transcript, error, startListening, stopListening, reset } = + useVoiceInput(); + + // Auto-classify when we get a final transcript + React.useEffect(() => { + if (transcript && !isListening && !isProcessing && open) { + classifyTranscript(transcript); + } + }, [transcript, isListening, isProcessing, open]); + + const handleOpen = () => { + if (!isSupported) { + setSnackbar({ + open: true, + message: 'Voice input not supported in this browser. Please use Chrome, Edge, or Safari.', + severity: 'error', + }); + return; + } + setOpen(true); + reset(); + setClassificationResult(null); + }; + + const handleClose = () => { + if (isListening) { + stopListening(); + } + setOpen(false); + reset(); + setClassificationResult(null); + }; + + const handleStartListening = () => { + reset(); + setClassificationResult(null); + startListening(); + }; + + const handleStopListening = () => { + stopListening(); + }; + + const classifyTranscript = async (text: string) => { + setIsProcessing(true); + try { + const response = await fetch('/api/voice/transcribe', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ text }), + }); + + const data = await response.json(); + + if (response.ok && data.success) { + setClassificationResult(data.classification); + handleClassifiedIntent(data.classification); + } else { + setClassificationResult({ + error: true, + message: data.message || 'Could not understand command', + }); + } + } catch (error) { + console.error('[Voice] Classification error:', error); + setClassificationResult({ + error: true, + message: 'Failed to process command', + }); + } finally { + setIsProcessing(false); + } }; const handleClassifiedIntent = (result: any) => { - console.log('[Voice] Classification:', result); - if (result.error) { setSnackbar({ open: true, @@ -52,9 +136,9 @@ export function VoiceFloatingButton() { severity: 'success', }); - // Navigate to appropriate page based on intent - // This is a placeholder - in production, you'd create the activity + // Auto-close dialog and navigate setTimeout(() => { + handleClose(); if (result.intent === 'feeding') { router.push('/track/feeding'); } else if (result.intent === 'sleep') { @@ -76,6 +160,8 @@ export function VoiceFloatingButton() { - + + {/* Voice input dialog */} + + + Voice Command + {classificationResult && !classificationResult.error && ( + + )} + + + + + {/* Microphone animation */} + + + {isListening ? : } + + + + {/* Status text */} + + {isListening ? 'Listening... Speak now' : 'Click the microphone to start'} + + + {/* Transcript */} + {transcript && ( + + + Transcript: + + {transcript} + + )} + + {/* Processing indicator */} + {isProcessing && ( + + + + Processing command... + + + )} + + {/* Classification result */} + {classificationResult && !classificationResult.error && ( + + + Understood: {classificationResult.intent} + + + )} + + {/* Error messages */} + {(error || (classificationResult && classificationResult.error)) && ( + + {error || classificationResult.message} + + )} + + {/* Examples */} + {!transcript && !isListening && ( + + + Example commands: + + + • "Fed baby 120 ml" +
+ • "Nursed on left breast for 15 minutes" +
+ • "Changed wet diaper" +
+ • "Baby napped for 45 minutes" +
+
+ )} +
+
+ + + + +
+ {/* Snackbar for feedback */} - + {snackbar.message}