import React, { useState, useEffect, useRef } from 'react';
import { Mic, Loader2, Brain } from 'lucide-react';
import { transcribeWithWhisper } from '../utils/whisper';
export function MicButton({ onTranscript, className = '' }) {
const [state, setState] = useState('idle'); // idle, recording, transcribing, processing
const [error, setError] = useState(null);
const [isSupported, setIsSupported] = useState(true);
const mediaRecorderRef = useRef(null);
const streamRef = useRef(null);
const chunksRef = useRef([]);
const lastTapRef = useRef(0);
// Check microphone support on mount
useEffect(() => {
const checkSupport = () => {
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
setIsSupported(false);
setError('Microphone not supported. Please use HTTPS or a modern browser.');
return;
}
// Additional check for secure context
if (location.protocol !== 'https:' && location.hostname !== 'localhost') {
setIsSupported(false);
setError('Microphone requires HTTPS. Please use a secure connection.');
return;
}
setIsSupported(true);
setError(null);
};
checkSupport();
}, []);
// Start recording
const startRecording = async () => {
try {
console.log('Starting recording...');
setError(null);
chunksRef.current = [];
// Check if getUserMedia is available
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
throw new Error('Microphone access not available. Please use HTTPS or a supported browser.');
}
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
streamRef.current = stream;
const mimeType = MediaRecorder.isTypeSupported('audio/webm') ? 'audio/webm' : 'audio/mp4';
const recorder = new MediaRecorder(stream, { mimeType });
mediaRecorderRef.current = recorder;
recorder.ondataavailable = (e) => {
if (e.data.size > 0) {
chunksRef.current.push(e.data);
}
};
recorder.onstop = async () => {
console.log('Recording stopped, creating blob...');
const blob = new Blob(chunksRef.current, { type: mimeType });
// Clean up stream
if (streamRef.current) {
streamRef.current.getTracks().forEach(track => track.stop());
streamRef.current = null;
}
// Start transcribing
setState('transcribing');
// Check if we're in an enhancement mode
const whisperMode = window.localStorage.getItem('whisperMode') || 'default';
const isEnhancementMode = whisperMode === 'prompt' || whisperMode === 'vibe' || whisperMode === 'instructions' || whisperMode === 'architect';
// Set up a timer to switch to processing state for enhancement modes
let processingTimer;
if (isEnhancementMode) {
processingTimer = setTimeout(() => {
setState('processing');
}, 2000); // Switch to processing after 2 seconds
}
try {
const text = await transcribeWithWhisper(blob);
if (text && onTranscript) {
onTranscript(text);
}
} catch (err) {
console.error('Transcription error:', err);
setError(err.message);
} finally {
if (processingTimer) {
clearTimeout(processingTimer);
}
setState('idle');
}
};
recorder.start();
setState('recording');
console.log('Recording started successfully');
} catch (err) {
console.error('Failed to start recording:', err);
// Provide specific error messages based on error type
let errorMessage = 'Microphone access failed';
if (err.name === 'NotAllowedError') {
errorMessage = 'Microphone access denied. Please allow microphone permissions.';
} else if (err.name === 'NotFoundError') {
errorMessage = 'No microphone found. Please check your audio devices.';
} else if (err.name === 'NotSupportedError') {
errorMessage = 'Microphone not supported by this browser.';
} else if (err.name === 'NotReadableError') {
errorMessage = 'Microphone is being used by another application.';
} else if (err.message.includes('HTTPS')) {
errorMessage = err.message;
}
setError(errorMessage);
setState('idle');
}
};
// Stop recording
const stopRecording = () => {
console.log('Stopping recording...');
if (mediaRecorderRef.current && mediaRecorderRef.current.state === 'recording') {
mediaRecorderRef.current.stop();
// Don't set state here - let the onstop handler do it
} else {
// If recorder isn't in recording state, force cleanup
console.log('Recorder not in recording state, forcing cleanup');
if (streamRef.current) {
streamRef.current.getTracks().forEach(track => track.stop());
streamRef.current = null;
}
setState('idle');
}
};
// Handle button click
const handleClick = (e) => {
// Prevent double firing on mobile
if (e) {
e.preventDefault();
e.stopPropagation();
}
// Don't proceed if microphone is not supported
if (!isSupported) {
return;
}
// Debounce for mobile double-tap issue
const now = Date.now();
if (now - lastTapRef.current < 300) {
console.log('Ignoring rapid tap');
return;
}
lastTapRef.current = now;
console.log('Button clicked, current state:', state);
if (state === 'idle') {
startRecording();
} else if (state === 'recording') {
stopRecording();
}
// Do nothing if transcribing or processing
};
// Clean up on unmount
useEffect(() => {
return () => {
if (streamRef.current) {
streamRef.current.getTracks().forEach(track => track.stop());
}
};
}, []);
// Button appearance based on state
const getButtonAppearance = () => {
if (!isSupported) {
return {
icon: