navyseal4000
commited on
Commit
·
36b7d94
1
Parent(s):
95776af
Added speech to text capability
Browse files- app/components/chat/BaseChat.tsx +79 -0
- app/types/global.d.ts +2 -0
app/components/chat/BaseChat.tsx
CHANGED
@@ -87,6 +87,35 @@ interface BaseChatProps {
|
|
87 |
enhancePrompt?: () => void;
|
88 |
}
|
89 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
90 |
export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
91 |
(
|
92 |
{
|
@@ -114,6 +143,8 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
114 |
const TEXTAREA_MAX_HEIGHT = chatStarted ? 400 : 200;
|
115 |
const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
|
116 |
const [modelList, setModelList] = useState(MODEL_LIST);
|
|
|
|
|
117 |
|
118 |
useEffect(() => {
|
119 |
// Load API keys from cookies on component mount
|
@@ -134,8 +165,49 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
134 |
initializeModelList().then((modelList) => {
|
135 |
setModelList(modelList);
|
136 |
});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
137 |
}, []);
|
138 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
139 |
const updateApiKey = (provider: string, key: string) => {
|
140 |
try {
|
141 |
const updatedApiKeys = { ...apiKeys, [provider]: key };
|
@@ -284,6 +356,13 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
284 |
</>
|
285 |
)}
|
286 |
</IconButton>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
287 |
</div>
|
288 |
{input.length > 3 ? (
|
289 |
<div className="text-xs text-bolt-elements-textTertiary">
|
|
|
87 |
enhancePrompt?: () => void;
|
88 |
}
|
89 |
|
90 |
+
const SpeechRecognitionButton = ({
|
91 |
+
isListening,
|
92 |
+
onStart,
|
93 |
+
onStop,
|
94 |
+
disabled
|
95 |
+
}: {
|
96 |
+
isListening: boolean;
|
97 |
+
onStart: () => void;
|
98 |
+
onStop: () => void;
|
99 |
+
disabled: boolean;
|
100 |
+
}) => {
|
101 |
+
return (
|
102 |
+
<IconButton
|
103 |
+
title={isListening ? "Stop listening" : "Start speech recognition"}
|
104 |
+
disabled={disabled}
|
105 |
+
className={classNames('transition-all', {
|
106 |
+
'text-bolt-elements-item-contentAccent': isListening,
|
107 |
+
})}
|
108 |
+
onClick={isListening ? onStop : onStart}
|
109 |
+
>
|
110 |
+
{isListening ? (
|
111 |
+
<div className="i-ph:microphone-slash text-xl" />
|
112 |
+
) : (
|
113 |
+
<div className="i-ph:microphone text-xl" />
|
114 |
+
)}
|
115 |
+
</IconButton>
|
116 |
+
);
|
117 |
+
};
|
118 |
+
|
119 |
export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
120 |
(
|
121 |
{
|
|
|
143 |
const TEXTAREA_MAX_HEIGHT = chatStarted ? 400 : 200;
|
144 |
const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
|
145 |
const [modelList, setModelList] = useState(MODEL_LIST);
|
146 |
+
const [isListening, setIsListening] = useState(false);
|
147 |
+
const [recognition, setRecognition] = useState<SpeechRecognition | null>(null);
|
148 |
|
149 |
useEffect(() => {
|
150 |
// Load API keys from cookies on component mount
|
|
|
165 |
initializeModelList().then((modelList) => {
|
166 |
setModelList(modelList);
|
167 |
});
|
168 |
+
if (typeof window !== 'undefined' && ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window)) {
|
169 |
+
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
|
170 |
+
const recognition = new SpeechRecognition();
|
171 |
+
recognition.continuous = true;
|
172 |
+
recognition.interimResults = true;
|
173 |
+
|
174 |
+
recognition.onresult = (event) => {
|
175 |
+
const transcript = Array.from(event.results)
|
176 |
+
.map(result => result[0])
|
177 |
+
.map(result => result.transcript)
|
178 |
+
.join('');
|
179 |
+
|
180 |
+
if (handleInputChange) {
|
181 |
+
const syntheticEvent = {
|
182 |
+
target: { value: transcript },
|
183 |
+
} as React.ChangeEvent<HTMLTextAreaElement>;
|
184 |
+
handleInputChange(syntheticEvent);
|
185 |
+
}
|
186 |
+
};
|
187 |
+
|
188 |
+
recognition.onerror = (event) => {
|
189 |
+
console.error('Speech recognition error:', event.error);
|
190 |
+
setIsListening(false);
|
191 |
+
};
|
192 |
+
|
193 |
+
setRecognition(recognition);
|
194 |
+
}
|
195 |
}, []);
|
196 |
|
197 |
+
const startListening = () => {
|
198 |
+
if (recognition) {
|
199 |
+
recognition.start();
|
200 |
+
setIsListening(true);
|
201 |
+
}
|
202 |
+
};
|
203 |
+
|
204 |
+
const stopListening = () => {
|
205 |
+
if (recognition) {
|
206 |
+
recognition.stop();
|
207 |
+
setIsListening(false);
|
208 |
+
}
|
209 |
+
};
|
210 |
+
|
211 |
const updateApiKey = (provider: string, key: string) => {
|
212 |
try {
|
213 |
const updatedApiKeys = { ...apiKeys, [provider]: key };
|
|
|
356 |
</>
|
357 |
)}
|
358 |
</IconButton>
|
359 |
+
|
360 |
+
<SpeechRecognitionButton
|
361 |
+
isListening={isListening}
|
362 |
+
onStart={startListening}
|
363 |
+
onStop={stopListening}
|
364 |
+
disabled={isStreaming}
|
365 |
+
/>
|
366 |
</div>
|
367 |
{input.length > 3 ? (
|
368 |
<div className="text-xs text-bolt-elements-textTertiary">
|
app/types/global.d.ts
CHANGED
@@ -1,3 +1,5 @@
|
|
1 |
interface Window {
|
2 |
showDirectoryPicker(): Promise<FileSystemDirectoryHandle>;
|
|
|
|
|
3 |
}
|
|
|
1 |
interface Window {
|
2 |
showDirectoryPicker(): Promise<FileSystemDirectoryHandle>;
|
3 |
+
webkitSpeechRecognition: typeof SpeechRecognition;
|
4 |
+
SpeechRecognition: typeof SpeechRecognition;
|
5 |
}
|