Merge branch 'main' into perplexity-models
Browse files- README.md +1 -0
- app/commit.json +1 -1
- app/components/chat/BaseChat.tsx +16 -0
- app/components/chat/ScreenshotStateManager.tsx +33 -0
- app/components/settings/connections/ConnectionsTab.tsx +1 -0
- app/components/workbench/Preview.tsx +22 -2
- app/components/workbench/ScreenshotSelector.tsx +293 -0
- app/lib/stores/workbench.ts +7 -2
- app/utils/constants.ts +2 -1
- app/utils/sampler.ts +49 -0
README.md
CHANGED
|
@@ -44,6 +44,7 @@ https://thinktank.ottomator.ai
|
|
| 44 |
- β
Better prompt enhancing (@SujalXplores)
|
| 45 |
- β
Attach images to prompts (@atrokhym)
|
| 46 |
- β
Detect package.json and commands to auto install and run preview for folder and git import (@wonderwhy-er)
|
|
|
|
| 47 |
- β¬ **HIGH PRIORITY** - Prevent Bolt from rewriting files as often (file locking and diffs)
|
| 48 |
- β¬ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
|
| 49 |
- β¬ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
|
|
|
|
| 44 |
- β
Better prompt enhancing (@SujalXplores)
|
| 45 |
- β
Attach images to prompts (@atrokhym)
|
| 46 |
- β
Detect package.json and commands to auto install and run preview for folder and git import (@wonderwhy-er)
|
| 47 |
+
- β
Selection tool to target changes visually (@emcconnell)
|
| 48 |
- β¬ **HIGH PRIORITY** - Prevent Bolt from rewriting files as often (file locking and diffs)
|
| 49 |
- β¬ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
|
| 50 |
- β¬ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
|
app/commit.json
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
{ "commit": "
|
|
|
|
| 1 |
+
{ "commit": "ece0213500a94a6b29e29512c5040baf57884014" }
|
app/components/chat/BaseChat.tsx
CHANGED
|
@@ -26,6 +26,7 @@ import FilePreview from './FilePreview';
|
|
| 26 |
import { ModelSelector } from '~/components/chat/ModelSelector';
|
| 27 |
import { SpeechRecognitionButton } from '~/components/chat/SpeechRecognition';
|
| 28 |
import type { IProviderSetting, ProviderInfo } from '~/types/model';
|
|
|
|
| 29 |
|
| 30 |
const TEXTAREA_MIN_HEIGHT = 76;
|
| 31 |
|
|
@@ -376,6 +377,16 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
| 376 |
setImageDataList?.(imageDataList.filter((_, i) => i !== index));
|
| 377 |
}}
|
| 378 |
/>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 379 |
<div
|
| 380 |
className={classNames(
|
| 381 |
'relative shadow-xs border border-bolt-elements-borderColor backdrop-blur rounded-lg',
|
|
@@ -431,6 +442,11 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
| 431 |
return;
|
| 432 |
}
|
| 433 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 434 |
handleSendMessage?.(event);
|
| 435 |
}
|
| 436 |
}}
|
|
|
|
| 26 |
import { ModelSelector } from '~/components/chat/ModelSelector';
|
| 27 |
import { SpeechRecognitionButton } from '~/components/chat/SpeechRecognition';
|
| 28 |
import type { IProviderSetting, ProviderInfo } from '~/types/model';
|
| 29 |
+
import { ScreenshotStateManager } from './ScreenshotStateManager';
|
| 30 |
|
| 31 |
const TEXTAREA_MIN_HEIGHT = 76;
|
| 32 |
|
|
|
|
| 377 |
setImageDataList?.(imageDataList.filter((_, i) => i !== index));
|
| 378 |
}}
|
| 379 |
/>
|
| 380 |
+
<ClientOnly>
|
| 381 |
+
{() => (
|
| 382 |
+
<ScreenshotStateManager
|
| 383 |
+
setUploadedFiles={setUploadedFiles}
|
| 384 |
+
setImageDataList={setImageDataList}
|
| 385 |
+
uploadedFiles={uploadedFiles}
|
| 386 |
+
imageDataList={imageDataList}
|
| 387 |
+
/>
|
| 388 |
+
)}
|
| 389 |
+
</ClientOnly>
|
| 390 |
<div
|
| 391 |
className={classNames(
|
| 392 |
'relative shadow-xs border border-bolt-elements-borderColor backdrop-blur rounded-lg',
|
|
|
|
| 442 |
return;
|
| 443 |
}
|
| 444 |
|
| 445 |
+
// ignore if using input method engine
|
| 446 |
+
if (event.nativeEvent.isComposing) {
|
| 447 |
+
return;
|
| 448 |
+
}
|
| 449 |
+
|
| 450 |
handleSendMessage?.(event);
|
| 451 |
}
|
| 452 |
}}
|
app/components/chat/ScreenshotStateManager.tsx
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import { useEffect } from 'react';
|
| 2 |
+
|
| 3 |
+
interface ScreenshotStateManagerProps {
|
| 4 |
+
setUploadedFiles?: (files: File[]) => void;
|
| 5 |
+
setImageDataList?: (dataList: string[]) => void;
|
| 6 |
+
uploadedFiles: File[];
|
| 7 |
+
imageDataList: string[];
|
| 8 |
+
}
|
| 9 |
+
|
| 10 |
+
export const ScreenshotStateManager = ({
|
| 11 |
+
setUploadedFiles,
|
| 12 |
+
setImageDataList,
|
| 13 |
+
uploadedFiles,
|
| 14 |
+
imageDataList,
|
| 15 |
+
}: ScreenshotStateManagerProps) => {
|
| 16 |
+
useEffect(() => {
|
| 17 |
+
if (setUploadedFiles && setImageDataList) {
|
| 18 |
+
(window as any).__BOLT_SET_UPLOADED_FILES__ = setUploadedFiles;
|
| 19 |
+
(window as any).__BOLT_SET_IMAGE_DATA_LIST__ = setImageDataList;
|
| 20 |
+
(window as any).__BOLT_UPLOADED_FILES__ = uploadedFiles;
|
| 21 |
+
(window as any).__BOLT_IMAGE_DATA_LIST__ = imageDataList;
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
return () => {
|
| 25 |
+
delete (window as any).__BOLT_SET_UPLOADED_FILES__;
|
| 26 |
+
delete (window as any).__BOLT_SET_IMAGE_DATA_LIST__;
|
| 27 |
+
delete (window as any).__BOLT_UPLOADED_FILES__;
|
| 28 |
+
delete (window as any).__BOLT_IMAGE_DATA_LIST__;
|
| 29 |
+
};
|
| 30 |
+
}, [setUploadedFiles, setImageDataList, uploadedFiles, imageDataList]);
|
| 31 |
+
|
| 32 |
+
return null;
|
| 33 |
+
};
|
app/components/settings/connections/ConnectionsTab.tsx
CHANGED
|
@@ -15,6 +15,7 @@ export default function ConnectionsTab() {
|
|
| 15 |
hasToken: !!githubToken,
|
| 16 |
});
|
| 17 |
toast.success('GitHub credentials saved successfully!');
|
|
|
|
| 18 |
};
|
| 19 |
|
| 20 |
return (
|
|
|
|
| 15 |
hasToken: !!githubToken,
|
| 16 |
});
|
| 17 |
toast.success('GitHub credentials saved successfully!');
|
| 18 |
+
Cookies.set('git:github.com', JSON.stringify({ username: githubToken, password: 'x-oauth-basic' }));
|
| 19 |
};
|
| 20 |
|
| 21 |
return (
|
app/components/workbench/Preview.tsx
CHANGED
|
@@ -3,6 +3,7 @@ import { memo, useCallback, useEffect, useRef, useState } from 'react';
|
|
| 3 |
import { IconButton } from '~/components/ui/IconButton';
|
| 4 |
import { workbenchStore } from '~/lib/stores/workbench';
|
| 5 |
import { PortDropdown } from './PortDropdown';
|
|
|
|
| 6 |
|
| 7 |
type ResizeSide = 'left' | 'right' | null;
|
| 8 |
|
|
@@ -20,6 +21,7 @@ export const Preview = memo(() => {
|
|
| 20 |
|
| 21 |
const [url, setUrl] = useState('');
|
| 22 |
const [iframeUrl, setIframeUrl] = useState<string | undefined>();
|
|
|
|
| 23 |
|
| 24 |
// Toggle between responsive mode and device mode
|
| 25 |
const [isDeviceModeOn, setIsDeviceModeOn] = useState(false);
|
|
@@ -218,12 +220,17 @@ export const Preview = memo(() => {
|
|
| 218 |
)}
|
| 219 |
<div className="bg-bolt-elements-background-depth-2 p-2 flex items-center gap-1.5">
|
| 220 |
<IconButton icon="i-ph:arrow-clockwise" onClick={reloadPreview} />
|
| 221 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 222 |
<div
|
| 223 |
className="flex items-center gap-1 flex-grow bg-bolt-elements-preview-addressBar-background border border-bolt-elements-borderColor text-bolt-elements-preview-addressBar-text rounded-full px-3 py-1 text-sm hover:bg-bolt-elements-preview-addressBar-backgroundHover hover:focus-within:bg-bolt-elements-preview-addressBar-backgroundActive focus-within:bg-bolt-elements-preview-addressBar-backgroundActive
|
| 224 |
focus-within-border-bolt-elements-borderColorActive focus-within:text-bolt-elements-preview-addressBar-textActive"
|
| 225 |
>
|
| 226 |
<input
|
|
|
|
| 227 |
ref={inputRef}
|
| 228 |
className="w-full bg-transparent outline-none"
|
| 229 |
type="text"
|
|
@@ -281,7 +288,20 @@ export const Preview = memo(() => {
|
|
| 281 |
}}
|
| 282 |
>
|
| 283 |
{activePreview ? (
|
| 284 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 285 |
) : (
|
| 286 |
<div className="flex w-full h-full justify-center items-center bg-white">No preview available</div>
|
| 287 |
)}
|
|
|
|
| 3 |
import { IconButton } from '~/components/ui/IconButton';
|
| 4 |
import { workbenchStore } from '~/lib/stores/workbench';
|
| 5 |
import { PortDropdown } from './PortDropdown';
|
| 6 |
+
import { ScreenshotSelector } from './ScreenshotSelector';
|
| 7 |
|
| 8 |
type ResizeSide = 'left' | 'right' | null;
|
| 9 |
|
|
|
|
| 21 |
|
| 22 |
const [url, setUrl] = useState('');
|
| 23 |
const [iframeUrl, setIframeUrl] = useState<string | undefined>();
|
| 24 |
+
const [isSelectionMode, setIsSelectionMode] = useState(false);
|
| 25 |
|
| 26 |
// Toggle between responsive mode and device mode
|
| 27 |
const [isDeviceModeOn, setIsDeviceModeOn] = useState(false);
|
|
|
|
| 220 |
)}
|
| 221 |
<div className="bg-bolt-elements-background-depth-2 p-2 flex items-center gap-1.5">
|
| 222 |
<IconButton icon="i-ph:arrow-clockwise" onClick={reloadPreview} />
|
| 223 |
+
<IconButton
|
| 224 |
+
icon="i-ph:selection"
|
| 225 |
+
onClick={() => setIsSelectionMode(!isSelectionMode)}
|
| 226 |
+
className={isSelectionMode ? 'bg-bolt-elements-background-depth-3' : ''}
|
| 227 |
+
/>
|
| 228 |
<div
|
| 229 |
className="flex items-center gap-1 flex-grow bg-bolt-elements-preview-addressBar-background border border-bolt-elements-borderColor text-bolt-elements-preview-addressBar-text rounded-full px-3 py-1 text-sm hover:bg-bolt-elements-preview-addressBar-backgroundHover hover:focus-within:bg-bolt-elements-preview-addressBar-backgroundActive focus-within:bg-bolt-elements-preview-addressBar-backgroundActive
|
| 230 |
focus-within-border-bolt-elements-borderColorActive focus-within:text-bolt-elements-preview-addressBar-textActive"
|
| 231 |
>
|
| 232 |
<input
|
| 233 |
+
title="URL"
|
| 234 |
ref={inputRef}
|
| 235 |
className="w-full bg-transparent outline-none"
|
| 236 |
type="text"
|
|
|
|
| 288 |
}}
|
| 289 |
>
|
| 290 |
{activePreview ? (
|
| 291 |
+
<>
|
| 292 |
+
<iframe
|
| 293 |
+
ref={iframeRef}
|
| 294 |
+
title="preview"
|
| 295 |
+
className="border-none w-full h-full bg-white"
|
| 296 |
+
src={iframeUrl}
|
| 297 |
+
allowFullScreen
|
| 298 |
+
/>
|
| 299 |
+
<ScreenshotSelector
|
| 300 |
+
isSelectionMode={isSelectionMode}
|
| 301 |
+
setIsSelectionMode={setIsSelectionMode}
|
| 302 |
+
containerRef={iframeRef}
|
| 303 |
+
/>
|
| 304 |
+
</>
|
| 305 |
) : (
|
| 306 |
<div className="flex w-full h-full justify-center items-center bg-white">No preview available</div>
|
| 307 |
)}
|
app/components/workbench/ScreenshotSelector.tsx
ADDED
|
@@ -0,0 +1,293 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import { memo, useCallback, useEffect, useRef, useState } from 'react';
|
| 2 |
+
import { toast } from 'react-toastify';
|
| 3 |
+
|
| 4 |
+
interface ScreenshotSelectorProps {
|
| 5 |
+
isSelectionMode: boolean;
|
| 6 |
+
setIsSelectionMode: (mode: boolean) => void;
|
| 7 |
+
containerRef: React.RefObject<HTMLElement>;
|
| 8 |
+
}
|
| 9 |
+
|
| 10 |
+
export const ScreenshotSelector = memo(
|
| 11 |
+
({ isSelectionMode, setIsSelectionMode, containerRef }: ScreenshotSelectorProps) => {
|
| 12 |
+
const [isCapturing, setIsCapturing] = useState(false);
|
| 13 |
+
const [selectionStart, setSelectionStart] = useState<{ x: number; y: number } | null>(null);
|
| 14 |
+
const [selectionEnd, setSelectionEnd] = useState<{ x: number; y: number } | null>(null);
|
| 15 |
+
const mediaStreamRef = useRef<MediaStream | null>(null);
|
| 16 |
+
const videoRef = useRef<HTMLVideoElement | null>(null);
|
| 17 |
+
|
| 18 |
+
useEffect(() => {
|
| 19 |
+
// Cleanup function to stop all tracks when component unmounts
|
| 20 |
+
return () => {
|
| 21 |
+
if (videoRef.current) {
|
| 22 |
+
videoRef.current.pause();
|
| 23 |
+
videoRef.current.srcObject = null;
|
| 24 |
+
videoRef.current.remove();
|
| 25 |
+
videoRef.current = null;
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
if (mediaStreamRef.current) {
|
| 29 |
+
mediaStreamRef.current.getTracks().forEach((track) => track.stop());
|
| 30 |
+
mediaStreamRef.current = null;
|
| 31 |
+
}
|
| 32 |
+
};
|
| 33 |
+
}, []);
|
| 34 |
+
|
| 35 |
+
const initializeStream = async () => {
|
| 36 |
+
if (!mediaStreamRef.current) {
|
| 37 |
+
try {
|
| 38 |
+
const stream = await navigator.mediaDevices.getDisplayMedia({
|
| 39 |
+
audio: false,
|
| 40 |
+
video: {
|
| 41 |
+
displaySurface: 'window',
|
| 42 |
+
preferCurrentTab: true,
|
| 43 |
+
surfaceSwitching: 'include',
|
| 44 |
+
systemAudio: 'exclude',
|
| 45 |
+
},
|
| 46 |
+
} as MediaStreamConstraints);
|
| 47 |
+
|
| 48 |
+
// Add handler for when sharing stops
|
| 49 |
+
stream.addEventListener('inactive', () => {
|
| 50 |
+
if (videoRef.current) {
|
| 51 |
+
videoRef.current.pause();
|
| 52 |
+
videoRef.current.srcObject = null;
|
| 53 |
+
videoRef.current.remove();
|
| 54 |
+
videoRef.current = null;
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
if (mediaStreamRef.current) {
|
| 58 |
+
mediaStreamRef.current.getTracks().forEach((track) => track.stop());
|
| 59 |
+
mediaStreamRef.current = null;
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
setIsSelectionMode(false);
|
| 63 |
+
setSelectionStart(null);
|
| 64 |
+
setSelectionEnd(null);
|
| 65 |
+
setIsCapturing(false);
|
| 66 |
+
});
|
| 67 |
+
|
| 68 |
+
mediaStreamRef.current = stream;
|
| 69 |
+
|
| 70 |
+
// Initialize video element if needed
|
| 71 |
+
if (!videoRef.current) {
|
| 72 |
+
const video = document.createElement('video');
|
| 73 |
+
video.style.opacity = '0';
|
| 74 |
+
video.style.position = 'fixed';
|
| 75 |
+
video.style.pointerEvents = 'none';
|
| 76 |
+
video.style.zIndex = '-1';
|
| 77 |
+
document.body.appendChild(video);
|
| 78 |
+
videoRef.current = video;
|
| 79 |
+
}
|
| 80 |
+
|
| 81 |
+
// Set up video with the stream
|
| 82 |
+
videoRef.current.srcObject = stream;
|
| 83 |
+
await videoRef.current.play();
|
| 84 |
+
} catch (error) {
|
| 85 |
+
console.error('Failed to initialize stream:', error);
|
| 86 |
+
setIsSelectionMode(false);
|
| 87 |
+
toast.error('Failed to initialize screen capture');
|
| 88 |
+
}
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
return mediaStreamRef.current;
|
| 92 |
+
};
|
| 93 |
+
|
| 94 |
+
const handleCopySelection = useCallback(async () => {
|
| 95 |
+
if (!isSelectionMode || !selectionStart || !selectionEnd || !containerRef.current) {
|
| 96 |
+
return;
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
setIsCapturing(true);
|
| 100 |
+
|
| 101 |
+
try {
|
| 102 |
+
const stream = await initializeStream();
|
| 103 |
+
|
| 104 |
+
if (!stream || !videoRef.current) {
|
| 105 |
+
return;
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
// Wait for video to be ready
|
| 109 |
+
await new Promise((resolve) => setTimeout(resolve, 300));
|
| 110 |
+
|
| 111 |
+
// Create temporary canvas for full screenshot
|
| 112 |
+
const tempCanvas = document.createElement('canvas');
|
| 113 |
+
tempCanvas.width = videoRef.current.videoWidth;
|
| 114 |
+
tempCanvas.height = videoRef.current.videoHeight;
|
| 115 |
+
|
| 116 |
+
const tempCtx = tempCanvas.getContext('2d');
|
| 117 |
+
|
| 118 |
+
if (!tempCtx) {
|
| 119 |
+
throw new Error('Failed to get temporary canvas context');
|
| 120 |
+
}
|
| 121 |
+
|
| 122 |
+
// Draw the full video frame
|
| 123 |
+
tempCtx.drawImage(videoRef.current, 0, 0);
|
| 124 |
+
|
| 125 |
+
// Calculate scale factor between video and screen
|
| 126 |
+
const scaleX = videoRef.current.videoWidth / window.innerWidth;
|
| 127 |
+
const scaleY = videoRef.current.videoHeight / window.innerHeight;
|
| 128 |
+
|
| 129 |
+
// Get window scroll position
|
| 130 |
+
const scrollX = window.scrollX;
|
| 131 |
+
const scrollY = window.scrollY + 40;
|
| 132 |
+
|
| 133 |
+
// Get the container's position in the page
|
| 134 |
+
const containerRect = containerRef.current.getBoundingClientRect();
|
| 135 |
+
|
| 136 |
+
// Offset adjustments for more accurate clipping
|
| 137 |
+
const leftOffset = -9; // Adjust left position
|
| 138 |
+
const bottomOffset = -14; // Adjust bottom position
|
| 139 |
+
|
| 140 |
+
// Calculate the scaled coordinates with scroll offset and adjustments
|
| 141 |
+
const scaledX = Math.round(
|
| 142 |
+
(containerRect.left + Math.min(selectionStart.x, selectionEnd.x) + scrollX + leftOffset) * scaleX,
|
| 143 |
+
);
|
| 144 |
+
const scaledY = Math.round(
|
| 145 |
+
(containerRect.top + Math.min(selectionStart.y, selectionEnd.y) + scrollY + bottomOffset) * scaleY,
|
| 146 |
+
);
|
| 147 |
+
const scaledWidth = Math.round(Math.abs(selectionEnd.x - selectionStart.x) * scaleX);
|
| 148 |
+
const scaledHeight = Math.round(Math.abs(selectionEnd.y - selectionStart.y) * scaleY);
|
| 149 |
+
|
| 150 |
+
// Create final canvas for the cropped area
|
| 151 |
+
const canvas = document.createElement('canvas');
|
| 152 |
+
canvas.width = Math.round(Math.abs(selectionEnd.x - selectionStart.x));
|
| 153 |
+
canvas.height = Math.round(Math.abs(selectionEnd.y - selectionStart.y));
|
| 154 |
+
|
| 155 |
+
const ctx = canvas.getContext('2d');
|
| 156 |
+
|
| 157 |
+
if (!ctx) {
|
| 158 |
+
throw new Error('Failed to get canvas context');
|
| 159 |
+
}
|
| 160 |
+
|
| 161 |
+
// Draw the cropped area
|
| 162 |
+
ctx.drawImage(tempCanvas, scaledX, scaledY, scaledWidth, scaledHeight, 0, 0, canvas.width, canvas.height);
|
| 163 |
+
|
| 164 |
+
// Convert to blob
|
| 165 |
+
const blob = await new Promise<Blob>((resolve, reject) => {
|
| 166 |
+
canvas.toBlob((blob) => {
|
| 167 |
+
if (blob) {
|
| 168 |
+
resolve(blob);
|
| 169 |
+
} else {
|
| 170 |
+
reject(new Error('Failed to create blob'));
|
| 171 |
+
}
|
| 172 |
+
}, 'image/png');
|
| 173 |
+
});
|
| 174 |
+
|
| 175 |
+
// Create a FileReader to convert blob to base64
|
| 176 |
+
const reader = new FileReader();
|
| 177 |
+
|
| 178 |
+
reader.onload = (e) => {
|
| 179 |
+
const base64Image = e.target?.result as string;
|
| 180 |
+
|
| 181 |
+
// Find the textarea element
|
| 182 |
+
const textarea = document.querySelector('textarea');
|
| 183 |
+
|
| 184 |
+
if (textarea) {
|
| 185 |
+
// Get the setters from the BaseChat component
|
| 186 |
+
const setUploadedFiles = (window as any).__BOLT_SET_UPLOADED_FILES__;
|
| 187 |
+
const setImageDataList = (window as any).__BOLT_SET_IMAGE_DATA_LIST__;
|
| 188 |
+
const uploadedFiles = (window as any).__BOLT_UPLOADED_FILES__ || [];
|
| 189 |
+
const imageDataList = (window as any).__BOLT_IMAGE_DATA_LIST__ || [];
|
| 190 |
+
|
| 191 |
+
if (setUploadedFiles && setImageDataList) {
|
| 192 |
+
// Update the files and image data
|
| 193 |
+
const file = new File([blob], 'screenshot.png', { type: 'image/png' });
|
| 194 |
+
setUploadedFiles([...uploadedFiles, file]);
|
| 195 |
+
setImageDataList([...imageDataList, base64Image]);
|
| 196 |
+
toast.success('Screenshot captured and added to chat');
|
| 197 |
+
} else {
|
| 198 |
+
toast.error('Could not add screenshot to chat');
|
| 199 |
+
}
|
| 200 |
+
}
|
| 201 |
+
};
|
| 202 |
+
reader.readAsDataURL(blob);
|
| 203 |
+
} catch (error) {
|
| 204 |
+
console.error('Failed to capture screenshot:', error);
|
| 205 |
+
toast.error('Failed to capture screenshot');
|
| 206 |
+
|
| 207 |
+
if (mediaStreamRef.current) {
|
| 208 |
+
mediaStreamRef.current.getTracks().forEach((track) => track.stop());
|
| 209 |
+
mediaStreamRef.current = null;
|
| 210 |
+
}
|
| 211 |
+
} finally {
|
| 212 |
+
setIsCapturing(false);
|
| 213 |
+
setSelectionStart(null);
|
| 214 |
+
setSelectionEnd(null);
|
| 215 |
+
setIsSelectionMode(false); // Turn off selection mode after capture
|
| 216 |
+
}
|
| 217 |
+
}, [isSelectionMode, selectionStart, selectionEnd, containerRef, setIsSelectionMode]);
|
| 218 |
+
|
| 219 |
+
const handleSelectionStart = useCallback(
|
| 220 |
+
(e: React.MouseEvent) => {
|
| 221 |
+
e.preventDefault();
|
| 222 |
+
e.stopPropagation();
|
| 223 |
+
|
| 224 |
+
if (!isSelectionMode) {
|
| 225 |
+
return;
|
| 226 |
+
}
|
| 227 |
+
|
| 228 |
+
const rect = e.currentTarget.getBoundingClientRect();
|
| 229 |
+
const x = e.clientX - rect.left;
|
| 230 |
+
const y = e.clientY - rect.top;
|
| 231 |
+
setSelectionStart({ x, y });
|
| 232 |
+
setSelectionEnd({ x, y });
|
| 233 |
+
},
|
| 234 |
+
[isSelectionMode],
|
| 235 |
+
);
|
| 236 |
+
|
| 237 |
+
const handleSelectionMove = useCallback(
|
| 238 |
+
(e: React.MouseEvent) => {
|
| 239 |
+
e.preventDefault();
|
| 240 |
+
e.stopPropagation();
|
| 241 |
+
|
| 242 |
+
if (!isSelectionMode || !selectionStart) {
|
| 243 |
+
return;
|
| 244 |
+
}
|
| 245 |
+
|
| 246 |
+
const rect = e.currentTarget.getBoundingClientRect();
|
| 247 |
+
const x = e.clientX - rect.left;
|
| 248 |
+
const y = e.clientY - rect.top;
|
| 249 |
+
setSelectionEnd({ x, y });
|
| 250 |
+
},
|
| 251 |
+
[isSelectionMode, selectionStart],
|
| 252 |
+
);
|
| 253 |
+
|
| 254 |
+
if (!isSelectionMode) {
|
| 255 |
+
return null;
|
| 256 |
+
}
|
| 257 |
+
|
| 258 |
+
return (
|
| 259 |
+
<div
|
| 260 |
+
className="absolute inset-0 cursor-crosshair"
|
| 261 |
+
onMouseDown={handleSelectionStart}
|
| 262 |
+
onMouseMove={handleSelectionMove}
|
| 263 |
+
onMouseUp={handleCopySelection}
|
| 264 |
+
onMouseLeave={() => {
|
| 265 |
+
if (selectionStart) {
|
| 266 |
+
setSelectionStart(null);
|
| 267 |
+
}
|
| 268 |
+
}}
|
| 269 |
+
style={{
|
| 270 |
+
backgroundColor: isCapturing ? 'transparent' : 'rgba(0, 0, 0, 0.1)',
|
| 271 |
+
userSelect: 'none',
|
| 272 |
+
WebkitUserSelect: 'none',
|
| 273 |
+
pointerEvents: 'all',
|
| 274 |
+
opacity: isCapturing ? 0 : 1,
|
| 275 |
+
zIndex: 50,
|
| 276 |
+
transition: 'opacity 0.1s ease-in-out',
|
| 277 |
+
}}
|
| 278 |
+
>
|
| 279 |
+
{selectionStart && selectionEnd && !isCapturing && (
|
| 280 |
+
<div
|
| 281 |
+
className="absolute border-2 border-blue-500 bg-blue-200 bg-opacity-20"
|
| 282 |
+
style={{
|
| 283 |
+
left: Math.min(selectionStart.x, selectionEnd.x),
|
| 284 |
+
top: Math.min(selectionStart.y, selectionEnd.y),
|
| 285 |
+
width: Math.abs(selectionEnd.x - selectionStart.x),
|
| 286 |
+
height: Math.abs(selectionEnd.y - selectionStart.y),
|
| 287 |
+
}}
|
| 288 |
+
/>
|
| 289 |
+
)}
|
| 290 |
+
</div>
|
| 291 |
+
);
|
| 292 |
+
},
|
| 293 |
+
);
|
app/lib/stores/workbench.ts
CHANGED
|
@@ -16,6 +16,7 @@ import * as nodePath from 'node:path';
|
|
| 16 |
import { extractRelativePath } from '~/utils/diff';
|
| 17 |
import { description } from '~/lib/persistence';
|
| 18 |
import Cookies from 'js-cookie';
|
|
|
|
| 19 |
|
| 20 |
export interface ArtifactState {
|
| 21 |
id: string;
|
|
@@ -280,7 +281,7 @@ export class WorkbenchStore {
|
|
| 280 |
|
| 281 |
runAction(data: ActionCallbackData, isStreaming: boolean = false) {
|
| 282 |
if (isStreaming) {
|
| 283 |
-
this.
|
| 284 |
} else {
|
| 285 |
this.addToExecutionQueue(() => this._runAction(data, isStreaming));
|
| 286 |
}
|
|
@@ -296,7 +297,7 @@ export class WorkbenchStore {
|
|
| 296 |
|
| 297 |
const action = artifact.runner.actions.get()[data.actionId];
|
| 298 |
|
| 299 |
-
if (action.executed) {
|
| 300 |
return;
|
| 301 |
}
|
| 302 |
|
|
@@ -329,6 +330,10 @@ export class WorkbenchStore {
|
|
| 329 |
}
|
| 330 |
}
|
| 331 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 332 |
#getArtifact(id: string) {
|
| 333 |
const artifacts = this.artifacts.get();
|
| 334 |
return artifacts[id];
|
|
|
|
| 16 |
import { extractRelativePath } from '~/utils/diff';
|
| 17 |
import { description } from '~/lib/persistence';
|
| 18 |
import Cookies from 'js-cookie';
|
| 19 |
+
import { createSampler } from '~/utils/sampler';
|
| 20 |
|
| 21 |
export interface ArtifactState {
|
| 22 |
id: string;
|
|
|
|
| 281 |
|
| 282 |
runAction(data: ActionCallbackData, isStreaming: boolean = false) {
|
| 283 |
if (isStreaming) {
|
| 284 |
+
this.actionStreamSampler(data, isStreaming);
|
| 285 |
} else {
|
| 286 |
this.addToExecutionQueue(() => this._runAction(data, isStreaming));
|
| 287 |
}
|
|
|
|
| 297 |
|
| 298 |
const action = artifact.runner.actions.get()[data.actionId];
|
| 299 |
|
| 300 |
+
if (!action || action.executed) {
|
| 301 |
return;
|
| 302 |
}
|
| 303 |
|
|
|
|
| 330 |
}
|
| 331 |
}
|
| 332 |
|
| 333 |
+
actionStreamSampler = createSampler(async (data: ActionCallbackData, isStreaming: boolean = false) => {
|
| 334 |
+
return await this._runAction(data, isStreaming);
|
| 335 |
+
}, 100); // TODO: remove this magic number to have it configurable
|
| 336 |
+
|
| 337 |
#getArtifact(id: string) {
|
| 338 |
const artifacts = this.artifacts.get();
|
| 339 |
return artifacts[id];
|
app/utils/constants.ts
CHANGED
|
@@ -139,11 +139,12 @@ const PROVIDER_LIST: ProviderInfo[] = [
|
|
| 139 |
{
|
| 140 |
name: 'Groq',
|
| 141 |
staticModels: [
|
| 142 |
-
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
| 143 |
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
| 144 |
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
|
|
|
| 145 |
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
| 146 |
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
|
|
|
| 147 |
],
|
| 148 |
getApiKeyLink: 'https://console.groq.com/keys',
|
| 149 |
},
|
|
|
|
| 139 |
{
|
| 140 |
name: 'Groq',
|
| 141 |
staticModels: [
|
|
|
|
| 142 |
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
| 143 |
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
| 144 |
+
{ name: 'llama-3.2-90b-vision-preview', label: 'Llama 3.2 90b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
| 145 |
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
| 146 |
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
| 147 |
+
{ name: 'llama-3.3-70b-versatile', label: 'Llama 3.3 70b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
| 148 |
],
|
| 149 |
getApiKeyLink: 'https://console.groq.com/keys',
|
| 150 |
},
|
app/utils/sampler.ts
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* Creates a function that samples calls at regular intervals and captures trailing calls.
|
| 3 |
+
* - Drops calls that occur between sampling intervals
|
| 4 |
+
* - Takes one call per sampling interval if available
|
| 5 |
+
* - Captures the last call if no call was made during the interval
|
| 6 |
+
*
|
| 7 |
+
* @param fn The function to sample
|
| 8 |
+
* @param sampleInterval How often to sample calls (in ms)
|
| 9 |
+
* @returns The sampled function
|
| 10 |
+
*/
|
| 11 |
+
export function createSampler<T extends (...args: any[]) => any>(fn: T, sampleInterval: number): T {
|
| 12 |
+
let lastArgs: Parameters<T> | null = null;
|
| 13 |
+
let lastTime = 0;
|
| 14 |
+
let timeout: NodeJS.Timeout | null = null;
|
| 15 |
+
|
| 16 |
+
// Create a function with the same type as the input function
|
| 17 |
+
const sampled = function (this: any, ...args: Parameters<T>) {
|
| 18 |
+
const now = Date.now();
|
| 19 |
+
lastArgs = args;
|
| 20 |
+
|
| 21 |
+
// If we're within the sample interval, just store the args
|
| 22 |
+
if (now - lastTime < sampleInterval) {
|
| 23 |
+
// Set up trailing call if not already set
|
| 24 |
+
if (!timeout) {
|
| 25 |
+
timeout = setTimeout(
|
| 26 |
+
() => {
|
| 27 |
+
timeout = null;
|
| 28 |
+
lastTime = Date.now();
|
| 29 |
+
|
| 30 |
+
if (lastArgs) {
|
| 31 |
+
fn.apply(this, lastArgs);
|
| 32 |
+
lastArgs = null;
|
| 33 |
+
}
|
| 34 |
+
},
|
| 35 |
+
sampleInterval - (now - lastTime),
|
| 36 |
+
);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
return;
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
+
// If we're outside the interval, execute immediately
|
| 43 |
+
lastTime = now;
|
| 44 |
+
fn.apply(this, args);
|
| 45 |
+
lastArgs = null;
|
| 46 |
+
} as T;
|
| 47 |
+
|
| 48 |
+
return sampled;
|
| 49 |
+
}
|