Commit
·
0e86bf7
1
Parent(s):
7fdab0a
More selection tool changes
Browse files
app/components/workbench/ScreenshotSelector.tsx
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
import { memo, useCallback, useState } from 'react';
|
2 |
import { toast } from 'react-toastify';
|
3 |
|
4 |
interface ScreenshotSelectorProps {
|
@@ -12,124 +12,189 @@ export const ScreenshotSelector = memo(
|
|
12 |
const [isCapturing, setIsCapturing] = useState(false);
|
13 |
const [selectionStart, setSelectionStart] = useState<{ x: number; y: number } | null>(null);
|
14 |
const [selectionEnd, setSelectionEnd] = useState<{ x: number; y: number } | null>(null);
|
|
|
|
|
15 |
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
|
|
|
|
|
|
28 |
|
|
|
|
|
29 |
try {
|
30 |
-
// Capture the entire screen
|
31 |
const stream = await navigator.mediaDevices.getDisplayMedia({
|
32 |
audio: false,
|
33 |
video: {
|
34 |
displaySurface: 'window',
|
|
|
|
|
|
|
35 |
},
|
36 |
} as MediaStreamConstraints);
|
37 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
// Set up video with the stream
|
39 |
-
|
40 |
-
await
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
|
42 |
-
|
43 |
-
|
44 |
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
const tempCtx = tempCanvas.getContext('2d');
|
50 |
|
51 |
-
|
52 |
-
|
53 |
-
}
|
54 |
|
55 |
-
|
56 |
-
|
|
|
|
|
|
|
57 |
|
58 |
-
|
59 |
-
|
|
|
60 |
|
61 |
-
|
62 |
-
|
63 |
-
const scaleY = video.videoHeight / window.innerHeight;
|
64 |
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
const scaledWidth = width * scaleX;
|
69 |
-
const scaledHeight = height * scaleY;
|
70 |
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
canvas.height = height;
|
75 |
-
const ctx = canvas.getContext('2d');
|
76 |
|
77 |
-
|
78 |
-
|
79 |
-
}
|
80 |
|
81 |
-
|
82 |
-
|
|
|
83 |
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
|
|
|
|
91 |
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
// Get the setters from the BaseChat component
|
101 |
-
const setUploadedFiles = (window as any).__BOLT_SET_UPLOADED_FILES__;
|
102 |
-
const setImageDataList = (window as any).__BOLT_SET_IMAGE_DATA_LIST__;
|
103 |
-
const uploadedFiles = (window as any).__BOLT_UPLOADED_FILES__ || [];
|
104 |
-
const imageDataList = (window as any).__BOLT_IMAGE_DATA_LIST__ || [];
|
105 |
-
|
106 |
-
if (setUploadedFiles && setImageDataList) {
|
107 |
-
// Update the files and image data
|
108 |
-
const file = new File([blob], 'screenshot.png', { type: 'image/png' });
|
109 |
-
setUploadedFiles([...uploadedFiles, file]);
|
110 |
-
setImageDataList([...imageDataList, base64Image]);
|
111 |
-
toast.success('Screenshot captured and added to chat');
|
112 |
-
} else {
|
113 |
-
toast.error('Could not add screenshot to chat');
|
114 |
-
}
|
115 |
-
}
|
116 |
-
};
|
117 |
-
reader.readAsDataURL(blob);
|
118 |
-
|
119 |
-
// Stop all tracks
|
120 |
-
stream.getTracks().forEach((track) => track.stop());
|
121 |
-
} finally {
|
122 |
-
// Clean up video element
|
123 |
-
document.body.removeChild(video);
|
124 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
125 |
} catch (error) {
|
126 |
console.error('Failed to capture screenshot:', error);
|
127 |
toast.error('Failed to capture screenshot');
|
|
|
|
|
|
|
|
|
128 |
} finally {
|
129 |
setIsCapturing(false);
|
130 |
setSelectionStart(null);
|
131 |
setSelectionEnd(null);
|
132 |
-
setIsSelectionMode(false);
|
133 |
}
|
134 |
}, [isSelectionMode, selectionStart, selectionEnd, containerRef, setIsSelectionMode]);
|
135 |
|
|
|
1 |
+
import { memo, useCallback, useEffect, useRef, useState } from 'react';
|
2 |
import { toast } from 'react-toastify';
|
3 |
|
4 |
interface ScreenshotSelectorProps {
|
|
|
12 |
const [isCapturing, setIsCapturing] = useState(false);
|
13 |
const [selectionStart, setSelectionStart] = useState<{ x: number; y: number } | null>(null);
|
14 |
const [selectionEnd, setSelectionEnd] = useState<{ x: number; y: number } | null>(null);
|
15 |
+
const mediaStreamRef = useRef<MediaStream | null>(null);
|
16 |
+
const videoRef = useRef<HTMLVideoElement | null>(null);
|
17 |
|
18 |
+
useEffect(() => {
|
19 |
+
// Cleanup function to stop all tracks when component unmounts
|
20 |
+
return () => {
|
21 |
+
if (videoRef.current) {
|
22 |
+
videoRef.current.pause();
|
23 |
+
videoRef.current.srcObject = null;
|
24 |
+
videoRef.current.remove();
|
25 |
+
videoRef.current = null;
|
26 |
+
}
|
27 |
+
if (mediaStreamRef.current) {
|
28 |
+
mediaStreamRef.current.getTracks().forEach((track) => track.stop());
|
29 |
+
mediaStreamRef.current = null;
|
30 |
+
}
|
31 |
+
};
|
32 |
+
}, []);
|
33 |
|
34 |
+
const initializeStream = async () => {
|
35 |
+
if (!mediaStreamRef.current) {
|
36 |
try {
|
|
|
37 |
const stream = await navigator.mediaDevices.getDisplayMedia({
|
38 |
audio: false,
|
39 |
video: {
|
40 |
displaySurface: 'window',
|
41 |
+
preferCurrentTab: true,
|
42 |
+
surfaceSwitching: 'include',
|
43 |
+
systemAudio: 'exclude',
|
44 |
},
|
45 |
} as MediaStreamConstraints);
|
46 |
|
47 |
+
// Add handler for when sharing stops
|
48 |
+
stream.addEventListener('inactive', () => {
|
49 |
+
if (videoRef.current) {
|
50 |
+
videoRef.current.pause();
|
51 |
+
videoRef.current.srcObject = null;
|
52 |
+
videoRef.current.remove();
|
53 |
+
videoRef.current = null;
|
54 |
+
}
|
55 |
+
if (mediaStreamRef.current) {
|
56 |
+
mediaStreamRef.current.getTracks().forEach((track) => track.stop());
|
57 |
+
mediaStreamRef.current = null;
|
58 |
+
}
|
59 |
+
setIsSelectionMode(false);
|
60 |
+
setSelectionStart(null);
|
61 |
+
setSelectionEnd(null);
|
62 |
+
setIsCapturing(false);
|
63 |
+
});
|
64 |
+
|
65 |
+
mediaStreamRef.current = stream;
|
66 |
+
|
67 |
+
// Initialize video element if needed
|
68 |
+
if (!videoRef.current) {
|
69 |
+
const video = document.createElement('video');
|
70 |
+
video.style.opacity = '0';
|
71 |
+
video.style.position = 'fixed';
|
72 |
+
video.style.pointerEvents = 'none';
|
73 |
+
video.style.zIndex = '-1';
|
74 |
+
document.body.appendChild(video);
|
75 |
+
videoRef.current = video;
|
76 |
+
}
|
77 |
+
|
78 |
// Set up video with the stream
|
79 |
+
videoRef.current.srcObject = stream;
|
80 |
+
await videoRef.current.play();
|
81 |
+
} catch (error) {
|
82 |
+
console.error('Failed to initialize stream:', error);
|
83 |
+
setIsSelectionMode(false);
|
84 |
+
toast.error('Failed to initialize screen capture');
|
85 |
+
}
|
86 |
+
}
|
87 |
+
return mediaStreamRef.current;
|
88 |
+
};
|
89 |
|
90 |
+
const handleCopySelection = useCallback(async () => {
|
91 |
+
if (!isSelectionMode || !selectionStart || !selectionEnd || !containerRef.current) return;
|
92 |
|
93 |
+
setIsCapturing(true);
|
94 |
+
try {
|
95 |
+
const stream = await initializeStream();
|
96 |
+
if (!stream || !videoRef.current) return;
|
|
|
97 |
|
98 |
+
// Wait for video to be ready
|
99 |
+
await new Promise((resolve) => setTimeout(resolve, 300));
|
|
|
100 |
|
101 |
+
// Create temporary canvas for full screenshot
|
102 |
+
const tempCanvas = document.createElement('canvas');
|
103 |
+
tempCanvas.width = videoRef.current.videoWidth;
|
104 |
+
tempCanvas.height = videoRef.current.videoHeight;
|
105 |
+
const tempCtx = tempCanvas.getContext('2d');
|
106 |
|
107 |
+
if (!tempCtx) {
|
108 |
+
throw new Error('Failed to get temporary canvas context');
|
109 |
+
}
|
110 |
|
111 |
+
// Draw the full video frame
|
112 |
+
tempCtx.drawImage(videoRef.current, 0, 0);
|
|
|
113 |
|
114 |
+
// Calculate scale factor between video and screen
|
115 |
+
const scaleX = videoRef.current.videoWidth / window.innerWidth;
|
116 |
+
const scaleY = videoRef.current.videoHeight / window.innerHeight;
|
|
|
|
|
117 |
|
118 |
+
// Get window scroll position
|
119 |
+
const scrollX = window.scrollX;
|
120 |
+
const scrollY = window.scrollY;
|
|
|
|
|
121 |
|
122 |
+
// Get the container's position in the page
|
123 |
+
const containerRect = containerRef.current.getBoundingClientRect();
|
|
|
124 |
|
125 |
+
// Offset adjustments for more accurate clipping
|
126 |
+
const leftOffset = -9; // Adjust left position
|
127 |
+
const bottomOffset = -14; // Adjust bottom position
|
128 |
|
129 |
+
// Calculate the scaled coordinates with scroll offset and adjustments
|
130 |
+
const scaledX = Math.round(
|
131 |
+
(containerRect.left + Math.min(selectionStart.x, selectionEnd.x) + scrollX + leftOffset) * scaleX,
|
132 |
+
);
|
133 |
+
const scaledY = Math.round(
|
134 |
+
(containerRect.top + Math.min(selectionStart.y, selectionEnd.y) + scrollY + bottomOffset) * scaleY,
|
135 |
+
);
|
136 |
+
const scaledWidth = Math.round(Math.abs(selectionEnd.x - selectionStart.x) * scaleX);
|
137 |
+
const scaledHeight = Math.round(Math.abs(selectionEnd.y - selectionStart.y) * scaleY);
|
138 |
|
139 |
+
// Create final canvas for the cropped area
|
140 |
+
const canvas = document.createElement('canvas');
|
141 |
+
canvas.width = Math.round(Math.abs(selectionEnd.x - selectionStart.x));
|
142 |
+
canvas.height = Math.round(Math.abs(selectionEnd.y - selectionStart.y));
|
143 |
+
const ctx = canvas.getContext('2d');
|
144 |
+
|
145 |
+
if (!ctx) {
|
146 |
+
throw new Error('Failed to get canvas context');
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
147 |
}
|
148 |
+
|
149 |
+
// Draw the cropped area
|
150 |
+
ctx.drawImage(tempCanvas, scaledX, scaledY, scaledWidth, scaledHeight, 0, 0, canvas.width, canvas.height);
|
151 |
+
|
152 |
+
// Convert to blob
|
153 |
+
const blob = await new Promise<Blob>((resolve, reject) => {
|
154 |
+
canvas.toBlob((blob) => {
|
155 |
+
if (blob) resolve(blob);
|
156 |
+
else reject(new Error('Failed to create blob'));
|
157 |
+
}, 'image/png');
|
158 |
+
});
|
159 |
+
|
160 |
+
// Create a FileReader to convert blob to base64
|
161 |
+
const reader = new FileReader();
|
162 |
+
reader.onload = (e) => {
|
163 |
+
const base64Image = e.target?.result as string;
|
164 |
+
|
165 |
+
// Find the textarea element
|
166 |
+
const textarea = document.querySelector('textarea');
|
167 |
+
if (textarea) {
|
168 |
+
// Get the setters from the BaseChat component
|
169 |
+
const setUploadedFiles = (window as any).__BOLT_SET_UPLOADED_FILES__;
|
170 |
+
const setImageDataList = (window as any).__BOLT_SET_IMAGE_DATA_LIST__;
|
171 |
+
const uploadedFiles = (window as any).__BOLT_UPLOADED_FILES__ || [];
|
172 |
+
const imageDataList = (window as any).__BOLT_IMAGE_DATA_LIST__ || [];
|
173 |
+
|
174 |
+
if (setUploadedFiles && setImageDataList) {
|
175 |
+
// Update the files and image data
|
176 |
+
const file = new File([blob], 'screenshot.png', { type: 'image/png' });
|
177 |
+
setUploadedFiles([...uploadedFiles, file]);
|
178 |
+
setImageDataList([...imageDataList, base64Image]);
|
179 |
+
toast.success('Screenshot captured and added to chat');
|
180 |
+
} else {
|
181 |
+
toast.error('Could not add screenshot to chat');
|
182 |
+
}
|
183 |
+
}
|
184 |
+
};
|
185 |
+
reader.readAsDataURL(blob);
|
186 |
} catch (error) {
|
187 |
console.error('Failed to capture screenshot:', error);
|
188 |
toast.error('Failed to capture screenshot');
|
189 |
+
if (mediaStreamRef.current) {
|
190 |
+
mediaStreamRef.current.getTracks().forEach((track) => track.stop());
|
191 |
+
mediaStreamRef.current = null;
|
192 |
+
}
|
193 |
} finally {
|
194 |
setIsCapturing(false);
|
195 |
setSelectionStart(null);
|
196 |
setSelectionEnd(null);
|
197 |
+
setIsSelectionMode(false); // Turn off selection mode after capture
|
198 |
}
|
199 |
}, [isSelectionMode, selectionStart, selectionEnd, containerRef, setIsSelectionMode]);
|
200 |
|