Spaces:
Runtime error
Runtime error
Upload 29 files
Browse files- .gitignore +41 -0
- README.md +36 -11
- app/components/ASRResultDisplay.tsx +31 -0
- app/components/AudioInput.tsx +173 -0
- app/components/ClassificationResultDisplay.tsx +126 -0
- app/components/ImageInput.tsx +88 -0
- app/components/ModelInput.tsx +36 -0
- app/components/Progress.tsx +57 -0
- app/components/TextInput.tsx +33 -0
- app/components/audioUtils.ts +70 -0
- app/components/modelConfig.ts +29 -0
- app/favicon.ico +0 -0
- app/globals.css +110 -0
- app/layout.tsx +34 -0
- app/page.tsx +187 -0
- app/worker.js +131 -0
- eslint.config.mjs +16 -0
- next-env.d.ts +5 -0
- next.config.ts +7 -0
- package-lock.json +0 -0
- package.json +29 -0
- postcss.config.mjs +5 -0
- public/file.svg +1 -0
- public/globe.svg +1 -0
- public/next.svg +1 -0
- public/vercel.svg +1 -0
- public/window.svg +1 -0
- tailwind.config.ts +26 -0
- tsconfig.json +27 -0
.gitignore
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
2 |
+
|
3 |
+
# dependencies
|
4 |
+
/node_modules
|
5 |
+
/.pnp
|
6 |
+
.pnp.*
|
7 |
+
.yarn/*
|
8 |
+
!.yarn/patches
|
9 |
+
!.yarn/plugins
|
10 |
+
!.yarn/releases
|
11 |
+
!.yarn/versions
|
12 |
+
|
13 |
+
# testing
|
14 |
+
/coverage
|
15 |
+
|
16 |
+
# next.js
|
17 |
+
/.next/
|
18 |
+
/out/
|
19 |
+
|
20 |
+
# production
|
21 |
+
/build
|
22 |
+
|
23 |
+
# misc
|
24 |
+
.DS_Store
|
25 |
+
*.pem
|
26 |
+
|
27 |
+
# debug
|
28 |
+
npm-debug.log*
|
29 |
+
yarn-debug.log*
|
30 |
+
yarn-error.log*
|
31 |
+
.pnpm-debug.log*
|
32 |
+
|
33 |
+
# env files (can opt-in for committing if needed)
|
34 |
+
.env*
|
35 |
+
|
36 |
+
# vercel
|
37 |
+
.vercel
|
38 |
+
|
39 |
+
# typescript
|
40 |
+
*.tsbuildinfo
|
41 |
+
next-env.d.ts
|
README.md
CHANGED
@@ -1,11 +1,36 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/cli/create-next-app).
|
2 |
+
|
3 |
+
## Getting Started
|
4 |
+
|
5 |
+
First, run the development server:
|
6 |
+
|
7 |
+
```bash
|
8 |
+
npm run dev
|
9 |
+
# or
|
10 |
+
yarn dev
|
11 |
+
# or
|
12 |
+
pnpm dev
|
13 |
+
# or
|
14 |
+
bun dev
|
15 |
+
```
|
16 |
+
|
17 |
+
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
|
18 |
+
|
19 |
+
You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
|
20 |
+
|
21 |
+
This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel.
|
22 |
+
|
23 |
+
## Learn More
|
24 |
+
|
25 |
+
To learn more about Next.js, take a look at the following resources:
|
26 |
+
|
27 |
+
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
|
28 |
+
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
|
29 |
+
|
30 |
+
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome!
|
31 |
+
|
32 |
+
## Deploy on Vercel
|
33 |
+
|
34 |
+
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
|
35 |
+
|
36 |
+
Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details.
|
app/components/ASRResultDisplay.tsx
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
'use client';
|
2 |
+
|
3 |
+
import React from "react";
|
4 |
+
|
5 |
+
interface ASRResultDisplayProps {
|
6 |
+
result: any;
|
7 |
+
ready: boolean | null;
|
8 |
+
task: string;
|
9 |
+
}
|
10 |
+
|
11 |
+
export const ASRResultDisplay = ({ result, ready }: ASRResultDisplayProps) => {
|
12 |
+
|
13 |
+
if (ready === false) {
|
14 |
+
return <div className="text-gray-400">Loading model...</div>;
|
15 |
+
}
|
16 |
+
if (!result) {
|
17 |
+
return <div className="text-gray-400">No transcription yet.</div>;
|
18 |
+
}
|
19 |
+
if (result.error) {
|
20 |
+
return <div className="text-red-500">Error: {result.error}</div>;
|
21 |
+
}
|
22 |
+
return (
|
23 |
+
<div className="w-full text-lg text-gray-800 break-words">
|
24 |
+
<span className="font-semibold">Transcript:</span>
|
25 |
+
<div className="mt-2 bg-gray-100 p-3 rounded-lg">
|
26 |
+
|
27 |
+
{result.text || "No text found."}
|
28 |
+
</div>
|
29 |
+
</div>
|
30 |
+
);
|
31 |
+
};
|
app/components/AudioInput.tsx
ADDED
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
'use client';
|
2 |
+
|
3 |
+
// AudioInput.tsx
|
4 |
+
import React, { useRef, useState } from "react";
|
5 |
+
import { readAudio } from './audioUtils'; // Import the updated utility
|
6 |
+
|
7 |
+
interface AudioInputProps {
|
8 |
+
input: Blob | null;
|
9 |
+
setInput: (v: Blob | null) => void;
|
10 |
+
classify: (input: Float32Array) => void; // Still needs Float32Array
|
11 |
+
ready: boolean | null;
|
12 |
+
}
|
13 |
+
|
14 |
+
export const AudioInput = ({ input, setInput, classify, ready }: AudioInputProps) => {
|
15 |
+
const [recording, setRecording] = useState(false);
|
16 |
+
const [audioUrl, setAudioUrl] = useState<string | null>(null);
|
17 |
+
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
|
18 |
+
const chunks = useRef<Blob[]>([]);
|
19 |
+
const fileInputRef = useRef<HTMLInputElement>(null);
|
20 |
+
|
21 |
+
const handleDrop = async (e: React.DragEvent<HTMLDivElement>) => {
|
22 |
+
e.preventDefault();
|
23 |
+
if (e.dataTransfer.files.length > 0) {
|
24 |
+
const file = e.dataTransfer.files[0];
|
25 |
+
if (file.type.startsWith("audio/")) {
|
26 |
+
setInput(file);
|
27 |
+
// Revoke previous URL to free memory
|
28 |
+
if (audioUrl) URL.revokeObjectURL(audioUrl);
|
29 |
+
setAudioUrl(URL.createObjectURL(file));
|
30 |
+
try {
|
31 |
+
const audioData = await readAudio(file); // Now decodes AND resamples to Float32Array PCM
|
32 |
+
classify(audioData);
|
33 |
+
} catch (error) {
|
34 |
+
console.error("Error reading or processing audio file:", error);
|
35 |
+
// Handle error, e.g., show a message to the user
|
36 |
+
}
|
37 |
+
}
|
38 |
+
}
|
39 |
+
};
|
40 |
+
|
41 |
+
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
42 |
+
if (e.target.files && e.target.files.length > 0) {
|
43 |
+
const file = e.target.files[0];
|
44 |
+
if (file.type.startsWith("audio/")) {
|
45 |
+
setInput(file);
|
46 |
+
// Revoke previous URL to free memory
|
47 |
+
if (audioUrl) URL.revokeObjectURL(audioUrl);
|
48 |
+
setAudioUrl(URL.createObjectURL(file));
|
49 |
+
try {
|
50 |
+
const audioData = await readAudio(file); // Now decodes AND resamples to Float32Array PCM
|
51 |
+
classify(audioData);
|
52 |
+
} catch (error) {
|
53 |
+
console.error("Error reading or processing audio file:", error);
|
54 |
+
// Handle error
|
55 |
+
}
|
56 |
+
}
|
57 |
+
}
|
58 |
+
};
|
59 |
+
|
60 |
+
const startRecording = async () => {
|
61 |
+
try {
|
62 |
+
setRecording(true);
|
63 |
+
chunks.current = [];
|
64 |
+
// Ensure audioUrl is cleared for new recording
|
65 |
+
if (audioUrl) URL.revokeObjectURL(audioUrl);
|
66 |
+
setAudioUrl(null);
|
67 |
+
setInput(null); // Clear previous input blob
|
68 |
+
|
69 |
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
70 |
+
const mediaRecorder = new window.MediaRecorder(stream);
|
71 |
+
mediaRecorderRef.current = mediaRecorder;
|
72 |
+
|
73 |
+
mediaRecorder.ondataavailable = (e) => {
|
74 |
+
if (e.data.size > 0) {
|
75 |
+
chunks.current.push(e.data);
|
76 |
+
}
|
77 |
+
};
|
78 |
+
|
79 |
+
mediaRecorder.onstop = async () => {
|
80 |
+
const blob = new Blob(chunks.current, { type: "audio/webm" }); // MediaRecorder outputs a Blob
|
81 |
+
setInput(blob); // Set the Blob input if needed elsewhere
|
82 |
+
// Revoke previous URL
|
83 |
+
if (audioUrl) URL.revokeObjectURL(audioUrl);
|
84 |
+
setAudioUrl(URL.createObjectURL(blob)); // Create URL for playback
|
85 |
+
|
86 |
+
try {
|
87 |
+
const audioData = await readAudio(blob); // Decode AND resample Blob to Float32Array PCM
|
88 |
+
classify(audioData); // Pass the Float32Array PCM data
|
89 |
+
} catch (error) {
|
90 |
+
console.error("Error reading or processing recorded audio:", error);
|
91 |
+
// Handle error
|
92 |
+
} finally {
|
93 |
+
// Always stop tracks after recording stops
|
94 |
+
stream.getTracks().forEach(track => track.stop());
|
95 |
+
}
|
96 |
+
};
|
97 |
+
|
98 |
+
mediaRecorder.start();
|
99 |
+
} catch (error) {
|
100 |
+
console.error("Error starting recording:", error);
|
101 |
+
setRecording(false); // Ensure recording state is reset on error
|
102 |
+
// Handle error, e.g., show a message to the user that mic access failed
|
103 |
+
}
|
104 |
+
};
|
105 |
+
|
106 |
+
const stopRecording = async () => {
|
107 |
+
if (!mediaRecorderRef.current) return;
|
108 |
+
// The actual classification and setting of input/audioUrl happens in mediaRecorder.onstop
|
109 |
+
mediaRecorderRef.current.stop();
|
110 |
+
setRecording(false); // Set recording state to false immediately
|
111 |
+
};
|
112 |
+
|
113 |
+
// Added error handling and URL cleanup
|
114 |
+
React.useEffect(() => {
|
115 |
+
// Cleanup object URLs when component unmounts or audioUrl changes
|
116 |
+
return () => {
|
117 |
+
if (audioUrl) URL.revokeObjectURL(audioUrl);
|
118 |
+
};
|
119 |
+
}, [audioUrl]);
|
120 |
+
|
121 |
+
|
122 |
+
return (
|
123 |
+
<div className="flex flex-col gap-4 h-full">
|
124 |
+
<label className="block text-gray-600 mb-2 text-sm font-medium">Upload or record audio</label>
|
125 |
+
<div
|
126 |
+
className={`flex-1 flex flex-col items-center justify-center border-2 border-dashed rounded-lg p-6 bg-gray-50 transition
|
127 |
+
${ready === false ? 'border-gray-200 text-gray-400 cursor-not-allowed' : 'border-gray-300 cursor-pointer hover:border-blue-400'}
|
128 |
+
`}
|
129 |
+
onDrop={handleDrop}
|
130 |
+
onDragOver={e => e.preventDefault()}
|
131 |
+
onClick={() => ready !== false && fileInputRef.current?.click()} // Prevent click if not ready
|
132 |
+
style={{ minHeight: 120 }}
|
133 |
+
>
|
134 |
+
<input
|
135 |
+
ref={fileInputRef}
|
136 |
+
type="file"
|
137 |
+
accept="audio/*"
|
138 |
+
style={{ display: "none" }}
|
139 |
+
onChange={handleFileChange}
|
140 |
+
disabled={ready === false}
|
141 |
+
/>
|
142 |
+
<span className="text-gray-500 text-center">
|
143 |
+
{ ready === false ? "Loading models..." : "Drag & drop audio file here or click to select" }
|
144 |
+
</span>
|
145 |
+
</div>
|
146 |
+
<div className="flex items-center gap-4">
|
147 |
+
{!recording ? (
|
148 |
+
<button
|
149 |
+
className="px-4 py-2 bg-green-600 text-white rounded hover:bg-green-700 transition disabled:opacity-50 disabled:cursor-not-allowed"
|
150 |
+
onClick={startRecording}
|
151 |
+
disabled={ready === false} // Disable record button if not ready
|
152 |
+
>
|
153 |
+
Record
|
154 |
+
</button>
|
155 |
+
) : (
|
156 |
+
<button
|
157 |
+
className="px-4 py-2 bg-red-600 text-white rounded hover:bg-red-700 transition"
|
158 |
+
onClick={stopRecording}
|
159 |
+
>
|
160 |
+
Stop
|
161 |
+
</button>
|
162 |
+
)}
|
163 |
+
{/* Only show audio player if not recording and audioUrl exists */}
|
164 |
+
{!recording && audioUrl && (
|
165 |
+
<audio controls src={audioUrl} className="ml-4 flex-1">
|
166 |
+
Your browser does not support the audio element.
|
167 |
+
</audio>
|
168 |
+
)}
|
169 |
+
{ready === false && <span className="text-gray-600 ml-auto">Loading...</span>} {/* Optional loading indicator */}
|
170 |
+
</div>
|
171 |
+
</div>
|
172 |
+
);
|
173 |
+
};
|
app/components/ClassificationResultDisplay.tsx
ADDED
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
'use client';
|
2 |
+
|
3 |
+
// Color palette for labels
|
4 |
+
const LABEL_COLORS = [
|
5 |
+
['bg-green-100 text-green-800', 'bg-green-400'],
|
6 |
+
['bg-blue-100 text-blue-800', 'bg-blue-400'],
|
7 |
+
['bg-purple-100 text-purple-800', 'bg-purple-400'],
|
8 |
+
['bg-yellow-100 text-yellow-800', 'bg-yellow-400'],
|
9 |
+
['bg-pink-100 text-pink-800', 'bg-pink-400'],
|
10 |
+
['bg-indigo-100 text-indigo-800', 'bg-indigo-400'],
|
11 |
+
["bg-red-100 text-red-800", "bg-red-400"],
|
12 |
+
["bg-teal-100 text-teal-800", "bg-teal-400"],
|
13 |
+
["bg-orange-100 text-orange-800", "bg-orange-400"],
|
14 |
+
];
|
15 |
+
|
16 |
+
// Deterministically assign a color to each label
|
17 |
+
const labelColorMap: Record<string, number> = {};
|
18 |
+
let colorIndex = 0;
|
19 |
+
function getColorForLabel(label: string): string[] {
|
20 |
+
if (!(label in labelColorMap)) {
|
21 |
+
labelColorMap[label] = colorIndex % LABEL_COLORS.length;
|
22 |
+
colorIndex++;
|
23 |
+
}
|
24 |
+
return LABEL_COLORS[labelColorMap[label]];
|
25 |
+
}
|
26 |
+
|
27 |
+
export function ClassificationResultDisplay({
|
28 |
+
result,
|
29 |
+
ready,
|
30 |
+
task,
|
31 |
+
}: {
|
32 |
+
result: any;
|
33 |
+
ready: boolean | null;
|
34 |
+
task: string;
|
35 |
+
// getColorForLabel: (label: string) => string[];
|
36 |
+
}) {
|
37 |
+
if (ready === null) {
|
38 |
+
return null;
|
39 |
+
}
|
40 |
+
if (!ready || !result) {
|
41 |
+
return (
|
42 |
+
<div className="text-center text-gray-400 animate-pulse">
|
43 |
+
Results will appear here
|
44 |
+
</div>
|
45 |
+
);
|
46 |
+
}
|
47 |
+
|
48 |
+
if (task === 'image-classification') {
|
49 |
+
return (
|
50 |
+
<div className="space-y-6 w-full">
|
51 |
+
<div className="text-center">
|
52 |
+
<h2 className="text-2xl font-bold text-gray-800 mb-2">
|
53 |
+
{result[0].label}
|
54 |
+
</h2>
|
55 |
+
<div className="w-full bg-gray-200 rounded-full h-3 mb-2">
|
56 |
+
<div
|
57 |
+
className={`h-3 rounded-full transition-all duration-1000 ease-out ${getColorForLabel(result[0].label)[1]}`}
|
58 |
+
style={{ width: `${result[0].score * 100}%` }}
|
59 |
+
/>
|
60 |
+
</div>
|
61 |
+
<div className="text-sm text-gray-500">
|
62 |
+
Confidence: {(result[0].score * 100).toFixed(2)}%
|
63 |
+
</div>
|
64 |
+
</div>
|
65 |
+
<div className="border-t border-gray-200 my-4"></div>
|
66 |
+
<div className="space-y-3">
|
67 |
+
{result.slice(1).map((item: any) => (
|
68 |
+
<div key={item.label} className="space-y-1">
|
69 |
+
<div className="flex justify-between items-center">
|
70 |
+
<span className="text-gray-700 text-sm">{item.label}</span>
|
71 |
+
<span className={`px-2 py-0.5 rounded-full text-xs font-medium ${getColorForLabel(item.label)[0]}`}>
|
72 |
+
{(item.score * 100).toFixed(2)}%
|
73 |
+
</span>
|
74 |
+
</div>
|
75 |
+
<div className="w-full bg-gray-200 rounded-full h-2">
|
76 |
+
<div
|
77 |
+
className={`h-2 rounded-full transition-all duration-1000 ease-out ${getColorForLabel(item.label)[1]}`}
|
78 |
+
style={{ width: `${item.score * 100}%` }}
|
79 |
+
/>
|
80 |
+
</div>
|
81 |
+
</div>
|
82 |
+
))}
|
83 |
+
</div>
|
84 |
+
</div>
|
85 |
+
);
|
86 |
+
}
|
87 |
+
|
88 |
+
// Default: text-classification
|
89 |
+
return (
|
90 |
+
<div className="space-y-6 w-full">
|
91 |
+
<div className="text-center">
|
92 |
+
<h2 className="text-2xl font-bold text-gray-800 mb-2">
|
93 |
+
{result[0].label}
|
94 |
+
</h2>
|
95 |
+
<div className="w-full bg-gray-200 rounded-full h-3 mb-2">
|
96 |
+
<div
|
97 |
+
className={`h-3 rounded-full transition-all duration-1000 ease-out ${getColorForLabel(result[0].label)[1]}`}
|
98 |
+
style={{ width: `${result[0].score * 100}%` }}
|
99 |
+
/>
|
100 |
+
</div>
|
101 |
+
<div className="text-sm text-gray-500">
|
102 |
+
Confidence: {(result[0].score * 100).toFixed(2)}%
|
103 |
+
</div>
|
104 |
+
</div>
|
105 |
+
<div className="border-t border-gray-200 my-4"></div>
|
106 |
+
<div className="space-y-3">
|
107 |
+
{result.slice(1).map((item: any) => (
|
108 |
+
<div key={item.label} className="space-y-1">
|
109 |
+
<div className="flex justify-between items-center">
|
110 |
+
<span className="text-gray-700 text-sm">{item.label}</span>
|
111 |
+
<span className={`px-2 py-0.5 rounded-full text-xs font-medium ${getColorForLabel(item.label)[0]}`}>
|
112 |
+
{(item.score * 100).toFixed(2)}%
|
113 |
+
</span>
|
114 |
+
</div>
|
115 |
+
<div className="w-full bg-gray-200 rounded-full h-2">
|
116 |
+
<div
|
117 |
+
className={`h-2 rounded-full transition-all duration-1000 ease-out ${getColorForLabel(item.label)[1]}`}
|
118 |
+
style={{ width: `${item.score * 100}%` }}
|
119 |
+
/>
|
120 |
+
</div>
|
121 |
+
</div>
|
122 |
+
))}
|
123 |
+
</div>
|
124 |
+
</div>
|
125 |
+
);
|
126 |
+
}
|
app/components/ImageInput.tsx
ADDED
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
'use client';
|
2 |
+
|
3 |
+
import React, { useRef, useState } from "react";
|
4 |
+
import { FiTrash2 } from "react-icons/fi"; // Use trash bin icon
|
5 |
+
|
6 |
+
interface ImageInputProps {
|
7 |
+
image: File | null;
|
8 |
+
setImage: (f: File | null) => void;
|
9 |
+
classify: (input: string | Blob) => void;
|
10 |
+
ready: boolean | null;
|
11 |
+
}
|
12 |
+
|
13 |
+
export const ImageInput = ({ image, setImage, classify, ready }: ImageInputProps) => {
|
14 |
+
const inputRef = useRef<HTMLInputElement>(null);
|
15 |
+
const [dragOver, setDragOver] = useState(false); // NEW: Track drag-over state
|
16 |
+
|
17 |
+
const handleFile = (file: File) => {
|
18 |
+
setImage(file);
|
19 |
+
const reader = new FileReader();
|
20 |
+
reader.onload = (ev) => {
|
21 |
+
classify(ev.target?.result as string);
|
22 |
+
};
|
23 |
+
reader.readAsDataURL(file);
|
24 |
+
};
|
25 |
+
|
26 |
+
// NEW: Handle delete image
|
27 |
+
const handleDelete = (e: React.MouseEvent) => {
|
28 |
+
e.stopPropagation();
|
29 |
+
setImage(null);
|
30 |
+
};
|
31 |
+
|
32 |
+
return (
|
33 |
+
<div
|
34 |
+
className={`flex flex-col items-center justify-center border-2 border-dashed rounded-lg p-8 min-h-[220px] transition-all duration-300 cursor-pointer bg-gray-50 relative
|
35 |
+
${ready === false ? 'opacity-50 pointer-events-none' : ''}
|
36 |
+
${dragOver ? 'border-blue-600 bg-blue-50 scale-105 shadow-lg' : 'hover:border-blue-400'}`}
|
37 |
+
tabIndex={0}
|
38 |
+
onClick={() => inputRef.current?.click()}
|
39 |
+
onDrop={e => {
|
40 |
+
e.preventDefault();
|
41 |
+
setDragOver(false);
|
42 |
+
if (e.dataTransfer.files.length > 0 && e.dataTransfer.files[0].type.startsWith('image/')) {
|
43 |
+
handleFile(e.dataTransfer.files[0]);
|
44 |
+
}
|
45 |
+
}}
|
46 |
+
onDragOver={e => {
|
47 |
+
e.preventDefault();
|
48 |
+
setDragOver(true);
|
49 |
+
}}
|
50 |
+
onDragLeave={() => setDragOver(false)}
|
51 |
+
>
|
52 |
+
{image ? (
|
53 |
+
<div className="relative w-full flex flex-col items-center">
|
54 |
+
{/* Delete button at top-right of image container */}
|
55 |
+
<button
|
56 |
+
onClick={handleDelete}
|
57 |
+
className="absolute -top-3 -right-3 z-10 bg-white border border-gray-200 hover:bg-red-500 hover:text-white text-gray-700 rounded-full p-1 shadow transition-colors w-7 h-7 flex items-center justify-center"
|
58 |
+
aria-label="Remove image"
|
59 |
+
tabIndex={0}
|
60 |
+
type="button"
|
61 |
+
>
|
62 |
+
<FiTrash2 size={16} className="text-red-500 overflow-visible" />
|
63 |
+
</button>
|
64 |
+
<img
|
65 |
+
src={URL.createObjectURL(image)}
|
66 |
+
alt="Uploaded"
|
67 |
+
className="mx-auto max-h-48 rounded-lg shadow-md mb-4 animate-fade-in"
|
68 |
+
/>
|
69 |
+
</div>
|
70 |
+
) : (
|
71 |
+
<span className={`text-lg ${dragOver ? 'text-blue-600 animate-pulse' : 'text-gray-400 animate-pulse'}`}>
|
72 |
+
Drop image here, or click to select
|
73 |
+
</span>
|
74 |
+
)}
|
75 |
+
<input
|
76 |
+
ref={inputRef}
|
77 |
+
type="file"
|
78 |
+
accept="image/*"
|
79 |
+
style={{ display: 'none' }}
|
80 |
+
onChange={e => {
|
81 |
+
if (e.target.files && e.target.files[0]) {
|
82 |
+
handleFile(e.target.files[0]);
|
83 |
+
}
|
84 |
+
}}
|
85 |
+
/>
|
86 |
+
</div>
|
87 |
+
);
|
88 |
+
};
|
app/components/ModelInput.tsx
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
'use client';
|
2 |
+
|
3 |
+
interface ModelInputProps {
|
4 |
+
currentModel: string;
|
5 |
+
onModelChange: (modelName: string) => void;
|
6 |
+
onLoadModel: () => void;
|
7 |
+
ready: boolean | null;
|
8 |
+
defaultModel: string;
|
9 |
+
}
|
10 |
+
|
11 |
+
export const ModelInput = ({
|
12 |
+
currentModel,
|
13 |
+
onModelChange,
|
14 |
+
onLoadModel,
|
15 |
+
ready,
|
16 |
+
defaultModel,
|
17 |
+
}: ModelInputProps) => {
|
18 |
+
return (
|
19 |
+
<div className="mb-8 flex flex-col md:flex-row items-center gap-4">
|
20 |
+
<input
|
21 |
+
type="text"
|
22 |
+
className="flex-1 p-3 rounded-lg border border-gray-300"
|
23 |
+
value={currentModel}
|
24 |
+
onChange={(e) => onModelChange(e.target.value)}
|
25 |
+
placeholder={`Enter model name (e.g. ${defaultModel})`}
|
26 |
+
/>
|
27 |
+
<button
|
28 |
+
className="px-6 py-3 bg-blue-600 text-white rounded-lg font-semibold hover:bg-blue-700 transition"
|
29 |
+
onClick={onLoadModel}
|
30 |
+
disabled={ready === false}
|
31 |
+
>
|
32 |
+
Load Model
|
33 |
+
</button>
|
34 |
+
</div>
|
35 |
+
);
|
36 |
+
};
|
app/components/Progress.tsx
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
'use client';
|
2 |
+
|
3 |
+
interface ProgressProps {
|
4 |
+
text?: string;
|
5 |
+
percentage?: number;
|
6 |
+
loaded?: number;
|
7 |
+
total?: number;
|
8 |
+
done?: boolean;
|
9 |
+
}
|
10 |
+
|
11 |
+
// Helper to format bytes to KB/MB/GB
|
12 |
+
function formatBytes(bytes?: number): string {
|
13 |
+
if (bytes === undefined || bytes === null) return '';
|
14 |
+
if (bytes < 1024) return `${bytes} B`;
|
15 |
+
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
16 |
+
if (bytes < 1024 * 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(2)} MB`;
|
17 |
+
return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`;
|
18 |
+
}
|
19 |
+
|
20 |
+
export default function Progress({ text, percentage, loaded, total, done }: ProgressProps) {
|
21 |
+
// Ensure percentage is always a number between 0 and 100
|
22 |
+
// percentage = Math.min(100, Math.max(0,
|
23 |
+
// percentage ?? (loaded && total ? (loaded / total) * 100 : 0)
|
24 |
+
// ));
|
25 |
+
|
26 |
+
return (
|
27 |
+
<div className="w-full max-w-4xl mx-auto">
|
28 |
+
{text && (
|
29 |
+
<div className="flex justify-between text-sm text-gray-600 mb-1 items-center">
|
30 |
+
<span className="truncate max-w-[70%]">{text}</span>
|
31 |
+
{done ? (
|
32 |
+
<span className="flex items-center text-green-600 font-semibold whitespace-nowrap">
|
33 |
+
<svg className="w-5 h-5 mr-1 text-green-500" fill="none" stroke="currentColor" strokeWidth="3" viewBox="0 0 24 24">
|
34 |
+
<path strokeLinecap="round" strokeLinejoin="round" d="M5 13l4 4L19 7" />
|
35 |
+
</svg>
|
36 |
+
Download completed
|
37 |
+
</span>
|
38 |
+
) : (
|
39 |
+
<span className="whitespace-nowrap">
|
40 |
+
{loaded !== undefined && total !== undefined && percentage !== undefined && total > 0
|
41 |
+
? `${formatBytes(loaded)} / ${formatBytes(total)} (${percentage.toFixed(1)}%)`
|
42 |
+
: `${percentage}%`}
|
43 |
+
</span>
|
44 |
+
)}
|
45 |
+
</div>
|
46 |
+
)}
|
47 |
+
<div className="w-full bg-gray-200 rounded-full h-3">
|
48 |
+
<div
|
49 |
+
className={`${done
|
50 |
+
? 'bg-green-500'
|
51 |
+
: 'bg-gradient-to-r from-blue-400 to-blue-600'} h-3 rounded-full transition-all duration-300`}
|
52 |
+
style={{ width: `${percentage}%` }}
|
53 |
+
></div>
|
54 |
+
</div>
|
55 |
+
</div>
|
56 |
+
);
|
57 |
+
}
|
app/components/TextInput.tsx
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
'use client';
|
2 |
+
|
3 |
+
import React from "react";
|
4 |
+
|
5 |
+
interface TextInputProps {
|
6 |
+
input: string;
|
7 |
+
setInput: (v: string) => void;
|
8 |
+
classify: (input: string) => void;
|
9 |
+
ready: boolean | null;
|
10 |
+
}
|
11 |
+
|
12 |
+
export const TextInput = ({ input, setInput, classify, ready }: TextInputProps) => {
|
13 |
+
return (
|
14 |
+
<div className="h-full flex flex-col">
|
15 |
+
<label htmlFor="input-text" className="block text-gray-600 mb-2 text-sm font-medium">
|
16 |
+
Enter your text
|
17 |
+
</label>
|
18 |
+
<textarea
|
19 |
+
id="input-text"
|
20 |
+
className="flex-1 w-full p-4 rounded-lg bg-gray-50 border border-gray-200 text-gray-800 placeholder-gray-400 focus:outline-none focus:ring-2 focus:ring-blue-200 focus:border-blue-300 transition-all duration-300 resize-none"
|
21 |
+
placeholder="Type something to analyze sentiment..."
|
22 |
+
value={input}
|
23 |
+
disabled={ready === false}
|
24 |
+
onChange={e => {
|
25 |
+
setInput(e.target.value);
|
26 |
+
if (e.target.value.trim() !== '') {
|
27 |
+
classify(e.target.value);
|
28 |
+
}
|
29 |
+
}}
|
30 |
+
/>
|
31 |
+
</div>
|
32 |
+
);
|
33 |
+
};
|
app/components/audioUtils.ts
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
// audioUtils.ts
|
2 |
+
const SAMPLE_RATE = 16000; // ASR models typically expect 16kHz audio
|
3 |
+
/**
|
4 |
+
* Reads an audio Blob (or File) and converts it to a Float32Array of PCM audio data
|
5 |
+
* at a specified sample rate.
|
6 |
+
* @param file The audio Blob or File.
|
7 |
+
* @returns A Promise resolving with the Float32Array of resampled audio data.
|
8 |
+
*/
|
9 |
+
export async function readAudio(file: Blob): Promise<Float32Array> {
|
10 |
+
const audioContext = new AudioContext(); // Use a standard AudioContext to decode initially
|
11 |
+
const arrayBuffer = await file.arrayBuffer();
|
12 |
+
|
13 |
+
// Decode the audio data from the ArrayBuffer. This handles various formats (mp3, wav, webm, etc.)
|
14 |
+
// and gives you an AudioBuffer with raw PCM data at the original sample rate.
|
15 |
+
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
16 |
+
|
17 |
+
// Check if resampling is needed
|
18 |
+
if (audioBuffer.sampleRate === SAMPLE_RATE) {
|
19 |
+
// If sample rate matches, return the first channel's data directly
|
20 |
+
// Ensure it's mono; if stereo, you might need to mix or pick a channel
|
21 |
+
if (audioBuffer.numberOfChannels > 1) {
|
22 |
+
// Basic mixing or just take the first channel for simplicity
|
23 |
+
// For ASR, mono is usually sufficient and expected by models
|
24 |
+
const monoData = audioBuffer.getChannelData(0);
|
25 |
+
// If needed, mix channels:
|
26 |
+
// const channelData1 = audioBuffer.getChannelData(0);
|
27 |
+
// const channelData2 = audioBuffer.getChannelData(1);
|
28 |
+
// const monoData = new Float32Array(channelData1.length);
|
29 |
+
// for (let i = 0; i < monoData.length; i++) {
|
30 |
+
// monoData[i] = (channelData1[i] + channelData2[i]) / 2;
|
31 |
+
// }
|
32 |
+
return monoData;
|
33 |
+
} else {
|
34 |
+
return audioBuffer.getChannelData(0); // Already mono
|
35 |
+
}
|
36 |
+
|
37 |
+
} else {
|
38 |
+
// Resampling is needed
|
39 |
+
const targetSampleRate = SAMPLE_RATE;
|
40 |
+
const numberOfChannels = 1; // ASR models typically expect mono input
|
41 |
+
|
42 |
+
// Calculate the length of the resampled buffer
|
43 |
+
const duration = audioBuffer.duration;
|
44 |
+
const resampledLength = Math.ceil(duration * targetSampleRate);
|
45 |
+
|
46 |
+
// Create an OfflineAudioContext for resampling
|
47 |
+
// This context renders audio offline and allows changing the sample rate
|
48 |
+
const offlineAudioContext = new OfflineAudioContext(
|
49 |
+
numberOfChannels,
|
50 |
+
resampledLength,
|
51 |
+
targetSampleRate
|
52 |
+
);
|
53 |
+
|
54 |
+
// Create a buffer source node from the original AudioBuffer
|
55 |
+
const source = offlineAudioContext.createBufferSource();
|
56 |
+
source.buffer = audioBuffer;
|
57 |
+
|
58 |
+
// Connect the source to the offline context's destination
|
59 |
+
source.connect(offlineAudioContext.destination);
|
60 |
+
|
61 |
+
// Start the source (playback in the offline context)
|
62 |
+
source.start(0);
|
63 |
+
|
64 |
+
// Render the audio. This performs the resampling.
|
65 |
+
const resampledBuffer = await offlineAudioContext.startRendering();
|
66 |
+
|
67 |
+
// Return the resampled audio data from the first channel
|
68 |
+
return resampledBuffer.getChannelData(0);
|
69 |
+
}
|
70 |
+
}
|
app/components/modelConfig.ts
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export interface ModelConfig {
|
2 |
+
inputComponent: React.ComponentType<any>;
|
3 |
+
outputComponent: React.ComponentType<any>;
|
4 |
+
defaultModel: string;
|
5 |
+
}
|
6 |
+
|
7 |
+
import { TextInput } from "@/app/components/TextInput";
|
8 |
+
import { ImageInput } from "@/app/components/ImageInput";
|
9 |
+
import { ClassificationResultDisplay } from "@/app/components/ClassificationResultDisplay";
|
10 |
+
import { AudioInput } from "@/app/components/AudioInput";
|
11 |
+
import { ASRResultDisplay } from "@/app/components/ASRResultDisplay";
|
12 |
+
|
13 |
+
export const modelConfigMap: Record<string, ModelConfig> = {
|
14 |
+
"text-classification": {
|
15 |
+
inputComponent: TextInput,
|
16 |
+
outputComponent: ClassificationResultDisplay,
|
17 |
+
defaultModel: "onnx-community/rubert-tiny-sentiment-balanced-ONNX"
|
18 |
+
},
|
19 |
+
"image-classification": {
|
20 |
+
inputComponent: ImageInput,
|
21 |
+
outputComponent: ClassificationResultDisplay,
|
22 |
+
defaultModel: "onnx-community/vit-tiny-patch16-224-ONNX"
|
23 |
+
},
|
24 |
+
"automatic-speech-recognition": {
|
25 |
+
inputComponent: AudioInput,
|
26 |
+
outputComponent: ASRResultDisplay,
|
27 |
+
defaultModel: "onnx-community/moonshine-tiny-ONNX"
|
28 |
+
}
|
29 |
+
};
|
app/favicon.ico
ADDED
|
app/globals.css
ADDED
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
@import "tailwindcss";
|
2 |
+
|
3 |
+
:root {
|
4 |
+
--background: #ffffff;
|
5 |
+
--foreground: #171717;
|
6 |
+
|
7 |
+
/* Styles from to-do.md (index.css) */
|
8 |
+
font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif;
|
9 |
+
line-height: 1.5;
|
10 |
+
font-weight: 400;
|
11 |
+
color: #213547; /* Base text color for light mode */
|
12 |
+
background-color: #ffffff; /* Base background for light mode */
|
13 |
+
|
14 |
+
font-synthesis: none;
|
15 |
+
text-rendering: optimizeLegibility;
|
16 |
+
-webkit-font-smoothing: antialiased;
|
17 |
+
-moz-osx-font-smoothing: grayscale;
|
18 |
+
-webkit-text-size-adjust: 100%;
|
19 |
+
}
|
20 |
+
|
21 |
+
.dark {
|
22 |
+
--background: #0a0a0a;
|
23 |
+
--foreground: #ededed;
|
24 |
+
color: #f9f9f9; /* Base text color for dark mode */
|
25 |
+
background-color: #242424; /* Base background for dark mode */
|
26 |
+
}
|
27 |
+
|
28 |
+
body {
|
29 |
+
background: var(--background);
|
30 |
+
color: var(--foreground);
|
31 |
+
/* Styles from to-do.md (index.css) */
|
32 |
+
margin: 0;
|
33 |
+
display: flex;
|
34 |
+
place-items: center;
|
35 |
+
min-width: 320px;
|
36 |
+
min-height: 100vh;
|
37 |
+
}
|
38 |
+
|
39 |
+
/* Styles from to-do.md (index.css & App.css) */
|
40 |
+
h1 {
|
41 |
+
font-size: 3.2em;
|
42 |
+
line-height: 1;
|
43 |
+
}
|
44 |
+
|
45 |
+
h1,
|
46 |
+
h2 {
|
47 |
+
margin: 8px;
|
48 |
+
}
|
49 |
+
|
50 |
+
select {
|
51 |
+
padding: 0.3em;
|
52 |
+
cursor: pointer;
|
53 |
+
}
|
54 |
+
|
55 |
+
textarea {
|
56 |
+
padding: 0.6em;
|
57 |
+
}
|
58 |
+
|
59 |
+
button {
|
60 |
+
padding: 0.6em 1.2em;
|
61 |
+
cursor: pointer;
|
62 |
+
font-weight: 500;
|
63 |
+
}
|
64 |
+
|
65 |
+
button[disabled] {
|
66 |
+
cursor: not-allowed;
|
67 |
+
}
|
68 |
+
|
69 |
+
select,
|
70 |
+
textarea,
|
71 |
+
button {
|
72 |
+
border-radius: 8px;
|
73 |
+
border: 1px solid transparent;
|
74 |
+
font-size: 1em;
|
75 |
+
font-family: inherit; /* Use inherit to respect the :root font-family */
|
76 |
+
background-color: #f9f9f9; /* Light mode background for elements */
|
77 |
+
color: #213547; /* Light mode text color for elements */
|
78 |
+
transition: border-color 0.25s;
|
79 |
+
}
|
80 |
+
|
81 |
+
.dark select,
|
82 |
+
.dark textarea,
|
83 |
+
.dark button {
|
84 |
+
background-color: #1a1a1a; /* Dark mode background for elements */
|
85 |
+
color: #f9f9f9; /* Dark mode text color for elements */
|
86 |
+
}
|
87 |
+
|
88 |
+
|
89 |
+
select:hover,
|
90 |
+
textarea:hover,
|
91 |
+
button:not([disabled]):hover {
|
92 |
+
border-color: #646cff;
|
93 |
+
}
|
94 |
+
|
95 |
+
select:focus,
|
96 |
+
select:focus-visible,
|
97 |
+
textarea:focus,
|
98 |
+
textarea:focus-visible,
|
99 |
+
button:focus,
|
100 |
+
button:focus-visible {
|
101 |
+
outline: 4px auto -webkit-focus-ring-color;
|
102 |
+
}
|
103 |
+
|
104 |
+
/* Keep Tailwind theme inline if needed, though base styles might override */
|
105 |
+
@theme inline {
|
106 |
+
--color-background: var(--background);
|
107 |
+
--color-foreground: var(--foreground);
|
108 |
+
--font-sans: var(--font-geist-sans);
|
109 |
+
--font-mono: var(--font-geist-mono);
|
110 |
+
}
|
app/layout.tsx
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import type { Metadata } from "next";
|
2 |
+
import { Geist, Geist_Mono } from "next/font/google";
|
3 |
+
import "./globals.css";
|
4 |
+
|
5 |
+
const geistSans = Geist({
|
6 |
+
variable: "--font-geist-sans",
|
7 |
+
subsets: ["latin"],
|
8 |
+
});
|
9 |
+
|
10 |
+
const geistMono = Geist_Mono({
|
11 |
+
variable: "--font-geist-mono",
|
12 |
+
subsets: ["latin"],
|
13 |
+
});
|
14 |
+
|
15 |
+
export const metadata: Metadata = {
|
16 |
+
title: "Create Next App",
|
17 |
+
description: "Generated by create next app",
|
18 |
+
};
|
19 |
+
|
20 |
+
export default function RootLayout({
|
21 |
+
children,
|
22 |
+
}: Readonly<{
|
23 |
+
children: React.ReactNode;
|
24 |
+
}>) {
|
25 |
+
return (
|
26 |
+
<html lang="en">
|
27 |
+
<body
|
28 |
+
className={`${geistSans.variable} ${geistMono.variable} antialiased flex min-h-screen justify-center items-center`}
|
29 |
+
>
|
30 |
+
{children}
|
31 |
+
</body>
|
32 |
+
</html>
|
33 |
+
);
|
34 |
+
}
|
app/page.tsx
ADDED
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
'use client';
|
2 |
+
|
3 |
+
import { useEffect, useRef, useState, useCallback } from 'react';
|
4 |
+
import Progress from './components/Progress';
|
5 |
+
import { modelConfigMap } from './components/modelConfig';
|
6 |
+
import { ModelInput } from './components/ModelInput';
|
7 |
+
|
8 |
+
export default function Home() {
|
9 |
+
const [result, setResult] = useState<any | null>(null);
|
10 |
+
const [ready, setReady] = useState<boolean | null>(null);
|
11 |
+
const [progressItems, setProgressItems] = useState<any[]>([]);
|
12 |
+
const [input, setInput] = useState('');
|
13 |
+
const [task, setTask] = useState('text-classification');
|
14 |
+
const [modelName, setModelName] = useState(() => modelConfigMap['text-classification'].defaultModel);
|
15 |
+
const [currentModel, setCurrentModel] = useState(modelName);
|
16 |
+
const worker = useRef<Worker | null>(null);
|
17 |
+
const [image, setImage] = useState<File | null>(null);
|
18 |
+
|
19 |
+
// Update modelName and currentModel when task changes
|
20 |
+
useEffect(() => {
|
21 |
+
const defaultModel = modelConfigMap[task].defaultModel;
|
22 |
+
setModelName(defaultModel);
|
23 |
+
setCurrentModel(defaultModel);
|
24 |
+
}, [task]);
|
25 |
+
|
26 |
+
useEffect(() => {
|
27 |
+
if (!worker.current) {
|
28 |
+
worker.current = new Worker(new URL('./worker.js', import.meta.url), {
|
29 |
+
type: 'module'
|
30 |
+
});
|
31 |
+
}
|
32 |
+
const onMessageReceived = (e: MessageEvent) => {
|
33 |
+
switch (e.data.status) {
|
34 |
+
case 'initiate':
|
35 |
+
setReady(false);
|
36 |
+
setProgressItems(prev => [...prev, { ...e.data, progress: 0 }]);
|
37 |
+
break;
|
38 |
+
case 'progress':
|
39 |
+
setProgressItems(prev => prev.map(item => {
|
40 |
+
if (item.file === e.data.file) {
|
41 |
+
return {
|
42 |
+
...item,
|
43 |
+
progress: e.data.progress,
|
44 |
+
loaded: e.data.loaded,
|
45 |
+
total: e.data.total,
|
46 |
+
name: e.data.name
|
47 |
+
};
|
48 |
+
}
|
49 |
+
return item;
|
50 |
+
}));
|
51 |
+
break;
|
52 |
+
case 'done':
|
53 |
+
setProgressItems(prev => {
|
54 |
+
const updated = prev.map(item =>
|
55 |
+
item.file === e.data.file ? { ...item, done: true, progress: 100 } : item
|
56 |
+
);
|
57 |
+
setTimeout(() => {
|
58 |
+
setProgressItems(current =>
|
59 |
+
current.filter(item => item.file !== e.data.file)
|
60 |
+
);
|
61 |
+
}, 1000);
|
62 |
+
return updated;
|
63 |
+
});
|
64 |
+
break;
|
65 |
+
case 'ready':
|
66 |
+
setReady(true);
|
67 |
+
setCurrentModel(e.data.file || modelName);
|
68 |
+
setProgressItems(prev => prev.filter(item => item.file !== e.data.file));
|
69 |
+
break;
|
70 |
+
case 'complete':
|
71 |
+
setResult(e.data.output);
|
72 |
+
break;
|
73 |
+
case 'error':
|
74 |
+
setResult({ label: 'Error', score: 0, error: e.data.error });
|
75 |
+
break;
|
76 |
+
}
|
77 |
+
};
|
78 |
+
|
79 |
+
worker.current.addEventListener('message', onMessageReceived);
|
80 |
+
return () => worker.current?.removeEventListener('message', onMessageReceived);
|
81 |
+
}, [modelName]);
|
82 |
+
|
83 |
+
const classify = useCallback((inputValue: string | Blob) => {
|
84 |
+
if (worker.current) {
|
85 |
+
worker.current.postMessage({
|
86 |
+
input: inputValue,
|
87 |
+
modelName: currentModel,
|
88 |
+
task,
|
89 |
+
});
|
90 |
+
}
|
91 |
+
}, [currentModel, task]);
|
92 |
+
|
93 |
+
const handleLoadModel = () => {
|
94 |
+
setReady(false);
|
95 |
+
setResult(null);
|
96 |
+
setProgressItems([]);
|
97 |
+
setCurrentModel(modelName);
|
98 |
+
if (worker.current) {
|
99 |
+
worker.current.postMessage({ action: 'load-model', modelName, task });
|
100 |
+
}
|
101 |
+
};
|
102 |
+
|
103 |
+
useEffect(() => {
|
104 |
+
setResult(null);
|
105 |
+
setInput('');
|
106 |
+
setImage(null);
|
107 |
+
setModelName(modelConfigMap[task].defaultModel);
|
108 |
+
}, [task]);
|
109 |
+
|
110 |
+
const InputComponent = modelConfigMap[task].inputComponent;
|
111 |
+
const OutputComponent = modelConfigMap[task].outputComponent;
|
112 |
+
|
113 |
+
return (
|
114 |
+
<main className="min-h-screen w-full bg-transparent backdrop-blur-sm">
|
115 |
+
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-12">
|
116 |
+
<div className="text-center mb-12 transform transition-all duration-500 ease-in-out">
|
117 |
+
<h1 className="text-4xl md:text-5xl font-bold text-gray-800 mb-2 hover:text-blue-600 transition-colors">
|
118 |
+
Transformers.js Playground
|
119 |
+
</h1>
|
120 |
+
<p className="text-gray-500 text-lg">
|
121 |
+
Powered by Transformers.js & Next.js (Local browser inference)
|
122 |
+
</p>
|
123 |
+
</div>
|
124 |
+
<div className="mb-6 flex justify-center">
|
125 |
+
<select
|
126 |
+
value={task}
|
127 |
+
onChange={e => setTask(e.target.value)}
|
128 |
+
className="p-2 rounded border border-gray-300 text-lg font-medium shadow-sm transition"
|
129 |
+
>
|
130 |
+
<option value="text-classification">Text Classification</option>
|
131 |
+
<option value="image-classification">Image Classification</option>
|
132 |
+
<option value="automatic-speech-recognition">Automatic Speech Recognition</option>
|
133 |
+
</select>
|
134 |
+
</div>
|
135 |
+
<ModelInput
|
136 |
+
currentModel={modelName}
|
137 |
+
onModelChange={setModelName}
|
138 |
+
onLoadModel={handleLoadModel}
|
139 |
+
ready={ready}
|
140 |
+
defaultModel={modelConfigMap[task].defaultModel} // Add defaultModel her
|
141 |
+
|
142 |
+
/>
|
143 |
+
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8">
|
144 |
+
<div className="bg-white rounded-xl shadow-sm border border-gray-200 p-6 transition-all duration-300 hover:shadow-md">
|
145 |
+
<InputComponent
|
146 |
+
input={input}
|
147 |
+
setInput={setInput}
|
148 |
+
classify={classify}
|
149 |
+
ready={ready}
|
150 |
+
image={image}
|
151 |
+
setImage={setImage}
|
152 |
+
/>
|
153 |
+
</div>
|
154 |
+
<div className="bg-white rounded-xl shadow-sm border border-gray-200 p-6 transition-all duration-300 hover:shadow-md flex flex-col">
|
155 |
+
<h3 className="text-gray-600 mb-4 text-sm font-medium">Result</h3>
|
156 |
+
<div className="flex-1 bg-gray-50 rounded-lg p-4 flex items-center justify-center">
|
157 |
+
<OutputComponent
|
158 |
+
result={result}
|
159 |
+
ready={ready}
|
160 |
+
task={task}
|
161 |
+
/>
|
162 |
+
</div>
|
163 |
+
</div>
|
164 |
+
</div>
|
165 |
+
|
166 |
+
{ready === false && (
|
167 |
+
<div className="mt-12 bg-white rounded-xl shadow-sm border border-gray-200 p-6 max-w-7xl mx-auto">
|
168 |
+
<h3 className="text-gray-600 mb-6 text-xl font-medium">Loading Model</h3>
|
169 |
+
<div className="space-y-6">
|
170 |
+
{progressItems.map((data, i) => (
|
171 |
+
<div key={i} className="transform transition-all duration-300">
|
172 |
+
<Progress
|
173 |
+
text={`${data.name || ''} - ${data.file}`}
|
174 |
+
percentage={data.progress}
|
175 |
+
loaded={data.loaded}
|
176 |
+
total={data.total}
|
177 |
+
done={data.done}
|
178 |
+
/>
|
179 |
+
</div>
|
180 |
+
))}
|
181 |
+
</div>
|
182 |
+
</div>
|
183 |
+
)}
|
184 |
+
</div>
|
185 |
+
</main>
|
186 |
+
);
|
187 |
+
}
|
app/worker.js
ADDED
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { pipeline, env } from "@huggingface/transformers";
|
2 |
+
|
3 |
+
// Skip local model check
|
4 |
+
env.allowLocalModels = false;
|
5 |
+
|
6 |
+
async function supportsWebGPU() {
|
7 |
+
try {
|
8 |
+
if (!navigator.gpu) return false;
|
9 |
+
await navigator.gpu.requestAdapter();
|
10 |
+
return true;
|
11 |
+
} catch (e) {
|
12 |
+
return false;
|
13 |
+
}
|
14 |
+
}
|
15 |
+
|
16 |
+
const device = (await supportsWebGPU()) ? "webgpu" : "wasm";
|
17 |
+
|
18 |
+
class PipelineManager {
|
19 |
+
static defaultConfigs = {
|
20 |
+
"text-classification": {
|
21 |
+
model: "onnx-community/rubert-tiny-sentiment-balanced-ONNX",
|
22 |
+
},
|
23 |
+
"image-classification": {
|
24 |
+
model: "onnx-community/mobilenet_v2_1.0_224",
|
25 |
+
},
|
26 |
+
};
|
27 |
+
static instances = {}; // key: `${task}:${modelName}` -> pipeline instance
|
28 |
+
static currentTask = "text-classification";
|
29 |
+
static currentModel = PipelineManager.defaultConfigs["text-classification"].model;
|
30 |
+
static queue = [];
|
31 |
+
static isProcessing = false;
|
32 |
+
|
33 |
+
static async getInstance(task, modelName, progress_callback = null) {
|
34 |
+
const key = `${task}:${modelName}`;
|
35 |
+
if (!this.instances[key]) {
|
36 |
+
self.postMessage({ status: "initiate", file: modelName, task });
|
37 |
+
this.instances[key] = await pipeline(task, modelName, { progress_callback, device: device});
|
38 |
+
self.postMessage({ status: "ready", file: modelName, task });
|
39 |
+
}
|
40 |
+
return this.instances[key];
|
41 |
+
}
|
42 |
+
|
43 |
+
static async processQueue() {
|
44 |
+
if (this.isProcessing || this.queue.length === 0) return;
|
45 |
+
|
46 |
+
this.isProcessing = true;
|
47 |
+
const { input, task, modelName } = this.queue[this.queue.length - 1];
|
48 |
+
this.queue = [];
|
49 |
+
|
50 |
+
try {
|
51 |
+
const classifier = await this.getInstance(task, modelName, (x) => {
|
52 |
+
self.postMessage({
|
53 |
+
...x,
|
54 |
+
status: x.status || "progress",
|
55 |
+
file: x.file || modelName,
|
56 |
+
name: modelName,
|
57 |
+
task,
|
58 |
+
loaded: x.loaded,
|
59 |
+
total: x.total,
|
60 |
+
progress: x.loaded && x.total ? (x.loaded / x.total) * 100 : 0,
|
61 |
+
});
|
62 |
+
});
|
63 |
+
|
64 |
+
let output;
|
65 |
+
if (task === "image-classification") {
|
66 |
+
// input is a data URL or Blob
|
67 |
+
output = await classifier(input, { top_k: 5 });
|
68 |
+
} else if (task === "automatic-speech-recognition") {
|
69 |
+
output = await classifier(input);
|
70 |
+
} else {
|
71 |
+
output = await classifier(input, { top_k: 5 });
|
72 |
+
}
|
73 |
+
|
74 |
+
self.postMessage({
|
75 |
+
status: "complete",
|
76 |
+
output,
|
77 |
+
file: modelName,
|
78 |
+
task,
|
79 |
+
});
|
80 |
+
} catch (error) {
|
81 |
+
self.postMessage({
|
82 |
+
status: "error",
|
83 |
+
error: error.message,
|
84 |
+
file: modelName,
|
85 |
+
task,
|
86 |
+
});
|
87 |
+
}
|
88 |
+
|
89 |
+
this.isProcessing = false;
|
90 |
+
if (this.queue.length > 0) {
|
91 |
+
this.processQueue();
|
92 |
+
}
|
93 |
+
}
|
94 |
+
}
|
95 |
+
|
96 |
+
// Listen for messages from the main thread
|
97 |
+
self.addEventListener("message", async (event) => {
|
98 |
+
const { input, modelName, task, action } = event.data;
|
99 |
+
|
100 |
+
// console.log("Worker received message:", event.data); // Add this line to log the received message t
|
101 |
+
|
102 |
+
if (action === "load-model") {
|
103 |
+
PipelineManager.currentTask = task || "text-classification";
|
104 |
+
PipelineManager.currentModel =
|
105 |
+
modelName ||
|
106 |
+
PipelineManager.defaultConfigs[PipelineManager.currentTask].model;
|
107 |
+
|
108 |
+
await PipelineManager.getInstance(
|
109 |
+
PipelineManager.currentTask,
|
110 |
+
PipelineManager.currentModel,
|
111 |
+
(x) => {
|
112 |
+
self.postMessage({
|
113 |
+
...x,
|
114 |
+
file: PipelineManager.currentModel,
|
115 |
+
status: x.status || "progress",
|
116 |
+
loaded: x.loaded,
|
117 |
+
total: x.total,
|
118 |
+
task: PipelineManager.currentTask,
|
119 |
+
});
|
120 |
+
}
|
121 |
+
);
|
122 |
+
return;
|
123 |
+
}
|
124 |
+
|
125 |
+
PipelineManager.queue.push({
|
126 |
+
input,
|
127 |
+
task: task || PipelineManager.currentTask,
|
128 |
+
modelName: modelName || PipelineManager.currentModel,
|
129 |
+
});
|
130 |
+
PipelineManager.processQueue();
|
131 |
+
});
|
eslint.config.mjs
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { dirname } from "path";
|
2 |
+
import { fileURLToPath } from "url";
|
3 |
+
import { FlatCompat } from "@eslint/eslintrc";
|
4 |
+
|
5 |
+
const __filename = fileURLToPath(import.meta.url);
|
6 |
+
const __dirname = dirname(__filename);
|
7 |
+
|
8 |
+
const compat = new FlatCompat({
|
9 |
+
baseDirectory: __dirname,
|
10 |
+
});
|
11 |
+
|
12 |
+
const eslintConfig = [
|
13 |
+
...compat.extends("next/core-web-vitals", "next/typescript"),
|
14 |
+
];
|
15 |
+
|
16 |
+
export default eslintConfig;
|
next-env.d.ts
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/// <reference types="next" />
|
2 |
+
/// <reference types="next/image-types/global" />
|
3 |
+
|
4 |
+
// NOTE: This file should not be edited
|
5 |
+
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
next.config.ts
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import type { NextConfig } from "next";
|
2 |
+
|
3 |
+
const nextConfig: NextConfig = {
|
4 |
+
/* config options here */
|
5 |
+
};
|
6 |
+
|
7 |
+
export default nextConfig;
|
package-lock.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
package.json
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"name": "nextjs-translator",
|
3 |
+
"version": "0.1.0",
|
4 |
+
"private": true,
|
5 |
+
"scripts": {
|
6 |
+
"dev": "next dev --turbopack",
|
7 |
+
"build": "next build",
|
8 |
+
"start": "next start",
|
9 |
+
"lint": "next lint"
|
10 |
+
},
|
11 |
+
"dependencies": {
|
12 |
+
"@huggingface/transformers": "^3.4.2",
|
13 |
+
"next": "15.3.0",
|
14 |
+
"react": "^19.0.0",
|
15 |
+
"react-dom": "^19.0.0",
|
16 |
+
"react-icons": "^5.5.0"
|
17 |
+
},
|
18 |
+
"devDependencies": {
|
19 |
+
"@eslint/eslintrc": "^3",
|
20 |
+
"@tailwindcss/postcss": "^4",
|
21 |
+
"@types/node": "^20",
|
22 |
+
"@types/react": "^19",
|
23 |
+
"@types/react-dom": "^19",
|
24 |
+
"eslint": "^9",
|
25 |
+
"eslint-config-next": "15.3.0",
|
26 |
+
"tailwindcss": "^4",
|
27 |
+
"typescript": "^5"
|
28 |
+
}
|
29 |
+
}
|
postcss.config.mjs
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
const config = {
|
2 |
+
plugins: ["@tailwindcss/postcss"],
|
3 |
+
};
|
4 |
+
|
5 |
+
export default config;
|
public/file.svg
ADDED
|
public/globe.svg
ADDED
|
public/next.svg
ADDED
|
public/vercel.svg
ADDED
|
public/window.svg
ADDED
|
tailwind.config.ts
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import type { Config } from 'tailwindcss'
|
2 |
+
|
3 |
+
const config: Config = {
|
4 |
+
content: [
|
5 |
+
'./pages/**/*.{js,ts,jsx,tsx,mdx}',
|
6 |
+
'./components/**/*.{js,ts,jsx,tsx,mdx}',
|
7 |
+
'./app/**/*.{js,ts,jsx,tsx,mdx}',
|
8 |
+
],
|
9 |
+
darkMode: 'class', // Enable class-based dark mode
|
10 |
+
theme: {
|
11 |
+
extend: {
|
12 |
+
backgroundImage: {
|
13 |
+
'gradient-radial': 'radial-gradient(var(--tw-gradient-stops))',
|
14 |
+
'gradient-conic':
|
15 |
+
'conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))',
|
16 |
+
},
|
17 |
+
// Define colors for light and dark modes if needed, or rely on CSS variables
|
18 |
+
colors: {
|
19 |
+
// Example: Define custom colors accessible via `text-primary`, `bg-primary`, etc.
|
20 |
+
// primary: '...',
|
21 |
+
},
|
22 |
+
},
|
23 |
+
},
|
24 |
+
plugins: [],
|
25 |
+
}
|
26 |
+
export default config
|
tsconfig.json
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"compilerOptions": {
|
3 |
+
"target": "ES2017",
|
4 |
+
"lib": ["dom", "dom.iterable", "esnext"],
|
5 |
+
"allowJs": true,
|
6 |
+
"skipLibCheck": true,
|
7 |
+
"strict": true,
|
8 |
+
"noEmit": true,
|
9 |
+
"esModuleInterop": true,
|
10 |
+
"module": "esnext",
|
11 |
+
"moduleResolution": "bundler",
|
12 |
+
"resolveJsonModule": true,
|
13 |
+
"isolatedModules": true,
|
14 |
+
"jsx": "preserve",
|
15 |
+
"incremental": true,
|
16 |
+
"plugins": [
|
17 |
+
{
|
18 |
+
"name": "next"
|
19 |
+
}
|
20 |
+
],
|
21 |
+
"paths": {
|
22 |
+
"@/*": ["./*"]
|
23 |
+
}
|
24 |
+
},
|
25 |
+
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
26 |
+
"exclude": ["node_modules"]
|
27 |
+
}
|