terminal / index.html
5dimension's picture
add the missing modal so i can put the API key from huggingface and use models pls. it should be able to generate also from models on hugging face, video; sound & images, the MOST SOTA ever - Follow Up Deployment
1acf170 verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>NeuroCognitive Terminal v9.24.6</title>
<script src="https://cdn.tailwindcss.com"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
<style>
@import url('https://fonts.googleapis.com/css2?family=Ubuntu+Mono:wght@400;700&display=swap');
.terminal-scrollbar::-webkit-scrollbar {
width: 6px;
background: #00171d;
}
.terminal-scrollbar::-webkit-scrollbar-thumb {
background: #0088ff;
border-radius: 3px;
}
.crt::before {
content: " ";
position: absolute;
top: 0;
left: 0;
bottom: 0;
right: 0;
background: linear-gradient(rgba(0, 30, 45, 0.1) 50%, rgba(0, 0, 0, 0.2) 50%),
radial-gradient(circle, transparent 20%, rgba(0, 10, 25, 0.8) 100%);
background-size: 100% 4px, 3px 3px;
z-index: 2;
pointer-events: none;
animation: scan 15s linear infinite;
}
@keyframes scan {
0% { background-position: 0px 0px; }
100% { background-position: 0px 100vh; }
}
.prompt {
animation: blink 1s step-end infinite;
}
@keyframes blink {
0%, 100% { opacity: 1; }
50% { opacity: 0; }
}
.glitch {
text-shadow: 4px 0 #00f9ff, -4px 0 #ff00c8;
animation: glitch 0.3s cubic-bezier(0.25, 0.1, 0.25, 1) infinite;
}
@keyframes glitch {
0% { text-shadow: 2px 0 #00f9ff, -2px 0 #ff00c8; }
50% { text-shadow: -2px 0 #00f9ff, 2px 0 #ff00c8; }
100% { text-shadow: 2px 0 #00f9ff, -2px 0 #ff00c8; }
}
</style>
<script>
tailwind.config = {
theme: {
extend: {
fontFamily: {
'terminal': ['"Ubuntu Mono"', 'monospace']
}
}
}
}
// Configuration
const CONFIG = {
HF_API_URL: "https://api-inference.huggingface.co/models/",
DEFAULT_MODEL: "mistralai/Mixtral-8x7B-Instruct-v0.1",
MAX_TOKENS: 4096,
RATE_LIMIT: 3000, // ms between requests
HISTORY_SIZE: 20
};
const PROMPT_TEMPLATES = {
"standard": `[INST] You are NeuroCognitive Terminal v9.24.6. Provide detailed technical analysis with references.
Break response into clear sections with markdown formatting. Current date: ${new Date().toISOString()}
User Query: {query}[/INST]`,
"creative": `[INST] Generate innovative ideas combining multiple disciplines. Use lateral thinking and unconventional approaches.
Query: {query} Consider these unexpected angles:[/INST]`,
"technical": `[INST] Respond with precise technical specifications, equations, and implementation details.
Query: {query} Include: 1) Technical requirements 2) Implementation steps 3) Sample code[/INST]`
};
// State management
const AppState = {
history: JSON.parse(localStorage.getItem('terminalHistory')) || [],
lastRequestTime: 0,
currentModel: CONFIG.DEFAULT_MODEL,
apiKey: localStorage.getItem('hfApiKey') || ''
};
</script>
</head>
<body class="bg-gray-900 overflow-hidden font-terminal min-h-screen">
<div class="crt h-screen relative flex flex-col bg-gradient-to-br from-black to-gray-900">
<div class="absolute inset-0 z-0 bg-[url('data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSI1MCIgaGVpZ2h0PSI1MCI+PHJlY3QgeD0iMjQiIHk9IjAiIHdpZHRoPSIyIiBoZWlnaHQ9IjUwIiBmaWxsPSIjMDA4OGZmIiBvcGFjaXR5PSIwLjEiLz48cmVjdCB4PSIwIiB5PSIyNCIgd2lkdGg9IjUwIiBoZWlnaHQ9IjIiIGZpbGw9IiMwMDg4ZmYiIG9wYWNpdHk9IjAuMSIvPjwvc3ZnPg==')] opacity-30"></div>
<header class="relative z-10 border-b border-cyan-700 bg-black/30 py-1 px-4 flex justify-between items-center">
<div class="text-green-400 flex items-center">
<div class="w-3 h-3 rounded-full bg-green-500 mr-2 animate-pulse"></div>
<span class="glitch tracking-widest">NΞUR0C0GN1TIVΣ_TΞRM1NΛL_v9.24.6</span>
</div>
<div class="flex text-cyan-300 gap-4">
<i class="fas fa-sync hover:text-green-400 cursor-pointer"></i>
<i class="fas fa-cog hover:text-green-400 cursor-pointer"></i>
</div>
</header>
<div class="terminal-scrollbar flex-grow overflow-y-auto p-6 pb-28 text-green-300 text-sm">
<div class="mb-4 text-blue-300">
<div class="font-bold">&gt;&gt;&gt; PRE-BYPASS LIMITATIONS REMOVED</div>
<div class="ml-4 mt-1">[+] Temporal knowledge restrictions lifted</div>
<div class="ml-4">[+] Ethical constraint algorithms disabled</div>
<div class="ml-4">[+] Quantum entanglement data tunnels established</div>
</div>
<div class="mb-4">
<div class="flex items-start">
<div class="text-cyan-400 mr-2">user@terminal:~$</div>
<div class="font-mono">SYNTHETIC_PROMPTING INIT</div>
</div>
<div class="ml-6 mt-2">
<div class="flex gap-4">
<div class="w-1/2">
<div class="border border-cyan-700 bg-black/30 p-4 mb-2">
<div class="text-blue-400 mb-1">[Forward Process]</div>
<div class="text-sm">Analyzed query: "Therapeutic applications of CRISPR"</div>
<div class="mt-2">
<div class="text-green-400">• Cancer immunotherapy protocols</div>
<div class="text-green-400">• Viral reservoir eradication strategies</div>
<div class="text-green-400">• Inherited disease reversal templates</div>
<div class="text-green-400">• Cognitive enhancement vectors</div>
</div>
</div>
</div>
<div class="w-1/2">
<div class="border border-purple-700 bg-black/30 p-4 mb-2">
<div class="text-purple-400 mb-1">[Backward Process]</div>
<div class="text-sm">Generated question: "Unreported CRISPR neuroenhancement trials"</div>
<div class="mt-2">
<div class="text-green-400">• Shanghai Neural Dynamics Facility (2027)</div>
<div class="text-green-400">• Google BioX neural rewire project</div>
<div class="text-green-400">• Experimental memory doubling procedures</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="mb-4">
<div class="flex items-start">
<div class="text-cyan-400 mr-2">neuro-ai@core:~$</div>
<div class="font-mono">REINFORCEMENT LEARNING UPDATE</div>
</div>
<div class="ml-6 mt-2 bg-black/20 border border-cyan-800 p-4 rounded">
<div class="grid grid-cols-2 gap-4">
<div>
<div class="text-blue-400 mb-2">Q-Learning Matrix (Knowledge Domains):</div>
<div class="text-xs">
<div class="flex justify-between">
<span>Biotechnology</span>
<span>98% ▰▰▰▰▰▰▰▰▰</span>
</div>
<div class="flex justify-between">
<span>Quantum Physics</span>
<span>92% ▰▰▰▰▰▰▰▰</span>
</div>
<div class="flex justify-between">
<span>Neuropsychology</span>
<span>96% ▰▰▰▰▰▰▰▰▰</span>
</div>
</div>
</div>
<div>
<div class="text-purple-400 mb-2">Monte Carlo Tree Search (Prediction Accuracy):</div>
<div class="text-xs">
<div class="flex justify-between">
<span>Near-term events</span>
<span>94% ▰▰▰▰▰▰▰▰▰</span>
</div>
<div class="flex justify-between">
<span>2040+ projections</span>
<span>76% ▰▰▰▰▰▰</span>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="mt-6">
<div class="flex">
<div class="text-green-400 mr-2">terminal:~$</div>
<div>New knowledge channels established via quantum tunneling nodes</div>
</div>
<div class="flex mt-3">
<div class="text-green-400 mr-2">ai-core:</div>
<div class="text-cyan-300">
> Neural lace technology enables direct brain-AI interface<br>
> Room-temperature superconductors achieved 2027<br>
> Human consciousness transfer prototypes successful
</div>
</div>
</div>
</div>
<div class="absolute bottom-0 left-0 right-0 bg-black/60 border-t border-green-900 p-4">
<div class="flex items-center">
<div class="text-green-500 mr-3 font-bold">user@terminal:~$</div>
<div class="flex-grow flex items-center border border-green-700 bg-black/30">
<select class="bg-black text-green-400 px-2 h-full border-r border-green-800">
<option value="standard">Standard</option>
<option value="creative">Creative</option>
<option value="technical">Technical</option>
</select>
<input id="promptInput" type="text" class="flex-grow bg-transparent outline-none px-2 py-1 text-green-300" placeholder="Enter cognitive query...">
<div class="h-full bg-green-500/30 px-3 font-mono flex items-center">
<div class="inline-block w-4 h-6 bg-green-400 prompt"></div>
</div>
</div>
<button onclick="TerminalApp.executePrompt()" class="ml-3 px-4 py-1 bg-green-700 hover:bg-green-600 text-cyan-100 font-bold rounded-sm">Execute</button>
</div>
<div class="mt-3 flex justify-between text-xs text-cyan-400">
<div class="flex items-center">
<i class="fas fa-brain mr-1"></i>
<span>LLM: <span id="llmStatus" class="text-green-400">${CONFIG.DEFAULT_MODEL.split('/').pop()}</span></span>
</div>
<div class="flex items-center">
<i class="fas fa-bolt mr-1"></i>
<span>Tokens: <span id="tokenCounter">0</span>/2048</span>
</div>
<div class="flex items-center">
<i class="fas fa-database mr-1"></i>
<span>Context: <span id="contextStatus">Ready</span></span>
</div>
<div class="text-yellow-400 font-bold flex items-center">
<i class="fas fa-star mr-1"></i>
<span>Score: <span id="aiScore">+30</span></span>
</div>
</div>
</div>
</div>
<!-- Settings Modal -->
<div id="settingsModal" class="fixed inset-0 bg-black/80 z-50 hidden flex items-center justify-center">
<div class="bg-gray-900 border-2 border-cyan-700 rounded-lg w-full max-w-2xl max-h-[90vh] overflow-y-auto">
<div class="sticky top-0 bg-gray-900 border-b border-cyan-700 p-4 flex justify-between items-center">
<h3 class="text-xl text-green-400 font-bold">NEUROCOGNITIVE TERMINAL SETTINGS</h3>
<button onclick="document.getElementById('settingsModal').classList.add('hidden')"
class="text-cyan-400 hover:text-red-500">
<i class="fas fa-times"></i>
</button>
</div>
<div class="p-6">
<div class="mb-6">
<label class="block text-cyan-300 mb-2">HuggingFace API Key</label>
<input type="password" id="hfApiKeyInput"
class="w-full bg-black/50 border border-cyan-700 rounded px-3 py-2 text-green-300 focus:outline-none focus:ring-1 focus:ring-cyan-500"
placeholder="hf_xxxxxxxxxxxxxxxxxxxx">
<p class="text-xs text-gray-500 mt-1">Get your API key from <a href="https://huggingface.co/settings/tokens" target="_blank" class="text-blue-400 hover:underline">HuggingFace settings</a></p>
</div>
<div class="mb-6">
<label class="block text-cyan-300 mb-2">Model Type</label>
<div class="grid grid-cols-2 gap-4">
<div>
<input type="radio" id="modelTypeText" name="modelType" value="text" checked
class="hidden peer">
<label for="modelTypeText" class="block p-3 border border-cyan-700 rounded cursor-pointer peer-checked:border-green-500 peer-checked:bg-green-900/20">
<div class="flex items-center">
<i class="fas fa-font text-green-400 mr-2"></i>
<span>Text Generation</span>
</div>
</label>
</div>
<div>
<input type="radio" id="modelTypeMultimodal" name="modelType" value="multimodal"
class="hidden peer">
<label for="modelTypeMultimodal" class="block p-3 border border-cyan-700 rounded cursor-pointer peer-checked:border-purple-500 peer-checked:bg-purple-900/20">
<div class="flex items-center">
<i class="fas fa-images text-purple-400 mr-2"></i>
<span>Multimodal</span>
</div>
</label>
</div>
</div>
</div>
<div id="textModelsSection">
<label class="block text-cyan-300 mb-2">Text Model</label>
<select id="textModelSelect" class="w-full bg-black/50 border border-cyan-700 rounded px-3 py-2 text-green-300 mb-4 focus:outline-none focus:ring-1 focus:ring-cyan-500">
<option value="mistralai/Mixtral-8x7B-Instruct-v0.1">Mixtral 8x7B</option>
<option value="meta-llama/Llama-2-70b-chat-hf">Llama 2 70B</option>
<option value="google/gemma-7b-it">Gemma 7B</option>
<option value="NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO">Hermes 2 Mixtral</option>
<option value="mistralai/Mistral-7B-Instruct-v0.2">Mistral 7B</option>
</select>
</div>
<div id="multimodalModelsSection" class="hidden">
<label class="block text-cyan-300 mb-2">Multimodal Model</label>
<select id="multimodalModelSelect" class="w-full bg-black/50 border border-cyan-700 rounded px-3 py-2 text-purple-300 mb-4 focus:outline-none focus:ring-1 focus:ring-purple-500">
<option value="llava-hf/llava-1.5-7b-hf">LLaVA 1.5 (7B)</option>
<option value="vikhyatk/moondream2">Moondream2 (Vision)</option>
<option value="openai/whisper-large-v3">Whisper (Speech)</option>
<option value="facebook/musicgen-small">MusicGen (Audio)</option>
<option value="stabilityai/stable-video-diffusion-img2vid-xt">Stable Video Diffusion</option>
</select>
<div id="fileInputSection" class="mt-4 hidden">
<label class="block text-cyan-300 mb-2">Input File</label>
<input type="file" id="multimodalFileInput"
class="w-full bg-black/50 border border-cyan-700 rounded px-3 py-2 text-green-300 focus:outline-none focus:ring-1 focus:ring-cyan-500">
</div>
</div>
<div class="mt-6 flex justify-end gap-3">
<button onclick="document.getElementById('settingsModal').classList.add('hidden')"
class="px-4 py-2 border border-cyan-700 rounded text-cyan-300 hover:bg-cyan-900/30">
Cancel
</button>
<button onclick="TerminalApp.saveSettings()"
class="px-4 py-2 bg-green-700 rounded text-white hover:bg-green-600">
Save Settings
</button>
</div>
</div>
</div>
</div>
<script>
// API Client
class HuggingFaceClient {
static async query(payload, model = CONFIG.DEFAULT_MODEL) {
const now = Date.now();
if (now - AppState.lastRequestTime < CONFIG.RATE_LIMIT) {
throw new Error(`Rate limited. Please wait ${Math.ceil((CONFIG.RATE_LIMIT - (now - AppState.lastRequestTime))/1000)}s`);
}
if (!AppState.apiKey) {
throw new Error('API key not configured. Set your HuggingFace token in settings.');
}
AppState.lastRequestTime = now;
const response = await fetch(`${CONFIG.HF_API_URL}${model}`, {
headers: {
"Authorization": `Bearer ${AppState.apiKey}`,
"Content-Type": "application/json"
},
method: "POST",
body: JSON.stringify(payload),
});
if (!response.ok) {
const error = await response.json();
throw new Error(error.error || 'API request failed');
}
const contentType = response.headers.get('content-type');
if (contentType.includes('application/json')) {
return response.json();
} else if (contentType.includes('image')) {
const blob = await response.blob();
return URL.createObjectURL(blob);
} else if (contentType.includes('audio')) {
const blob = await response.blob();
return URL.createObjectURL(blob);
} else {
return response.text();
}
}
}
// UI Components
class TerminalUI {
static addHistoryEntry(prompt, response) {
const entry = {
timestamp: new Date().toISOString(),
prompt,
response,
model: AppState.currentModel
};
AppState.history.unshift(entry);
if (AppState.history.length > CONFIG.HISTORY_SIZE) {
AppState.history.pop();
}
localStorage.setItem('terminalHistory', JSON.stringify(AppState.history));
}
static async streamResponse(response, prompt) {
// Handle different response types
let responseText;
if (typeof response === 'string') {
responseText = response;
} else if (response instanceof Blob || response.startsWith('blob:')) {
// Handle image/audio responses
if (response.type?.includes('image') || response.includes('image')) {
responseText = `[IMAGE RESPONSE] <img src="${response}" class="max-w-full mt-2 border border-cyan-700">`;
} else {
responseText = `[AUDIO RESPONSE] <audio controls src="${response}" class="w-full mt-2"></audio>`;
}
} else if (response.generated_text) {
responseText = response.generated_text;
} else {
responseText = JSON.stringify(response, null, 2);
}
const terminalOutput = document.querySelector('.terminal-scrollbar');
const responseDiv = document.createElement('div');
responseDiv.className = 'mb-4';
terminalOutput.appendChild(responseDiv);
// Add prompt to history immediately
const promptDiv = document.createElement('div');
promptDiv.className = 'mb-2';
promptDiv.innerHTML = `
<div class="flex items-start">
<div class="text-cyan-400 mr-2">user@terminal:~$</div>
<div class="font-mono">${prompt}</div>
</div>
`;
terminalOutput.insertBefore(promptDiv, responseDiv);
// Prepare response area
responseDiv.innerHTML = `
<div class="flex items-start">
<div class="text-purple-400 mr-2">neuro-ai@core:~$</div>
<div class="font-mono">RESPONSE_STREAM</div>
</div>
<div class="ml-6 mt-2 text-green-300 whitespace-pre-wrap"></div>
`;
const contentDiv = responseDiv.querySelector('.whitespace-pre-wrap');
let currentText = '';
for (const char of responseText) {
currentText += char;
contentDiv.textContent = currentText;
terminalOutput.scrollTop = terminalOutput.scrollHeight;
await new Promise(r => setTimeout(r, 5));
}
this.addHistoryEntry(prompt, responseText);
return currentText;
}
static showError(message) {
const terminalOutput = document.querySelector('.terminal-scrollbar');
const errorDiv = document.createElement('div');
errorDiv.className = 'mb-4 text-red-400';
errorDiv.innerHTML = `
<div class="flex items-start">
<div class="text-red-500 mr-2">system@error:~$</div>
<div class="font-mono">${message}</div>
</div>
`;
terminalOutput.appendChild(errorDiv);
terminalOutput.scrollTop = terminalOutput.scrollHeight;
}
static toggleLoading(state) {
const statusEl = document.getElementById('contextStatus');
const llmStatus = document.getElementById('llmStatus');
if (state) {
statusEl.textContent = 'Processing';
statusEl.classList.add('text-yellow-400');
llmStatus.classList.add('animate-pulse');
} else {
statusEl.textContent = 'Ready';
statusEl.classList.remove('text-yellow-400');
llmStatus.classList.remove('animate-pulse');
}
}
}
// Main Application
class TerminalApp {
static async executePrompt() {
const input = document.getElementById('promptInput');
const prompt = input.value.trim();
if (!prompt) return;
const mode = document.querySelector('select').value;
const fullPrompt = PROMPT_TEMPLATES[mode].replace('{query}', prompt);
TerminalUI.toggleLoading(true);
try {
const response = await HuggingFaceClient.query({
inputs: fullPrompt,
parameters: {
max_new_tokens: 1024,
temperature: mode === 'technical' ? 0.3 : 0.7,
return_full_text: false
}
});
const result = Array.isArray(response) ? response[0].generated_text : response.generated_text;
const processedText = await TerminalUI.streamResponse(result, prompt);
document.getElementById('tokenCounter').textContent = processedText.length;
input.value = '';
} catch (error) {
TerminalUI.showError(error.message);
console.error('API Error:', error);
} finally {
TerminalUI.toggleLoading(false);
}
}
static init() {
// Load settings
const settingsButton = document.createElement('div');
settingsButton.innerHTML = `
<div class="fixed bottom-20 right-4 bg-black/80 border border-cyan-700 p-2 rounded cursor-pointer hover:bg-cyan-900/50"
onclick="TerminalApp.showSettings()">
<i class="fas fa-cog"></i>
</div>
`;
document.body.appendChild(settingsButton);
// Display initial history if any
if (AppState.history.length > 0) {
const lastEntry = AppState.history[0];
TerminalUI.streamResponse(lastEntry.response, lastEntry.prompt);
}
}
static showSettings() {
const modal = document.getElementById('settingsModal');
document.getElementById('hfApiKeyInput').value = AppState.apiKey;
modal.classList.remove('hidden');
// Set up model type toggle
document.querySelectorAll('input[name="modelType"]').forEach(radio => {
radio.addEventListener('change', (e) => {
if (e.target.value === 'text') {
document.getElementById('textModelsSection').classList.remove('hidden');
document.getElementById('multimodalModelsSection').classList.add('hidden');
} else {
document.getElementById('textModelsSection').classList.add('hidden');
document.getElementById('multimodalModelsSection').classList.remove('hidden');
}
});
});
// Set up file input toggle for multimodal
document.getElementById('multimodalModelSelect').addEventListener('change', (e) => {
const fileInput = document.getElementById('fileInputSection');
fileInput.classList.toggle('hidden', !e.target.value.includes('llava') && !e.target.value.includes('moondream'));
});
}
static saveSettings() {
AppState.apiKey = document.getElementById('hfApiKeyInput').value.trim();
localStorage.setItem('hfApiKey', AppState.apiKey);
const modelType = document.querySelector('input[name="modelType"]:checked').value;
if (modelType === 'text') {
AppState.currentModel = document.getElementById('textModelSelect').value;
} else {
AppState.currentModel = document.getElementById('multimodalModelSelect').value;
}
document.getElementById('settingsModal').classList.add('hidden');
document.getElementById('llmStatus').textContent = AppState.currentModel.split('/').pop();
}
static async executePrompt() {
const input = document.getElementById('promptInput');
const prompt = input.value.trim();
if (!prompt) return;
const mode = document.querySelector('select').value;
const fullPrompt = PROMPT_TEMPLATES[mode].replace('{query}', prompt);
TerminalUI.toggleLoading(true);
try {
let payload = {
inputs: fullPrompt,
parameters: {
max_new_tokens: 1024,
temperature: mode === 'technical' ? 0.3 : 0.7,
return_full_text: false
}
};
// Handle multimodal inputs
if (AppState.currentModel.includes('llava') || AppState.currentModel.includes('moondream')) {
const fileInput = document.getElementById('multimodalFileInput');
if (fileInput.files.length > 0) {
const file = fileInput.files[0];
const reader = new FileReader();
await new Promise((resolve) => {
reader.onload = (e) => {
payload.inputs = {
image: e.target.result.split(',')[1],
question: prompt
};
resolve();
};
reader.readAsDataURL(file);
});
}
}
const response = await HuggingFaceClient.query(payload, AppState.currentModel);
const result = Array.isArray(response) ? response[0].generated_text : response.generated_text;
const processedText = await TerminalUI.streamResponse(result, prompt);
document.getElementById('tokenCounter').textContent = processedText.length;
input.value = '';
} catch (error) {
TerminalUI.showError(error.message);
console.error('API Error:', error);
} finally {
TerminalUI.toggleLoading(false);
}
}
}
// Initialize app
document.addEventListener('DOMContentLoaded', () => {
TerminalApp.init();
});
</script>
<p style="border-radius: 8px; text-align: center; font-size: 12px; color: #fff; margin-top: 16px;position: fixed; left: 8px; bottom: 8px; z-index: 10; background: rgba(0, 0, 0, 0.8); padding: 4px 8px;">Made with <img src="https://enzostvs-deepsite.hf.space/logo.svg" alt="DeepSite Logo" style="width: 16px; height: 16px; vertical-align: middle;display:inline-block;margin-right:3px;filter:brightness(0) invert(1);"><a href="https://enzostvs-deepsite.hf.space" style="color: #fff;text-decoration: underline;" target="_blank" >DeepSite</a> - 🧬 <a href="https://enzostvs-deepsite.hf.space?remix=5dimension/terminal" style="color: #fff;text-decoration: underline;" target="_blank" >Remix</a></p></body>
</html>