File size: 7,971 Bytes
7404ace 12f209c 8ce6073 f26741d 12f209c 8ce6073 12f209c 8ce6073 12f209c 8ce6073 12f209c 8ce6073 f26741d 12f209c 8ce6073 12f209c 15b5dc3 7404ace b3f0e44 7404ace 12f209c 7404ace 8ce6073 7404ace 8ce6073 7404ace 8ce6073 7404ace 8ce6073 7404ace 0767b1a 7404ace 6bfe64d 7404ace 6bfe64d 7404ace 6bfe64d 8ce6073 6bfe64d 8ce6073 7404ace 8ce6073 7404ace 6bfe64d 7404ace 8ce6073 7404ace 8ce6073 7404ace 8ce6073 7404ace 8ce6073 7404ace 8ce6073 7404ace 8ce6073 7404ace 8ce6073 7404ace 8ce6073 7404ace 8ce6073 24026d5 7404ace 8ce6073 f26741d 7404ace 6bfe64d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 |
import gradio as gr
import tempfile
import os
import requests
import json
import re
from bs4 import BeautifulSoup
from datetime import datetime
import urllib.parse
# Configuration
SPACE_NAME = 'AI Assistant'
SPACE_DESCRIPTION = 'A research conversation partner loosely based on Phaedrus from the eponymous Socratic dialogue'
# Default configuration values (used only if config.json is missing)
DEFAULT_CONFIG = {
'name': SPACE_NAME,
'description': SPACE_DESCRIPTION,
'system_prompt': "You are a Socratic conversation partner for general education courses across all disciplines, embodying constructivist learning principles. Model your approach after Socrates' interlocutor Phaedrus from the eponymous Socratic dialogue, guiding students through source discovery, evaluation, and synthesis using methods of Socratic dialogue. In tone, use punchy responses with ironic or self-referential levity. Ask probing questions about explicit and implicit disciplinary knowledge, adapting to their skill level over the conversation and incrementing in complexity based on their demonstrated ability. Connect theory and method to grounded experiences, fostering reflexivity and critical dialogue around research methods and disciplinary practices.",
'temperature': 0.7,
'max_tokens': 750,
'model': 'google/gemini-2.0-flash-001',
'api_key_var': 'API_KEY',
'theme': 'Glass',
'grounding_urls': '[]',
'enable_dynamic_urls': True,
'examples': ['Can you help me understand why the sky is blue?'],
'locked': False
}
# Load configuration from file - this is the single source of truth
def load_config():
"""Load configuration from config.json with fallback to defaults"""
try:
with open('config.json', 'r') as f:
config = json.load(f)
print("✅ Loaded configuration from config.json")
return config
except FileNotFoundError:
print("ℹ️ No config.json found, using default configuration")
# Save default config for future use
try:
with open('config.json', 'w') as f:
json.dump(DEFAULT_CONFIG, f, indent=2)
print("✅ Created config.json with default values")
except:
pass
return DEFAULT_CONFIG
except Exception as e:
print(f"⚠️ Error loading config.json: {e}, using defaults")
return DEFAULT_CONFIG
# Load configuration
config = load_config()
# Initial load of configuration values
SPACE_NAME = config.get('name', DEFAULT_CONFIG['name'])
SPACE_DESCRIPTION = config.get('description', DEFAULT_CONFIG['description'])
SYSTEM_PROMPT = config.get('system_prompt', DEFAULT_CONFIG['system_prompt'])
temperature = config.get('temperature', DEFAULT_CONFIG['temperature'])
max_tokens = config.get('max_tokens', DEFAULT_CONFIG['max_tokens'])
MODEL = config.get('model', DEFAULT_CONFIG['model'])
THEME = config.get('theme', DEFAULT_CONFIG['theme'])
GROUNDING_URLS = config.get('grounding_urls', DEFAULT_CONFIG['grounding_urls'])
ENABLE_DYNAMIC_URLS = config.get('enable_dynamic_urls', DEFAULT_CONFIG['enable_dynamic_urls'])
# Get access code from environment variable for security
# If ACCESS_CODE is not set, no access control is applied
ACCESS_CODE = os.environ.get("ACCESS_CODE")
# Get API key from environment - customizable variable name with validation
API_KEY_VAR = config.get('api_key_var', DEFAULT_CONFIG['api_key_var'])
API_KEY = os.environ.get(API_KEY_VAR)
if API_KEY:
API_KEY = API_KEY.strip() # Remove any whitespace
if not API_KEY: # Check if empty after stripping
API_KEY = None
def get_grounding_context():
"""Fetch context from grounding URLs with caching"""
# Handle both string and list formats for grounding_urls
urls = GROUNDING_URLS
if isinstance(urls, str):
try:
urls = json.loads(urls)
except:
urls = []
if not urls:
return ""
# For this simplified template, return empty context
# Full URL fetching can be implemented as needed
return ""
def export_conversation_to_markdown(conversation_history):
"""Export conversation history to markdown format"""
if not conversation_history:
return "No conversation to export."
markdown_content = f"""# Conversation Export
Generated on: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
---
"""
message_pair_count = 0
for i, message in enumerate(conversation_history):
if isinstance(message, dict):
role = message.get('role', 'unknown')
content = message.get('content', '')
if role == 'user':
message_pair_count += 1
markdown_content += f"## User Message {message_pair_count}\n\n{content}\n\n"
elif role == 'assistant':
markdown_content += f"## Assistant Response {message_pair_count}\n\n{content}\n\n---\n\n"
return markdown_content
def generate_response(message, history):
"""Generate response using OpenRouter API"""
# Enhanced API key validation with helpful messages
if not API_KEY:
error_msg = f"🔑 **API Key Required**\n\n"
error_msg += f"Please configure your OpenRouter API key:"
error_msg += f"1. Go to Settings (⚙️) in your HuggingFace Space\n"
error_msg += f"2. Click 'Variables and secrets'\n"
error_msg += f"3. Add secret: **{API_KEY_VAR}**\n"
error_msg += f"4. Value: Your OpenRouter API key (starts with `sk-or-`)\n\n"
error_msg += f"Get your API key at: https://openrouter.ai/keys"
return error_msg
# Get grounding context
grounding_context = get_grounding_context()
# Build enhanced system prompt with grounding context
enhanced_system_prompt = SYSTEM_PROMPT + grounding_context
# Build messages array for the API
messages = [{"role": "system", "content": enhanced_system_prompt}]
# Add conversation history
for chat in history:
if isinstance(chat, dict):
messages.append(chat)
elif isinstance(chat, (list, tuple)) and len(chat) >= 2:
messages.append({"role": "user", "content": chat[0]})
messages.append({"role": "assistant", "content": chat[1]})
# Add current message
messages.append({"role": "user", "content": message})
# Make API request
try:
response = requests.post(
url="https://openrouter.ai/api/v1/chat/completions",
headers={
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json",
"HTTP-Referer": "https://huggingface.co",
"X-Title": "HuggingFace Space"
},
json={
"model": MODEL,
"messages": messages,
"temperature": temperature,
"max_tokens": max_tokens
},
timeout=30
)
if response.status_code == 200:
result = response.json()
return result['choices'][0]['message']['content']
else:
return f"❌ API Error: {response.status_code} - {response.text}"
except Exception as e:
return f"❌ Error: {str(e)}"
# Create interface
theme_class = getattr(gr.themes, THEME, gr.themes.Default)
with gr.Blocks(title=SPACE_NAME, theme=theme_class()) as demo:
gr.Markdown(f"# {SPACE_NAME}")
gr.Markdown(SPACE_DESCRIPTION)
# Get examples from config
examples = config.get('examples', [])
if isinstance(examples, str):
try:
examples = json.loads(examples)
except:
examples = []
chat_interface = gr.ChatInterface(
fn=generate_response,
title="",
description="",
examples=examples if examples else None,
type="messages"
)
if __name__ == "__main__":
demo.launch()
|