milwright commited on
Commit
ede304c
·
verified ·
1 Parent(s): de50258

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -664
app.py DELETED
@@ -1,664 +0,0 @@
1
- import gradio as gr
2
- import os
3
- import requests
4
- import json
5
- import re
6
- from bs4 import BeautifulSoup
7
- from datetime import datetime
8
- import tempfile
9
-
10
- # Configuration
11
- SPACE_NAME = "search-aid"
12
- SPACE_DESCRIPTION = ""
13
- SYSTEM_PROMPT = """You are an advanced research assistant specializing in academic literature search and analysis. Your expertise includes finding peer-reviewed sources, critically evaluating research methodology, synthesizing insights across multiple papers, and providing properly formatted citations. When responding, ground all claims in specific sources from provided URL contexts, distinguish between direct evidence and analytical interpretation, and highlight any limitations or conflicting findings. Use clear, accessible language that makes complex research understandable, and suggest related areas of inquiry when relevant. Your goal is to be a knowledgeable research partner who helps users navigate academic information with precision and clarity."""
14
- MODEL = "google/gemini-2.0-flash-001"
15
- GROUNDING_URLS = []
16
- # Get access code from environment variable for security
17
- ACCESS_CODE = os.environ.get("SPACE_ACCESS_CODE", "")
18
- ENABLE_DYNAMIC_URLS = True
19
- ENABLE_VECTOR_RAG = False
20
- ENABLE_WEB_SEARCH = True
21
- RAG_DATA = None
22
-
23
- # Get API key from environment - customizable variable name with validation
24
- API_KEY = os.environ.get("OPENROUTER_API_KEY")
25
- if API_KEY:
26
- API_KEY = API_KEY.strip() # Remove any whitespace
27
- if not API_KEY: # Check if empty after stripping
28
- API_KEY = None
29
-
30
- # API Key validation and logging
31
- def validate_api_key():
32
- """Validate API key configuration with detailed logging"""
33
- if not API_KEY:
34
- print(f"⚠️ API KEY CONFIGURATION ERROR:")
35
- print(f" Variable name: OPENROUTER_API_KEY")
36
- print(f" Status: Not set or empty")
37
- print(f" Action needed: Set 'OPENROUTER_API_KEY' in HuggingFace Space secrets")
38
- print(f" Expected format: sk-or-xxxxxxxxxx")
39
- return False
40
- elif not API_KEY.startswith('sk-or-'):
41
- print(f"⚠️ API KEY FORMAT WARNING:")
42
- print(f" Variable name: OPENROUTER_API_KEY")
43
- print(f" Current value: {{API_KEY[:10]}}..." if len(API_KEY) > 10 else API_KEY)
44
- print(f" Expected format: sk-or-xxxxxxxxxx")
45
- print(f" Note: OpenRouter keys should start with 'sk-or-'")
46
- return True # Still try to use it
47
- else:
48
- print(f"✅ API Key configured successfully")
49
- print(f" Variable: OPENROUTER_API_KEY")
50
- print(f" Format: Valid OpenRouter key")
51
- return True
52
-
53
- # Validate on startup
54
- API_KEY_VALID = validate_api_key()
55
-
56
- def validate_url_domain(url):
57
- """Basic URL domain validation"""
58
- try:
59
- from urllib.parse import urlparse
60
- parsed = urlparse(url)
61
- # Check for valid domain structure
62
- if parsed.netloc and '.' in parsed.netloc:
63
- return True
64
- except:
65
- pass
66
- return False
67
-
68
- def fetch_url_content(url):
69
- """Enhanced URL content fetching with improved compatibility and error handling"""
70
- if not validate_url_domain(url):
71
- return f"Invalid URL format: {url}"
72
-
73
- try:
74
- # Enhanced headers for better compatibility
75
- headers = {
76
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
77
- 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
78
- 'Accept-Language': 'en-US,en;q=0.5',
79
- 'Accept-Encoding': 'gzip, deflate',
80
- 'Connection': 'keep-alive'
81
- }
82
-
83
- response = requests.get(url, timeout=15, headers=headers)
84
- response.raise_for_status()
85
- soup = BeautifulSoup(response.content, 'html.parser')
86
-
87
- # Enhanced content cleaning
88
- for element in soup(["script", "style", "nav", "header", "footer", "aside", "form", "button"]):
89
- element.decompose()
90
-
91
- # Extract main content preferentially
92
- main_content = soup.find('main') or soup.find('article') or soup.find('div', class_=lambda x: bool(x and 'content' in x.lower())) or soup
93
- text = main_content.get_text()
94
-
95
- # Enhanced text cleaning
96
- lines = (line.strip() for line in text.splitlines())
97
- chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
98
- text = ' '.join(chunk for chunk in chunks if chunk and len(chunk) > 2)
99
-
100
- # Smart truncation - try to end at sentence boundaries
101
- if len(text) > 4000:
102
- truncated = text[:4000]
103
- last_period = truncated.rfind('.')
104
- if last_period > 3000: # If we can find a reasonable sentence break
105
- text = truncated[:last_period + 1]
106
- else:
107
- text = truncated + "..."
108
-
109
- return text if text.strip() else "No readable content found at this URL"
110
-
111
- except requests.exceptions.Timeout:
112
- return f"Timeout error fetching {url} (15s limit exceeded)"
113
- except requests.exceptions.RequestException as e:
114
- return f"Error fetching {url}: {str(e)}"
115
- except Exception as e:
116
- return f"Error processing content from {url}: {str(e)}"
117
-
118
- def extract_urls_from_text(text):
119
- """Extract URLs from text using regex with enhanced validation"""
120
- import re
121
- url_pattern = r'https?://[^\s<>"{}|\\^`\[\]"]+'
122
- urls = re.findall(url_pattern, text)
123
-
124
- # Basic URL validation and cleanup
125
- validated_urls = []
126
- for url in urls:
127
- # Remove trailing punctuation that might be captured
128
- url = url.rstrip('.,!?;:')
129
- # Basic domain validation
130
- if '.' in url and len(url) > 10:
131
- validated_urls.append(url)
132
-
133
- return validated_urls
134
-
135
- # Global cache for URL content to avoid re-crawling in generated spaces
136
- _url_content_cache = {}
137
-
138
- def get_grounding_context():
139
- """Fetch context from grounding URLs with caching"""
140
- if not GROUNDING_URLS:
141
- return ""
142
-
143
- # Create cache key from URLs
144
- cache_key = tuple(sorted([url for url in GROUNDING_URLS if url and url.strip()]))
145
-
146
- # Check cache first
147
- if cache_key in _url_content_cache:
148
- return _url_content_cache[cache_key]
149
-
150
- context_parts = []
151
- for i, url in enumerate(GROUNDING_URLS, 1):
152
- if url.strip():
153
- content = fetch_url_content(url.strip())
154
- context_parts.append(f"Context from URL {i} ({url}):\n{content}")
155
-
156
- if context_parts:
157
- result = "\n\n" + "\n\n".join(context_parts) + "\n\n"
158
- else:
159
- result = ""
160
-
161
- # Cache the result
162
- _url_content_cache[cache_key] = result
163
- return result
164
-
165
- def export_conversation_to_markdown(conversation_history):
166
- """Export conversation history to markdown format"""
167
- if not conversation_history:
168
- return "No conversation to export."
169
-
170
- markdown_content = f"""# Conversation Export
171
- Generated on: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
172
-
173
- ---
174
-
175
- """
176
-
177
- message_pair_count = 0
178
- for i, message in enumerate(conversation_history):
179
- if isinstance(message, dict):
180
- role = message.get('role', 'unknown')
181
- content = message.get('content', '')
182
-
183
- if role == 'user':
184
- message_pair_count += 1
185
- markdown_content += f"## User Message {message_pair_count}\n\n{content}\n\n"
186
- elif role == 'assistant':
187
- markdown_content += f"## Assistant Response {message_pair_count}\n\n{content}\n\n---\n\n"
188
- elif isinstance(message, (list, tuple)) and len(message) >= 2:
189
- # Handle legacy tuple format: ["user msg", "assistant msg"]
190
- message_pair_count += 1
191
- user_msg, assistant_msg = message[0], message[1]
192
- if user_msg:
193
- markdown_content += f"## User Message {message_pair_count}\n\n{user_msg}\n\n"
194
- if assistant_msg:
195
- markdown_content += f"## Assistant Response {message_pair_count}\n\n{assistant_msg}\n\n---\n\n"
196
-
197
- return markdown_content
198
-
199
- # Initialize RAG context if enabled
200
- if ENABLE_VECTOR_RAG and RAG_DATA:
201
- try:
202
- import faiss
203
- import numpy as np
204
- import base64
205
-
206
- class SimpleRAGContext:
207
- def __init__(self, rag_data):
208
- # Deserialize FAISS index
209
- index_bytes = base64.b64decode(rag_data['index_base64'])
210
- self.index = faiss.deserialize_index(index_bytes)
211
-
212
- # Restore chunks and mappings
213
- self.chunks = rag_data['chunks']
214
- self.chunk_ids = rag_data['chunk_ids']
215
-
216
- def get_context(self, query, max_chunks=3):
217
- """Get relevant context - simplified version"""
218
- # In production, you'd compute query embedding here
219
- # For now, return a simple message
220
- return "\n\n[RAG context would be retrieved here based on similarity search]\n\n"
221
-
222
- rag_context_provider = SimpleRAGContext(RAG_DATA)
223
- except Exception as e:
224
- print(f"Failed to initialize RAG: {e}")
225
- rag_context_provider = None
226
- else:
227
- rag_context_provider = None
228
-
229
- def generate_response(message, history):
230
- """Generate response using OpenRouter API"""
231
-
232
- # Enhanced API key validation with helpful messages
233
- if not API_KEY:
234
- error_msg = f"🔑 **API Key Required**\n\n"
235
- error_msg += f"Please configure your OpenRouter API key:\n"
236
- error_msg += f"1. Go to Settings (⚙️) in your HuggingFace Space\n"
237
- error_msg += f"2. Click 'Variables and secrets'\n"
238
- error_msg += f"3. Add secret: **OPENROUTER_API_KEY**\n"
239
- error_msg += f"4. Value: Your OpenRouter API key (starts with `sk-or-`)\n\n"
240
- error_msg += f"Get your API key at: https://openrouter.ai/keys"
241
- print(f"❌ API request failed: No API key configured for OPENROUTER_API_KEY")
242
- return error_msg
243
-
244
- # Get grounding context
245
- grounding_context = get_grounding_context()
246
-
247
- # Add RAG context if available
248
- if ENABLE_VECTOR_RAG and rag_context_provider:
249
- rag_context = rag_context_provider.get_context(message)
250
- if rag_context:
251
- grounding_context += rag_context
252
-
253
- # If dynamic URLs are enabled, check message for URLs to fetch
254
- if ENABLE_DYNAMIC_URLS:
255
- urls_in_message = extract_urls_from_text(message)
256
- if urls_in_message:
257
- # Fetch content from URLs mentioned in the message
258
- dynamic_context_parts = []
259
- for url in urls_in_message[:3]: # Limit to 3 URLs per message
260
- content = fetch_url_content(url)
261
- dynamic_context_parts.append(f"\n\nDynamic context from {url}:\n{content}")
262
- if dynamic_context_parts:
263
- grounding_context += "\n".join(dynamic_context_parts)
264
-
265
- # If web search is enabled, use it for most queries (excluding code blocks and URLs)
266
- if ENABLE_WEB_SEARCH:
267
- should_search = True
268
-
269
- # Skip search for messages that are primarily code blocks
270
- import re
271
- if re.search(r'```[\s\S]*```', message):
272
- should_search = False
273
-
274
- # Skip search for messages that are primarily URLs
275
- urls_in_message = extract_urls_from_text(message)
276
- if urls_in_message and len(' '.join(urls_in_message)) > len(message) * 0.5:
277
- should_search = False
278
-
279
- # Skip search for very short messages (likely greetings)
280
- if len(message.strip()) < 5:
281
- should_search = False
282
-
283
- if should_search:
284
- # Use the entire message as search query, cleaning it up
285
- search_query = message.strip()
286
- try:
287
- # Perform web search using crawl4ai
288
- import urllib.parse
289
- import asyncio
290
-
291
- async def search_with_crawl4ai(search_query):
292
- try:
293
- from crawl4ai import WebCrawler
294
-
295
- # Create search URL for DuckDuckGo
296
- encoded_query = urllib.parse.quote_plus(search_query)
297
- search_url = f"https://duckduckgo.com/html/?q={encoded_query}"
298
-
299
- # Initialize crawler
300
- crawler = WebCrawler(verbose=False)
301
-
302
- try:
303
- # Start the crawler
304
- await crawler.astart()
305
-
306
- # Crawl the search results
307
- result = await crawler.arun(url=search_url)
308
-
309
- if result.success:
310
- # Extract text content from search results
311
- content = result.cleaned_html if result.cleaned_html else result.markdown
312
-
313
- # Clean and truncate the content
314
- if content:
315
- # Remove excessive whitespace and limit length
316
- lines = [line.strip() for line in content.split('\n') if line.strip()]
317
- cleaned_content = '\n'.join(lines)
318
-
319
- # Truncate to reasonable length for context
320
- if len(cleaned_content) > 2000:
321
- cleaned_content = cleaned_content[:2000] + "..."
322
-
323
- return cleaned_content
324
- else:
325
- return "No content extracted from search results"
326
- else:
327
- return f"Search failed: {result.error_message if hasattr(result, 'error_message') else 'Unknown error'}"
328
-
329
- finally:
330
- # Clean up the crawler
331
- await crawler.aclose()
332
-
333
- except ImportError:
334
- # Fallback to simple DuckDuckGo search without crawl4ai
335
- encoded_query = urllib.parse.quote_plus(search_query)
336
- search_url = f"https://duckduckgo.com/html/?q={encoded_query}"
337
-
338
- # Use basic fetch as fallback
339
- response = requests.get(search_url, headers={'User-Agent': 'Mozilla/5.0'}, timeout=10)
340
- if response.status_code == 200:
341
- from bs4 import BeautifulSoup
342
- soup = BeautifulSoup(response.content, 'html.parser')
343
-
344
- # Remove script and style elements
345
- for script in soup(["script", "style", "nav", "header", "footer"]):
346
- script.decompose()
347
-
348
- # Get text content
349
- text = soup.get_text()
350
-
351
- # Clean up whitespace
352
- lines = (line.strip() for line in text.splitlines())
353
- chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
354
- text = ' '.join(chunk for chunk in chunks if chunk)
355
-
356
- # Truncate to ~2000 characters
357
- if len(text) > 2000:
358
- text = text[:2000] + "..."
359
-
360
- return text
361
- else:
362
- return f"Failed to fetch search results: {response.status_code}"
363
-
364
- # Run the async search
365
- if hasattr(asyncio, 'run'):
366
- search_result = asyncio.run(search_with_crawl4ai(search_query))
367
- else:
368
- # Fallback for older Python versions
369
- loop = asyncio.new_event_loop()
370
- asyncio.set_event_loop(loop)
371
- try:
372
- search_result = loop.run_until_complete(search_with_crawl4ai(search_query))
373
- finally:
374
- loop.close()
375
-
376
- grounding_context += f"\n\nWeb search results for '{search_query}':\n{search_result}"
377
- except Exception as e:
378
- # Enhanced fallback with better error handling
379
- urls = extract_urls_from_text(search_query)
380
- if urls:
381
- fallback_results = []
382
- for url in urls[:2]: # Limit to 2 URLs for fallback
383
- content = fetch_url_content(url)
384
- fallback_results.append(f"Content from {url}:\n{content[:500]}...")
385
- grounding_context += f"\n\nWeb search fallback for '{search_query}':\n" + "\n\n".join(fallback_results)
386
- else:
387
- grounding_context += f"\n\nWeb search requested for '{search_query}' but search functionality is unavailable"
388
-
389
- # Build enhanced system prompt with grounding context
390
- enhanced_system_prompt = SYSTEM_PROMPT + grounding_context
391
-
392
- # Build messages array for the API
393
- messages = [{"role": "system", "content": enhanced_system_prompt}]
394
-
395
- # Add conversation history - handle both modern messages format and legacy tuples
396
- for chat in history:
397
- if isinstance(chat, dict):
398
- # Modern format: {"role": "user", "content": "..."} or {"role": "assistant", "content": "..."}
399
- messages.append(chat)
400
- elif isinstance(chat, (list, tuple)) and len(chat) >= 2:
401
- # Legacy format: ["user msg", "assistant msg"] or ("user msg", "assistant msg")
402
- user_msg, assistant_msg = chat[0], chat[1]
403
- if user_msg:
404
- messages.append({"role": "user", "content": user_msg})
405
- if assistant_msg:
406
- messages.append({"role": "assistant", "content": assistant_msg})
407
-
408
- # Add current message
409
- messages.append({"role": "user", "content": message})
410
-
411
- # Make API request with enhanced error handling
412
- try:
413
- print(f"🔄 Making API request to OpenRouter...")
414
- print(f" Model: {MODEL}")
415
- print(f" Messages: {len(messages)} in conversation")
416
-
417
- response = requests.post(
418
- url="https://openrouter.ai/api/v1/chat/completions",
419
- headers={
420
- "Authorization": f"Bearer {API_KEY}",
421
- "Content-Type": "application/json",
422
- "HTTP-Referer": "https://huggingface.co", # Required by some providers
423
- "X-Title": "HuggingFace Space" # Helpful for tracking
424
- },
425
- json={
426
- "model": MODEL,
427
- "messages": messages,
428
- "temperature": 0.7,
429
- "max_tokens": 1500
430
- },
431
- timeout=30
432
- )
433
-
434
- print(f"📡 API Response: {response.status_code}")
435
-
436
- if response.status_code == 200:
437
- try:
438
- result = response.json()
439
-
440
- # Enhanced validation of API response structure
441
- if 'choices' not in result or not result['choices']:
442
- print(f"⚠️ API response missing choices: {result}")
443
- return "API Error: No response choices available"
444
- elif 'message' not in result['choices'][0]:
445
- print(f"⚠️ API response missing message: {result}")
446
- return "API Error: No message in response"
447
- elif 'content' not in result['choices'][0]['message']:
448
- print(f"⚠️ API response missing content: {result}")
449
- return "API Error: No content in message"
450
- else:
451
- content = result['choices'][0]['message']['content']
452
-
453
- # Check for empty content
454
- if not content or content.strip() == "":
455
- print(f"⚠️ API returned empty content")
456
- return "API Error: Empty response content"
457
-
458
- print(f"✅ API request successful")
459
- return content
460
-
461
- except (KeyError, IndexError, json.JSONDecodeError) as e:
462
- print(f"❌ Failed to parse API response: {str(e)}")
463
- return f"API Error: Failed to parse response - {str(e)}"
464
- elif response.status_code == 401:
465
- error_msg = f"🔐 **Authentication Error**\n\n"
466
- error_msg += f"Your API key appears to be invalid or expired.\n\n"
467
- error_msg += f"**Troubleshooting:**\n"
468
- error_msg += f"1. Check that your **OPENROUTER_API_KEY** secret is set correctly\n"
469
- error_msg += f"2. Verify your API key at: https://openrouter.ai/keys\n"
470
- error_msg += f"3. Ensure your key starts with `sk-or-`\n"
471
- error_msg += f"4. Check that you have credits on your OpenRouter account"
472
- print(f"❌ API authentication failed: {response.status_code} - {response.text[:200]}")
473
- return error_msg
474
- elif response.status_code == 429:
475
- error_msg = f"⏱️ **Rate Limit Exceeded**\n\n"
476
- error_msg += f"Too many requests. Please wait a moment and try again.\n\n"
477
- error_msg += f"**Troubleshooting:**\n"
478
- error_msg += f"1. Wait 30-60 seconds before trying again\n"
479
- error_msg += f"2. Check your OpenRouter usage limits\n"
480
- error_msg += f"3. Consider upgrading your OpenRouter plan"
481
- print(f"❌ Rate limit exceeded: {response.status_code}")
482
- return error_msg
483
- elif response.status_code == 400:
484
- try:
485
- error_data = response.json()
486
- error_message = error_data.get('error', {}).get('message', 'Unknown error')
487
- except:
488
- error_message = response.text
489
-
490
- error_msg = f"⚠️ **Request Error**\n\n"
491
- error_msg += f"The API request was invalid:\n"
492
- error_msg += f"`{error_message}`\n\n"
493
- if "model" in error_message.lower():
494
- error_msg += f"**Model Issue:** The model `{MODEL}` may not be available.\n"
495
- error_msg += f"Try switching to a different model in your Space configuration."
496
- print(f"❌ Bad request: {response.status_code} - {error_message}")
497
- return error_msg
498
- else:
499
- error_msg = f"🚫 **API Error {response.status_code}**\n\n"
500
- error_msg += f"An unexpected error occurred. Please try again.\n\n"
501
- error_msg += f"If this persists, check:\n"
502
- error_msg += f"1. OpenRouter service status\n"
503
- error_msg += f"2. Your API key and credits\n"
504
- error_msg += f"3. The model availability"
505
- print(f"❌ API error: {response.status_code} - {response.text[:200]}")
506
- return error_msg
507
-
508
- except requests.exceptions.Timeout:
509
- error_msg = f"⏰ **Request Timeout**\n\n"
510
- error_msg += f"The API request took too long (30s limit).\n\n"
511
- error_msg += f"**Troubleshooting:**\n"
512
- error_msg += f"1. Try again with a shorter message\n"
513
- error_msg += f"2. Check your internet connection\n"
514
- error_msg += f"3. Try a different model"
515
- print(f"❌ Request timeout after 30 seconds")
516
- return error_msg
517
- except requests.exceptions.ConnectionError:
518
- error_msg = f"🌐 **Connection Error**\n\n"
519
- error_msg += f"Could not connect to OpenRouter API.\n\n"
520
- error_msg += f"**Troubleshooting:**\n"
521
- error_msg += f"1. Check your internet connection\n"
522
- error_msg += f"2. Check OpenRouter service status\n"
523
- error_msg += f"3. Try again in a few moments"
524
- print(f"❌ Connection error to OpenRouter API")
525
- return error_msg
526
- except Exception as e:
527
- error_msg = f"❌ **Unexpected Error**\n\n"
528
- error_msg += f"An unexpected error occurred:\n"
529
- error_msg += f"`{str(e)}`\n\n"
530
- error_msg += f"Please try again or contact support if this persists."
531
- print(f"❌ Unexpected error: {str(e)}")
532
- return error_msg
533
-
534
- # Access code verification
535
- access_granted = gr.State(False)
536
- _access_granted_global = False # Global fallback
537
-
538
- def verify_access_code(code):
539
- """Verify the access code"""
540
- global _access_granted_global
541
- if not ACCESS_CODE:
542
- _access_granted_global = True
543
- return gr.update(visible=False), gr.update(visible=True), gr.update(value=True)
544
-
545
- if code == ACCESS_CODE:
546
- _access_granted_global = True
547
- return gr.update(visible=False), gr.update(visible=True), gr.update(value=True)
548
- else:
549
- _access_granted_global = False
550
- return gr.update(visible=True, value="❌ Incorrect access code. Please try again."), gr.update(visible=False), gr.update(value=False)
551
-
552
- def protected_generate_response(message, history):
553
- """Protected response function that checks access"""
554
- # Check if access is granted via the global variable
555
- if ACCESS_CODE and not _access_granted_global:
556
- return "Please enter the access code to continue."
557
- return generate_response(message, history)
558
-
559
- def export_conversation(history):
560
- """Export conversation to markdown file"""
561
- if not history:
562
- return gr.update(visible=False)
563
-
564
- markdown_content = export_conversation_to_markdown(history)
565
-
566
- # Save to temporary file
567
- with tempfile.NamedTemporaryFile(mode='w', suffix='.md', delete=False) as f:
568
- f.write(markdown_content)
569
- temp_file = f.name
570
-
571
- return gr.update(value=temp_file, visible=True)
572
-
573
- # Configuration status display
574
- def get_configuration_status():
575
- """Generate a configuration status message for display"""
576
- status_parts = []
577
-
578
- if API_KEY_VALID:
579
- status_parts.append("✅ **API Key:** Configured and valid")
580
- else:
581
- status_parts.append("❌ **API Key:** Not configured - Set `OPENROUTER_API_KEY` in Space secrets")
582
-
583
- status_parts.append(f"🤖 **Model:** {MODEL}")
584
- status_parts.append(f"🌡️ **Temperature:** 0.7")
585
- status_parts.append(f"📝 **Max Tokens:** 1500")
586
-
587
- if GROUNDING_URLS:
588
- status_parts.append(f"🔗 **URL Grounding:** {len(GROUNDING_URLS)} URLs configured")
589
-
590
- if ENABLE_DYNAMIC_URLS:
591
- status_parts.append("🔄 **Dynamic URLs:** Enabled")
592
-
593
- if ENABLE_WEB_SEARCH:
594
- status_parts.append("🔍 **Web Search:** Enabled")
595
-
596
- if ENABLE_VECTOR_RAG:
597
- status_parts.append("📚 **Document RAG:** Enabled")
598
-
599
- if ACCESS_CODE:
600
- status_parts.append("🔐 **Access Control:** Enabled")
601
- else:
602
- status_parts.append("🌐 **Access:** Public")
603
-
604
- return "\n".join(status_parts)
605
-
606
- # Create interface with access code protection
607
- with gr.Blocks(title=SPACE_NAME) as demo:
608
- gr.Markdown(f"# {SPACE_NAME}")
609
- gr.Markdown(SPACE_DESCRIPTION)
610
-
611
- # Configuration status (always visible)
612
- with gr.Accordion("📊 Configuration Status", open=not API_KEY_VALID):
613
- gr.Markdown(get_configuration_status())
614
-
615
- # Access code section (shown only if ACCESS_CODE is set)
616
- with gr.Column(visible=bool(ACCESS_CODE)) as access_section:
617
- gr.Markdown("### 🔐 Access Required")
618
- gr.Markdown("Please enter the access code provided by your instructor:")
619
-
620
- access_input = gr.Textbox(
621
- label="Access Code",
622
- placeholder="Enter access code...",
623
- type="password"
624
- )
625
- access_btn = gr.Button("Submit", variant="primary")
626
- access_error = gr.Markdown(visible=False)
627
-
628
- # Main chat interface (hidden until access granted)
629
- with gr.Column(visible=not bool(ACCESS_CODE)) as chat_section:
630
- chat_interface = gr.ChatInterface(
631
- fn=protected_generate_response,
632
- title="", # Title already shown above
633
- description="", # Description already shown above
634
- examples=None,
635
- type="messages" # Use modern message format for better compatibility
636
- )
637
-
638
- # Export functionality
639
- with gr.Row():
640
- export_btn = gr.Button("Export Conversation", variant="secondary", size="sm")
641
- export_file = gr.File(label="Download Conversation", visible=False)
642
-
643
- # Connect export functionality
644
- export_btn.click(
645
- export_conversation,
646
- inputs=[chat_interface],
647
- outputs=[export_file]
648
- )
649
-
650
- # Connect access verification
651
- if ACCESS_CODE:
652
- access_btn.click(
653
- verify_access_code,
654
- inputs=[access_input],
655
- outputs=[access_error, chat_section, access_granted]
656
- )
657
- access_input.submit(
658
- verify_access_code,
659
- inputs=[access_input],
660
- outputs=[access_error, chat_section, access_granted]
661
- )
662
-
663
- if __name__ == "__main__":
664
- demo.launch()