Spaces:
Running
Running
from flask import Flask, render_template, request, jsonify | |
import os, re, json, sqlite3, logging | |
app = Flask(__name__) | |
# โโโโโโโโโโโโโโโโโโโโโโโโโโ 1. CONFIGURATION โโโโโโโโโโโโโโโโโโโโโโโโโโ | |
# Use absolute paths in the persistent directory for Hugging Face | |
BASE_DIR = os.path.dirname(os.path.abspath(__file__)) | |
DB_FILE = os.path.join(BASE_DIR, "favorite_sites.json") # JSON file for backward compatibility | |
SQLITE_DB = os.path.join(BASE_DIR, "favorite_sites.db") # SQLite database for persistence | |
# Setup logging | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger(__name__) | |
# Domains that commonly block iframes | |
BLOCKED_DOMAINS = [ | |
"naver.com", "daum.net", "google.com", | |
"facebook.com", "instagram.com", "kakao.com", | |
"ycombinator.com" | |
] | |
# โโโโโโโโโโโโโโโโโโโโโโโโโโ 2. CURATED CATEGORIES โโโโโโโโโโโโโโโโโโโโโโโโโโ | |
CATEGORIES = { | |
"Free AI: Productivity": [ | |
"https://huggingface.co/spaces/ginigen/perflexity-clone", | |
"https://huggingface.co/spaces/ginipick/IDEA-DESIGN", | |
"https://huggingface.co/spaces/VIDraft/mouse-webgen", | |
"https://huggingface.co/spaces/openfree/Vibe-Game", | |
"https://huggingface.co/spaces/openfree/Game-Gallery", | |
"https://huggingface.co/spaces/aiqtech/Contributors-Leaderboard", | |
"https://huggingface.co/spaces/fantaxy/Model-Leaderboard", | |
"https://huggingface.co/spaces/fantaxy/Space-Leaderboard", | |
"https://huggingface.co/spaces/openfree/Korean-Leaderboard", | |
], | |
"Free AI: Multimodal": [ | |
"https://huggingface.co/spaces/openfree/DreamO-video", | |
"https://huggingface.co/spaces/Heartsync/NSFW-Uncensored-photo", | |
"https://huggingface.co/spaces/Heartsync/NSFW-Uncensored", | |
"https://huggingface.co/spaces/fantaxy/Sound-AI-SFX", | |
"https://huggingface.co/spaces/ginigen/SFX-Sound-magic", | |
"https://huggingface.co/spaces/ginigen/VoiceClone-TTS", | |
"https://huggingface.co/spaces/aiqcamp/MCP-kokoro", | |
"https://huggingface.co/spaces/aiqcamp/ENGLISH-Speaking-Scoring", | |
], | |
"Free AI: Professional": [ | |
"https://huggingface.co/spaces/ginigen/blogger", | |
"https://huggingface.co/spaces/VIDraft/money-radar", | |
"https://huggingface.co/spaces/immunobiotech/drug-discovery", | |
"https://huggingface.co/spaces/immunobiotech/Gemini-MICHELIN", | |
"https://huggingface.co/spaces/Heartsync/Papers-Leaderboard", | |
"https://huggingface.co/spaces/VIDraft/PapersImpact", | |
"https://huggingface.co/spaces/ginipick/AgentX-Papers", | |
"https://huggingface.co/spaces/openfree/Cycle-Navigator", | |
], | |
"Free AI: Image": [ | |
"https://huggingface.co/spaces/ginigen/interior-design", | |
"https://huggingface.co/spaces/ginigen/Workflow-Canvas", | |
"https://huggingface.co/spaces/ginigen/Multi-LoRAgen", | |
"https://huggingface.co/spaces/ginigen/Every-Text", | |
"https://huggingface.co/spaces/ginigen/text3d-r1", | |
"https://huggingface.co/spaces/ginipick/FLUXllama", | |
"https://huggingface.co/spaces/Heartsync/FLUX-Vision", | |
"https://huggingface.co/spaces/ginigen/VisualCloze", | |
"https://huggingface.co/spaces/seawolf2357/Ghibli-Multilingual-Text-rendering", | |
"https://huggingface.co/spaces/ginigen/Ghibli-Meme-Studio", | |
"https://huggingface.co/spaces/VIDraft/Open-Meme-Studio", | |
"https://huggingface.co/spaces/ginigen/3D-LLAMA", | |
], | |
"Free AI: LLM / VLM": [ | |
"https://huggingface.co/spaces/VIDraft/Gemma-3-R1984-4B", | |
"https://huggingface.co/spaces/VIDraft/Gemma-3-R1984-12B", | |
"https://huggingface.co/spaces/ginigen/Mistral-Perflexity", | |
"https://huggingface.co/spaces/aiqcamp/gemini-2.5-flash-preview", | |
"https://huggingface.co/spaces/openfree/qwen3-30b-a3b-research", | |
"https://huggingface.co/spaces/openfree/qwen3-235b-a22b-research", | |
"https://huggingface.co/spaces/openfree/Llama-4-Maverick-17B-Research", | |
], | |
} | |
# โโโโโโโโโโโโโโโโโโโโโโโโโโ 3. DATABASE FUNCTIONS โโโโโโโโโโโโโโโโโโโโโโโโโโ | |
def init_db(): | |
"""Initialize both JSON and SQLite databases""" | |
# Log database paths for debugging | |
logger.info(f"JSON DB path: {DB_FILE}") | |
logger.info(f"SQLite DB path: {SQLITE_DB}") | |
# Initialize JSON file if it doesn't exist | |
if not os.path.exists(DB_FILE): | |
try: | |
with open(DB_FILE, "w", encoding="utf-8") as f: | |
json.dump([], f, ensure_ascii=False) | |
logger.info("Created new JSON database file") | |
except Exception as e: | |
logger.error(f"Error creating JSON file: {e}") | |
# Initialize SQLite database | |
try: | |
conn = sqlite3.connect(SQLITE_DB) | |
cursor = conn.cursor() | |
cursor.execute(''' | |
CREATE TABLE IF NOT EXISTS urls ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
url TEXT UNIQUE NOT NULL, | |
date_added TIMESTAMP DEFAULT CURRENT_TIMESTAMP | |
) | |
''') | |
conn.commit() | |
logger.info("SQLite database initialized successfully") | |
# If we have data in JSON but not in SQLite (first run with new SQLite DB), | |
# migrate the data from JSON to SQLite | |
json_urls = load_json() | |
if json_urls: | |
logger.info(f"Found {len(json_urls)} URLs in JSON file") | |
db_urls = load_db_sqlite() | |
new_urls = 0 | |
for url in json_urls: | |
if url not in db_urls: | |
add_url_to_sqlite(url) | |
new_urls += 1 | |
logger.info(f"Migrated {new_urls} new URLs from JSON to SQLite") | |
conn.close() | |
except Exception as e: | |
logger.error(f"Error initializing SQLite database: {e}") | |
# Create default URLs file if none exists | |
if not load_db(): | |
default_urls = [ | |
"https://huggingface.co/spaces/ginigen/perflexity-clone", | |
"https://huggingface.co/spaces/openfree/Game-Gallery", | |
"https://www.google.com" | |
] | |
for url in default_urls: | |
add_url_to_sqlite(url) | |
save_json(default_urls) | |
logger.info("Added default URLs to empty database") | |
def load_json(): | |
"""Load URLs from JSON file (for backward compatibility)""" | |
try: | |
if os.path.exists(DB_FILE): | |
with open(DB_FILE, "r", encoding="utf-8") as f: | |
raw = json.load(f) | |
return raw if isinstance(raw, list) else [] | |
return [] | |
except Exception as e: | |
logger.error(f"Error loading JSON file: {e}") | |
return [] | |
def save_json(lst): | |
"""Save URLs to JSON file (for backward compatibility)""" | |
try: | |
with open(DB_FILE, "w", encoding="utf-8") as f: | |
json.dump(lst, f, ensure_ascii=False, indent=2) | |
logger.info(f"Saved {len(lst)} URLs to JSON file") | |
return True | |
except Exception as e: | |
logger.error(f"Error saving to JSON file: {e}") | |
return False | |
def load_db_sqlite(): | |
"""Load URLs from SQLite database""" | |
try: | |
conn = sqlite3.connect(SQLITE_DB) | |
cursor = conn.cursor() | |
cursor.execute("SELECT url FROM urls ORDER BY date_added DESC") | |
urls = [row[0] for row in cursor.fetchall()] | |
conn.close() | |
logger.info(f"Loaded {len(urls)} URLs from SQLite database") | |
return urls | |
except Exception as e: | |
logger.error(f"Error loading from SQLite database: {e}") | |
return [] | |
def add_url_to_sqlite(url): | |
"""Add a URL to SQLite database""" | |
try: | |
conn = sqlite3.connect(SQLITE_DB) | |
cursor = conn.cursor() | |
cursor.execute("INSERT INTO urls (url) VALUES (?)", (url,)) | |
conn.commit() | |
conn.close() | |
logger.info(f"Added URL to SQLite: {url}") | |
return True | |
except sqlite3.IntegrityError: | |
# URL already exists | |
logger.info(f"URL already exists in SQLite: {url}") | |
return False | |
except Exception as e: | |
logger.error(f"Error adding URL to SQLite: {e}") | |
return False | |
def update_url_in_sqlite(old_url, new_url): | |
"""Update a URL in SQLite database""" | |
try: | |
conn = sqlite3.connect(SQLITE_DB) | |
cursor = conn.cursor() | |
cursor.execute("UPDATE urls SET url = ? WHERE url = ?", (new_url, old_url)) | |
if cursor.rowcount > 0: | |
conn.commit() | |
logger.info(f"Updated URL in SQLite: {old_url} -> {new_url}") | |
success = True | |
else: | |
logger.info(f"URL not found for update in SQLite: {old_url}") | |
success = False | |
conn.close() | |
return success | |
except sqlite3.IntegrityError: | |
# New URL already exists | |
logger.error(f"New URL already exists in SQLite: {new_url}") | |
return False | |
except Exception as e: | |
logger.error(f"Error updating URL in SQLite: {e}") | |
return False | |
def delete_url_from_sqlite(url): | |
"""Delete a URL from SQLite database""" | |
try: | |
conn = sqlite3.connect(SQLITE_DB) | |
cursor = conn.cursor() | |
cursor.execute("DELETE FROM urls WHERE url = ?", (url,)) | |
if cursor.rowcount > 0: | |
conn.commit() | |
logger.info(f"Deleted URL from SQLite: {url}") | |
success = True | |
else: | |
logger.info(f"URL not found for deletion in SQLite: {url}") | |
success = False | |
conn.close() | |
return success | |
except Exception as e: | |
logger.error(f"Error deleting URL from SQLite: {e}") | |
return False | |
def load_db(): | |
"""Primary function to load URLs - prioritizes SQLite DB but falls back to JSON""" | |
sqlite_urls = load_db_sqlite() | |
# If SQLite DB is empty, try loading from JSON | |
if not sqlite_urls: | |
logger.info("SQLite database empty, trying JSON file") | |
json_urls = load_json() | |
# If we found URLs in JSON, migrate them to SQLite | |
if json_urls: | |
logger.info(f"Migrating {len(json_urls)} URLs from JSON to SQLite") | |
for url in json_urls: | |
add_url_to_sqlite(url) | |
return json_urls | |
return sqlite_urls | |
def save_db(lst): | |
"""Save URLs to both SQLite and JSON""" | |
logger.info(f"Saving {len(lst)} URLs to database") | |
# Clear all URLs from SQLite and add the new list | |
try: | |
conn = sqlite3.connect(SQLITE_DB) | |
cursor = conn.cursor() | |
cursor.execute("DELETE FROM urls") | |
for url in lst: | |
cursor.execute("INSERT INTO urls (url) VALUES (?)", (url,)) | |
conn.commit() | |
conn.close() | |
logger.info("Successfully saved to SQLite database") | |
except Exception as e: | |
logger.error(f"Error saving to SQLite database: {e}") | |
# Also save to JSON for backward compatibility | |
return save_json(lst) | |
# โโโโโโโโโโโโโโโโโโโโโโโโโโ 4. URL HELPERS โโโโโโโโโโโโโโโโโโโโโโโโโโ | |
def direct_url(hf_url): | |
m = re.match(r"https?://huggingface\.co/spaces/([^/]+)/([^/?#]+)", hf_url) | |
if not m: | |
return hf_url | |
owner, name = m.groups() | |
owner = owner.lower() | |
name = name.replace('.', '-').replace('_', '-').lower() | |
return f"https://{owner}-{name}.hf.space" | |
def screenshot_url(url): | |
return f"https://image.thum.io/get/fullpage/{url}" | |
def process_url_for_preview(url): | |
"""Returns (preview_url, mode)""" | |
# Handle blocked domains first | |
if any(d for d in BLOCKED_DOMAINS if d in url): | |
return screenshot_url(url), "snapshot" | |
# Special case handling for problematic URLs | |
if "vibe-coding-tetris" in url or "World-of-Tank-GAME" in url or "Minesweeper-Game" in url: | |
return screenshot_url(url), "snapshot" | |
# General HF space handling | |
try: | |
if "huggingface.co/spaces" in url: | |
parts = url.rstrip("/").split("/") | |
if len(parts) >= 5: | |
owner = parts[-2] | |
name = parts[-1] | |
embed_url = f"https://huggingface.co/spaces/{owner}/{name}/embed" | |
return embed_url, "iframe" | |
except Exception: | |
return screenshot_url(url), "snapshot" | |
# Default handling | |
return url, "iframe" | |
# โโโโโโโโโโโโโโโโโโโโโโโโโโ 5. API ROUTES โโโโโโโโโโโโโโโโโโโโโโโโโโ | |
def api_category(): | |
cat = request.args.get('name', '') | |
urls = CATEGORIES.get(cat, []) | |
return jsonify([ | |
{ | |
"title": url.split('/')[-1], | |
"owner": url.split('/')[-2] if '/spaces/' in url else '', | |
"iframe": direct_url(url), | |
"shot": screenshot_url(url), | |
"hf": url | |
} for url in urls | |
]) | |
def api_favorites(): | |
# Load URLs from SQLite database | |
urls = load_db() | |
page = int(request.args.get('page', 1)) | |
per_page = int(request.args.get('per_page', 9)) | |
total_pages = max(1, (len(urls) + per_page - 1) // per_page) | |
start = (page - 1) * per_page | |
end = min(start + per_page, len(urls)) | |
urls_page = urls[start:end] | |
result = [] | |
for url in urls_page: | |
try: | |
preview_url, mode = process_url_for_preview(url) | |
result.append({ | |
"title": url.split('/')[-1], | |
"url": url, | |
"preview_url": preview_url, | |
"mode": mode | |
}) | |
except Exception: | |
# Fallback to screenshot mode | |
result.append({ | |
"title": url.split('/')[-1], | |
"url": url, | |
"preview_url": screenshot_url(url), | |
"mode": "snapshot" | |
}) | |
return jsonify({ | |
"items": result, | |
"page": page, | |
"total_pages": total_pages | |
}) | |
def add_url(): | |
url = request.form.get('url', '').strip() | |
if not url: | |
return jsonify({"success": False, "message": "URL is required"}) | |
# Check if URL already exists in database | |
if not add_url_to_sqlite(url): | |
return jsonify({"success": False, "message": "URL already exists"}) | |
# Also update JSON file for backward compatibility | |
data = load_json() | |
if url not in data: | |
data.insert(0, url) | |
save_json(data) | |
return jsonify({"success": True, "message": "URL added successfully"}) | |
def update_url(): | |
old = request.form.get('old', '') | |
new = request.form.get('new', '').strip() | |
if not new: | |
return jsonify({"success": False, "message": "New URL is required"}) | |
# Update in SQLite DB | |
if not update_url_in_sqlite(old, new): | |
return jsonify({"success": False, "message": "URL not found or new URL already exists"}) | |
# Also update JSON file for backward compatibility | |
data = load_json() | |
try: | |
idx = data.index(old) | |
data[idx] = new | |
save_json(data) | |
except ValueError: | |
# If URL not in JSON, add it | |
data.append(new) | |
save_json(data) | |
return jsonify({"success": True, "message": "URL updated successfully"}) | |
def delete_url(): | |
url = request.form.get('url', '') | |
# Delete from SQLite DB | |
if not delete_url_from_sqlite(url): | |
return jsonify({"success": False, "message": "URL not found"}) | |
# Also update JSON file for backward compatibility | |
data = load_json() | |
try: | |
data.remove(url) | |
save_json(data) | |
except ValueError: | |
pass | |
return jsonify({"success": True, "message": "URL deleted successfully"}) | |
# โโโโโโโโโโโโโโโโโโโโโโโโโโ 6. MAIN ROUTES โโโโโโโโโโโโโโโโโโโโโโโโโโ | |
def home(): | |
# Create a simple initial template for debugging | |
template_dir = os.path.join(BASE_DIR, 'templates') | |
os.makedirs(template_dir, exist_ok=True) | |
index_html = '''<!DOCTYPE html> | |
<html> | |
<head> | |
<meta charset="utf-8"> | |
<meta name="viewport" content="width=device-width, initial-scale=1"> | |
<title>AI Favorite Sites</title> | |
<style> | |
body { font-family: Arial, sans-serif; margin: 0; padding: 20px; } | |
h1 { text-align: center; } | |
</style> | |
</head> | |
<body> | |
<h1>๐ AI Favorite Sites</h1> | |
<p style="text-align: center;"> | |
<a href="https://discord.gg/openfreeai" target="_blank"> | |
<img src="https://img.shields.io/static/v1?label=Discord&message=Openfree%20AI&color=%230000ff&labelColor=%23800080&logo=discord&logoColor=white&style=for-the-badge" alt="Discord"> | |
</a> | |
</p> | |
<div id="content">Loading...</div> | |
<script> | |
// Simple alert to check if JavaScript is working | |
window.onload = function() { | |
document.getElementById('content').innerHTML = 'JavaScript is working! Loading content...'; | |
// List available categories | |
const cats = {{cats|tojson}}; | |
let catList = '<ul>'; | |
cats.forEach(cat => { | |
catList += '<li>' + cat + '</li>'; | |
}); | |
catList += '</ul>'; | |
document.getElementById('content').innerHTML += '<p>Available categories:</p>' + catList; | |
}; | |
</script> | |
</body> | |
</html>''' | |
with open(os.path.join(template_dir, 'index.html'), 'w', encoding='utf-8') as f: | |
f.write(index_html) | |
# Log for debugging | |
logger.info(f"Template written to: {os.path.join(template_dir, 'index.html')}") | |
logger.info(f"Categories: {list(CATEGORIES.keys())}") | |
return render_template('index.html', cats=list(CATEGORIES.keys())) | |
# Initialize database on startup | |
init_db() | |
# Define a function to ensure database consistency | |
def ensure_db_consistency(): | |
"""Make sure both databases are in sync""" | |
try: | |
# Get URLs from both sources | |
sqlite_urls = load_db_sqlite() | |
json_urls = load_json() | |
# Combine and deduplicate | |
all_urls = list(set(sqlite_urls + json_urls)) | |
# If there are differences, update both databases | |
if len(all_urls) != len(sqlite_urls) or len(all_urls) != len(json_urls): | |
logger.info("Database inconsistency detected, synchronizing...") | |
# Save to both databases | |
save_db(all_urls) | |
# Double-check if save was successful | |
sqlite_check = load_db_sqlite() | |
json_check = load_json() | |
if len(sqlite_check) != len(all_urls) or len(json_check) != len(all_urls): | |
logger.error(f"Database synchronization failed! SQLite: {len(sqlite_check)}, JSON: {len(json_check)}, Expected: {len(all_urls)}") | |
else: | |
logger.info("Database synchronization successful") | |
except Exception as e: | |
logger.error(f"Error during database consistency check: {e}") | |
# For Flask 2.0+ compatibility | |
def before_request_func(): | |
# Use a flag to run this only once | |
if not hasattr(app, '_got_first_request'): | |
ensure_db_consistency() | |
app._got_first_request = True | |
# Log database status | |
logger.info(f"Database status - SQLite: {len(load_db_sqlite())} URLs, JSON: {len(load_json())} URLs") | |
if __name__ == '__main__': | |
app.run(host='0.0.0.0', port=7860) |