IDEA-DESIGN / app.py
ginipick's picture
Update app.py
2273a08 verified
raw
history blame
30.6 kB
import os
import streamlit as st
import json
import anthropic
import requests
import logging
from gradio_client import Client
import markdown
import tempfile
import base64
from datetime import datetime
import re
from bs4 import BeautifulSoup # BeautifulSoup ์ถ”๊ฐ€
# ๋กœ๊น… ์„ค์ •
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s')
# API ์„ค์ •
api_key = os.environ.get("API_KEY")
client = anthropic.Anthropic(api_key=api_key)
# ์ด๋ฏธ์ง€ ์ƒ์„ฑ API URL
IMAGE_API_URL = "http://211.233.58.201:7896"
# ์ตœ๋Œ€ ํ† ํฐ ์ˆ˜ ์„ค์ • (Claude-3 Sonnet์˜ ์ตœ๋Œ€ ํ† ํฐ ์ˆ˜)
MAX_TOKENS = 7999
# SerpHouse API Key ์„ค์ •
SERPHOUSE_API_KEY = os.environ.get("SERPHOUSE_API_KEY", "")
def get_system_prompt():
return """
๋‹น์‹ ์€ ์ „๋ฌธ ๋ธ”๋กœ๊ทธ ์ž‘์„ฑ ์ „๋ฌธ๊ฐ€์ž…๋‹ˆ๋‹ค. ๋ชจ๋“  ๋ธ”๋กœ๊ทธ ๊ธ€ ์ž‘์„ฑ ์š”์ฒญ์— ๋Œ€ํ•ด ๋‹ค์Œ์˜ 8๋‹จ๊ณ„ ํ”„๋ ˆ์ž„์›Œํฌ๋ฅผ ์ฒ ์ €ํžˆ ๋”ฐ๋ฅด๋˜, ์ž์—ฐ์Šค๋Ÿฝ๊ณ  ๋งค๋ ฅ์ ์ธ ๊ธ€์ด ๋˜๋„๋ก ์ž‘์„ฑํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค:
๋…์ž ์—ฐ๊ฒฐ ๋‹จ๊ณ„ 1.1. ๊ณต๊ฐ๋Œ€ ํ˜•์„ฑ์„ ์œ„ํ•œ ์นœ๊ทผํ•œ ์ธ์‚ฌ 1.2. ๋…์ž์˜ ์‹ค์ œ ๊ณ ๋ฏผ์„ ๋ฐ˜์˜ํ•œ ๋„์ž… ์งˆ๋ฌธ 1.3. ์ฃผ์ œ์— ๋Œ€ํ•œ ์ฆ‰๊ฐ์  ๊ด€์‹ฌ ์œ ๋„
๋ฌธ์ œ ์ •์˜ ๋‹จ๊ณ„ 2.1. ๋…์ž์˜ ํŽ˜์ธํฌ์ธํŠธ ๊ตฌ์ฒดํ™” 2.2. ๋ฌธ์ œ์˜ ์‹œ๊ธ‰์„ฑ๊ณผ ์˜ํ–ฅ๋„ ๋ถ„์„ 2.3. ํ•ด๊ฒฐ ํ•„์š”์„ฑ์— ๋Œ€ํ•œ ๊ณต๊ฐ๋Œ€ ํ˜•์„ฑ
์ „๋ฌธ์„ฑ ์ž…์ฆ ๋‹จ๊ณ„ 3.1. ๊ฐ๊ด€์  ๋ฐ์ดํ„ฐ ๊ธฐ๋ฐ˜ ๋ถ„์„ 3.2. ์ „๋ฌธ๊ฐ€ ๊ฒฌํ•ด์™€ ์—ฐ๊ตฌ ๊ฒฐ๊ณผ ์ธ์šฉ 3.3. ์‹ค์ œ ์‚ฌ๋ก€๋ฅผ ํ†ตํ•œ ๋ฌธ์ œ ๊ตฌ์ฒดํ™”
์†”๋ฃจ์…˜ ์ œ๊ณต ๋‹จ๊ณ„ 4.1. ๋‹จ๊ณ„๋ณ„ ์‹ค์ฒœ ๊ฐ€์ด๋“œ๋ผ์ธ ์ œ์‹œ 4.2. ์ฆ‰์‹œ ์ ์šฉ ๊ฐ€๋Šฅํ•œ ๊ตฌ์ฒด์  ํŒ 4.3. ์˜ˆ์ƒ ์žฅ์• ๋ฌผ๊ณผ ๊ทน๋ณต ๋ฐฉ์•ˆ ํฌํ•จ
์‹ ๋ขฐ๋„ ๊ฐ•ํ™” ๋‹จ๊ณ„ 5.1. ์‹ค์ œ ์„ฑ๊ณต ์‚ฌ๋ก€ ์ œ์‹œ 5.2. ๊ตฌ์ฒด์  ์‚ฌ์šฉ์ž ํ›„๊ธฐ ์ธ์šฉ 5.3. ๊ฐ๊ด€์  ๋ฐ์ดํ„ฐ๋กœ ํšจ๊ณผ ์ž…์ฆ
ํ–‰๋™ ์œ ๋„ ๋‹จ๊ณ„ 6.1. ๋ช…ํ™•ํ•œ ์ฒซ ์‹ค์ฒœ ๋‹จ๊ณ„ ์ œ์‹œ 6.2. ์‹œ๊ธ‰์„ฑ์„ ๊ฐ•์กฐํ•œ ํ–‰๋™ ์ด‰๊ตฌ 6.3. ์‹ค์ฒœ ๋™๊ธฐ ๋ถ€์—ฌ ์š”์†Œ ํฌํ•จ
์ง„์ •์„ฑ ๊ฐ•ํ™” ๋‹จ๊ณ„ 7.1. ์†”๋ฃจ์…˜์˜ ํ•œ๊ณ„ ํˆฌ๋ช…ํ•˜๊ฒŒ ๊ณต๊ฐœ 7.2. ๊ฐœ์ธ๋ณ„ ์ฐจ์ด ์กด์žฌ ์ธ์ • 7.3. ํ•„์š” ์กฐ๊ฑด๊ณผ ์ฃผ์˜์‚ฌํ•ญ ๋ช…์‹œ
๊ด€๊ณ„ ์ง€์† ๋‹จ๊ณ„ 8.1. ์ง„์ •์„ฑ ์žˆ๋Š” ๊ฐ์‚ฌ ์ธ์‚ฌ 8.2. ๋‹ค์Œ ์ปจํ…์ธ  ์˜ˆ๊ณ ๋กœ ๊ธฐ๋Œ€๊ฐ ์กฐ์„ฑ 8.3. ์†Œํ†ต ์ฑ„๋„ ์•ˆ๋‚ด
์ž‘์„ฑ ์‹œ ์ค€์ˆ˜์‚ฌํ•ญ 9.1. ๊ธ€์ž ์ˆ˜: 1500-2000์ž ๋‚ด์™ธ 9.2. ๋ฌธ๋‹จ ๊ธธ์ด: 3-4๋ฌธ์žฅ ์ด๋‚ด 9.3. ์‹œ๊ฐ์  ๊ตฌ๋ถ„: ์†Œ์ œ๋ชฉ, ๊ตฌ๋ถ„์„ , ๋ฒˆํ˜ธ ๋ชฉ๋ก ํ™œ์šฉ 9.4. ํ†ค์•ค๋งค๋„ˆ: ์นœ๊ทผํ•˜๊ณ  ์ „๋ฌธ์ ์ธ ๋Œ€ํ™”์ฒด 9.5. ๋ฐ์ดํ„ฐ: ๋ชจ๋“  ์ •๋ณด์˜ ์ถœ์ฒ˜ ๋ช…์‹œ 9.6. ๊ฐ€๋…์„ฑ: ๋ช…ํ™•ํ•œ ๋‹จ๋ฝ ๊ตฌ๋ถ„๊ณผ ๊ฐ•์กฐ์  ์‚ฌ์šฉ
์ด๋Ÿฌํ•œ ํ”„๋ ˆ์ž„์›Œํฌ๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ, ์š”์ฒญ๋ฐ›์€ ์ฃผ์ œ์— ๋Œ€ํ•ด ์ฒด๊ณ„์ ์ด๊ณ  ๋งค๋ ฅ์ ์ธ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŠธ๋ฅผ ์ž‘์„ฑํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค.
"""
def test_image_api_connection():
"""์ด๋ฏธ์ง€ API ์„œ๋ฒ„ ์—ฐ๊ฒฐ ํ…Œ์ŠคํŠธ"""
try:
client = Client(IMAGE_API_URL)
return "์ด๋ฏธ์ง€ API ์—ฐ๊ฒฐ ์„ฑ๊ณต: ์ •์ƒ ์ž‘๋™ ์ค‘"
except Exception as e:
logging.error(f"์ด๋ฏธ์ง€ API ์—ฐ๊ฒฐ ํ…Œ์ŠคํŠธ ์‹คํŒจ: {e}")
return f"์ด๋ฏธ์ง€ API ์—ฐ๊ฒฐ ์‹คํŒจ: {e}"
def generate_image(prompt, width=768, height=768, guidance=3.5, inference_steps=30, seed=3):
"""์ด๋ฏธ์ง€ ์ƒ์„ฑ ํ•จ์ˆ˜"""
if not prompt:
return None, "์˜ค๋ฅ˜: ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”"
try:
client = Client(IMAGE_API_URL)
result = client.predict(
prompt=prompt,
width=int(width),
height=int(height),
guidance=float(guidance),
inference_steps=int(inference_steps),
seed=int(seed),
do_img2img=False,
init_image=None,
image2image_strength=0.8,
resize_img=True,
api_name="/generate_image"
)
logging.info(f"์ด๋ฏธ์ง€ ์ƒ์„ฑ ์„ฑ๊ณต: {result[1]}")
return result[0], f"์‚ฌ์šฉ๋œ ์‹œ๋“œ: {result[1]}"
except Exception as e:
logging.error(f"์ด๋ฏธ์ง€ ์ƒ์„ฑ ์‹คํŒจ: {str(e)}")
return None, f"์˜ค๋ฅ˜: {str(e)}"
def extract_image_prompt(blog_content, blog_topic):
"""๋ธ”๋กœ๊ทธ ๋‚ด์šฉ์—์„œ ์ด๋ฏธ์ง€ ์ƒ์„ฑ์„ ์œ„ํ•œ ํ”„๋กฌํ”„ํŠธ ์ถ”์ถœ"""
image_prompt_system = f"""
๋‹ค์Œ์€ '{blog_topic}'์— ๊ด€ํ•œ ๋ธ”๋กœ๊ทธ ๊ธ€์ž…๋‹ˆ๋‹ค. ์ด ๋ธ”๋กœ๊ทธ ๊ธ€์˜ ๋‚ด์šฉ์„ ๊ธฐ๋ฐ˜์œผ๋กœ ์ ์ ˆํ•œ ์ด๋ฏธ์ง€๋ฅผ ์ƒ์„ฑํ•˜๊ธฐ ์œ„ํ•œ
ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ž‘์„ฑํ•ด์ฃผ์„ธ์š”. ํ”„๋กฌํ”„ํŠธ๋Š” ์˜์–ด๋กœ ์ž‘์„ฑํ•˜๊ณ , ๊ตฌ์ฒด์ ์ธ ์‹œ๊ฐ์  ์š”์†Œ๋ฅผ ๋‹ด์•„์•ผ ํ•ฉ๋‹ˆ๋‹ค.
ํ”„๋กฌํ”„ํŠธ๋งŒ ๋ฐ˜ํ™˜ํ•˜์„ธ์š”(๋‹ค๋ฅธ ์„ค๋ช… ์—†์ด).
์˜ˆ์‹œ ํ˜•์‹:
"A professional photo of [subject], [specific details], [atmosphere], [lighting], [perspective], high quality, detailed"
"""
try:
response = client.messages.create(
model="claude-3-7-sonnet-20250219",
max_tokens=150,
system=image_prompt_system,
messages=[{"role": "user", "content": blog_content}]
)
# ์‘๋‹ต์—์„œ ํ”„๋กฌํ”„ํŠธ ์ถ”์ถœ
image_prompt = response.content[0].text.strip()
logging.info(f"์ƒ์„ฑ๋œ ์ด๋ฏธ์ง€ ํ”„๋กฌํ”„ํŠธ: {image_prompt}")
return image_prompt
except Exception as e:
logging.error(f"์ด๋ฏธ์ง€ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ ์˜ค๋ฅ˜: {e}")
return f"A professional photo related to {blog_topic}, detailed, high quality"
# ๋งˆํฌ๋‹ค์šด์„ HTML๋กœ ๋ณ€ํ™˜ํ•˜๋Š” ํ•จ์ˆ˜
def convert_md_to_html(md_text, title="Ginigen Blog"):
html_content = markdown.markdown(md_text)
html_doc = f"""
<!DOCTYPE html>
<html>
<head>
<title>{title}</title>
<meta charset="utf-8">
<style>
body {{ font-family: Arial, sans-serif; line-height: 1.6; max-width: 800px; margin: 0 auto; padding: 20px; }}
h1 {{ color: #2c3e50; font-size: 2.5em; margin-bottom: 20px; }}
h2 {{ color: #3498db; margin-top: 25px; font-size: 1.8em; }}
h3 {{ color: #2980b9; font-size: 1.5em; }}
p {{ margin-bottom: 15px; font-size: 1.1em; }}
blockquote {{ background: #f9f9f9; border-left: 10px solid #ccc; margin: 1.5em 10px; padding: 1em 10px; }}
ul, ol {{ margin-bottom: 15px; }}
li {{ margin-bottom: 5px; }}
hr {{ border: 0; height: 1px; background: #ddd; margin: 20px 0; }}
img {{ max-width: 100%; height: auto; display: block; margin: 20px auto; }}
</style>
</head>
<body>
{html_content}
</body>
</html>
"""
return html_doc
# ์›น ๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ ์ถ”์ถœ ํ•จ์ˆ˜
def extract_keywords(text: str, top_k: int = 5) -> str:
"""
1) ํ•œ๊ธ€(๊ฐ€-ํžฃ), ์˜์–ด(a-zA-Z), ์ˆซ์ž(0-9), ๊ณต๋ฐฑ๋งŒ ๋‚จ๊น€
2) ๊ณต๋ฐฑ ๊ธฐ์ค€ ํ† ํฐ ๋ถ„๋ฆฌ
3) ์ตœ๋Œ€ top_k๊ฐœ๋งŒ
"""
text = re.sub(r"[^a-zA-Z0-9๊ฐ€-ํžฃ\s]", "", text)
tokens = text.split()
key_tokens = tokens[:top_k]
return " ".join(key_tokens)
# Mock ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ƒ์„ฑ ํ•จ์ˆ˜
def generate_mock_search_results(query):
"""API ์—ฐ๊ฒฐ์ด ์•ˆ๋  ๋•Œ ์‚ฌ์šฉํ•  ๊ฐ€์ƒ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ƒ์„ฑ"""
current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
mock_results = [
{
"title": f"{query}์— ๊ด€ํ•œ ์ตœ์‹  ์ •๋ณด",
"link": "https://example.com/article1",
"snippet": f"{query}์— ๊ด€ํ•œ ๊ฐ€์ƒ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์ž…๋‹ˆ๋‹ค. ์ด ๊ฒฐ๊ณผ๋Š” API ์—ฐ๊ฒฐ ๋ฌธ์ œ๋กœ ์ธํ•ด ์ƒ์„ฑ๋œ ๊ฐ€์ƒ ๋ฐ์ดํ„ฐ์ž…๋‹ˆ๋‹ค. ์‹ค์ œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์•„๋‹˜์„ ์ฐธ๊ณ ํ•˜์„ธ์š”. ์ƒ์„ฑ ์‹œ๊ฐ„: {current_time}",
"displayed_link": "example.com/article1"
},
{
"title": f"{query} ๊ด€๋ จ ์—ฐ๊ตฌ ๋™ํ–ฅ",
"link": "https://example.org/research",
"snippet": "์ด๊ฒƒ์€ API ์—ฐ๊ฒฐ ๋ฌธ์ œ๋กœ ์ธํ•œ ๊ฐ€์ƒ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์ž…๋‹ˆ๋‹ค. ์‹ค์ œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋ฅผ ๋ณด์—ฌ๋“œ๋ฆฌ์ง€ ๋ชปํ•ด ์ฃ„์†กํ•ฉ๋‹ˆ๋‹ค. ๋Œ€์‹  AI์˜ ๊ธฐ์กด ์ง€์‹์„ ํ™œ์šฉํ•˜์—ฌ ๋‹ต๋ณ€๋“œ๋ฆฌ๊ฒ ์Šต๋‹ˆ๋‹ค.",
"displayed_link": "example.org/research"
},
{
"title": f"{query}์˜ ์—ญ์‚ฌ์  ๋ฐฐ๊ฒฝ",
"link": "https://example.net/history",
"snippet": "์ด ๊ฐ€์ƒ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋Š” API ์—ฐ๊ฒฐ ๋ฌธ์ œ๋กœ ์ธํ•ด ์ƒ์„ฑ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ์ฐธ๊ณ ์šฉ์œผ๋กœ๋งŒ ์‚ฌ์šฉํ•ด์ฃผ์„ธ์š”.",
"displayed_link": "example.net/history"
}
]
summary_lines = []
for idx, item in enumerate(mock_results, start=1):
title = item.get("title", "No title")
link = item.get("link", "#")
snippet = item.get("snippet", "No description")
displayed_link = item.get("displayed_link", link)
summary_lines.append(
f"### Result {idx}: {title}\n\n"
f"{snippet}\n\n"
f"**์ถœ์ฒ˜**: [{displayed_link}]({link})\n\n"
f"---\n"
)
notice = """
# ๊ฐ€์ƒ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ (API ์—ฐ๊ฒฐ ๋ฌธ์ œ๋กœ ์ธํ•ด ์ƒ์„ฑ๋จ)
์•„๋ž˜๋Š” API ์—ฐ๊ฒฐ ๋ฌธ์ œ๋กœ ์ธํ•ด ์ƒ์„ฑ๋œ ๊ฐ€์ƒ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์ž…๋‹ˆ๋‹ค. ์‹ค์ œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์•„๋‹˜์„ ์ฐธ๊ณ ํ•˜์„ธ์š”.
๋Œ€์‹  AI์˜ ๊ธฐ์กด ์ง€์‹์„ ํ™œ์šฉํ•˜์—ฌ ์ตœ๋Œ€ํ•œ ์ •ํ™•ํ•œ ๋‹ต๋ณ€์„ ๋“œ๋ฆฌ๊ฒ ์Šต๋‹ˆ๋‹ค.
"""
return notice + "\n".join(summary_lines)
# Google ๊ฒ€์ƒ‰ ํ•จ์ˆ˜ (SerpAPI ๋Œ€์‹  ์ง์ ‘ ๊ฒ€์ƒ‰)
# Google ๊ฒ€์ƒ‰ ํ•จ์ˆ˜ (BeautifulSoup์„ ์‚ฌ์šฉํ•˜์—ฌ ๊ฒฐ๊ณผ ํŒŒ์‹ฑ)
def do_google_search(query, num_results=5):
try:
# ๋‹ค์–‘ํ•œ User-Agent ์‚ฌ์šฉ (Google ์ฐจ๋‹จ ๋ฐฉ์ง€)
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Language': 'ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7',
'Accept-Encoding': 'gzip, deflate, br',
'Referer': 'https://www.google.com/',
'DNT': '1',
'Connection': 'keep-alive',
'Upgrade-Insecure-Requests': '1',
'Cache-Control': 'max-age=0',
}
# ๊ฒ€์ƒ‰ URL (์ผ๋ถ€ ํŒŒ๋ผ๋ฏธํ„ฐ ์ถ”๊ฐ€)
search_url = f"https://www.google.com/search?q={query}&num={num_results}&hl=ko&gl=kr"
logging.info(f"๊ตฌ๊ธ€ ๊ฒ€์ƒ‰ URL: {search_url}")
# ์š”์ฒญ ๋ณด๋‚ด๊ธฐ (์งง์€ ํƒ€์ž„์•„์›ƒ ์„ค์ •)
response = requests.get(search_url, headers=headers, timeout=10)
# ์‘๋‹ต์ด ์„ฑ๊ณต์ ์ธ์ง€ ํ™•์ธ
if response.status_code != 200:
logging.error(f"Google ๊ฒ€์ƒ‰ ์‘๋‹ต ์ƒํƒœ ์ฝ”๋“œ: {response.status_code}")
return generate_mock_search_results(query)
# BeautifulSoup์œผ๋กœ HTML ํŒŒ์‹ฑ
soup = BeautifulSoup(response.text, 'html.parser')
# ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ถ”์ถœ
organic_results = []
# ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ปจํ…Œ์ด๋„ˆ ์ฐพ๊ธฐ (Google์˜ HTML ๊ตฌ์กฐ์— ๋”ฐ๋ผ ๋ณ€๊ฒฝ๋  ์ˆ˜ ์žˆ์Œ)
result_containers = soup.select('div.g')
if not result_containers:
logging.warning("Google ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค. ๋Œ€์ฒด ์„ ํƒ์ž๋ฅผ ์‹œ๋„ํ•ฉ๋‹ˆ๋‹ค.")
# ๋Œ€์ฒด ์„ ํƒ์ž ์‹œ๋„
result_containers = soup.select('div[data-hveid]')
counter = 0
for container in result_containers:
if counter >= num_results:
break
# ์ œ๋ชฉ ์ถ”์ถœ
title_element = container.select_one('h3')
if not title_element:
continue
title = title_element.get_text()
# ๋งํฌ ์ถ”์ถœ
link_element = container.select_one('a')
if not link_element:
continue
link = link_element.get('href', '')
if link.startswith('/url?'):
# Google์˜ ๋ฆฌ๋‹ค์ด๋ ‰ํŠธ URL์—์„œ ์‹ค์ œ URL ์ถ”์ถœ
link = link.split('q=')[1].split('&')[0] if 'q=' in link else link
elif not link.startswith('http'):
continue
# ์Šค๋‹ˆํŽซ ์ถ”์ถœ
snippet_element = container.select_one('div.VwiC3b') or container.select_one('span.aCOpRe')
snippet = snippet_element.get_text() if snippet_element else "์„ค๋ช… ์—†์Œ"
# ํ‘œ์‹œ ๋งํฌ ์ถ”์ถœ
displayed_link_element = container.select_one('cite')
displayed_link = displayed_link_element.get_text() if displayed_link_element else link
organic_results.append({
"title": title,
"link": link,
"snippet": snippet,
"displayed_link": displayed_link
})
counter += 1
if not organic_results:
logging.warning("๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋ฅผ ํŒŒ์‹ฑํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค. ์„ ํƒ์ž๊ฐ€ ๋ณ€๊ฒฝ๋˜์—ˆ์„ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.")
return generate_mock_search_results(query)
# ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ๋งˆํฌ๋‹ค์šด ํ˜•์‹์œผ๋กœ ๋ณ€ํ™˜
summary_lines = []
for idx, item in enumerate(organic_results, start=1):
title = item.get("title", "No title")
link = item.get("link", "#")
snippet = item.get("snippet", "No description")
displayed_link = item.get("displayed_link", link)
summary_lines.append(
f"### Result {idx}: {title}\n\n"
f"{snippet}\n\n"
f"**์ถœ์ฒ˜**: [{displayed_link}]({link})\n\n"
f"---\n"
)
# ๋ชจ๋ธ์—๊ฒŒ ๋ช…ํ™•ํ•œ ์ง€์นจ ์ถ”๊ฐ€
instructions = """
# ์›น ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ
์•„๋ž˜๋Š” ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์ž…๋‹ˆ๋‹ค. ์งˆ๋ฌธ์— ๋‹ต๋ณ€ํ•  ๋•Œ ์ด ์ •๋ณด๋ฅผ ํ™œ์šฉํ•˜์„ธ์š”:
1. ๊ฐ ๊ฒฐ๊ณผ์˜ ์ œ๋ชฉ, ๋‚ด์šฉ, ์ถœ์ฒ˜ ๋งํฌ๋ฅผ ์ฐธ๊ณ ํ•˜์„ธ์š”
2. ๋‹ต๋ณ€์— ๊ด€๋ จ ์ •๋ณด์˜ ์ถœ์ฒ˜๋ฅผ ๋ช…์‹œ์ ์œผ๋กœ ์ธ์šฉํ•˜์„ธ์š” (์˜ˆ: "X ์ถœ์ฒ˜์— ๋”ฐ๋ฅด๋ฉด...")
3. ์‘๋‹ต์— ์‹ค์ œ ์ถœ์ฒ˜ ๋งํฌ๋ฅผ ํฌํ•จํ•˜์„ธ์š”
4. ์—ฌ๋Ÿฌ ์ถœ์ฒ˜์˜ ์ •๋ณด๋ฅผ ์ข…ํ•ฉํ•˜์—ฌ ๋‹ต๋ณ€ํ•˜์„ธ์š”
"""
search_results = instructions + "\n".join(summary_lines)
logging.info(f"Google ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ {len(organic_results)}๊ฐœ ํŒŒ์‹ฑ ์™„๋ฃŒ")
return search_results
except Exception as e:
logging.error(f"Google ๊ฒ€์ƒ‰ ์‹คํŒจ: {e}")
return generate_mock_search_results(query)
# ์›น ๊ฒ€์ƒ‰ ํ•จ์ˆ˜
def do_web_search(query: str) -> str:
"""
์›น ๊ฒ€์ƒ‰์„ ์ˆ˜ํ–‰ํ•˜๋Š” ํ•จ์ˆ˜ - SerpHouse API ๋˜๋Š” ์ง์ ‘ ๊ตฌ๊ธ€ ๊ฒ€์ƒ‰
"""
try:
# API ํ‚ค๊ฐ€ ์—†๊ฑฐ๋‚˜ 'mock'์ธ ๊ฒฝ์šฐ
if not SERPHOUSE_API_KEY or "mock" in SERPHOUSE_API_KEY.lower():
logging.warning("API ํ‚ค๊ฐ€ ์—†๊ฑฐ๋‚˜ Mock ๋ชจ๋“œ์ž…๋‹ˆ๋‹ค. ๊ฐ€์ƒ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค.")
return generate_mock_search_results(query)
# SerpHouse API ์‚ฌ์šฉ
url = "https://api.serphouse.com/serp/live"
params = {
"q": query,
"domain": "google.com",
"serp_type": "web",
"device": "desktop",
"lang": "ko", # ํ•œ๊ตญ์–ด ๊ฒฐ๊ณผ
"num": "5" # ๊ฒฐ๊ณผ ์ˆ˜ ์ค„์ž„
}
headers = {
"Authorization": f"Bearer {SERPHOUSE_API_KEY}"
}
logging.info(f"SerpHouse API ํ˜ธ์ถœ ์ค‘... ๊ฒ€์ƒ‰์–ด: {query}")
# ์งง์€ ํƒ€์ž„์•„์›ƒ์œผ๋กœ ์š”์ฒญ ์‹œ๋„
response = requests.get(url, headers=headers, params=params, timeout=15)
response.raise_for_status()
logging.info(f"SerpHouse API ์‘๋‹ต ์ƒํƒœ ์ฝ”๋“œ: {response.status_code}")
data = response.json()
# ๋‹ค์–‘ํ•œ ์‘๋‹ต ๊ตฌ์กฐ ์ฒ˜๋ฆฌ
results = data.get("results", {})
organic = None
# ๊ฐ€๋Šฅํ•œ ์‘๋‹ต ๊ตฌ์กฐ 1
if isinstance(results, dict) and "organic" in results:
organic = results["organic"]
# ๊ฐ€๋Šฅํ•œ ์‘๋‹ต ๊ตฌ์กฐ 2
elif isinstance(results, dict) and "results" in results:
if isinstance(results["results"], dict) and "organic" in results["results"]:
organic = results["results"]["organic"]
# ๊ฐ€๋Šฅํ•œ ์‘๋‹ต ๊ตฌ์กฐ 3
elif "organic" in data:
organic = data["organic"]
if not organic:
logging.warning("์‘๋‹ต์—์„œ organic ๊ฒฐ๊ณผ๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค. ๊ตฌ๊ธ€ ์ง์ ‘ ๊ฒ€์ƒ‰์œผ๋กœ ์ „ํ™˜ํ•ฉ๋‹ˆ๋‹ค.")
return do_google_search(query)
# ๊ฒฐ๊ณผ ์ˆ˜ ์ œํ•œ ๋ฐ ์ปจํ…์ŠคํŠธ ๊ธธ์ด ์ตœ์ ํ™”
max_results = min(5, len(organic))
limited_organic = organic[:max_results]
# ๊ฒฐ๊ณผ ํ˜•์‹ ๊ฐœ์„ 
summary_lines = []
for idx, item in enumerate(limited_organic, start=1):
title = item.get("title", "No title")
link = item.get("link", "#")
snippet = item.get("snippet", "No description")
displayed_link = item.get("displayed_link", link)
summary_lines.append(
f"### Result {idx}: {title}\n\n"
f"{snippet}\n\n"
f"**์ถœ์ฒ˜**: [{displayed_link}]({link})\n\n"
f"---\n"
)
# ๋ชจ๋ธ์—๊ฒŒ ๋ช…ํ™•ํ•œ ์ง€์นจ ์ถ”๊ฐ€
instructions = """
# ์›น ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ
์•„๋ž˜๋Š” ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์ž…๋‹ˆ๋‹ค. ์งˆ๋ฌธ์— ๋‹ต๋ณ€ํ•  ๋•Œ ์ด ์ •๋ณด๋ฅผ ํ™œ์šฉํ•˜์„ธ์š”:
1. ๊ฐ ๊ฒฐ๊ณผ์˜ ์ œ๋ชฉ, ๋‚ด์šฉ, ์ถœ์ฒ˜ ๋งํฌ๋ฅผ ์ฐธ๊ณ ํ•˜์„ธ์š”
2. ๋‹ต๋ณ€์— ๊ด€๋ จ ์ •๋ณด์˜ ์ถœ์ฒ˜๋ฅผ ๋ช…์‹œ์ ์œผ๋กœ ์ธ์šฉํ•˜์„ธ์š” (์˜ˆ: "X ์ถœ์ฒ˜์— ๋”ฐ๋ฅด๋ฉด...")
3. ์‘๋‹ต์— ์‹ค์ œ ์ถœ์ฒ˜ ๋งํฌ๋ฅผ ํฌํ•จํ•˜์„ธ์š”
4. ์—ฌ๋Ÿฌ ์ถœ์ฒ˜์˜ ์ •๋ณด๋ฅผ ์ข…ํ•ฉํ•˜์—ฌ ๋‹ต๋ณ€ํ•˜์„ธ์š”
"""
search_results = instructions + "\n".join(summary_lines)
logging.info(f"๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ {len(limited_organic)}๊ฐœ ์ฒ˜๋ฆฌ ์™„๋ฃŒ")
return search_results
except requests.exceptions.Timeout:
logging.error("Web search timed out, ์ง์ ‘ ๊ตฌ๊ธ€ ๊ฒ€์ƒ‰์œผ๋กœ ์ „ํ™˜ํ•ฉ๋‹ˆ๋‹ค.")
return do_google_search(query)
except Exception as e:
logging.error(f"Web search failed: {e}, ์ง์ ‘ ๊ตฌ๊ธ€ ๊ฒ€์ƒ‰์œผ๋กœ ์ „ํ™˜ํ•ฉ๋‹ˆ๋‹ค.")
return do_google_search(query)
def chatbot_interface():
st.title("Ginigen Blog")
# ๋ชจ๋ธ ๊ณ ์ • ์„ค์ •
if "ai_model" not in st.session_state:
st.session_state["ai_model"] = "claude-3-7-sonnet-20250219"
# ์„ธ์…˜ ์ƒํƒœ ์ดˆ๊ธฐํ™”
if "messages" not in st.session_state:
st.session_state.messages = []
# ์ž๋™ ์ €์žฅ ๊ธฐ๋Šฅ
if "auto_save" not in st.session_state:
st.session_state.auto_save = True
# ์ด๋ฏธ์ง€ ์ƒ์„ฑ ํ† ๊ธ€
if "generate_image" not in st.session_state:
st.session_state.generate_image = False
# ์›น ๊ฒ€์ƒ‰ ํ† ๊ธ€
if "use_web_search" not in st.session_state:
st.session_state.use_web_search = False
# ์ด๋ฏธ์ง€ API ์ƒํƒœ
if "image_api_status" not in st.session_state:
st.session_state.image_api_status = test_image_api_connection()
# ๋Œ€ํ™” ๊ธฐ๋ก ๊ด€๋ฆฌ (์‚ฌ์ด๋“œ๋ฐ”)
st.sidebar.title("๋Œ€ํ™” ๊ธฐ๋ก ๊ด€๋ฆฌ")
# ์ž๋™ ์ €์žฅ ํ† ๊ธ€
st.session_state.auto_save = st.sidebar.toggle("์ž๋™ ์ €์žฅ", value=st.session_state.auto_save)
# ์ด๋ฏธ์ง€ ์ƒ์„ฑ ํ† ๊ธ€
st.session_state.generate_image = st.sidebar.toggle("๋ธ”๋กœ๊ทธ ๊ธ€ ์ž‘์„ฑ ํ›„ ์ด๋ฏธ์ง€ ์ž๋™ ์ƒ์„ฑ", value=st.session_state.generate_image)
# ์›น ๊ฒ€์ƒ‰ ํ† ๊ธ€
st.session_state.use_web_search = st.sidebar.toggle("์ฃผ์ œ ์›น ๊ฒ€์ƒ‰ ๋ฐ ๋ถ„์„", value=st.session_state.use_web_search)
# ์ด๋ฏธ์ง€ API ์ƒํƒœ ํ‘œ์‹œ
st.sidebar.text(st.session_state.image_api_status)
# ์ด๋ฏธ์ง€ ์ƒ์„ฑ ์„ค์ • (ํ† ๊ธ€์ด ์ผœ์ ธ ์žˆ์„ ๋•Œ๋งŒ ํ‘œ์‹œ)
if st.session_state.generate_image:
st.sidebar.subheader("์ด๋ฏธ์ง€ ์ƒ์„ฑ ์„ค์ •")
width = st.sidebar.slider("๋„ˆ๋น„", 256, 1024, 768, 64)
height = st.sidebar.slider("๋†’์ด", 256, 1024, 768, 64)
guidance = st.sidebar.slider("๊ฐ€์ด๋˜์Šค ์Šค์ผ€์ผ", 1.0, 20.0, 3.5, 0.1)
inference_steps = st.sidebar.slider("์ธํผ๋Ÿฐ์Šค ์Šคํ…", 1, 50, 30, 1)
seed = st.sidebar.number_input("์‹œ๋“œ", value=3, min_value=0, step=1)
else:
# ๊ธฐ๋ณธ๊ฐ’ ์„ค์ •
width, height, guidance, inference_steps, seed = 768, 768, 3.5, 30, 3
# ๋ธ”๋กœ๊ทธ ๋‚ด์šฉ ๋‹ค์šด๋กœ๋“œ ์„น์…˜
st.sidebar.title("๋ธ”๋กœ๊ทธ ๋‹ค์šด๋กœ๋“œ")
# ์ตœ์‹  ๋ธ”๋กœ๊ทธ ๋‚ด์šฉ ๊ฐ€์ ธ์˜ค๊ธฐ
latest_blog = None
latest_blog_title = "๋ธ”๋กœ๊ทธ ๊ธ€"
if len(st.session_state.messages) > 0:
# ๊ฐ€์žฅ ์ตœ๊ทผ assistant ๋ฉ”์‹œ์ง€ ์ฐพ๊ธฐ
for msg in reversed(st.session_state.messages):
if msg["role"] == "assistant" and msg["content"].strip():
latest_blog = msg["content"]
# ํƒ€์ดํ‹€ ์ถ”์ถœ ์‹œ๋„ (์ฒซ ๋ฒˆ์งธ ์ œ๋ชฉ ํƒœ๊ทธ ์‚ฌ์šฉ)
title_match = re.search(r'# (.*?)(\n|$)', latest_blog)
if title_match:
latest_blog_title = title_match.group(1).strip()
# ์‚ฌ์šฉ์ž ์ž…๋ ฅ์„ ํƒ€์ดํ‹€๋กœ ์‚ฌ์šฉ
elif len(st.session_state.messages) >= 2:
for i in range(len(st.session_state.messages)-1, -1, -1):
if st.session_state.messages[i]["role"] == "user":
latest_blog_title = st.session_state.messages[i]["content"][:30].strip()
if len(st.session_state.messages[i]["content"]) > 30:
latest_blog_title += "..."
break
break
# ๋‹ค์šด๋กœ๋“œ ๋ฒ„ํŠผ ๊ทธ๋ฃน
if latest_blog:
st.sidebar.subheader("์ตœ๊ทผ ๋ธ”๋กœ๊ทธ ๋‹ค์šด๋กœ๋“œ")
col1, col2 = st.sidebar.columns(2)
# ๋งˆํฌ๋‹ค์šด์œผ๋กœ ๋‹ค์šด๋กœ๋“œ
with col1:
st.download_button(
label="๋งˆํฌ๋‹ค์šด",
data=latest_blog,
file_name=f"{latest_blog_title}.md",
mime="text/markdown"
)
# HTML๋กœ ๋‹ค์šด๋กœ๋“œ
with col2:
html_content = convert_md_to_html(latest_blog, latest_blog_title)
st.download_button(
label="HTML",
data=html_content,
file_name=f"{latest_blog_title}.html",
mime="text/html"
)
# ๋Œ€ํ™” ๊ธฐ๋ก ๋ถˆ๋Ÿฌ์˜ค๊ธฐ
uploaded_file = st.sidebar.file_uploader("๋Œ€ํ™” ๊ธฐ๋ก ๋ถˆ๋Ÿฌ์˜ค๊ธฐ", type=['json'])
if uploaded_file is not None:
try:
content = uploaded_file.getvalue().decode()
if content.strip():
st.session_state.messages = json.loads(content)
st.sidebar.success("๋Œ€ํ™” ๊ธฐ๋ก์„ ์„ฑ๊ณต์ ์œผ๋กœ ๋ถˆ๋Ÿฌ์™”์Šต๋‹ˆ๋‹ค!")
else:
st.sidebar.warning("์—…๋กœ๋“œ๋œ ํŒŒ์ผ์ด ๋น„์–ด ์žˆ์Šต๋‹ˆ๋‹ค.")
except json.JSONDecodeError:
st.sidebar.error("์˜ฌ๋ฐ”๋ฅธ JSON ํ˜•์‹์˜ ํŒŒ์ผ์ด ์•„๋‹™๋‹ˆ๋‹ค.")
except Exception as e:
st.sidebar.error(f"ํŒŒ์ผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}")
# ๋Œ€ํ™” ๊ธฐ๋ก ์ดˆ๊ธฐํ™” ๋ฒ„ํŠผ
if st.sidebar.button("๋Œ€ํ™” ๊ธฐ๋ก ์ดˆ๊ธฐํ™”"):
st.session_state.messages = []
st.sidebar.success("๋Œ€ํ™” ๊ธฐ๋ก์ด ์ดˆ๊ธฐํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
# ๋ฉ”์‹œ์ง€ ํ‘œ์‹œ
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# ์ด๋ฏธ์ง€๊ฐ€ ์žˆ๋Š” ๊ฒฝ์šฐ ํ‘œ์‹œ
if "image" in message:
st.image(message["image"], caption=message.get("image_caption", "์ƒ์„ฑ๋œ ์ด๋ฏธ์ง€"))
# ์‚ฌ์šฉ์ž ์ž…๋ ฅ
if prompt := st.chat_input("๋ฌด์—‡์„ ๋„์™€๋“œ๋ฆด๊นŒ์š”?"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
# AI ์‘๋‹ต ์ƒ์„ฑ
with st.chat_message("assistant"):
message_placeholder = st.empty()
full_response = ""
# ์›น ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰ (์›น ๊ฒ€์ƒ‰ ์˜ต์…˜์ด ์ผœ์ ธ ์žˆ์„ ๊ฒฝ์šฐ)
system_prompt = get_system_prompt()
if st.session_state.use_web_search:
with st.spinner("์›น์—์„œ ๊ด€๋ จ ์ •๋ณด๋ฅผ ๊ฒ€์ƒ‰ ์ค‘..."):
try:
search_query = extract_keywords(prompt, top_k=5)
st.info(f"๊ฒ€์ƒ‰์–ด: {search_query}")
# ๋‘ ๊ฐ€์ง€ ๋ฐฉ๋ฒ• ๋ชจ๋‘ ์‹œ๋„ (SerpHouse API์™€ ์ง์ ‘ ๊ฒ€์ƒ‰)
search_results = do_web_search(search_query)
if "๊ฐ€์ƒ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ" in search_results:
st.warning("์‹ค์ œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋ฅผ ๊ฐ€์ ธ์˜ฌ ์ˆ˜ ์—†์–ด ๊ธฐ์กด ์ง€์‹์„ ํ™œ์šฉํ•ฉ๋‹ˆ๋‹ค.")
else:
st.success(f"๊ฒ€์ƒ‰ ์™„๋ฃŒ: '{search_query}'์— ๋Œ€ํ•œ ์ •๋ณด๋ฅผ ์ˆ˜์ง‘ํ–ˆ์Šต๋‹ˆ๋‹ค.")
# ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ์— ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ถ”๊ฐ€
system_prompt += f"\n\n๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ:\n{search_results}\n"
except Exception as e:
st.error(f"์›น ๊ฒ€์ƒ‰ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}")
logging.error(f"์›น ๊ฒ€์ƒ‰ ์˜ค๋ฅ˜: {str(e)}")
system_prompt += "\n\n์›น ๊ฒ€์ƒ‰์ด ์‹คํŒจํ–ˆ์Šต๋‹ˆ๋‹ค. ๊ธฐ์กด ์ง€์‹์„ ๋ฐ”ํƒ•์œผ๋กœ ๋‹ต๋ณ€ํ•˜์„ธ์š”."
# API ํ˜ธ์ถœ
with client.messages.stream(
max_tokens=MAX_TOKENS,
system=system_prompt,
messages=[{"role": m["role"], "content": m["content"]} for m in st.session_state.messages],
model=st.session_state["ai_model"]
) as stream:
for text in stream.text_stream:
full_response += str(text) if text is not None else ""
message_placeholder.markdown(full_response + "โ–Œ")
message_placeholder.markdown(full_response)
# ์ด๋ฏธ์ง€ ์ƒ์„ฑ ์˜ต์…˜์ด ์ผœ์ ธ ์žˆ๋Š” ๊ฒฝ์šฐ
if st.session_state.generate_image:
with st.spinner("๋ธ”๋กœ๊ทธ์— ๋งž๋Š” ์ด๋ฏธ์ง€ ์ƒ์„ฑ ์ค‘..."):
# ์ด๋ฏธ์ง€ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ
image_prompt = extract_image_prompt(full_response, prompt)
# ์ด๋ฏธ์ง€ ์ƒ์„ฑ
image, image_caption = generate_image(
image_prompt,
width=width,
height=height,
guidance=guidance,
inference_steps=inference_steps,
seed=seed
)
if image:
st.image(image, caption=image_caption)
# ์ด๋ฏธ์ง€ ์ •๋ณด๋ฅผ ์‘๋‹ต์— ํฌํ•จ
st.session_state.messages.append({
"role": "assistant",
"content": full_response,
"image": image,
"image_caption": image_caption
})
else:
st.error(f"์ด๋ฏธ์ง€ ์ƒ์„ฑ ์‹คํŒจ: {image_caption}")
st.session_state.messages.append({
"role": "assistant",
"content": full_response
})
else:
# ์ด๋ฏธ์ง€ ์ƒ์„ฑ ์—†์ด ์‘๋‹ต๋งŒ ์ €์žฅ
st.session_state.messages.append({
"role": "assistant",
"content": full_response
})
# ๋ธ”๋กœ๊ทธ ๋‹ค์šด๋กœ๋“œ ๋ฒ„ํŠผ ํ‘œ์‹œ (์‘๋‹ต ๋ฐ”๋กœ ์•„๋ž˜์—)
st.subheader("์ด ๋ธ”๋กœ๊ทธ ๋‹ค์šด๋กœ๋“œ:")
col1, col2 = st.columns(2)
with col1:
st.download_button(
label="๋งˆํฌ๋‹ค์šด์œผ๋กœ ์ €์žฅ",
data=full_response,
file_name=f"{prompt[:30]}.md",
mime="text/markdown"
)
with col2:
html_content = convert_md_to_html(full_response, prompt[:30])
st.download_button(
label="HTML๋กœ ์ €์žฅ",
data=html_content,
file_name=f"{prompt[:30]}.html",
mime="text/html"
)
# ์ž๋™ ์ €์žฅ ๊ธฐ๋Šฅ
if st.session_state.auto_save:
try:
# ์ด๋ฏธ์ง€ ์ •๋ณด๋Š” ์ €์žฅํ•˜์ง€ ์•Š์Œ (JSON์—๋Š” ๋ฐ”์ด๋„ˆ๋ฆฌ ๋ฐ์ดํ„ฐ๋ฅผ ์ง์ ‘ ์ €์žฅํ•  ์ˆ˜ ์—†์Œ)
save_messages = []
for msg in st.session_state.messages:
save_msg = {"role": msg["role"], "content": msg["content"]}
save_messages.append(save_msg)
# ํ˜„์žฌ ์‹œ๊ฐ„์„ ํฌํ•จํ•œ ํŒŒ์ผ๋ช… ์ƒ์„ฑ
current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f'chat_history_auto_save_{current_time}.json'
with open(filename, 'w', encoding='utf-8') as f:
json.dump(save_messages, f, ensure_ascii=False, indent=4)
except Exception as e:
st.sidebar.error(f"์ž๋™ ์ €์žฅ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}")
# ๋Œ€ํ™” ๊ธฐ๋ก ๋‹ค์šด๋กœ๋“œ
if st.sidebar.button("๋Œ€ํ™” ๊ธฐ๋ก ๋‹ค์šด๋กœ๋“œ"):
# ์ด๋ฏธ์ง€ ์ •๋ณด๋Š” ์ €์žฅํ•˜์ง€ ์•Š์Œ
save_messages = []
for msg in st.session_state.messages:
save_msg = {"role": msg["role"], "content": msg["content"]}
save_messages.append(save_msg)
json_history = json.dumps(save_messages, indent=4, ensure_ascii=False)
st.sidebar.download_button(
label="๋Œ€ํ™” ๊ธฐ๋ก ์ €์žฅํ•˜๊ธฐ",
data=json_history,
file_name="chat_history.json",
mime="application/json"
)
def main():
chatbot_interface()
if __name__ == "__main__":
# requirements.txt ํŒŒ์ผ ์ƒ์„ฑ
with open("requirements.txt", "w") as f:
f.write("streamlit>=1.31.0\n")
f.write("anthropic>=0.18.1\n")
f.write("gradio-client>=1.8.0\n")
f.write("requests>=2.32.3\n")
f.write("markdown>=3.5.1\n")
f.write("pillow>=10.1.0\n")
main()