Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -258,7 +258,7 @@ class Demo:
|
|
| 258 |
|
| 259 |
async def generation_code(self, query: Optional[str], _setting: Dict[str, str], _history: Optional[History]):
|
| 260 |
if not query or query.strip() == '':
|
| 261 |
-
query =
|
| 262 |
|
| 263 |
if _history is None:
|
| 264 |
_history = []
|
|
@@ -281,8 +281,9 @@ class Demo:
|
|
| 281 |
openai_messages.append({"role": "user", "content": query})
|
| 282 |
|
| 283 |
try:
|
|
|
|
| 284 |
yield [
|
| 285 |
-
"
|
| 286 |
_history,
|
| 287 |
None,
|
| 288 |
gr.update(active_key="loading"),
|
|
@@ -294,28 +295,28 @@ class Demo:
|
|
| 294 |
try:
|
| 295 |
async for content in try_claude_api(system_message, claude_messages):
|
| 296 |
yield [
|
| 297 |
-
|
| 298 |
_history,
|
| 299 |
None,
|
| 300 |
gr.update(active_key="loading"),
|
| 301 |
gr.update(open=True)
|
| 302 |
]
|
| 303 |
await asyncio.sleep(0)
|
| 304 |
-
|
| 305 |
-
|
| 306 |
except Exception as claude_error:
|
| 307 |
print(f"Falling back to OpenAI API due to Claude error: {str(claude_error)}")
|
| 308 |
|
| 309 |
async for content in try_openai_api(openai_messages):
|
| 310 |
yield [
|
| 311 |
-
|
| 312 |
_history,
|
| 313 |
None,
|
| 314 |
gr.update(active_key="loading"),
|
| 315 |
gr.update(open=True)
|
| 316 |
]
|
| 317 |
await asyncio.sleep(0)
|
| 318 |
-
|
| 319 |
|
| 320 |
if collected_content:
|
| 321 |
_history = messages_to_history([
|
|
@@ -325,13 +326,13 @@ class Demo:
|
|
| 325 |
'content': collected_content
|
| 326 |
}])
|
| 327 |
|
| 328 |
-
|
| 329 |
yield [
|
| 330 |
collected_content,
|
| 331 |
_history,
|
| 332 |
send_to_sandbox(remove_code_block(collected_content)),
|
| 333 |
gr.update(active_key="render"),
|
| 334 |
-
gr.update(open=False)
|
| 335 |
]
|
| 336 |
|
| 337 |
else:
|
|
@@ -483,9 +484,7 @@ def create_main_interface():
|
|
| 483 |
|
| 484 |
with demo:
|
| 485 |
with gr.Tabs(elem_classes="main-tabs") as tabs:
|
| 486 |
-
# MOUSE ํญ
|
| 487 |
with gr.Tab("Visual AI Assistant", elem_id="mouse-tab", elem_classes="mouse-tab"):
|
| 488 |
-
|
| 489 |
history = gr.State([])
|
| 490 |
setting = gr.State({
|
| 491 |
"system": SystemPrompt,
|
|
@@ -494,8 +493,89 @@ def create_main_interface():
|
|
| 494 |
with ms.Application() as app:
|
| 495 |
with antd.ConfigProvider():
|
| 496 |
# Drawer ์ปดํฌ๋ํธ๋ค
|
| 497 |
-
with antd.Drawer(open=False, title="
|
| 498 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 499 |
|
| 500 |
with antd.Drawer(open=False, title="history", placement="left", width="900px") as history_drawer:
|
| 501 |
history_output = legacy.Chatbot(show_label=False, flushing=False, height=960, elem_classes="history_chatbot")
|
|
|
|
| 258 |
|
| 259 |
async def generation_code(self, query: Optional[str], _setting: Dict[str, str], _history: Optional[History]):
|
| 260 |
if not query or query.strip() == '':
|
| 261 |
+
query = get_random_placeholder()
|
| 262 |
|
| 263 |
if _history is None:
|
| 264 |
_history = []
|
|
|
|
| 281 |
openai_messages.append({"role": "user", "content": query})
|
| 282 |
|
| 283 |
try:
|
| 284 |
+
# ๋ก๋ฉ ์์
|
| 285 |
yield [
|
| 286 |
+
"", # ๋น ๋ฌธ์์ด๋ก ๋ณ๊ฒฝ (์ฝ๋ ์ถ๋ ฅํ์ง ์์)
|
| 287 |
_history,
|
| 288 |
None,
|
| 289 |
gr.update(active_key="loading"),
|
|
|
|
| 295 |
try:
|
| 296 |
async for content in try_claude_api(system_message, claude_messages):
|
| 297 |
yield [
|
| 298 |
+
"", # ๋น ๋ฌธ์์ด๋ก ๋ณ๊ฒฝ (์ฝ๋ ์ถ๋ ฅํ์ง ์์)
|
| 299 |
_history,
|
| 300 |
None,
|
| 301 |
gr.update(active_key="loading"),
|
| 302 |
gr.update(open=True)
|
| 303 |
]
|
| 304 |
await asyncio.sleep(0)
|
| 305 |
+
collected_content = content
|
| 306 |
+
|
| 307 |
except Exception as claude_error:
|
| 308 |
print(f"Falling back to OpenAI API due to Claude error: {str(claude_error)}")
|
| 309 |
|
| 310 |
async for content in try_openai_api(openai_messages):
|
| 311 |
yield [
|
| 312 |
+
"", # ๋น ๋ฌธ์์ด๋ก ๋ณ๊ฒฝ (์ฝ๋ ์ถ๋ ฅํ์ง ์์)
|
| 313 |
_history,
|
| 314 |
None,
|
| 315 |
gr.update(active_key="loading"),
|
| 316 |
gr.update(open=True)
|
| 317 |
]
|
| 318 |
await asyncio.sleep(0)
|
| 319 |
+
collected_content = content
|
| 320 |
|
| 321 |
if collected_content:
|
| 322 |
_history = messages_to_history([
|
|
|
|
| 326 |
'content': collected_content
|
| 327 |
}])
|
| 328 |
|
| 329 |
+
# ์ต์ข
๊ฒฐ๊ณผ๋ง ํ์
|
| 330 |
yield [
|
| 331 |
collected_content,
|
| 332 |
_history,
|
| 333 |
send_to_sandbox(remove_code_block(collected_content)),
|
| 334 |
gr.update(active_key="render"),
|
| 335 |
+
gr.update(open=False)
|
| 336 |
]
|
| 337 |
|
| 338 |
else:
|
|
|
|
| 484 |
|
| 485 |
with demo:
|
| 486 |
with gr.Tabs(elem_classes="main-tabs") as tabs:
|
|
|
|
| 487 |
with gr.Tab("Visual AI Assistant", elem_id="mouse-tab", elem_classes="mouse-tab"):
|
|
|
|
| 488 |
history = gr.State([])
|
| 489 |
setting = gr.State({
|
| 490 |
"system": SystemPrompt,
|
|
|
|
| 493 |
with ms.Application() as app:
|
| 494 |
with antd.ConfigProvider():
|
| 495 |
# Drawer ์ปดํฌ๋ํธ๋ค
|
| 496 |
+
with antd.Drawer(open=False, title="AI is Creating...", placement="left", width="750px") as code_drawer:
|
| 497 |
+
gr.HTML("""
|
| 498 |
+
<div class="thinking-container">
|
| 499 |
+
<div class="thinking-animation">
|
| 500 |
+
<lottie-player
|
| 501 |
+
src="https://assets2.lottiefiles.com/packages/lf20_szviypry.json"
|
| 502 |
+
background="transparent"
|
| 503 |
+
speed="1"
|
| 504 |
+
style="width: 300px; height: 300px;"
|
| 505 |
+
loop
|
| 506 |
+
autoplay>
|
| 507 |
+
</lottie-player>
|
| 508 |
+
</div>
|
| 509 |
+
|
| 510 |
+
<div class="thinking-tips" id="thinkingTips">
|
| 511 |
+
<h3>Did you know?</h3>
|
| 512 |
+
<div class="tip-content"></div>
|
| 513 |
+
</div>
|
| 514 |
+
|
| 515 |
+
<style>
|
| 516 |
+
.thinking-container {
|
| 517 |
+
text-align: center;
|
| 518 |
+
padding: 20px;
|
| 519 |
+
font-family: 'Arial', sans-serif;
|
| 520 |
+
}
|
| 521 |
+
|
| 522 |
+
.thinking-animation {
|
| 523 |
+
margin-bottom: 30px;
|
| 524 |
+
}
|
| 525 |
+
|
| 526 |
+
.thinking-tips {
|
| 527 |
+
background: #f5f5f5;
|
| 528 |
+
padding: 20px;
|
| 529 |
+
border-radius: 10px;
|
| 530 |
+
margin-top: 20px;
|
| 531 |
+
}
|
| 532 |
+
|
| 533 |
+
.thinking-tips h3 {
|
| 534 |
+
color: #1890ff;
|
| 535 |
+
margin-bottom: 15px;
|
| 536 |
+
}
|
| 537 |
+
|
| 538 |
+
.tip-content {
|
| 539 |
+
font-size: 16px;
|
| 540 |
+
line-height: 1.6;
|
| 541 |
+
color: #333;
|
| 542 |
+
}
|
| 543 |
+
</style>
|
| 544 |
+
|
| 545 |
+
<script src="https://unpkg.com/@lottiefiles/lottie-player@latest/dist/lottie-player.js"></script>
|
| 546 |
+
<script>
|
| 547 |
+
const tips = [
|
| 548 |
+
"MOUSE-I can create interactive visualizations using Chart.js and D3.js! ๐",
|
| 549 |
+
"Our AI understands and implements modern design principles for better user experience. ๐จ",
|
| 550 |
+
"Every visualization is responsive and works on all devices! ๐ฑ",
|
| 551 |
+
"We use advanced animations to make your data come alive! โจ",
|
| 552 |
+
"MOUSE-I can create beautiful presentations in multiple styles! ๐ฏ",
|
| 553 |
+
"Your visualization is being crafted with attention to every detail! ๐",
|
| 554 |
+
"We're adding interactive elements to make your content engaging! ๐ฎ",
|
| 555 |
+
"MOUSE-I is optimizing the layout for the best visual experience! ๐ช"
|
| 556 |
+
];
|
| 557 |
+
|
| 558 |
+
function updateTip() {
|
| 559 |
+
const tipElement = document.querySelector('.tip-content');
|
| 560 |
+
if (tipElement) {
|
| 561 |
+
const randomTip = tips[Math.floor(Math.random() * tips.length)];
|
| 562 |
+
tipElement.innerHTML = randomTip;
|
| 563 |
+
}
|
| 564 |
+
}
|
| 565 |
+
|
| 566 |
+
// ์ด๊ธฐ ํ ์ค์
|
| 567 |
+
updateTip();
|
| 568 |
+
|
| 569 |
+
// 3์ด๋ง๋ค ํ ์
๋ฐ์ดํธ
|
| 570 |
+
setInterval(updateTip, 3000);
|
| 571 |
+
</script>
|
| 572 |
+
</div>
|
| 573 |
+
""")
|
| 574 |
+
code_output = legacy.Markdown(visible=False) # ์ฝ๋ ์ถ๋ ฅ์ ์จ๊น ์ฒ๋ฆฌ
|
| 575 |
+
|
| 576 |
+
with antd.Drawer(open=False, title="history", placement="left", width="900px") as history_drawer:
|
| 577 |
+
history_output = legacy.Chatbot(show_label=False, flushing=False, height=960, elem_classes="history_chatbot")
|
| 578 |
+
|
| 579 |
|
| 580 |
with antd.Drawer(open=False, title="history", placement="left", width="900px") as history_drawer:
|
| 581 |
history_output = legacy.Chatbot(show_label=False, flushing=False, height=960, elem_classes="history_chatbot")
|