Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import pipeline | |
import threading | |
chat_model = None | |
loading_done = False | |
status_text = "β³ λͺ¨λΈ λ‘λ© μ€μ λλ€..." | |
# λ°±κ·ΈλΌμ΄λμμ λͺ¨λΈ λ‘λ© | |
def load_model_bg(): | |
global chat_model, loading_done, status_text | |
chat_model = pipeline("text-generation", model="beomi/KoAlpaca-Polyglot-5.8B") | |
loading_done = True | |
status_text = "β λͺ¨λΈ λ‘λ© μλ£! λ¬Έμ₯μ μ λ ₯ν΄λ³΄μΈμ." | |
# λλ΅ ν¨μ | |
def chat_with_model(message, history): | |
global chat_model, loading_done | |
if not loading_done: | |
return history + [[message, "β οΈ λͺ¨λΈμ΄ μμ§ λ‘λ© μ€μ λλ€. μ μλ§ κΈ°λ€λ € μ£ΌμΈμ."]] | |
prompt = f""" | |
### Instruction: λ€μ λ¬Έμ₯μ λΆμν΄μ 무λ‘νκ±°λ 곡격μ μΈ ννμ΄ μλμ§ νλ¨νκ³ , μλ€λ©΄ λ μμ μκ² κ³ μ³μ€. | |
### Input: | |
{message} | |
### Response: | |
""" | |
response = chat_model(prompt, max_new_tokens=200)[0]['generated_text'] | |
response_only = response[len(prompt):].strip() | |
return history + [[message, response_only]] | |
# μν ν μ€νΈ λ°ν ν¨μ (λ§€λ² μλ‘ μ½μ΄μ΄) | |
def get_status(): | |
return status_text | |
# λ°±κ·ΈλΌμ΄λμμ λͺ¨λΈ λ‘λ© μμ | |
threading.Thread(target=load_model_bg).start() | |
# Gradio μ± | |
with gr.Blocks() as demo: | |
chatbot = gr.Chatbot() | |
msg = gr.Textbox(label="λ¬Έμ₯μ μ λ ₯νμΈμ", placeholder="μ: λ μ λ§ μ κ·Έλ κ² λ§ν΄?") | |
status = gr.Markdown(get_status) | |
def respond_and_clear(user_input, history): | |
updated_history = chat_with_model(user_input, history) | |
return "", updated_history, get_status() | |
msg.submit(respond_and_clear, [msg, chatbot], [msg, chatbot, status]) | |
demo.launch() | |