Spaces:
Runtime error
Runtime error
File size: 1,759 Bytes
531d477 fb5e935 531d477 171f69d fb5e935 531d477 fb5e935 171f69d fb5e935 171f69d dc5e80d 171f69d dc5e80d 171f69d fb5e935 171f69d fb5e935 171f69d fb5e935 171f69d fb5e935 171f69d fb5e935 531d477 171f69d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
import gradio as gr
from transformers import pipeline
import threading
chat_model = None
loading_done = False
status_text = "β³ λͺ¨λΈ λ‘λ© μ€μ
λλ€..."
# λ°±κ·ΈλΌμ΄λμμ λͺ¨λΈ λ‘λ©
def load_model_bg():
global chat_model, loading_done, status_text
chat_model = pipeline("text-generation", model="beomi/KoAlpaca-Polyglot-5.8B")
loading_done = True
status_text = "β
λͺ¨λΈ λ‘λ© μλ£! λ¬Έμ₯μ μ
λ ₯ν΄λ³΄μΈμ."
# λλ΅ ν¨μ
def chat_with_model(message, history):
global chat_model, loading_done
if not loading_done:
return history + [[message, "β οΈ λͺ¨λΈμ΄ μμ§ λ‘λ© μ€μ
λλ€. μ μλ§ κΈ°λ€λ € μ£ΌμΈμ."]]
prompt = f"""
### Instruction: λ€μ λ¬Έμ₯μ λΆμν΄μ 무λ‘νκ±°λ 곡격μ μΈ ννμ΄ μλμ§ νλ¨νκ³ , μλ€λ©΄ λ μμ μκ² κ³ μ³μ€.
### Input:
{message}
### Response:
"""
response = chat_model(prompt, max_new_tokens=200)[0]['generated_text']
response_only = response[len(prompt):].strip()
return history + [[message, response_only]]
# μν ν
μ€νΈ λ°ν ν¨μ (λ§€λ² μλ‘ μ½μ΄μ΄)
def get_status():
return status_text
# λ°±κ·ΈλΌμ΄λμμ λͺ¨λΈ λ‘λ© μμ
threading.Thread(target=load_model_bg).start()
# Gradio μ±
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
msg = gr.Textbox(label="λ¬Έμ₯μ μ
λ ₯νμΈμ", placeholder="μ: λ μ λ§ μ κ·Έλ κ² λ§ν΄?")
status = gr.Markdown(get_status)
def respond_and_clear(user_input, history):
updated_history = chat_with_model(user_input, history)
return "", updated_history, get_status()
msg.submit(respond_and_clear, [msg, chatbot], [msg, chatbot, status])
demo.launch()
|