daeguhighschool's picture
Update app.py
15fc488 verified
raw
history blame contribute delete
636 Bytes
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
# SKT 한국어 GPT 모델
model_id = "skt/ko-gpt-trinity-1.2B-v0.5"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id)
pipe = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
max_new_tokens=100,
do_sample=True,
temperature=0.7,
)
def chat(prompt):
response = pipe(prompt)[0]["generated_text"]
return response
gr.Interface(
fn=chat,
inputs="text",
outputs="text",
title="한국어 GPT 챗봇 (SKT Trinity 1.2B)"
).launch()