|
|
|
import gradio as gr |
|
import requests |
|
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM |
|
|
|
|
|
model_id = "beomi/KoAlpaca-Polyglot-5.8B" |
|
tokenizer = AutoTokenizer.from_pretrained(model_id) |
|
model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto") |
|
generator = pipeline("text-generation", model=model, tokenizer=tokenizer) |
|
|
|
NEIS_KEY = "a69e08342c8947b4a52cd72789a5ecaf" |
|
SCHOOL_INFO_URL = "https://open.neis.go.kr/hub/schoolInfo" |
|
SCHEDULE_URL = "https://open.neis.go.kr/hub/SchoolSchedule" |
|
|
|
REGIONS = { |
|
"μμΈνΉλ³μκ΅μ‘μ²": "B10", |
|
"κ²½μλΆλκ΅μ‘μ²": "R10" |
|
} |
|
|
|
MONTH_NAMES = ["01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12"] |
|
|
|
|
|
def get_school_code(region_code, school_name): |
|
params = { |
|
"KEY": NEIS_KEY, |
|
"Type": "json", |
|
"pIndex": 1, |
|
"pSize": 1, |
|
"SCHUL_NM": school_name, |
|
"ATPT_OFCDC_SC_CODE": region_code |
|
} |
|
res = requests.get(SCHOOL_INFO_URL, params=params) |
|
data = res.json() |
|
try: |
|
return data["schoolInfo"][1]["row"][0]["SD_SCHUL_CODE"], data["schoolInfo"][1]["row"][0]["ATPT_OFCDC_SC_CODE"] |
|
except: |
|
return None, None |
|
|
|
|
|
def get_schedule(region_code, school_code, year, month): |
|
from_ymd = f"{year}{month}01" |
|
to_ymd = f"{year}{month}31" |
|
params = { |
|
"KEY": NEIS_KEY, |
|
"Type": "json", |
|
"pIndex": 1, |
|
"pSize": 100, |
|
"ATPT_OFCDC_SC_CODE": region_code, |
|
"SD_SCHUL_CODE": school_code, |
|
"AA_FROM_YMD": from_ymd, |
|
"AA_TO_YMD": to_ymd |
|
} |
|
res = requests.get(SCHEDULE_URL, params=params) |
|
data = res.json() |
|
try: |
|
rows = data["SchoolSchedule"][1]["row"] |
|
return rows |
|
except: |
|
return [] |
|
|
|
|
|
def generate_answer(region, school_name, year, month, question): |
|
region_code = REGIONS.get(region) |
|
if not region_code: |
|
return "μλͺ»λ κ΅μ‘μ²μ
λλ€." |
|
|
|
school_code, confirmed_region = get_school_code(region_code, school_name) |
|
if not school_code: |
|
return "νκ΅ μ 보λ₯Ό μ°Ύμ μ μμ΅λλ€." |
|
|
|
schedule_rows = get_schedule(confirmed_region, school_code, year, month) |
|
if not schedule_rows: |
|
schedule_text = "νμ¬ μΌμ μ 보λ μμ΅λλ€." |
|
else: |
|
schedule_text = "\n".join(f"{row['AA_YMD']}: {row['EVENT_NM']}" for row in schedule_rows) |
|
|
|
prompt = f"""μΌμ μ 보: |
|
{schedule_text} |
|
|
|
μ¬μ©μ μ§λ¬Έ: {question} |
|
|
|
μμ°μ€λ½κ² λλ΅νμΈμ.""" |
|
|
|
result = generator(prompt, max_new_tokens=200, temperature=0.7)[0]["generated_text"] |
|
return result |
|
|
|
|
|
def interface_fn(region, school_name, year, month, question): |
|
return generate_answer(region, school_name, year, month, question) |
|
|
|
|
|
with gr.Interface( |
|
fn=interface_fn, |
|
inputs=[ |
|
gr.Dropdown(choices=list(REGIONS.keys()), label="κ΅μ‘μ² μ ν"), |
|
gr.Textbox(label="νκ΅λͺ
μ
λ ₯"), |
|
gr.Textbox(label="λ
λ μ
λ ₯", placeholder="μ: 2025"), |
|
gr.Dropdown(choices=MONTH_NAMES, label="μ μ ν (μ: 07)"), |
|
gr.Textbox(label="GPT μ§λ¬Έ μ
λ ₯") |
|
], |
|
outputs=gr.Textbox(label="GPTμ μλ΅"), |
|
title="νμ¬μΌμ + GPT μ±λ΄ (KoAlpaca)" |
|
) as app: |
|
app.launch() |
|
|