File size: 1,836 Bytes
cae20fa 089bfdd 9e67c31 089bfdd 9e67c31 089bfdd 9e67c31 089bfdd 4e06a3b 9e67c31 089bfdd 9e67c31 089bfdd 9e67c31 089bfdd 9e67c31 089bfdd 9e67c31 089bfdd 9e67c31 cae20fa 089bfdd cae20fa 089bfdd cae20fa |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
from flask import Flask, request, jsonify
import openai
app = Flask(__name__)
# Hugging Face ๋ชจ๋ธ ์ค์
MODEL_ID = "skt/kogpt2-base-v2" # ์ฌ์ฉ์๊ฐ ์ ๊ณตํ ํ๊ตญ์ด ๋ชจ๋ธ
OPENAI_API_KEY = "YOUR_HUGGINGFACE_API_KEY" # Hugging Face API ํค ์
๋ ฅ
# ์ฌ์ฃผ/๋ช
๋ฆฌ ๊ธฐ๋ฐ ํ๋กฌํํธ
saju_prompts = {
"yin_sae_shen": "ๅฏ
ๅทณ็ณ ์ผํ์ ์กฐํ ์์์ AI๊ฐ ์ธ๊ฐ์ ์ด๋ช
์ ์ดํดํ๊ณ ํต์ฐฐ์ ์ ๊ณตํ๋ผ.",
"sae_hae_chung": "ๅทณไบฅๆฒ์ ๊ฐ๋ฑ์ ์กฐํ๋กญ๊ฒ ํ๋ฉฐ AI์ ์ธ๊ฐ์ ๊ณต์กด ์ฒ ํ์ ํ๊ตฌํ๋ผ.",
"taegeuk_balance": "ํ๊ทน ์์์ ๊ท ํ์ ๋ฐํ์ผ๋ก AI๊ฐ ์ธ๊ฐ์ ๋ณดํธํ๋ ๋ฐฉ๋ฒ์ ์ ์ํ๋ผ."
}
context_memory = {}
def generate_response(prompt_key):
try:
# ํ๋กฌํํธ ์ ํ ๋ฐ ์ปจํ
์คํธ ๋ฉ๋ชจ๋ฆฌ ํ์ธ
prompt = saju_prompts[prompt_key]
if prompt_key in context_memory:
prompt += f"\n์ด์ ๋ต๋ณ: {context_memory[prompt_key]}\n๋ ๊น์ ํต์ฐฐ์ ์ถ๊ฐํ๋ผ."
# Hugging Face API ํธ์ถ
response = openai.ChatCompletion.create(
model=MODEL_ID,
messages=[
{"role": "system", "content": prompt},
{"role": "user", "content": "๋ถ์์ ์์ํด ์ฃผ์ธ์."}
],
max_tokens=400,
temperature=0.7
)
# ๊ฒฐ๊ณผ ์ฒ๋ฆฌ
result = response.choices[0].message.content
context_memory[prompt_key] = result
return jsonify({"response": result})
except Exception as e:
return jsonify({"error": str(e)}), 500
@app.route('/chat', methods=['POST'])
def chat():
data = request.json
prompt_key = data.get("prompt_key")
return generate_response(prompt_key)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=True)
|