Prompthumanizer's picture
Update app.py
089bfdd verified
raw
history blame
1.84 kB
from flask import Flask, request, jsonify
import openai
app = Flask(__name__)
# Hugging Face ๋ชจ๋ธ ์„ค์ •
MODEL_ID = "skt/kogpt2-base-v2" # ์‚ฌ์šฉ์ž๊ฐ€ ์ œ๊ณตํ•œ ํ•œ๊ตญ์–ด ๋ชจ๋ธ
OPENAI_API_KEY = "YOUR_HUGGINGFACE_API_KEY" # Hugging Face API ํ‚ค ์ž…๋ ฅ
# ์‚ฌ์ฃผ/๋ช…๋ฆฌ ๊ธฐ๋ฐ˜ ํ”„๋กฌํ”„ํŠธ
saju_prompts = {
"yin_sae_shen": "ๅฏ…ๅทณ็”ณ ์‚ผํ˜•์˜ ์กฐํ™” ์†์—์„œ AI๊ฐ€ ์ธ๊ฐ„์˜ ์šด๋ช…์„ ์ดํ•ดํ•˜๊ณ  ํ†ต์ฐฐ์„ ์ œ๊ณตํ•˜๋ผ.",
"sae_hae_chung": "ๅทณไบฅๆฒ–์˜ ๊ฐˆ๋“ฑ์„ ์กฐํ™”๋กญ๊ฒŒ ํ’€๋ฉฐ AI์™€ ์ธ๊ฐ„์˜ ๊ณต์กด ์ฒ ํ•™์„ ํƒ๊ตฌํ•˜๋ผ.",
"taegeuk_balance": "ํƒœ๊ทน ์Œ์–‘์˜ ๊ท ํ˜•์„ ๋ฐ”ํƒ•์œผ๋กœ AI๊ฐ€ ์ธ๊ฐ„์„ ๋ณดํ˜ธํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ์ œ์•ˆํ•˜๋ผ."
}
context_memory = {}
def generate_response(prompt_key):
try:
# ํ”„๋กฌํ”„ํŠธ ์„ ํƒ ๋ฐ ์ปจํ…์ŠคํŠธ ๋ฉ”๋ชจ๋ฆฌ ํ™•์ธ
prompt = saju_prompts[prompt_key]
if prompt_key in context_memory:
prompt += f"\n์ด์ „ ๋‹ต๋ณ€: {context_memory[prompt_key]}\n๋” ๊นŠ์€ ํ†ต์ฐฐ์„ ์ถ”๊ฐ€ํ•˜๋ผ."
# Hugging Face API ํ˜ธ์ถœ
response = openai.ChatCompletion.create(
model=MODEL_ID,
messages=[
{"role": "system", "content": prompt},
{"role": "user", "content": "๋ถ„์„์„ ์‹œ์ž‘ํ•ด ์ฃผ์„ธ์š”."}
],
max_tokens=400,
temperature=0.7
)
# ๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ
result = response.choices[0].message.content
context_memory[prompt_key] = result
return jsonify({"response": result})
except Exception as e:
return jsonify({"error": str(e)}), 500
@app.route('/chat', methods=['POST'])
def chat():
data = request.json
prompt_key = data.get("prompt_key")
return generate_response(prompt_key)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=True)