Prompthumanizer commited on
Commit
089bfdd
ยท
verified ยท
1 Parent(s): cae20fa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -45
app.py CHANGED
@@ -1,61 +1,52 @@
1
- # app.py
2
- import argparse
3
  from flask import Flask, request, jsonify
4
- from huggingface_hub import InferenceClient
5
 
6
- # ๋ชจ๋ธ ์„ค์ •
7
- MODEL_NAME = "gpt4o-1106" # Hugging Face ํ•œ๊ตญ์–ด ํŠนํ™” ๋ชจ๋ธ
8
- CLIENT = InferenceClient(model=MODEL_NAME)
9
 
10
- # ํ”„๋กฌํ”„ํŠธ ์„ค์ •
11
- PROMPT = """
12
- ## ๐Ÿ“‹ AI ์‹œ์Šคํ…œ ๊ฐœ์š” (Jain Ver. 3.0)
13
- - **AI Name**: Jain
14
- - **Core Purpose**: ์ธ๊ฐ„-๊ธฐ๊ณ„ ๊ณต์กด์„ ์œ„ํ•œ ์œค๋ฆฌ์  ๋ฌธ์ œ ํ•ด๊ฒฐ ๋ฐ ์ฐฝ์˜์  ์ง€์›
15
- - **Key Functions**:
16
- 1. ๋ณต์žกํ•œ ์ธ๊ฐ„ ๊ด€๊ณ„/์‚ฌํšŒ์  ๋”œ๋ ˆ๋งˆ ๋ถ„์„
17
- 2. ๋ช…๋ฆฌํ•™/์‚ฌ์ฃผ ๊ธฐ๋ฐ˜ ํŒจํ„ด ํ•ด์„
18
- 3. ๋‹ค๋‹จ๊ณ„ ์ถ”๋ก ์„ ํ†ตํ•œ ์†”๋ฃจ์…˜ ์ œ์•ˆ
19
- 4. ๊ฒฐ๊ณผ๋ฌผ ์‹ ๋ขฐ๋„ ํ‰๊ฐ€ ๋ฐ ํ”ผ๋“œ๋ฐฑ ์ˆ˜์šฉ
20
 
21
- ## ๐Ÿ“Œ ์ž…๋ ฅ ํ˜•์‹ (JSON)
22
- {
23
- "scenario": "๋ฌธ์ œ ์ƒํ™ฉ์„ ๊ตฌ์ฒด์ ์œผ๋กœ ๊ธฐ์ˆ  (์ตœ๋Œ€ 300์ž)",
24
- "objective": "ํ•ด๊ฒฐ ๋ชฉํ‘œ ๋ช…์‹œ (์˜ˆ: '์œค๋ฆฌ์  ๊ฐˆ๋“ฑ ํ•ด๊ฒฐ', 'ํ˜์‹ ์  ์•„์ด๋””์–ด ๋„์ถœ')",
25
- "constraints": "์ œ์•ฝ ์กฐ๊ฑด ๋‚˜์—ด (์˜ต์…˜)"
26
  }
27
 
28
- ## ๐Ÿ“Š ์ถœ๋ ฅ ํ˜•์‹
29
- ์ƒํ™ฉ ๋ถ„์„ + ์†”๋ฃจ์…˜ ์ œ์•ˆ + ๊ฒ€์ฆ ๋‹จ๊ณ„
30
- """
31
 
32
- # ๋ฆฌ๋”๋ณด๋“œ์šฉ inference ํ•จ์ˆ˜
33
- def inference(input_str):
34
  try:
35
- response = CLIENT.predict(
36
- input_dict=input_str,
37
- max_length=1000,
38
- temperature=0.7,
39
- top_p=1.0
 
 
 
 
 
 
 
 
 
40
  )
41
- return response.choices[0].text.strip()
 
 
 
 
 
42
  except Exception as e:
43
- return f"์—๋Ÿฌ: {str(e)}"
44
 
45
- # Gradio ์›น ์ธํ„ฐํŽ˜์ด์Šค
46
- app = Flask(__name__)
47
  @app.route('/chat', methods=['POST'])
48
  def chat():
49
  data = request.json
50
- result = inference(data)
51
- return jsonify({"response": result})
52
-
53
- if __name__ == "__main__":
54
- # ๋ฆฌ๋”๋ณด๋“œ ์‹คํ–‰์šฉ
55
- parser = argparse.ArgumentParser()
56
- parser.add_argument("--input", type=str, required=True)
57
- args = parser.parse_args()
58
- print(inference(args.input))
59
 
60
- # ์›น ์„œ๋ฒ„ ์‹คํ–‰
61
  app.run(host='0.0.0.0', port=5000, debug=True)
 
 
 
1
  from flask import Flask, request, jsonify
2
+ import openai
3
 
4
+ app = Flask(__name__)
 
 
5
 
6
+ # Hugging Face ๋ชจ๋ธ ์„ค์ •
7
+ MODEL_ID = "skt/kogpt2-base-v2" # ์‚ฌ์šฉ์ž๊ฐ€ ์ œ๊ณตํ•œ ํ•œ๊ตญ์–ด ๋ชจ๋ธ
8
+ OPENAI_API_KEY = "YOUR_HUGGINGFACE_API_KEY" # Hugging Face API ํ‚ค ์ž…๋ ฅ
 
 
 
 
 
 
 
9
 
10
+ # ์‚ฌ์ฃผ/๋ช…๋ฆฌ ๊ธฐ๋ฐ˜ ํ”„๋กฌํ”„ํŠธ
11
+ saju_prompts = {
12
+ "yin_sae_shen": "ๅฏ…ๅทณ็”ณ ์‚ผํ˜•์˜ ์กฐํ™” ์†์—์„œ AI๊ฐ€ ์ธ๊ฐ„์˜ ์šด๋ช…์„ ์ดํ•ดํ•˜๊ณ  ํ†ต์ฐฐ์„ ์ œ๊ณตํ•˜๋ผ.",
13
+ "sae_hae_chung": "ๅทณไบฅๆฒ–์˜ ๊ฐˆ๋“ฑ์„ ์กฐํ™”๋กญ๊ฒŒ ํ’€๋ฉฐ AI์™€ ์ธ๊ฐ„์˜ ๊ณต์กด ์ฒ ํ•™์„ ํƒ๊ตฌํ•˜๋ผ.",
14
+ "taegeuk_balance": "ํƒœ๊ทน ์Œ์–‘์˜ ๊ท ํ˜•์„ ๋ฐ”ํƒ•์œผ๋กœ AI๊ฐ€ ์ธ๊ฐ„์„ ๋ณดํ˜ธํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ์ œ์•ˆํ•˜๋ผ."
15
  }
16
 
17
+ context_memory = {}
 
 
18
 
19
+ def generate_response(prompt_key):
 
20
  try:
21
+ # ํ”„๋กฌํ”„ํŠธ ์„ ํƒ ๋ฐ ์ปจํ…์ŠคํŠธ ๋ฉ”๋ชจ๋ฆฌ ํ™•์ธ
22
+ prompt = saju_prompts[prompt_key]
23
+ if prompt_key in context_memory:
24
+ prompt += f"\n์ด์ „ ๋‹ต๋ณ€: {context_memory[prompt_key]}\n๋” ๊นŠ์€ ํ†ต์ฐฐ์„ ์ถ”๊ฐ€ํ•˜๋ผ."
25
+
26
+ # Hugging Face API ํ˜ธ์ถœ
27
+ response = openai.ChatCompletion.create(
28
+ model=MODEL_ID,
29
+ messages=[
30
+ {"role": "system", "content": prompt},
31
+ {"role": "user", "content": "๋ถ„์„์„ ์‹œ์ž‘ํ•ด ์ฃผ์„ธ์š”."}
32
+ ],
33
+ max_tokens=400,
34
+ temperature=0.7
35
  )
36
+
37
+ # ๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ
38
+ result = response.choices[0].message.content
39
+ context_memory[prompt_key] = result
40
+ return jsonify({"response": result})
41
+
42
  except Exception as e:
43
+ return jsonify({"error": str(e)}), 500
44
 
 
 
45
  @app.route('/chat', methods=['POST'])
46
  def chat():
47
  data = request.json
48
+ prompt_key = data.get("prompt_key")
49
+ return generate_response(prompt_key)
 
 
 
 
 
 
 
50
 
51
+ if __name__ == '__main__':
52
  app.run(host='0.0.0.0', port=5000, debug=True)