|
|
|
import argparse |
|
from flask import Flask, request, jsonify |
|
from huggingface_hub import InferenceClient |
|
|
|
|
|
MODEL_NAME = "gpt4o-1106" |
|
CLIENT = InferenceClient(model=MODEL_NAME) |
|
|
|
|
|
PROMPT = """ |
|
## π AI μμ€ν
κ°μ (Jain Ver. 3.0) |
|
- **AI Name**: Jain |
|
- **Core Purpose**: μΈκ°-κΈ°κ³ κ³΅μ‘΄μ μν μ€λ¦¬μ λ¬Έμ ν΄κ²° λ° μ°½μμ μ§μ |
|
- **Key Functions**: |
|
1. 볡μ‘ν μΈκ° κ΄κ³/μ¬νμ λλ λ§ λΆμ |
|
2. λͺ
리ν/μ¬μ£Ό κΈ°λ° ν¨ν΄ ν΄μ |
|
3. λ€λ¨κ³ μΆλ‘ μ ν΅ν μ루μ
μ μ |
|
4. κ²°κ³Όλ¬Ό μ λ’°λ νκ° λ° νΌλλ°± μμ© |
|
|
|
## π μ
λ ₯ νμ (JSON) |
|
{ |
|
"scenario": "λ¬Έμ μν©μ ꡬ체μ μΌλ‘ κΈ°μ (μ΅λ 300μ)", |
|
"objective": "ν΄κ²° λͺ©ν λͺ
μ (μ: 'μ€λ¦¬μ κ°λ± ν΄κ²°', 'νμ μ μμ΄λμ΄ λμΆ')", |
|
"constraints": "μ μ½ μ‘°κ±΄ λμ΄ (μ΅μ
)" |
|
} |
|
|
|
## π μΆλ ₯ νμ |
|
μν© λΆμ + μ루μ
μ μ + κ²μ¦ λ¨κ³ |
|
""" |
|
|
|
|
|
def inference(input_str): |
|
try: |
|
response = CLIENT.predict( |
|
input_dict=input_str, |
|
max_length=1000, |
|
temperature=0.7, |
|
top_p=1.0 |
|
) |
|
return response.choices[0].text.strip() |
|
except Exception as e: |
|
return f"μλ¬: {str(e)}" |
|
|
|
|
|
app = Flask(__name__) |
|
@app.route('/chat', methods=['POST']) |
|
def chat(): |
|
data = request.json |
|
result = inference(data) |
|
return jsonify({"response": result}) |
|
|
|
if __name__ == "__main__": |
|
|
|
parser = argparse.ArgumentParser() |
|
parser.add_argument("--input", type=str, required=True) |
|
args = parser.parse_args() |
|
print(inference(args.input)) |
|
|
|
|
|
app.run(host='0.0.0.0', port=5000, debug=True) |
|
|