LLM_Ariphes / api.py
Euryeth's picture
Update api.py
17aeec1 verified
raw
history blame
1.24 kB
from flask import Flask, request, jsonify
from app import generate_chat_completion
import time
app = Flask(__name__)
@app.route('/v1/chat/completions', methods=['POST'])
def chat_completions():
data = request.json
messages = data.get('messages') # list of {"role":..., "content":...}
if not messages or not isinstance(messages, list):
return jsonify({"error": "A valid 'messages' list is required."}), 400
try:
start = time.time()
new_history = generate_chat_completion(
message=messages[-1]['content'],
history=messages[:-1] # feed all except last user message
)
assistant_msg = new_history[-1]['content']
elapsed = time.time() - start
return jsonify({
"model": "tiiuae/falcon-rw-1b",
"choices": [{
"message": {"role": "assistant", "content": assistant_msg}
}],
"usage": {"generation_time": round(elapsed, 2)},
"history": new_history
})
except Exception as e:
return jsonify({"error": str(e)}), 500
@app.route('/')
def health_check():
return "LLM API is running", 200
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8081)