File size: 1,453 Bytes
98651d2
 
 
894e5af
98651d2
 
 
 
 
ef5f149
98651d2
894e5af
98651d2
 
 
 
 
ef5f149
 
98651d2
ef5f149
98651d2
ef5f149
 
98651d2
ef5f149
98651d2
ef5f149
 
 
 
 
 
 
 
 
 
 
 
 
 
 
894e5af
98651d2
ef5f149
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
from flask import Flask, request, jsonify
from duckai import DuckAI
from flask_cors import CORS

app = Flask(__name__)
CORS(app, resources={r"/*": {
    "origins": "*",
    "allow_headers": "*",
    "methods": ["GET", "POST", "PUT", "DELETE", "OPTIONS"],
    "supports_credentials": True
}})

@app.route("/chat/", methods=["GET"])
def chat():
    query = request.args.get("query")
    if not query:
        return jsonify({"error": "Query parameter is required"}), 400

    duck = DuckAI()  # create one DuckAI instance
    try:
        results = duck.chat(query, model='gpt-4o-mini')
        return jsonify({"results": results})
    except Exception as e1:
        print(f"Primary model (gpt-4o-mini) failed: {e1}")
        try:
            results = duck.chat(query, model='claude-3-haiku')
            return jsonify({"results": results})
        except Exception as e2:
            print(f"Fallback model (claude-3-haiku) also failed: {e2}")
            return jsonify({
                "error": "Both models failed",
                "primary_error": str(e1),
                "fallback_error": str(e2)
            }), 500

def chat_with_model(query: str, model: str):
    try:
        duck = DuckAI()
        results = duck.chat(query, model=model)
        return jsonify({"results": results})
    except Exception as e:
        return jsonify({"error": str(e)}), 500

if __name__ == "__main__":
    app.run(host="localhost", port=3000, debug=True)