kevalfst commited on
Commit
4def57d
Β·
verified Β·
1 Parent(s): bad0bdc

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -0
app.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ from transformers import pipeline, AutoModelForSeq2SeqLM, AutoTokenizer
3
+
4
+ app = Flask(__name__)
5
+
6
+ # Load a lightweight model for structured output
7
+ model_name = "google/flan-t5-small" # You can replace this with another small instruct model
8
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
9
+ model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
10
+
11
+ generator = pipeline("text2text-generation", model=model, tokenizer=tokenizer)
12
+
13
+ @app.route('/generate-json', methods=['POST'])
14
+ def generate_json():
15
+ data = request.json
16
+ prompt = data.get("prompt")
17
+
18
+ if not prompt:
19
+ return jsonify({"error": "Prompt is required"}), 400
20
+
21
+ # Add instruction if needed
22
+ instruction = f"Generate a JSON object from the following description:\n{prompt}"
23
+
24
+ response = generator(instruction, max_length=256, do_sample=False)
25
+ generated_text = response[0]["generated_text"]
26
+
27
+ try:
28
+ # Try to parse generated text as JSON
29
+ generated_json = eval(generated_text) # Use json.loads if safe JSON string is expected
30
+ return jsonify({"json": generated_json})
31
+ except Exception as e:
32
+ return jsonify({"raw_output": generated_text, "error": str(e)}), 200
33
+
34
+ if __name__ == '__main__':
35
+ app.run(debug=True)