Akbartus commited on
Commit
6647c1d
·
verified ·
1 Parent(s): f6c922d

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +27 -26
main.py CHANGED
@@ -1,27 +1,28 @@
1
- from model import model_pipeline
2
-
3
- from typing import Union
4
-
5
- from fastapi import FastAPI, UploadFile
6
- import io
7
- from PIL import Image
8
- import os
9
- os.environ['TRANSFORMERS_CACHE'] = '/blabla/cache/'
10
-
11
  app = FastAPI()
12
-
13
-
14
- @app.get("/")
15
- def read_root():
16
- return {"Hello": "World"}
17
-
18
-
19
- @app.post("/ask")
20
- def ask(text: str, image: UploadFile):
21
- content = image.file.read()
22
-
23
- image = Image.open(io.BytesIO(content))
24
- # image = Image.open(image.file)
25
-
26
- result = model_pipeline(text, image)
27
- return {"answer": result}
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from transformers import pipeline
3
+
4
+ # Create a new FastAPI app instance
 
 
 
 
 
 
5
  app = FastAPI()
6
+
7
+ # Initialize the text generation pipeline
8
+ # This function will be able to generate text
9
+ # given an input.
10
+ pipe = pipeline("text2text-generation",
11
+ model="google/flan-t5-small")
12
+
13
+ # Define a function to handle the GET request at `/generate`
14
+ # The generate() function is defined as a FastAPI route that takes a
15
+ # string parameter called text. The function generates text based on the # input using the pipeline() object, and returns a JSON response
16
+ # containing the generated text under the key "output"
17
+ @app.get("/generate")
18
+ def generate(text: str):
19
+ """
20
+ Using the text2text-generation pipeline from `transformers`, generate text
21
+ from the given input text. The model used is `google/flan-t5-small`, which
22
+ can be found [here](<https://huggingface.co/google/flan-t5-small>).
23
+ """
24
+ # Use the pipeline to generate text from the given input text
25
+ output = pipe(text)
26
+
27
+ # Return the generated text in a JSON response
28
+ return {"output": output[0]["generated_text"]}