minigpt4-chat / app.py
adikrmuj's picture
Update app.py
d3ad3ff verified
# app.py
import torch
import gradio as gr
from PIL import Image
from transformers import Blip2Processor, Blip2ForConditionalGeneration
class MiniGPT4Lite:
def __init__(self):
self.device = torch.device("cpu") # Force CPU for Hugging Face Space
self.processor = Blip2Processor.from_pretrained("Salesforce/blip2-flan-t5-xl")
self.model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-flan-t5-xl").to(self.device)
def chat(self, image, prompt="What is in this image?"):
inputs = self.processor(images=image, text=prompt, return_tensors="pt").to(self.device)
out = self.model.generate(**inputs, max_new_tokens=100)
return self.processor.tokenizer.decode(out[0], skip_special_tokens=True)
# Initialize model
minigpt = MiniGPT4Lite()
def respond(image, prompt):
return minigpt.chat(image, prompt)
demo = gr.Interface(
fn=respond,
inputs=[
gr.Image(type="pil", label="Upload an Image"),
gr.Textbox(lines=2, placeholder="Ask a question about the image...", label="Prompt")
],
outputs=gr.Textbox(label="MiniGPT-4 Lite Response"),
title="MiniGPT-4 Lite (CPU)",
description="Upload an image and ask a question. Powered by BLIP-2 + Flan-T5. Works on CPU."
)
if __name__ == "__main__":
demo.launch()