Spaces:
Runtime error
Runtime error
File size: 587 Bytes
267974d 3943d19 267974d e7e1448 267974d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
import gradio as gr
from PIL import Image
import requests
from transformers import CLIPProcessor, CLIPModel
model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32")
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
def image_feature(url):
image = Image.open(requests.get(url, stream=True).raw)
inputs = processor(images=image, return_tensors="pt")
image_features = model.get_image_features(**inputs)
return image_features.detach().numpy().tolist()[0]
iface = gr.Interface(fn=image_feature, inputs="text", outputs="text")
iface.launch() |