Spaces:
Sleeping
Sleeping
Kaveh-Workstation
commited on
Commit
·
20dc8e8
1
Parent(s):
72b6d2f
renamed pkl file and rename in app.py
Browse files
app.py
CHANGED
|
@@ -9,8 +9,12 @@ from sklearn.metrics.pairwise import cosine_similarity
|
|
| 9 |
import csv
|
| 10 |
from PIL import Image
|
| 11 |
|
| 12 |
-
|
| 13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
csv_path = "./captions.txt"
|
| 15 |
|
| 16 |
def load_image_ids(csv_file):
|
|
@@ -36,13 +40,19 @@ def find_similar_images(query_embedding, image_embeddings, k=2):
|
|
| 36 |
return closest_indices, scores
|
| 37 |
|
| 38 |
|
| 39 |
-
def main(query, k=2):
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 43 |
|
| 44 |
-
# Load image embeddings
|
| 45 |
-
image_embeddings = load_embeddings(embeddings_file)
|
| 46 |
|
| 47 |
# Embed the query
|
| 48 |
inputs = processor(text=query, images=None, return_tensors="pt", padding=True)
|
|
|
|
| 9 |
import csv
|
| 10 |
from PIL import Image
|
| 11 |
|
| 12 |
+
model_path_rclip = "kaveh/rclip"
|
| 13 |
+
embeddings_file_rclip = './image_embeddings_rclip.pkl'
|
| 14 |
+
|
| 15 |
+
model_path_pubmedclip = "flaviagiammarino/pubmed-clip-vit-base-patch32"
|
| 16 |
+
embeddings_file_pubmedclip = './image_embeddings_pubmedclip.pkl'
|
| 17 |
+
|
| 18 |
csv_path = "./captions.txt"
|
| 19 |
|
| 20 |
def load_image_ids(csv_file):
|
|
|
|
| 40 |
return closest_indices, scores
|
| 41 |
|
| 42 |
|
| 43 |
+
def main(query, model_id="rclip", k=2):
|
| 44 |
+
if model_id=="rclip":
|
| 45 |
+
# Load RCLIP model
|
| 46 |
+
model = VisionTextDualEncoderModel.from_pretrained(model_path_rclip)
|
| 47 |
+
processor = VisionTextDualEncoderProcessor.from_pretrained(model_path_rclip)
|
| 48 |
+
# Load image embeddings
|
| 49 |
+
image_embeddings = load_embeddings(embeddings_file_rclip)
|
| 50 |
+
elif mode_id=="pubmedclip":
|
| 51 |
+
model = CLIPModel.from_pretrained(model_path_pubmedclip)
|
| 52 |
+
processor = CLIPProcessor.from_pretrained(model_path_pubmedclip)
|
| 53 |
+
# Load image embeddings
|
| 54 |
+
image_embeddings = load_embeddings(embeddings_file_pubmedclip)
|
| 55 |
|
|
|
|
|
|
|
| 56 |
|
| 57 |
# Embed the query
|
| 58 |
inputs = processor(text=query, images=None, return_tensors="pt", padding=True)
|
image_embeddings_8_clip14_cxrbert.pkl → image_embeddings_rclip.pkl
RENAMED
|
File without changes
|