Spaces:
Running
Running
traopia
commited on
Commit
·
08d32ab
1
Parent(s):
a8bd039
search similar images with image search
Browse files
app.py
CHANGED
|
@@ -216,6 +216,46 @@ with gr.Blocks() as demo:
|
|
| 216 |
outputs=[uploaded_selected_idx, uploaded_metadata_output, uploaded_reference_image]
|
| 217 |
)
|
| 218 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 219 |
back_button = gr.Button("Back to Home")
|
| 220 |
|
| 221 |
def back_to_home():
|
|
|
|
| 216 |
outputs=[uploaded_selected_idx, uploaded_metadata_output, uploaded_reference_image]
|
| 217 |
)
|
| 218 |
|
| 219 |
+
# SIMILAR IMAGE SEARCH FOR IMAGE TAB
|
| 220 |
+
uploaded_similar_gallery = gr.Gallery(label="Similar Images", columns=5, height="auto")
|
| 221 |
+
uploaded_similar_metadata_state = gr.State([])
|
| 222 |
+
uploaded_similar_metadata_output = gr.Markdown()
|
| 223 |
+
|
| 224 |
+
uploaded_show_similar_button = gr.Button("Show Similar Images")
|
| 225 |
+
|
| 226 |
+
def show_similar_uploaded(idx, metadata):
|
| 227 |
+
if idx is None or not str(idx).isdigit():
|
| 228 |
+
return [], []
|
| 229 |
+
return find_similar(int(idx), metadata)
|
| 230 |
+
|
| 231 |
+
uploaded_show_similar_button.click(
|
| 232 |
+
show_similar_uploaded,
|
| 233 |
+
inputs=[uploaded_selected_idx, uploaded_metadata_state],
|
| 234 |
+
outputs=[uploaded_similar_gallery, uploaded_similar_metadata_state]
|
| 235 |
+
)
|
| 236 |
+
|
| 237 |
+
def handle_uploaded_similar_click(evt: gr.SelectData, metadata):
|
| 238 |
+
idx = evt.index
|
| 239 |
+
md = show_metadata(idx, metadata)
|
| 240 |
+
img_path = metadata[idx]["url"]
|
| 241 |
+
return idx, md, img_path
|
| 242 |
+
|
| 243 |
+
uploaded_similar_gallery.select(
|
| 244 |
+
handle_uploaded_similar_click,
|
| 245 |
+
inputs=[uploaded_similar_metadata_state],
|
| 246 |
+
outputs=[uploaded_selected_idx, uploaded_similar_metadata_output, uploaded_reference_image]
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
uploaded_back_button = gr.Button("Back to Initial Uploaded Search")
|
| 250 |
+
|
| 251 |
+
def back_to_uploaded_home():
|
| 252 |
+
return [], "", None
|
| 253 |
+
|
| 254 |
+
uploaded_back_button.click(
|
| 255 |
+
back_to_uploaded_home,
|
| 256 |
+
outputs=[uploaded_similar_gallery, uploaded_similar_metadata_output, uploaded_reference_image]
|
| 257 |
+
)
|
| 258 |
+
|
| 259 |
back_button = gr.Button("Back to Home")
|
| 260 |
|
| 261 |
def back_to_home():
|
search.py
CHANGED
|
@@ -5,6 +5,8 @@ import numpy as np
|
|
| 5 |
# Use a compatible CLIP model
|
| 6 |
model = SentenceTransformer("clip-ViT-B-32")
|
| 7 |
|
|
|
|
|
|
|
| 8 |
def search_images_by_text(text, df, embeddings, top_k=30):
|
| 9 |
text_emb = model.encode([text])
|
| 10 |
filtered_embeddings = embeddings[df.index]
|
|
|
|
| 5 |
# Use a compatible CLIP model
|
| 6 |
model = SentenceTransformer("clip-ViT-B-32")
|
| 7 |
|
| 8 |
+
|
| 9 |
+
|
| 10 |
def search_images_by_text(text, df, embeddings, top_k=30):
|
| 11 |
text_emb = model.encode([text])
|
| 12 |
filtered_embeddings = embeddings[df.index]
|