Spaces:
Sleeping
Sleeping
traopia
commited on
Commit
·
05cb438
1
Parent(s):
7411cee
double trouble search
Browse files- app.py +29 -24
- app_twotabs.py +229 -0
- search.py +32 -0
app.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
import pandas as pd
|
3 |
import numpy as np
|
4 |
-
from search import search_images_by_text, get_similar_images
|
5 |
import requests
|
6 |
from io import BytesIO
|
7 |
|
@@ -115,7 +115,7 @@ with gr.Blocks() as demo:
|
|
115 |
fashion_house = gr.Dropdown(label="Fashion House", choices=sorted(df["designer"].dropna().unique()), multiselect=True)
|
116 |
category = gr.Dropdown(label="Category", choices=sorted(df["category"].dropna().unique()), multiselect=True)
|
117 |
season = gr.Dropdown(label="Season", choices=sorted(df["season"].dropna().unique()), multiselect=True)
|
118 |
-
|
119 |
|
120 |
min_year = int(df['year'].min())
|
121 |
max_year = int(df['year'].max())
|
@@ -123,8 +123,30 @@ with gr.Blocks() as demo:
|
|
123 |
start_year = gr.Slider(label="Start Year", minimum=min_year, maximum=max_year, value=2000, step=1)
|
124 |
end_year = gr.Slider(label="End Year", minimum=min_year, maximum=max_year, value=2024, step=1)
|
125 |
|
126 |
-
query = gr.Textbox(label="Search", placeholder="e.g., pink dress")
|
127 |
-
search_button = gr.Button("Search")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
128 |
|
129 |
result_gallery = gr.Gallery(label="Search Results", columns=5, height="auto")
|
130 |
metadata_output = gr.Markdown()
|
@@ -144,21 +166,14 @@ with gr.Blocks() as demo:
|
|
144 |
outputs=[result_gallery, metadata_state, metadata_output, similar_gallery]
|
145 |
)
|
146 |
|
147 |
-
|
148 |
-
# idx = evt.index
|
149 |
-
# md = show_metadata(idx, metadata)
|
150 |
-
# return idx, md
|
151 |
def handle_click(evt: gr.SelectData, metadata):
|
152 |
idx = evt.index
|
153 |
md = show_metadata(idx, metadata)
|
154 |
img_path = metadata[idx]["url"]
|
155 |
return idx, md, img_path
|
156 |
|
157 |
-
|
158 |
-
# handle_click,
|
159 |
-
# inputs=[metadata_state],
|
160 |
-
# outputs=[selected_idx, metadata_output]
|
161 |
-
# )
|
162 |
|
163 |
result_gallery.select(
|
164 |
handle_click,
|
@@ -166,8 +181,6 @@ with gr.Blocks() as demo:
|
|
166 |
outputs=[selected_idx, metadata_output, reference_image]
|
167 |
)
|
168 |
|
169 |
-
# def show_similar(idx, metadata):
|
170 |
-
# return find_similar(int(idx), metadata)
|
171 |
def show_similar(idx, metadata):
|
172 |
if idx is None or not str(idx).isdigit():
|
173 |
return [],[] # safe fallback
|
@@ -183,10 +196,6 @@ with gr.Blocks() as demo:
|
|
183 |
outputs=[similar_gallery, similar_metadata_state]
|
184 |
)
|
185 |
|
186 |
-
# def handle_similar_click(evt: gr.SelectData, metadata):
|
187 |
-
# idx = evt.index
|
188 |
-
# md = show_metadata(idx, metadata)
|
189 |
-
# return idx, md
|
190 |
|
191 |
def handle_similar_click(evt: gr.SelectData, metadata):
|
192 |
idx = evt.index
|
@@ -194,11 +203,7 @@ with gr.Blocks() as demo:
|
|
194 |
img_path = metadata[idx]["url"]
|
195 |
return idx, md, img_path
|
196 |
|
197 |
-
|
198 |
-
# handle_similar_click,
|
199 |
-
# inputs=[similar_metadata_state],
|
200 |
-
# outputs=[selected_idx, similar_metadata_output]
|
201 |
-
# )
|
202 |
similar_gallery.select(
|
203 |
handle_similar_click,
|
204 |
inputs=[similar_metadata_state],
|
|
|
1 |
import gradio as gr
|
2 |
import pandas as pd
|
3 |
import numpy as np
|
4 |
+
from search import search_images_by_text, get_similar_images, search_images_by_image
|
5 |
import requests
|
6 |
from io import BytesIO
|
7 |
|
|
|
115 |
fashion_house = gr.Dropdown(label="Fashion House", choices=sorted(df["designer"].dropna().unique()), multiselect=True)
|
116 |
category = gr.Dropdown(label="Category", choices=sorted(df["category"].dropna().unique()), multiselect=True)
|
117 |
season = gr.Dropdown(label="Season", choices=sorted(df["season"].dropna().unique()), multiselect=True)
|
118 |
+
|
119 |
|
120 |
min_year = int(df['year'].min())
|
121 |
max_year = int(df['year'].max())
|
|
|
123 |
start_year = gr.Slider(label="Start Year", minimum=min_year, maximum=max_year, value=2000, step=1)
|
124 |
end_year = gr.Slider(label="End Year", minimum=min_year, maximum=max_year, value=2024, step=1)
|
125 |
|
126 |
+
query = gr.Textbox(label="Search by text", placeholder="(optional): e.g., pink dress ")
|
127 |
+
search_button = gr.Button("Search by text")
|
128 |
+
|
129 |
+
uploaded_image = gr.Image(label="Upload an image", type="pil") # or type="pil" if you prefer PIL Image object
|
130 |
+
search_by_image_button = gr.Button("Search by Image")
|
131 |
+
|
132 |
+
def handle_search_by_image(uploaded_image):
|
133 |
+
if uploaded_image is None:
|
134 |
+
return [], "Please upload an image first."
|
135 |
+
results_df = search_images_by_image(uploaded_image, df, embeddings)
|
136 |
+
# Convert results DataFrame to image URLs (or paths) for gallery display
|
137 |
+
images = results_df['url'].tolist()
|
138 |
+
metadata = results_df.to_dict(orient='records')
|
139 |
+
return images, metadata, ""
|
140 |
+
|
141 |
+
uploaded_metadata_state = gr.State([])
|
142 |
+
uploaded_metadata_output = gr.Markdown()
|
143 |
+
uploaded_result_gallery = gr.Gallery(label="Search Results by Image", columns=5, height="auto")
|
144 |
+
|
145 |
+
search_by_image_button.click(
|
146 |
+
fn=handle_search_by_image,
|
147 |
+
inputs=[uploaded_image],
|
148 |
+
outputs=[uploaded_result_gallery, uploaded_metadata_state, uploaded_metadata_output]
|
149 |
+
)
|
150 |
|
151 |
result_gallery = gr.Gallery(label="Search Results", columns=5, height="auto")
|
152 |
metadata_output = gr.Markdown()
|
|
|
166 |
outputs=[result_gallery, metadata_state, metadata_output, similar_gallery]
|
167 |
)
|
168 |
|
169 |
+
|
|
|
|
|
|
|
170 |
def handle_click(evt: gr.SelectData, metadata):
|
171 |
idx = evt.index
|
172 |
md = show_metadata(idx, metadata)
|
173 |
img_path = metadata[idx]["url"]
|
174 |
return idx, md, img_path
|
175 |
|
176 |
+
|
|
|
|
|
|
|
|
|
177 |
|
178 |
result_gallery.select(
|
179 |
handle_click,
|
|
|
181 |
outputs=[selected_idx, metadata_output, reference_image]
|
182 |
)
|
183 |
|
|
|
|
|
184 |
def show_similar(idx, metadata):
|
185 |
if idx is None or not str(idx).isdigit():
|
186 |
return [],[] # safe fallback
|
|
|
196 |
outputs=[similar_gallery, similar_metadata_state]
|
197 |
)
|
198 |
|
|
|
|
|
|
|
|
|
199 |
|
200 |
def handle_similar_click(evt: gr.SelectData, metadata):
|
201 |
idx = evt.index
|
|
|
203 |
img_path = metadata[idx]["url"]
|
204 |
return idx, md, img_path
|
205 |
|
206 |
+
|
|
|
|
|
|
|
|
|
207 |
similar_gallery.select(
|
208 |
handle_similar_click,
|
209 |
inputs=[similar_metadata_state],
|
app_twotabs.py
ADDED
@@ -0,0 +1,229 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import pandas as pd
|
3 |
+
import numpy as np
|
4 |
+
from search import search_images_by_text, get_similar_images, search_images_by_image
|
5 |
+
import requests
|
6 |
+
from io import BytesIO
|
7 |
+
|
8 |
+
def create_collection_url(row):
|
9 |
+
base_url = "https://www.vogue.com/fashion-shows/"
|
10 |
+
season = str(row["season"]).lower()
|
11 |
+
year = str(row["year"])
|
12 |
+
category = str(row["category"]).lower() if pd.notna(row["category"]) and row["category"] and str(row["category"]).lower() != "nan" else None
|
13 |
+
designer = str(row["designer"]).lower().replace(" ", "-")
|
14 |
+
|
15 |
+
# Add city if available
|
16 |
+
city = str(row["city"]).lower().replace(" ", "-") if pd.notna(row["city"]) and row["city"] and str(row["city"]).lower() != "nan" else None
|
17 |
+
|
18 |
+
if pd.isna(category) or category is None or category == "nan":
|
19 |
+
if city:
|
20 |
+
return f"{base_url}{city}-{season}-{year}/{designer}"
|
21 |
+
else:
|
22 |
+
return f"{base_url}{season}-{year}/{designer}"
|
23 |
+
else:
|
24 |
+
if city:
|
25 |
+
return f"{base_url}{city}-{season}-{year}-{category}/{designer}"
|
26 |
+
else:
|
27 |
+
return f"{base_url}{season}-{year}-{category}/{designer}"
|
28 |
+
|
29 |
+
import requests
|
30 |
+
from io import BytesIO
|
31 |
+
#@st.cache_data(show_spinner="Loading FashionDB...")
|
32 |
+
def load_data_hf():
|
33 |
+
# Load the Parquet file directly from Hugging Face
|
34 |
+
df_url = "https://huggingface.co/datasets/traopia/vogue_runway_small/resolve/main/VogueRunway.parquet"
|
35 |
+
df = pd.read_parquet(df_url)
|
36 |
+
|
37 |
+
# Load the .npy file using requests
|
38 |
+
npy_url = "https://huggingface.co/datasets/traopia/vogue_runway_small/resolve/main/VogueRunway_image.npy"
|
39 |
+
response = requests.get(npy_url)
|
40 |
+
response.raise_for_status() # Raise error if download fails
|
41 |
+
embeddings = np.load(BytesIO(response.content))
|
42 |
+
df['collection'] = df.apply(create_collection_url, axis=1)
|
43 |
+
return df, embeddings
|
44 |
+
|
45 |
+
|
46 |
+
|
47 |
+
|
48 |
+
df, embeddings = load_data_hf()
|
49 |
+
|
50 |
+
# Filter and search
|
51 |
+
def filter_and_search(fashion_house, category, season, start_year, end_year, query):
|
52 |
+
filtered = df.copy()
|
53 |
+
|
54 |
+
if fashion_house:
|
55 |
+
filtered = filtered[filtered['designer'].isin(fashion_house)]
|
56 |
+
if category:
|
57 |
+
filtered = filtered[filtered['category'].isin(category)]
|
58 |
+
if season:
|
59 |
+
filtered = filtered[filtered['season'].isin(season)]
|
60 |
+
filtered = filtered[(filtered['year'] >= start_year) & (filtered['year'] <= end_year)]
|
61 |
+
|
62 |
+
if query:
|
63 |
+
results = search_images_by_text(query, filtered, embeddings)
|
64 |
+
else:
|
65 |
+
results = filtered.head(30)
|
66 |
+
|
67 |
+
image_urls = results["url"].tolist()
|
68 |
+
metadata = results.to_dict(orient="records")
|
69 |
+
return image_urls, metadata
|
70 |
+
|
71 |
+
# Display metadata and similar
|
72 |
+
def show_metadata(idx, metadata):
|
73 |
+
item = metadata[idx]
|
74 |
+
out = ""
|
75 |
+
for field in ["designer", "season", "year", "category"]:
|
76 |
+
if field in item and pd.notna(item[field]):
|
77 |
+
out += f"**{field.title()}**: {item[field]}\n"
|
78 |
+
if 'collection' in item and pd.notna(item['collection']):
|
79 |
+
out += f"\n[View Collection]({item['collection']})"
|
80 |
+
return out
|
81 |
+
|
82 |
+
def find_similar(idx, metadata):
|
83 |
+
if not isinstance(idx, int) or idx >= len(metadata) or idx < 0:
|
84 |
+
return [] # or gr.update(visible=False)
|
85 |
+
key = metadata[idx]["key"]
|
86 |
+
similar_df = get_similar_images(df, key, embeddings, top_k=5)
|
87 |
+
return similar_df["url"].tolist(), similar_df.to_dict(orient="records")
|
88 |
+
|
89 |
+
|
90 |
+
|
91 |
+
|
92 |
+
with gr.Blocks() as demo:
|
93 |
+
gr.Markdown("# 👗 FashionDB Explorer")
|
94 |
+
|
95 |
+
with gr.Tabs():
|
96 |
+
# TEXT SEARCH TAB
|
97 |
+
with gr.Tab("Search by Text"):
|
98 |
+
with gr.Row():
|
99 |
+
fashion_house = gr.Dropdown(label="Fashion House", choices=sorted(df["designer"].dropna().unique()), multiselect=True)
|
100 |
+
category = gr.Dropdown(label="Category", choices=sorted(df["category"].dropna().unique()), multiselect=True)
|
101 |
+
season = gr.Dropdown(label="Season", choices=sorted(df["season"].dropna().unique()), multiselect=True)
|
102 |
+
min_year = int(df['year'].min())
|
103 |
+
max_year = int(df['year'].max())
|
104 |
+
start_year = gr.Slider(label="Start Year", minimum=min_year, maximum=max_year, value=2000, step=1)
|
105 |
+
end_year = gr.Slider(label="End Year", minimum=min_year, maximum=max_year, value=2024, step=1)
|
106 |
+
|
107 |
+
query = gr.Textbox(label="Search by text", placeholder="e.g., pink dress")
|
108 |
+
search_button = gr.Button("Search")
|
109 |
+
|
110 |
+
result_gallery = gr.Gallery(label="Search Results", columns=5, height="auto")
|
111 |
+
metadata_output = gr.Markdown()
|
112 |
+
reference_image = gr.Image(label="Reference Image", interactive=False)
|
113 |
+
similar_gallery = gr.Gallery(label="Similar Images", columns=5, height="auto")
|
114 |
+
|
115 |
+
metadata_state = gr.State([])
|
116 |
+
selected_idx = gr.Number(value=0, visible=False)
|
117 |
+
|
118 |
+
def handle_search(fh, cat, sea, sy, ey, q):
|
119 |
+
imgs, meta = filter_and_search(fh, cat, sea, sy, ey, q)
|
120 |
+
return imgs, meta, "", [], None
|
121 |
+
|
122 |
+
search_button.click(
|
123 |
+
handle_search,
|
124 |
+
inputs=[fashion_house, category, season, start_year, end_year, query],
|
125 |
+
outputs=[result_gallery, metadata_state, metadata_output, similar_gallery, reference_image]
|
126 |
+
)
|
127 |
+
|
128 |
+
def handle_click(evt: gr.SelectData, metadata):
|
129 |
+
idx = evt.index
|
130 |
+
md = show_metadata(idx, metadata)
|
131 |
+
img_path = metadata[idx]["url"]
|
132 |
+
return idx, md, img_path
|
133 |
+
|
134 |
+
result_gallery.select(
|
135 |
+
handle_click,
|
136 |
+
inputs=[metadata_state],
|
137 |
+
outputs=[selected_idx, metadata_output, reference_image]
|
138 |
+
)
|
139 |
+
|
140 |
+
def show_similar(idx, metadata):
|
141 |
+
if idx is None or not str(idx).isdigit():
|
142 |
+
return [], []
|
143 |
+
return find_similar(int(idx), metadata)
|
144 |
+
|
145 |
+
similar_metadata_state = gr.State()
|
146 |
+
similar_metadata_output = gr.Markdown()
|
147 |
+
|
148 |
+
show_similar_button = gr.Button("Show Similar Images")
|
149 |
+
show_similar_button.click(
|
150 |
+
show_similar,
|
151 |
+
inputs=[selected_idx, metadata_state],
|
152 |
+
outputs=[similar_gallery, similar_metadata_state]
|
153 |
+
)
|
154 |
+
|
155 |
+
def handle_similar_click(evt: gr.SelectData, metadata):
|
156 |
+
idx = evt.index
|
157 |
+
md = show_metadata(idx, metadata)
|
158 |
+
img_path = metadata[idx]["url"]
|
159 |
+
return idx, md, img_path
|
160 |
+
|
161 |
+
similar_gallery.select(
|
162 |
+
handle_similar_click,
|
163 |
+
inputs=[similar_metadata_state],
|
164 |
+
outputs=[selected_idx, similar_metadata_output, reference_image]
|
165 |
+
)
|
166 |
+
|
167 |
+
# IMAGE SEARCH TAB
|
168 |
+
with gr.Tab("Search by Image"):
|
169 |
+
with gr.Row():
|
170 |
+
fashion_house_img = gr.Dropdown(label="Fashion House", choices=sorted(df["designer"].dropna().unique()), multiselect=True)
|
171 |
+
category_img = gr.Dropdown(label="Category", choices=sorted(df["category"].dropna().unique()), multiselect=True)
|
172 |
+
season_img = gr.Dropdown(label="Season", choices=sorted(df["season"].dropna().unique()), multiselect=True)
|
173 |
+
start_year_img = gr.Slider(label="Start Year", minimum=min_year, maximum=max_year, value=2000, step=1)
|
174 |
+
end_year_img = gr.Slider(label="End Year", minimum=min_year, maximum=max_year, value=2024, step=1)
|
175 |
+
|
176 |
+
uploaded_image = gr.Image(label="Upload an image", type="pil")
|
177 |
+
search_by_image_button = gr.Button("Search by Image")
|
178 |
+
|
179 |
+
uploaded_result_gallery = gr.Gallery(label="Search Results by Image", columns=5, height="auto")
|
180 |
+
uploaded_metadata_state = gr.State([])
|
181 |
+
uploaded_metadata_output = gr.Markdown()
|
182 |
+
uploaded_reference_image = gr.Image(label="Reference Image", interactive=False)
|
183 |
+
|
184 |
+
def handle_search_by_image(image, fh, cat, sea, sy, ey):
|
185 |
+
if image is None:
|
186 |
+
return [], "Please upload an image first.", None
|
187 |
+
# Apply filters
|
188 |
+
filtered_df = df.copy()
|
189 |
+
if fh: filtered_df = filtered_df[filtered_df["designer"].isin(fh)]
|
190 |
+
if cat: filtered_df = filtered_df[filtered_df["category"].isin(cat)]
|
191 |
+
if sea: filtered_df = filtered_df[filtered_df["season"].isin(sea)]
|
192 |
+
filtered_df = filtered_df[(filtered_df["year"] >= sy) & (filtered_df["year"] <= ey)]
|
193 |
+
|
194 |
+
results_df = search_images_by_image(image, filtered_df, embeddings)
|
195 |
+
images = results_df['url'].tolist()
|
196 |
+
metadata = results_df.to_dict(orient="records")
|
197 |
+
return images, metadata, ""
|
198 |
+
|
199 |
+
search_by_image_button.click(
|
200 |
+
handle_search_by_image,
|
201 |
+
inputs=[uploaded_image, fashion_house_img, category_img, season_img, start_year_img, end_year_img],
|
202 |
+
outputs=[uploaded_result_gallery, uploaded_metadata_state, uploaded_metadata_output]
|
203 |
+
)
|
204 |
+
|
205 |
+
uploaded_selected_idx = gr.Number(visible=False)
|
206 |
+
|
207 |
+
def handle_uploaded_click(evt: gr.SelectData, metadata):
|
208 |
+
idx = evt.index
|
209 |
+
md = show_metadata(idx, metadata)
|
210 |
+
img_path = metadata[idx]["url"]
|
211 |
+
return idx, md, img_path
|
212 |
+
|
213 |
+
uploaded_result_gallery.select(
|
214 |
+
handle_uploaded_click,
|
215 |
+
inputs=[uploaded_metadata_state],
|
216 |
+
outputs=[uploaded_selected_idx, uploaded_metadata_output, uploaded_reference_image]
|
217 |
+
)
|
218 |
+
|
219 |
+
back_button = gr.Button("Back to Home")
|
220 |
+
|
221 |
+
def back_to_home():
|
222 |
+
return [], "", None # clear similar_gallery, metadata_output, reference image
|
223 |
+
|
224 |
+
back_button.click(
|
225 |
+
back_to_home,
|
226 |
+
outputs=[similar_gallery, similar_metadata_output, reference_image]
|
227 |
+
)
|
228 |
+
|
229 |
+
demo.launch()
|
search.py
CHANGED
@@ -20,3 +20,35 @@ def get_similar_images(df, image_id, embeddings, top_k=5):
|
|
20 |
return df.iloc[top_indices]
|
21 |
|
22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
return df.iloc[top_indices]
|
21 |
|
22 |
|
23 |
+
from PIL import Image
|
24 |
+
import torchvision.transforms as T
|
25 |
+
|
26 |
+
|
27 |
+
|
28 |
+
# Make sure to use the same embedding method for your dataset images
|
29 |
+
def preprocess_image(img):
|
30 |
+
# You may want to adapt this if you used a specific transform for your dataset
|
31 |
+
transform = T.Compose([
|
32 |
+
T.Resize((224, 224)),
|
33 |
+
T.ToTensor(),
|
34 |
+
T.Normalize(mean=(0.48145466, 0.4578275, 0.40821073),
|
35 |
+
std=(0.26862954, 0.26130258, 0.27577711))
|
36 |
+
])
|
37 |
+
return transform(img).unsqueeze(0)
|
38 |
+
|
39 |
+
def search_images_by_image(uploaded_image, df, embeddings, top_k=30):
|
40 |
+
# Convert to PIL Image if necessary
|
41 |
+
if isinstance(uploaded_image, str): # if path
|
42 |
+
uploaded_image = Image.open(uploaded_image).convert("RGB")
|
43 |
+
elif isinstance(uploaded_image, np.ndarray):
|
44 |
+
uploaded_image = Image.fromarray(uploaded_image).convert("RGB")
|
45 |
+
|
46 |
+
# Encode image using CLIP model
|
47 |
+
image_emb = model.encode(uploaded_image)
|
48 |
+
|
49 |
+
# Compute cosine similarity to dataset embeddings
|
50 |
+
filtered_embeddings = embeddings[df.index]
|
51 |
+
sims = cosine_similarity([image_emb], filtered_embeddings)[0]
|
52 |
+
top_indices = np.argsort(sims)[::-1][:top_k]
|
53 |
+
|
54 |
+
return df.iloc[top_indices]
|