File size: 5,060 Bytes
b659a7a
 
3b3b8a5
ddfcb23
 
b659a7a
 
 
 
 
 
613e2fc
b659a7a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2122a17
 
 
 
 
 
b659a7a
2122a17
3d5fbd4
2122a17
 
3d5fbd4
b659a7a
 
 
 
ddfcb23
 
b659a7a
 
 
613e2fc
b659a7a
 
 
 
ddfcb23
 
 
 
 
 
 
 
 
 
3d5fbd4
b659a7a
 
 
3d5fbd4
 
b659a7a
3d5fbd4
 
 
 
 
b659a7a
 
3d5fbd4
 
 
 
 
b659a7a
 
3d5fbd4
 
 
 
 
b659a7a
 
 
3d5fbd4
 
 
ddfcb23
 
 
 
3d5fbd4
ddfcb23
 
 
b659a7a
2122a17
b659a7a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6902a03
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
import os
import pickle
import numpy as np
import streamlit as st
from PIL import Image
from tensorflow.keras.preprocessing import image
from tensorflow.keras.layers import GlobalMaxPooling2D
from tensorflow.keras.applications.resnet50 import ResNet50, preprocess_input
from sklearn.neighbors import NearestNeighbors
from numpy.linalg import norm
from chatbot import Chatbot  # Assuming you have a chatbot module
import tensorflow as tf  # Make sure this import is included

# Define function for feature extraction
def feature_extraction(img_path, model):
    img = image.load_img(img_path, target_size=(224, 224))
    img_array = image.img_to_array(img)
    expanded_img_array = np.expand_dims(img_array, axis=0)
    preprocessed_img = preprocess_input(expanded_img_array)
    result = model.predict(preprocessed_img).flatten()
    normalized_result = result / norm(result)
    return normalized_result

# Define function for recommendation
def recommend(features, feature_list):
    neighbors = NearestNeighbors(n_neighbors=6, algorithm='brute', metric='euclidean')
    neighbors.fit(feature_list)
    distances, indices = neighbors.kneighbors([features])
    return indices

# Function to save uploaded file
def save_uploaded_file(uploaded_file):
    try:
        # Ensure the uploads directory exists
        if not os.path.exists('uploads'):
            os.makedirs('uploads')
        
        file_path = os.path.join('uploads', uploaded_file.name)
        with open(file_path, 'wb') as f:
            f.write(uploaded_file.getbuffer())
        st.success(f"File saved to {file_path}")
        return file_path
    except Exception as e:
        st.error(f"Error saving file: {e}")
        return None

# Function to show dashboard content
def show_dashboard():
    st.header("Fashion Recommender System")
    chatbot = Chatbot()
    
    # Load ResNet model for image feature extraction
    model = ResNet50(weights='imagenet', include_top=False, input_shape=(224, 224, 3))
    model.trainable = False
    model = tf.keras.Sequential([
        model,
        GlobalMaxPooling2D()
    ])

    try:
        feature_list = np.array(pickle.load(open('embeddings.pkl', 'rb')))
        filenames = pickle.load(open('filenames.pkl', 'rb'))
    except Exception as e:
        st.error(f"Error loading pickle files: {e}")
        return

    # Print the filenames to verify
    st.write("List of filenames loaded:")
    st.write(filenames)

    # File upload section
    uploaded_file = st.file_uploader("Choose an image")
    if uploaded_file is not None:
        file_path = save_uploaded_file(uploaded_file)
        if file_path:
            # Display the uploaded image
            try:
                display_image = Image.open(file_path)
                st.image(display_image)
            except Exception as e:
                st.error(f"Error displaying uploaded image: {e}")

            # Feature extraction
            try:
                features = feature_extraction(file_path, model)
            except Exception as e:
                st.error(f"Error extracting features: {e}")
                return

            # Recommendation
            try:
                indices = recommend(features, feature_list)
            except Exception as e:
                st.error(f"Error in recommendation: {e}")
                return

            # Display recommended products
            col1, col2, col3, col4, col5 = st.columns(5)
            columns = [col1, col2, col3, col4, col5]

            for col, idx in zip(columns, indices[0]):
                # Directly access images from the dataset instead of file paths
                image_data = chatbot.images[idx]
                if image_data is not None:
                    try:
                        with col:
                            st.image(image_data)
                    except Exception as e:
                        st.error(f"Error opening image index {idx}: {e}")
        else:
            st.error("Some error occurred in file upload")

    # Chatbot section
    user_question = st.text_input("Ask a question:")
    if user_question:
        bot_response, recommended_products = chatbot.generate_response(user_question)
        st.write("Chatbot:", bot_response)

        # Display recommended products
        for result in recommended_products:
            pid = result['corpus_id']
            product_info = chatbot.product_data[pid]
            st.write("Product Name:", product_info['productDisplayName'])
            st.write("Category:", product_info['masterCategory'])
            st.write("Article Type:", product_info['articleType'])
            st.write("Usage:", product_info['usage'])
            st.write("Season:", product_info['season'])
            st.write("Gender:", product_info['gender'])
            st.image(chatbot.images[pid])

# Main Streamlit app
def main():
    # Give title to the app
    st.title("Fashion Recommender System")

    # Show dashboard content directly
    show_dashboard()

# Run the main app
if __name__ == "__main__":
    main()