File size: 4,138 Bytes
d5a6a33
 
 
 
 
 
1db4ff7
188c1ca
2ca54a6
331c814
 
 
 
 
 
 
b563126
 
 
 
 
 
 
 
 
 
 
 
 
0b22f07
b563126
 
 
 
b16530f
1db4ff7
 
 
 
 
ee96e70
b563126
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1db4ff7
 
 
436aefa
1db4ff7
 
436aefa
586d97d
ff703b4
5ae354b
1db4ff7
 
 
 
d5a6a33
586d97d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1db4ff7
 
b563126
 
 
 
 
 
586d97d
 
 
 
 
1db4ff7
 
2ca54a6
586d97d
 
 
 
 
 
 
 
 
 
 
 
b16530f
586d97d
 
b16530f
586d97d
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import streamlit as st
import google.generativeai as genai
import os
from dotenv import load_dotenv

load_dotenv()
# Configure the API key
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))

safety_settings = [
    {"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
    {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
    {"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"},
    {"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"},
]

def create_model_with_search(use_search):
    """
    Crée un modèle Gemini avec ou sans recherche Google
    
    Args:
        use_search (bool): Active ou désactive la recherche Google
    
    Returns:
        genai.GenerativeModel: Modèle Gemini configuré
    """
    tools = "google_search_retrieval" if use_search else None
    
    return genai.GenerativeModel(
        'gemini-1.5-flash', 
        safety_settings=safety_settings,
        system_instruction="Tu es un assistant intelligent. ton but est d'assister au mieux que tu peux. tu as été créé par Aenir et tu t'appelles Mariam",
        tools=tools
    )

def role_to_streamlit(role):
    if role == "model":
        return "assistant"
    else:
        return role

# Ajouter un radiobox pour la recherche Google
st.sidebar.header("Paramètres de recherche")
use_google_search = st.sidebar.radio(
    "Recherche Google", 
    options=[
        "Désactivée", 
        "Activée"
    ],
    index=0  # Par défaut, recherche désactivée
)

# Convertir le radiobox en booléen
search_enabled = use_google_search == "Activée"

# Créer le modèle initial avec les paramètres du radiobox
model = create_model_with_search(search_enabled)

# Add a Gemini Chat history object to Streamlit session state
if "chat" not in st.session_state:
    st.session_state.chat = model.start_chat(history=[])

# Display Form Title
st.title("Mariam AI!")

# File upload section
uploaded_file = st.file_uploader("Télécharger un fichier (image/document)", type=['jpg','mp4','mp3', 'jpeg', 'png', 'pdf', 'txt'])

# Display chat messages from history above current input box
for message in st.session_state.chat.history:
    with st.chat_message(role_to_streamlit(message.role)):
        st.markdown(message.parts[0].text)

# Function to handle file upload with Gemini
def process_uploaded_file(file):
    if file is not None:
        # Save the uploaded file temporarily
        with open(os.path.join("temp", file.name), "wb") as f:
            f.write(file.getbuffer())
        
        # Upload the file to Gemini
        try:
            gemini_file = genai.upload_file(os.path.join("temp", file.name))
            return gemini_file
        except Exception as e:
            st.error(f"Erreur lors du téléchargement du fichier : {e}")
            return None

# Accept user's next message, add to context, resubmit context to Gemini
if prompt := st.chat_input("Hey?"):
    # Recréer le modèle avec les paramètres de recherche actuels
    model = create_model_with_search(search_enabled)
    
    # Réinitialiser la conversation avec le nouveau modèle
    st.session_state.chat = model.start_chat(history=st.session_state.chat.history)
    
    # Process any uploaded file
    uploaded_gemini_file = None
    if uploaded_file:
        uploaded_gemini_file = process_uploaded_file(uploaded_file)
    
    # Display user's last message
    st.chat_message("user").markdown(prompt)

    # Send user entry to Gemini with optional file
    try:
        if uploaded_gemini_file:
            # If a file is uploaded, include it in the context
            response = st.session_state.chat.send_message([uploaded_gemini_file, "\n\n", prompt])
        else:
            # Normal text-only conversation
            response = st.session_state.chat.send_message(prompt)

        # Display last response
        with st.chat_message("assistant"):
            st.markdown(response.text)
    
    except Exception as e:
        st.error(f"Erreur lors de l'envoi du message : {e}")

# Create temp directory if it doesn't exist
os.makedirs("temp", exist_ok=True)