File size: 3,895 Bytes
d5a6a33
 
 
 
6788534
 
d5a6a33
 
 
 
 
331c814
 
 
 
 
 
 
 
 
 
 
 
 
d5a6a33
 
 
 
 
 
 
 
 
 
 
 
 
 
 
331c814
d5a6a33
 
 
 
6788534
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d5a6a33
 
331c814
d5a6a33
 
 
6788534
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d5a6a33
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
import streamlit as st
import google.generativeai as genai
import os
from dotenv import load_dotenv
from PIL import Image
import mimetypes

load_dotenv()
# Configure the API key
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))

safety_settings = [
    {"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
    {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
    {"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"},
    {"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"},
]

genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))

model = genai.GenerativeModel('gemini-1.5-flash',safety_settings=safety_settings,
            system_instruction="Tu es un assistant intelligent. ton but est d'assister au mieux que tu peux. tu as été créé par Aenir et tu t'appelles Mariam")



# Function to get response from the model
# Gemini uses 'model' for assistant; Streamlit uses 'assistant'

def role_to_streamlit(role):
    if role == "model":
        return "assistant"
    else:
        return role

# Add a Gemini Chat history object to Streamlit session state
if "chat" not in st.session_state:
    st.session_state.chat = model.start_chat(history=[])

# Display Form Title
st.title("Mariam AI!")

# Display chat messages from history above current input box
for message in st.session_state.chat.history:
    with st.chat_message(role_to_streamlit(message.role)):
        # Check if the message part is text or a file part
        for part in message.parts:
            if part.HasField("text"):
                st.markdown(part.text)
            elif part.HasField("file_data"):
                # Handle file display (e.g., image)
                try:
                    # Infer MIME type if not provided
                    if not part.file_data.mime_type:
                        mime_type = mimetypes.guess_type(part.file_data.file_name)[0]
                    else:
                        mime_type = part.file_data.mime_type
                    if mime_type and mime_type.startswith("image/"):
                        image_data = part.file_data.data  # Access the image data directly
                        image = Image.open(io.BytesIO(image_data)) # Open the image using PIL
                        st.image(image)
                    else:
                        st.write(f"File: {part.file_data.file_name} (MIME type: {part.file_data.mime_type})")
                except Exception as e:
                    st.error(f"Error displaying file: {e}")

# Accept user's next message, add to context, resubmit context to Gemini
if prompt := st.chat_input("Hey?"):
    # Display user's last message
    st.chat_message("user").markdown(prompt)

    # Handle file uploads
    uploaded_file = st.file_uploader("Choose a file", type=["jpg", "jpeg", "png", "pdf"]) # Add more type if needed

    if uploaded_file is not None:
        # To read file as bytes:
        bytes_data = uploaded_file.getvalue()

        # Display the uploaded file (if it's an image)
        if uploaded_file.type.startswith("image/"):
            image = Image.open(uploaded_file)
            st.image(image, caption=f"Uploaded Image: {uploaded_file.name}")

        # Construct the message parts, including the uploaded file
        parts = [
            prompt,
            {
                "file_data": {
                    "mime_type": uploaded_file.type,
                    "file_name": uploaded_file.name,
                    "data": bytes_data
                }
            }
        ]

        # Send the message with file to Gemini
        response = st.session_state.chat.send_message(parts)
    
    else:
      # Send user entry to Gemini and read the response
      response = st.session_state.chat.send_message(prompt)

    # Display last
    with st.chat_message("assistant"):
        st.markdown(response.text)