File size: 3,159 Bytes
d5a6a33
 
 
 
6788534
e692355
6788534
d5a6a33
 
e692355
d5a6a33
 
 
331c814
 
 
 
 
 
 
e692355
 
 
 
 
d5a6a33
 
 
 
 
 
 
 
 
 
 
 
331c814
d5a6a33
 
 
 
6788534
e692355
6788534
e692355
6788534
 
e692355
 
6788534
e692355
6788534
e692355
6788534
 
e692355
6788534
 
d5a6a33
e692355
331c814
e692355
6788534
e692355
 
6788534
e692355
 
 
 
 
 
 
6788534
e692355
6788534
 
e692355
 
6788534
e692355
 
 
6788534
e692355
 
d5a6a33
e692355
d5a6a33
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
import streamlit as st
import google.generativeai as genai
import os
from dotenv import load_dotenv
from PIL import Image
import io
import mimetypes

load_dotenv()

# Configure the API key
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))

safety_settings = [
    {"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
    {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
    {"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"},
    {"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"},
]

model = genai.GenerativeModel(
    'gemini-1.5-flash',
    safety_settings=safety_settings,
    system_instruction="Tu es un assistant intelligent. ton but est d'assister au mieux que tu peux. tu as été créé par Aenir et tu t'appelles Mariam"
)

def role_to_streamlit(role):
    if role == "model":
        return "assistant"
    else:
        return role

# Add a Gemini Chat history object to Streamlit session state
if "chat" not in st.session_state:
    st.session_state.chat = model.start_chat(history=[])

# Display Form Title
st.title("Mariam AI!")

# Display chat messages from history above current input box
for message in st.session_state.chat.history:
    with st.chat_message(role_to_streamlit(message.role)):
        for part in message.parts:
            if part.text:  # Check for text content
                st.markdown(part.text)
            elif part.file_data:  # Check for file data
                try:
                    # Infer MIME type if not provided
                    mime_type = part.file_data.mime_type
                    if not mime_type:
                        mime_type = mimetypes.guess_type(part.file_data.file_name)[0]

                    if mime_type and mime_type.startswith("image/"):
                        image = Image.open(io.BytesIO(part.file_data.data))
                        st.image(image)
                    else:
                        st.write(f"File: {part.file_data.file_name} (MIME type: {mime_type})")
                except Exception as e:
                    st.error(f"Error displaying file: {e}")

# Accept user's next message and file uploads
if prompt := st.chat_input("Hey?"):
    uploaded_file = st.file_uploader("Choose a file", type=["jpg", "jpeg", "png", "pdf"])

    parts = [prompt]
    if uploaded_file:
        bytes_data = uploaded_file.getvalue()
        parts.append({
            "file_data": {
                "mime_type": uploaded_file.type,
                "file_name": uploaded_file.name,
                "data": bytes_data
            }
        })

        # Display the uploaded image
        if uploaded_file.type.startswith("image/"):
            image = Image.open(uploaded_file)
            with st.chat_message("user"):
                st.image(image, caption=f"Uploaded Image: {uploaded_file.name}")

    # Display user's message
    with st.chat_message("user"):
        st.markdown(prompt)

    # Send message to Gemini
    response = st.session_state.chat.send_message(parts)

    # Display Gemini's response
    with st.chat_message("assistant"):
        st.markdown(response.text)