Spaces:
Paused
Paused
| import os | |
| import google.generativeai as genai | |
| import gradio as gr | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") | |
| model_name = "gemini-1.5-flash-exp-0827" | |
| TITLE = """<h1 align="center">๐ฎChat with Gemini 1.5๐ฅ</h1>""" | |
| NOTICE = """ | |
| **Notices** ๐: | |
| - This app is still in development | |
| - Some features may not work as expected | |
| """ | |
| ABOUT = """ | |
| **Updates (2024-8-28)** ๐: Upgrade model to SOTA Gemini 1.5 Flash Experimental 0827 | |
| **Info** ๐: | |
| - Model: Gemini 1.5 Flash Experimental 0827 | |
| - Chat with Gemini 1.5 Flash model with images and documents | |
| """ | |
| ERRORS = """ | |
| Known errors โ ๏ธ: | |
| """ | |
| FUTURE_IMPLEMENTATIONS = """ | |
| Future features ๐: | |
| - Select other Gemini / Gemma models | |
| - More tools such as web search | |
| """ | |
| genai.configure(api_key=GEMINI_API_KEY) | |
| model = genai.GenerativeModel( | |
| model_name, | |
| safety_settings=[ | |
| { | |
| "category": "HARM_CATEGORY_HARASSMENT", | |
| "threshold": "BLOCK_NONE" | |
| }, | |
| { | |
| "category": "HARM_CATEGORY_HATE_SPEECH", | |
| "threshold": "BLOCK_NONE" | |
| }, | |
| { | |
| "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", | |
| "threshold": "BLOCK_NONE" | |
| }, | |
| { | |
| "category": "HARM_CATEGORY_DANGEROUS_CONTENT", | |
| "threshold": "BLOCK_NONE" | |
| } | |
| ], | |
| generation_config={ | |
| "temperature": 1, | |
| "top_p": 0.95, | |
| "top_k": 64, | |
| "max_output_tokens": 8192, | |
| "response_mime_type": "text/plain", | |
| } | |
| ) | |
| chat = model.start_chat(history=[]) | |
| def clear_chat_history(): | |
| chat.history = [] | |
| def undo_chat(): | |
| last_send, last_received = chat.rewind() | |
| def transform_history(history): | |
| new_history = [] | |
| for user_msg, model_msg in history: | |
| new_history.append({"role": "user", "parts": [user_msg]}) | |
| new_history.append({"role": "model", "parts": [model_msg]}) | |
| return new_history | |
| def chatbot_stable(message, history): | |
| message_text = message["text"] | |
| message_files = message["files"] | |
| if message_files: | |
| image_uris = [genai.upload_file(path=file_path["path"]) for file_path in message_files] | |
| message_content = [message_text] + image_uris | |
| else: | |
| message_content = [message_text] | |
| response = chat.send_message(message_content, stream=True) | |
| response.resolve() | |
| return response.text | |
| gemini_chatbot_interface = gr.Chatbot( | |
| height=400, | |
| likeable=True, | |
| avatar_images=( | |
| None, | |
| "https://media.roboflow.com/spaces/gemini-icon.png" | |
| ), | |
| show_copy_button=True, | |
| show_share_button=True, | |
| render_markdown=True | |
| ) | |
| clear_chat_button = gr.ClearButton( | |
| components=[gemini_chatbot_interface], | |
| value="๐๏ธ Clear" | |
| ) | |
| undo_chat_button = gr.Button( | |
| value="โฉ๏ธ Undo" | |
| ) | |
| gemini_chatbot = gr.ChatInterface( | |
| fn=chatbot_stable, | |
| chatbot=gemini_chatbot_interface, | |
| multimodal=True, | |
| clear_btn=clear_chat_button, | |
| undo_btn=undo_chat_button | |
| ) |