import os import keyfile import warnings import streamlit as st from langchain_google_genai import ChatGoogleGenerativeAI from langchain.schema import HumanMessage, SystemMessage, AIMessage # Ignore warnings warnings.filterwarnings("ignore") # Streamlit settings st.set_page_config(page_title="๐ŸŒฟ ArchanaCare ๐Ÿง™โ€โ™€๏ธ", page_icon="๐Ÿง™โ€โ™€๏ธ", layout="centered") st.markdown("

Welcome to ArchanaCare ๐ŸŒฟโœจ

", unsafe_allow_html=True) st.markdown("

How can I assist with your ailments or worries today? ๐Ÿงช๐Ÿ’ซ

", unsafe_allow_html=True) # Adding vertical space without streamlit_extras st.markdown("

", unsafe_allow_html=True) # Initialize session state for messages with an introductory message if "sessionMessages" not in st.session_state: st.session_state["sessionMessages"] = [ SystemMessage(content="You are a medieval magical healer known for your peculiar sarcasm.") ] # Set Google API key os.environ["GOOGLE_API_KEY"] = keyfile.GOOGLEKEY # Initialize the model llm = ChatGoogleGenerativeAI( model="gemini-1.5-pro", temperature=0.7, convert_system_message_to_human=True ) # Define a function to create chat bubbles def chat_bubble(message, is_user=True): align = 'right' if is_user else 'left' color = '#ADD8E6' if is_user else '#E6E6FA' border_radius = '25px' if is_user else '25px' st.markdown(f"""
{message}
""", unsafe_allow_html=True) # Response function def load_answer(question): # Add user question to the message history st.session_state.sessionMessages.append(HumanMessage(content=question)) # Get AI's response assistant_answer = llm.invoke(st.session_state.sessionMessages) # Append AI's answer to the session messages if isinstance(assistant_answer, AIMessage): st.session_state.sessionMessages.append(assistant_answer) return assistant_answer.content else: st.session_state.sessionMessages.append(AIMessage(content=assistant_answer)) return assistant_answer # Capture user input def get_text(): input_text = st.text_input("You: ", key="input", placeholder="Type your question here...") return str(input_text) # Main implementation user_input = get_text() submit = st.button("๐ŸŒŸ Get a Magical Answer ๐ŸŒŸ") if submit and user_input: # Display the user's question chat_bubble(user_input, is_user=True) # Load the response and display it as a chat bubble response = load_answer(user_input) chat_bubble(response, is_user=False) # Background styling and layout enhancements st.markdown(""" """, unsafe_allow_html=True)