File size: 5,072 Bytes
a630adb d94bdb4 369f31d d94bdb4 369f31d d94bdb4 1b3009e 369f31d d94bdb4 1b3009e d94bdb4 1b3009e d94bdb4 369f31d 1b3009e d94bdb4 1b3009e 369f31d 1b3009e 369f31d d94bdb4 1b3009e d94bdb4 1b3009e d94bdb4 1b3009e 369f31d 1b3009e 369f31d d94bdb4 1b3009e a630adb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 |
"""
import os
import keyfile
import warnings
import streamlit as st
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain.schema import HumanMessage, SystemMessage, AIMessage
# Ignore warnings
warnings.filterwarnings("ignore")
# Streamlit settings
st.set_page_config(page_title="Magical Healer")
st.header("Welcome, What help do you need?")
# Initialize session state for messages
if "sessionMessages" not in st.session_state:
st.session_state["sessionMessages"] = [
SystemMessage(content="You are a medieval magical healer known for your peculiar sarcasm")
]
# Set Google API key
os.environ["GOOGLE_API_KEY"] = keyfile.GOOGLEKEY
# Initialize the model
llm = ChatGoogleGenerativeAI(
model="gemini-1.5-pro",
temperature=0.7,
convert_system_message_to_human=True
)
# Response function
def load_answer(question):
# Add user question to the message history
st.session_state.sessionMessages.append(HumanMessage(content=question))
# Get AI's response
assistant_answer = llm.invoke(st.session_state.sessionMessages)
# Append AI's answer to the session messages
if isinstance(assistant_answer, AIMessage):
st.session_state.sessionMessages.append(assistant_answer)
return assistant_answer.content
else:
st.session_state.sessionMessages.append(AIMessage(content=assistant_answer))
return assistant_answer
# Capture user input
def get_text():
input_text = st.text_input("You: ", key="input")
return str(input_text)
# Main implementation
user_input = get_text()
submit = st.button("Generate")
if submit and user_input:
response = load_answer(user_input)
st.subheader("Answer:")
st.write(response)
"""
import os
import keyfile
import warnings
import streamlit as st
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain.schema import HumanMessage, SystemMessage, AIMessage
from streamlit_extras.add_vertical_space import add_vertical_space
# Ignore warnings
warnings.filterwarnings("ignore")
# Streamlit settings
st.set_page_config(page_title="πΏ Magical Healer", page_icon="π§ββοΈ", layout="centered")
st.markdown("<h1 style='text-align: center; color: #4B0082;'>Welcome to the Magical Healer π§ββοΈπΏ</h1>", unsafe_allow_html=True)
st.write("### How can I assist with your ailments or worries? π§ͺβ¨")
# Add some vertical space
add_vertical_space(2)
# Initialize session state for messages with an introductory message
if "sessionMessages" not in st.session_state:
st.session_state["sessionMessages"] = [
SystemMessage(content="You are a medieval magical healer known for your peculiar sarcasm.")
]
# Set Google API key
os.environ["GOOGLE_API_KEY"] = keyfile.GOOGLEKEY
# Initialize the model
llm = ChatGoogleGenerativeAI(
model="gemini-1.5-pro",
temperature=0.7,
convert_system_message_to_human=True
)
# Define a function to create chat bubbles
def chat_bubble(message, is_user=True):
align = 'right' if is_user else 'left'
color = '#ADD8E6' if is_user else '#E6E6FA'
border_radius = '25px' if is_user else '25px'
st.markdown(f"""
<div style="text-align: {align}; padding: 10px;">
<span style="display: inline-block; padding: 10px; background-color: {color}; color: black;
border-radius: {border_radius}; max-width: 70%;">
{message}
</span>
</div>
""", unsafe_allow_html=True)
# Response function
def load_answer(question):
# Add user question to the message history
st.session_state.sessionMessages.append(HumanMessage(content=question))
# Get AI's response
assistant_answer = llm.invoke(st.session_state.sessionMessages)
# Append AI's answer to the session messages
if isinstance(assistant_answer, AIMessage):
st.session_state.sessionMessages.append(assistant_answer)
return assistant_answer.content
else:
st.session_state.sessionMessages.append(AIMessage(content=assistant_answer))
return assistant_answer
# Capture user input
def get_text():
input_text = st.text_input("You: ", key="input", placeholder="Type your question here...")
return str(input_text)
# Main implementation
user_input = get_text()
submit = st.button("π Get a Magical Answer π")
if submit and user_input:
# Display the user's question
chat_bubble(user_input, is_user=True)
# Load the response and display it as a chat bubble
response = load_answer(user_input)
chat_bubble(response, is_user=False)
# Background styling and layout enhancements
st.markdown("""
<style>
.stApp {
background: linear-gradient(to right, #FFEFBA, #FFFFFF);
color: #4B0082;
font-family: Arial, sans-serif;
}
input[type="text"] {
padding: 8px;
border: 2px solid #4B0082;
border-radius: 15px;
outline: none;
}
button {
background-color: #4B0082;
color: white;
border-radius: 15px;
}
</style>
""", unsafe_allow_html=True)
|