|
import streamlit as st |
|
from streamlit_app import model,embedding_model |
|
from utils import logger |
|
|
|
|
|
def page_config(): |
|
st.set_page_config(page_title="Physio Assist", layout="wide") |
|
logger.log("info","Configuring the page layout and title") |
|
|
|
def page_title(): |
|
st.title("ποΈββοΈπ§ββοΈ Welcome to Physio Assist: Your Physical Therapy Companion") |
|
logger.log("info", "Setting the title to 'Physio Assist: Your Physical Therapy Companion'") |
|
|
|
def get_or_greet_user_name(): |
|
if 'user_name' not in st.session_state: |
|
st.session_state.user_name = None |
|
logger.log("info","user_name not found in session_state, setting to None.") |
|
|
|
if st.session_state.user_name is None: |
|
logger.log("info","user_name is None, requesting user input.") |
|
user_name = st.text_input("Please let me know your name:", |
|
placeholder="Enter your name buddy") |
|
if user_name: |
|
st.session_state.user_name = user_name |
|
logger.log("info", f"User entered name: {user_name}. Setting session_state.user_name.") |
|
st.rerun() |
|
else: |
|
logger.log("info", f"User already entered a name: {st.session_state.user_name}. Displaying greeting.") |
|
return st._bottom.subheader(f"Hello {st.session_state.user_name}! How can I assist you today?") |
|
|
|
def display_chat(): |
|
logger.log("info","Displaying the chat history.") |
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"]) |
|
logger.log("info", f"Displayed {len(st.session_state.messages)} messages from the chat history.") |
|
|
|
def handle_user_input(): |
|
logger.log("info", "Waiting for user input...") |
|
prompt = st.chat_input("Ask me anything related to physiotherapy. E.g., 'How can I recover from a sprained ankle?'") |
|
|
|
if prompt : |
|
with st.chat_message("user"): |
|
st.markdown(prompt) |
|
if prompt: |
|
st.session_state.messages.append({"role": "user", "content": prompt}) |
|
|
|
similarity = embedding_model.get_cosine_similarity(prompt) |
|
st.write(similarity) |
|
if similarity >= 0.2: |
|
|
|
with st.spinner("Processing your query..."): |
|
try: |
|
response = model.get_physiotherapy_assistant_response(prompt) |
|
with st.chat_message("assistant"): |
|
st.markdown(response) |
|
st.session_state.messages.append({"role": "assistant", "content": response}) |
|
logger.log("info",f"Assistant response: {response}") |
|
except Exception as e: |
|
st.error(f"An error occurred while processing your query: {str(e)}") |
|
logger.log("error", f"Error processing user query: {str(e)}") |
|
else: |
|
response = "Please ask me anything related to physiotherapy" |
|
with st.chat_message("assistant"): |
|
st.markdown(response) |
|
st.session_state.messages.append({"role": "assistant", "content": response}) |