sainathBelagavi commited on
Commit
efa753c
·
verified ·
1 Parent(s): 88dec7e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -12
app.py CHANGED
@@ -1,7 +1,6 @@
1
  import streamlit as st
2
  from huggingface_hub import InferenceClient
3
  import os
4
- import sys
5
  import pickle
6
 
7
  st.title("CODEFUSSION ☄")
@@ -52,7 +51,7 @@ def reset_conversation():
52
  '''
53
  st.session_state.conversation = []
54
  st.session_state.messages = []
55
- save_conversation_history([])
56
  return None
57
 
58
  def load_conversation_history():
@@ -69,6 +68,14 @@ def save_conversation_history(conversation_history):
69
  with open(history_file, "wb") as f:
70
  pickle.dump(conversation_history, f)
71
 
 
 
 
 
 
 
 
 
72
  models = [key for key in model_links.keys()]
73
  selected_model = st.sidebar.selectbox("Select Model", models)
74
  temp_values = st.sidebar.slider('Select a temperature value', 0.0, 1.0, (0.5))
@@ -80,23 +87,16 @@ st.sidebar.image(model_info[selected_model]['logo'])
80
 
81
  st.sidebar.markdown("\*Generating the code might go slow if you are using low power resources \*")
82
 
83
- if "prev_option" not in st.session_state:
84
- st.session_state.prev_option = selected_model
85
-
86
  if st.session_state.prev_option != selected_model:
87
  st.session_state.messages = []
88
  st.session_state.prev_option = selected_model
89
 
90
- if "conversation" not in st.session_state:
91
- st.session_state.conversation = []
92
-
93
- reset_conversation()
94
-
95
  repo_id = model_links[selected_model]
96
  st.subheader(f'{selected_model}')
97
 
98
  # Load the conversation history from the file
99
- st.session_state.messages = load_conversation_history()
 
100
 
101
  for message in st.session_state.messages:
102
  with st.chat_message(message["role"]):
@@ -107,7 +107,6 @@ if prompt := st.chat_input(f"Hi I'm {selected_model}, How can I help you today?"
107
  with st.chat_message("user"):
108
  st.markdown(prompt)
109
 
110
-
111
  st.session_state.messages.append({"role": "user", "content": prompt})
112
  conversation_history = [(message["role"], message["content"]) for message in st.session_state.messages]
113
 
 
1
  import streamlit as st
2
  from huggingface_hub import InferenceClient
3
  import os
 
4
  import pickle
5
 
6
  st.title("CODEFUSSION ☄")
 
51
  '''
52
  st.session_state.conversation = []
53
  st.session_state.messages = []
54
+ save_conversation_history([]) # Clear the conversation history file
55
  return None
56
 
57
  def load_conversation_history():
 
68
  with open(history_file, "wb") as f:
69
  pickle.dump(conversation_history, f)
70
 
71
+ # Ensure session state is initialized
72
+ if "conversation" not in st.session_state:
73
+ st.session_state.conversation = []
74
+ if "messages" not in st.session_state:
75
+ st.session_state.messages = []
76
+ if "prev_option" not in st.session_state:
77
+ st.session_state.prev_option = None
78
+
79
  models = [key for key in model_links.keys()]
80
  selected_model = st.sidebar.selectbox("Select Model", models)
81
  temp_values = st.sidebar.slider('Select a temperature value', 0.0, 1.0, (0.5))
 
87
 
88
  st.sidebar.markdown("\*Generating the code might go slow if you are using low power resources \*")
89
 
 
 
 
90
  if st.session_state.prev_option != selected_model:
91
  st.session_state.messages = []
92
  st.session_state.prev_option = selected_model
93
 
 
 
 
 
 
94
  repo_id = model_links[selected_model]
95
  st.subheader(f'{selected_model}')
96
 
97
  # Load the conversation history from the file
98
+ if not st.session_state.messages:
99
+ st.session_state.messages = load_conversation_history()
100
 
101
  for message in st.session_state.messages:
102
  with st.chat_message(message["role"]):
 
107
  with st.chat_message("user"):
108
  st.markdown(prompt)
109
 
 
110
  st.session_state.messages.append({"role": "user", "content": prompt})
111
  conversation_history = [(message["role"], message["content"]) for message in st.session_state.messages]
112