awacke1 commited on
Commit
ef75150
·
1 Parent(s): 0bf5e04

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -14
app.py CHANGED
@@ -107,34 +107,34 @@ def main():
107
  file_content = read_file_content(uploaded_file)
108
  document_sections.append(file_content)
109
 
 
 
 
 
 
 
 
 
 
 
110
  if len(document_sections) > 0:
111
  with col2:
112
  st.markdown("**Chat with the model:**")
113
- for i, section in enumerate(list(document_sections)):
114
  if i in document_responses:
115
- st.markdown(f"**Section {i+1} Response:**\n{document_responses[i]}")
116
  st.markdown(f"**Section {i+1} Content:**\n{section}")
 
117
  else:
118
  if st.button(f"Chat about Section {i+1}"):
119
  st.write('Thinking and Reasoning with your inputs...')
120
  response = chat_with_model(user_prompt, section)
121
  document_responses[i] = response
122
- st.markdown(f"**Section {i+1} Response:**\n{response}")
123
  st.markdown(f"**Section {i+1} Content:**\n{section}")
 
124
  filename = generate_filename(f"{user_prompt}_section_{i+1}", choice)
125
  create_file(filename, user_prompt, response)
126
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
127
 
128
- if st.button('💬 Chat'):
129
- st.write('Thinking and Reasoning with your inputs...')
130
- response = chat_with_model(user_prompt, ''.join(list(document_sections)))
131
- st.write('Response:')
132
- st.write(response)
133
-
134
- filename = generate_filename(user_prompt, choice)
135
- create_file(filename, user_prompt, response)
136
- st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
137
-
138
  all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
139
  for file in all_files:
140
  col1, col2 = st.sidebar.columns([4,1]) # adjust the ratio as needed
@@ -146,4 +146,4 @@ def main():
146
  st.experimental_rerun()
147
 
148
  if __name__ == "__main__":
149
- main()
 
107
  file_content = read_file_content(uploaded_file)
108
  document_sections.append(file_content)
109
 
110
+ with col2:
111
+ if st.button('💬 Chat'):
112
+ st.write('Thinking and Reasoning with your inputs...')
113
+ response = chat_with_model(user_prompt, ''.join(list(document_sections)))
114
+ st.write('Response:')
115
+ st.write(response)
116
+ filename = generate_filename(user_prompt, choice)
117
+ create_file(filename, user_prompt, response)
118
+ st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
119
+
120
  if len(document_sections) > 0:
121
  with col2:
122
  st.markdown("**Chat with the model:**")
123
+ for i, section in reversed(list(enumerate(list(document_sections)))):
124
  if i in document_responses:
 
125
  st.markdown(f"**Section {i+1} Content:**\n{section}")
126
+ st.markdown(f"**Section {i+1} Response:**\n{document_responses[i]}")
127
  else:
128
  if st.button(f"Chat about Section {i+1}"):
129
  st.write('Thinking and Reasoning with your inputs...')
130
  response = chat_with_model(user_prompt, section)
131
  document_responses[i] = response
 
132
  st.markdown(f"**Section {i+1} Content:**\n{section}")
133
+ st.markdown(f"**Section {i+1} Response:**\n{response}")
134
  filename = generate_filename(f"{user_prompt}_section_{i+1}", choice)
135
  create_file(filename, user_prompt, response)
136
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
137
 
 
 
 
 
 
 
 
 
 
 
138
  all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
139
  for file in all_files:
140
  col1, col2 = st.sidebar.columns([4,1]) # adjust the ratio as needed
 
146
  st.experimental_rerun()
147
 
148
  if __name__ == "__main__":
149
+ main()