awacke1 commited on
Commit
7eb442c
·
1 Parent(s): ef75150

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -19
app.py CHANGED
@@ -18,6 +18,30 @@ st.set_page_config(
18
  page_title="GPT Streamlit Document Reasoner",
19
  layout="wide")
20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  menu = ["txt", "htm", "md", "py"]
22
  choice = st.sidebar.selectbox("Output file type:", menu)
23
  choicePrefix = "Output file type is "
@@ -97,7 +121,7 @@ def main():
97
  col1, col2 = st.columns([1, 2])
98
 
99
  with col1:
100
- user_prompt = st.text_area("Your question:", '', height=120)
101
  uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
102
 
103
  document_sections = deque()
@@ -111,29 +135,25 @@ def main():
111
  if st.button('💬 Chat'):
112
  st.write('Thinking and Reasoning with your inputs...')
113
  response = chat_with_model(user_prompt, ''.join(list(document_sections)))
114
- st.write('Response:')
115
- st.write(response)
116
  filename = generate_filename(user_prompt, choice)
117
  create_file(filename, user_prompt, response)
118
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
119
 
120
  if len(document_sections) > 0:
121
- with col2:
122
- st.markdown("**Chat with the model:**")
123
- for i, section in reversed(list(enumerate(list(document_sections)))):
124
- if i in document_responses:
125
- st.markdown(f"**Section {i+1} Content:**\n{section}")
126
- st.markdown(f"**Section {i+1} Response:**\n{document_responses[i]}")
127
- else:
128
- if st.button(f"Chat about Section {i+1}"):
129
- st.write('Thinking and Reasoning with your inputs...')
130
- response = chat_with_model(user_prompt, section)
131
- document_responses[i] = response
132
- st.markdown(f"**Section {i+1} Content:**\n{section}")
133
- st.markdown(f"**Section {i+1} Response:**\n{response}")
134
- filename = generate_filename(f"{user_prompt}_section_{i+1}", choice)
135
- create_file(filename, user_prompt, response)
136
- st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
137
 
138
  all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
139
  for file in all_files:
 
18
  page_title="GPT Streamlit Document Reasoner",
19
  layout="wide")
20
 
21
+ # Custom CSS to increase scrollbar width
22
+ st.markdown("""
23
+ <style>
24
+ ::-webkit-scrollbar {
25
+ width: 20px;
26
+ }
27
+
28
+ /* Track */
29
+ ::-webkit-scrollbar-track {
30
+ background: #f1f1f1;
31
+ }
32
+
33
+ /* Handle */
34
+ ::-webkit-scrollbar-thumb {
35
+ background: #888;
36
+ }
37
+
38
+ /* Handle on hover */
39
+ ::-webkit-scrollbar-thumb:hover {
40
+ background: #555;
41
+ }
42
+ </style>
43
+ """, unsafe_allow_html=True)
44
+
45
  menu = ["txt", "htm", "md", "py"]
46
  choice = st.sidebar.selectbox("Output file type:", menu)
47
  choicePrefix = "Output file type is "
 
121
  col1, col2 = st.columns([1, 2])
122
 
123
  with col1:
124
+ user_prompt = st.text_area("Your question:", '', height=150) # Increased height for question text box
125
  uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
126
 
127
  document_sections = deque()
 
135
  if st.button('💬 Chat'):
136
  st.write('Thinking and Reasoning with your inputs...')
137
  response = chat_with_model(user_prompt, ''.join(list(document_sections)))
138
+ response_area = st.text_area('Response:', value=response, height=400)
 
139
  filename = generate_filename(user_prompt, choice)
140
  create_file(filename, user_prompt, response)
141
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
142
 
143
  if len(document_sections) > 0:
144
+ for i, section in reversed(list(enumerate(list(document_sections)))):
145
+ if i in document_responses:
146
+ st.markdown(f"**Section {i+1} Content:**\n{section}")
147
+ response_area = st.text_area(f"Section {i+1} Response:", value=document_responses[i], height=400)
148
+ else:
149
+ if st.button(f"Chat about Section {i+1}"):
150
+ st.write('Thinking and Reasoning with your inputs...')
151
+ response = chat_with_model(user_prompt, section)
152
+ document_responses[i] = response
153
+ response_area = st.text_area(f"Section {i+1} Response:", value=response, height=400)
154
+ filename = generate_filename(f"{user_prompt}_section_{i+1}", choice)
155
+ create_file(filename, user_prompt, response)
156
+ st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
 
 
 
157
 
158
  all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
159
  for file in all_files: