awacke1 commited on
Commit
049626b
·
1 Parent(s): 7eb442c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -47
app.py CHANGED
@@ -3,7 +3,6 @@ import openai
3
  import os
4
  import base64
5
  import glob
6
- import json
7
  import mistune
8
  import pytz
9
  import math
@@ -18,30 +17,6 @@ st.set_page_config(
18
  page_title="GPT Streamlit Document Reasoner",
19
  layout="wide")
20
 
21
- # Custom CSS to increase scrollbar width
22
- st.markdown("""
23
- <style>
24
- ::-webkit-scrollbar {
25
- width: 20px;
26
- }
27
-
28
- /* Track */
29
- ::-webkit-scrollbar-track {
30
- background: #f1f1f1;
31
- }
32
-
33
- /* Handle */
34
- ::-webkit-scrollbar-thumb {
35
- background: #888;
36
- }
37
-
38
- /* Handle on hover */
39
- ::-webkit-scrollbar-thumb:hover {
40
- background: #555;
41
- }
42
- </style>
43
- """, unsafe_allow_html=True)
44
-
45
  menu = ["txt", "htm", "md", "py"]
46
  choice = st.sidebar.selectbox("Output file type:", menu)
47
  choicePrefix = "Output file type is "
@@ -54,6 +29,9 @@ elif choice == "md":
54
  elif choice == "py":
55
  st.sidebar.write(choicePrefix + "Python Code.")
56
 
 
 
 
57
  def generate_filename(prompt, file_type):
58
  central = pytz.timezone('US/Central')
59
  safe_date_time = datetime.now(central).strftime("%m%d_%I%M")
@@ -71,6 +49,9 @@ def create_file(filename, prompt, response):
71
  with open(filename, 'w') as file:
72
  file.write(f"# Prompt:\n{prompt}\n# Response:\n{response}")
73
 
 
 
 
74
  def chat_with_model(prompt, document_section):
75
  model = "gpt-3.5-turbo"
76
  conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
@@ -106,7 +87,7 @@ def read_file_content(file):
106
  elif file.type == "application/xml" or file.type == "text/xml":
107
  tree = ET.parse(file)
108
  root = tree.getroot()
109
- xml = ET.tostring(root, encoding='unicode')
110
  return xml
111
  elif file.type == "text/markdown" or file.type == "text/md":
112
  md = mistune.create_markdown()
@@ -118,43 +99,37 @@ def read_file_content(file):
118
  return ""
119
 
120
  def main():
121
- col1, col2 = st.columns([1, 2])
122
-
123
- with col1:
124
- user_prompt = st.text_area("Your question:", '', height=150) # Increased height for question text box
125
- uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
126
 
127
  document_sections = deque()
128
  document_responses = {}
129
 
130
  if uploaded_file is not None:
131
  file_content = read_file_content(uploaded_file)
132
- document_sections.append(file_content)
133
-
134
- with col2:
135
- if st.button('💬 Chat'):
136
- st.write('Thinking and Reasoning with your inputs...')
137
- response = chat_with_model(user_prompt, ''.join(list(document_sections)))
138
- response_area = st.text_area('Response:', value=response, height=400)
139
- filename = generate_filename(user_prompt, choice)
140
- create_file(filename, user_prompt, response)
141
- st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
142
 
143
  if len(document_sections) > 0:
144
- for i, section in reversed(list(enumerate(list(document_sections)))):
145
  if i in document_responses:
146
- st.markdown(f"**Section {i+1} Content:**\n{section}")
147
- response_area = st.text_area(f"Section {i+1} Response:", value=document_responses[i], height=400)
148
  else:
149
- if st.button(f"Chat about Section {i+1}"):
150
- st.write('Thinking and Reasoning with your inputs...')
151
  response = chat_with_model(user_prompt, section)
152
  document_responses[i] = response
153
- response_area = st.text_area(f"Section {i+1} Response:", value=response, height=400)
154
  filename = generate_filename(f"{user_prompt}_section_{i+1}", choice)
155
  create_file(filename, user_prompt, response)
156
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
157
 
 
 
 
 
 
 
 
158
  all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
159
  for file in all_files:
160
  col1, col2 = st.sidebar.columns([4,1]) # adjust the ratio as needed
 
3
  import os
4
  import base64
5
  import glob
 
6
  import mistune
7
  import pytz
8
  import math
 
17
  page_title="GPT Streamlit Document Reasoner",
18
  layout="wide")
19
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  menu = ["txt", "htm", "md", "py"]
21
  choice = st.sidebar.selectbox("Output file type:", menu)
22
  choicePrefix = "Output file type is "
 
29
  elif choice == "py":
30
  st.sidebar.write(choicePrefix + "Python Code.")
31
 
32
+ # Create 3 columns with column 2 being twice as large
33
+ col1, col2, col3 = st.columns([1, 2, 1])
34
+
35
  def generate_filename(prompt, file_type):
36
  central = pytz.timezone('US/Central')
37
  safe_date_time = datetime.now(central).strftime("%m%d_%I%M")
 
49
  with open(filename, 'w') as file:
50
  file.write(f"# Prompt:\n{prompt}\n# Response:\n{response}")
51
 
52
+ def divide_document(document, max_length):
53
+ return [document[i:i+max_length] for i in range(0, len(document), max_length)]
54
+
55
  def chat_with_model(prompt, document_section):
56
  model = "gpt-3.5-turbo"
57
  conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
 
87
  elif file.type == "application/xml" or file.type == "text/xml":
88
  tree = ET.parse(file)
89
  root = tree.getroot()
90
+ xml = CompressXML(ET.tostring(root, encoding='unicode'))
91
  return xml
92
  elif file.type == "text/markdown" or file.type == "text/md":
93
  md = mistune.create_markdown()
 
99
  return ""
100
 
101
  def main():
102
+ user_prompt = col1.text_area("Your question:", '', height=150)
103
+ uploaded_file = col1.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
104
+ max_length = 4000
 
 
105
 
106
  document_sections = deque()
107
  document_responses = {}
108
 
109
  if uploaded_file is not None:
110
  file_content = read_file_content(uploaded_file)
111
+ document_sections.extend(divide_document(file_content, max_length))
 
 
 
 
 
 
 
 
 
112
 
113
  if len(document_sections) > 0:
114
+ for i, section in enumerate(list(document_sections)):
115
  if i in document_responses:
116
+ col2.text(f"Section {i+1} Response")
117
+ col2.text_area('', document_responses[i], height=200)
118
  else:
119
+ if col3.button(f"Chat about Section {i+1}"):
 
120
  response = chat_with_model(user_prompt, section)
121
  document_responses[i] = response
 
122
  filename = generate_filename(f"{user_prompt}_section_{i+1}", choice)
123
  create_file(filename, user_prompt, response)
124
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
125
 
126
+ if col3.button('💬 Chat'):
127
+ response = chat_with_model(user_prompt, ''.join(list(document_sections)))
128
+ document_responses['aggregate'] = response
129
+ filename = generate_filename(user_prompt, choice)
130
+ create_file(filename, user_prompt, response)
131
+ st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
132
+
133
  all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
134
  for file in all_files:
135
  col1, col2 = st.sidebar.columns([4,1]) # adjust the ratio as needed