awacke1 commited on
Commit
0da1c62
·
1 Parent(s): 049626b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -12
app.py CHANGED
@@ -3,6 +3,7 @@ import openai
3
  import os
4
  import base64
5
  import glob
 
6
  import mistune
7
  import pytz
8
  import math
@@ -29,8 +30,7 @@ elif choice == "md":
29
  elif choice == "py":
30
  st.sidebar.write(choicePrefix + "Python Code.")
31
 
32
- # Create 3 columns with column 2 being twice as large
33
- col1, col2, col3 = st.columns([1, 2, 1])
34
 
35
  def generate_filename(prompt, file_type):
36
  central = pytz.timezone('US/Central')
@@ -49,6 +49,9 @@ def create_file(filename, prompt, response):
49
  with open(filename, 'w') as file:
50
  file.write(f"# Prompt:\n{prompt}\n# Response:\n{response}")
51
 
 
 
 
52
  def divide_document(document, max_length):
53
  return [document[i:i+max_length] for i in range(0, len(document), max_length)]
54
 
@@ -60,6 +63,7 @@ def chat_with_model(prompt, document_section):
60
  response = openai.ChatCompletion.create(model=model, messages=conversation)
61
  return response['choices'][0]['message']['content']
62
 
 
63
  def get_table_download_link(file_path):
64
  with open(file_path, 'r') as file:
65
  data = file.read()
@@ -77,7 +81,15 @@ def get_table_download_link(file_path):
77
  href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
78
  return href
79
 
80
- def read_file_content(file):
 
 
 
 
 
 
 
 
81
  if file.type == "application/json":
82
  content = json.load(file)
83
  return str(content)
@@ -99,37 +111,48 @@ def read_file_content(file):
99
  return ""
100
 
101
  def main():
102
- user_prompt = col1.text_area("Your question:", '', height=150)
103
- uploaded_file = col1.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
104
  max_length = 4000
105
 
106
  document_sections = deque()
107
  document_responses = {}
108
 
109
  if uploaded_file is not None:
110
- file_content = read_file_content(uploaded_file)
111
  document_sections.extend(divide_document(file_content, max_length))
112
 
113
  if len(document_sections) > 0:
 
 
 
 
 
114
  for i, section in enumerate(list(document_sections)):
115
  if i in document_responses:
116
- col2.text(f"Section {i+1} Response")
117
- col2.text_area('', document_responses[i], height=200)
118
  else:
119
- if col3.button(f"Chat about Section {i+1}"):
 
120
  response = chat_with_model(user_prompt, section)
 
 
121
  document_responses[i] = response
122
  filename = generate_filename(f"{user_prompt}_section_{i+1}", choice)
123
  create_file(filename, user_prompt, response)
124
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
125
 
126
- if col3.button('💬 Chat'):
 
 
127
  response = chat_with_model(user_prompt, ''.join(list(document_sections)))
128
- document_responses['aggregate'] = response
 
 
129
  filename = generate_filename(user_prompt, choice)
130
  create_file(filename, user_prompt, response)
131
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
132
-
133
  all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
134
  for file in all_files:
135
  col1, col2 = st.sidebar.columns([4,1]) # adjust the ratio as needed
 
3
  import os
4
  import base64
5
  import glob
6
+ import json
7
  import mistune
8
  import pytz
9
  import math
 
30
  elif choice == "py":
31
  st.sidebar.write(choicePrefix + "Python Code.")
32
 
33
+ max_length = st.sidebar.slider("Max document length", min_value=1000, max_value=32000, value=2000, step=1000)
 
34
 
35
  def generate_filename(prompt, file_type):
36
  central = pytz.timezone('US/Central')
 
49
  with open(filename, 'w') as file:
50
  file.write(f"# Prompt:\n{prompt}\n# Response:\n{response}")
51
 
52
+ def truncate_document(document, length):
53
+ return document[:length]
54
+
55
  def divide_document(document, max_length):
56
  return [document[i:i+max_length] for i in range(0, len(document), max_length)]
57
 
 
63
  response = openai.ChatCompletion.create(model=model, messages=conversation)
64
  return response['choices'][0]['message']['content']
65
 
66
+
67
  def get_table_download_link(file_path):
68
  with open(file_path, 'r') as file:
69
  data = file.read()
 
81
  href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
82
  return href
83
 
84
+
85
+ def CompressXML(xml_text):
86
+ root = ET.fromstring(xml_text)
87
+ for elem in list(root.iter()):
88
+ if isinstance(elem.tag, str) and 'Comment' in elem.tag:
89
+ elem.parent.remove(elem)
90
+ return ET.tostring(root, encoding='unicode', method="xml")
91
+
92
+ def read_file_content(file,max_length):
93
  if file.type == "application/json":
94
  content = json.load(file)
95
  return str(content)
 
111
  return ""
112
 
113
  def main():
114
+ user_prompt = st.text_area("Your question:", '', height=120)
115
+ uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
116
  max_length = 4000
117
 
118
  document_sections = deque()
119
  document_responses = {}
120
 
121
  if uploaded_file is not None:
122
+ file_content = read_file_content(uploaded_file, max_length)
123
  document_sections.extend(divide_document(file_content, max_length))
124
 
125
  if len(document_sections) > 0:
126
+ st.markdown("**Sections of the uploaded file:**")
127
+ for i, section in enumerate(list(document_sections)):
128
+ st.markdown(f"**Section {i+1}**\n{section}")
129
+
130
+ st.markdown("**Chat with the model:**")
131
  for i, section in enumerate(list(document_sections)):
132
  if i in document_responses:
133
+ st.markdown(f"**Section {i+1}**\n{document_responses[i]}")
 
134
  else:
135
+ if st.button(f"Chat about Section {i+1}"):
136
+ st.write('Thinking and Reasoning with your inputs...')
137
  response = chat_with_model(user_prompt, section)
138
+ st.write('Response:')
139
+ st.write(response)
140
  document_responses[i] = response
141
  filename = generate_filename(f"{user_prompt}_section_{i+1}", choice)
142
  create_file(filename, user_prompt, response)
143
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
144
 
145
+
146
+ if st.button('💬 Chat'):
147
+ st.write('Thinking and Reasoning with your inputs...')
148
  response = chat_with_model(user_prompt, ''.join(list(document_sections)))
149
+ st.write('Response:')
150
+ st.write(response)
151
+
152
  filename = generate_filename(user_prompt, choice)
153
  create_file(filename, user_prompt, response)
154
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
155
+
156
  all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
157
  for file in all_files:
158
  col1, col2 = st.sidebar.columns([4,1]) # adjust the ratio as needed