Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -30,8 +30,6 @@ elif choice == "md":
|
|
30 |
elif choice == "py":
|
31 |
st.sidebar.write(choicePrefix + "Python Code.")
|
32 |
|
33 |
-
max_length = st.sidebar.slider("Max document length", min_value=1000, max_value=32000, value=2000, step=1000)
|
34 |
-
|
35 |
def generate_filename(prompt, file_type):
|
36 |
central = pytz.timezone('US/Central')
|
37 |
safe_date_time = datetime.now(central).strftime("%m%d_%I%M")
|
@@ -49,12 +47,6 @@ def create_file(filename, prompt, response):
|
|
49 |
with open(filename, 'w') as file:
|
50 |
file.write(f"# Prompt:\n{prompt}\n# Response:\n{response}")
|
51 |
|
52 |
-
def truncate_document(document, length):
|
53 |
-
return document[:length]
|
54 |
-
|
55 |
-
def divide_document(document, max_length):
|
56 |
-
return [document[i:i+max_length] for i in range(0, len(document), max_length)]
|
57 |
-
|
58 |
def chat_with_model(prompt, document_section):
|
59 |
model = "gpt-3.5-turbo"
|
60 |
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
|
@@ -63,7 +55,6 @@ def chat_with_model(prompt, document_section):
|
|
63 |
response = openai.ChatCompletion.create(model=model, messages=conversation)
|
64 |
return response['choices'][0]['message']['content']
|
65 |
|
66 |
-
|
67 |
def get_table_download_link(file_path):
|
68 |
with open(file_path, 'r') as file:
|
69 |
data = file.read()
|
@@ -81,15 +72,7 @@ def get_table_download_link(file_path):
|
|
81 |
href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
|
82 |
return href
|
83 |
|
84 |
-
|
85 |
-
def CompressXML(xml_text):
|
86 |
-
root = ET.fromstring(xml_text)
|
87 |
-
for elem in list(root.iter()):
|
88 |
-
if isinstance(elem.tag, str) and 'Comment' in elem.tag:
|
89 |
-
elem.parent.remove(elem)
|
90 |
-
return ET.tostring(root, encoding='unicode', method="xml")
|
91 |
-
|
92 |
-
def read_file_content(file,max_length):
|
93 |
if file.type == "application/json":
|
94 |
content = json.load(file)
|
95 |
return str(content)
|
@@ -99,7 +82,7 @@ def read_file_content(file,max_length):
|
|
99 |
elif file.type == "application/xml" or file.type == "text/xml":
|
100 |
tree = ET.parse(file)
|
101 |
root = tree.getroot()
|
102 |
-
xml =
|
103 |
return xml
|
104 |
elif file.type == "text/markdown" or file.type == "text/md":
|
105 |
md = mistune.create_markdown()
|
@@ -111,44 +94,49 @@ def read_file_content(file,max_length):
|
|
111 |
return ""
|
112 |
|
113 |
def main():
|
114 |
-
|
115 |
-
|
116 |
-
|
|
|
|
|
117 |
|
118 |
document_sections = deque()
|
119 |
document_responses = {}
|
120 |
|
121 |
if uploaded_file is not None:
|
122 |
-
file_content = read_file_content(uploaded_file
|
123 |
-
document_sections.
|
124 |
|
125 |
if len(document_sections) > 0:
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
st.
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
|
|
|
|
|
|
145 |
|
146 |
if st.button('💬 Chat'):
|
147 |
st.write('Thinking and Reasoning with your inputs...')
|
148 |
response = chat_with_model(user_prompt, ''.join(list(document_sections)))
|
149 |
st.write('Response:')
|
150 |
st.write(response)
|
151 |
-
|
152 |
filename = generate_filename(user_prompt, choice)
|
153 |
create_file(filename, user_prompt, response)
|
154 |
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
|
|
|
30 |
elif choice == "py":
|
31 |
st.sidebar.write(choicePrefix + "Python Code.")
|
32 |
|
|
|
|
|
33 |
def generate_filename(prompt, file_type):
|
34 |
central = pytz.timezone('US/Central')
|
35 |
safe_date_time = datetime.now(central).strftime("%m%d_%I%M")
|
|
|
47 |
with open(filename, 'w') as file:
|
48 |
file.write(f"# Prompt:\n{prompt}\n# Response:\n{response}")
|
49 |
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
def chat_with_model(prompt, document_section):
|
51 |
model = "gpt-3.5-turbo"
|
52 |
conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
|
|
|
55 |
response = openai.ChatCompletion.create(model=model, messages=conversation)
|
56 |
return response['choices'][0]['message']['content']
|
57 |
|
|
|
58 |
def get_table_download_link(file_path):
|
59 |
with open(file_path, 'r') as file:
|
60 |
data = file.read()
|
|
|
72 |
href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
|
73 |
return href
|
74 |
|
75 |
+
def read_file_content(file):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
76 |
if file.type == "application/json":
|
77 |
content = json.load(file)
|
78 |
return str(content)
|
|
|
82 |
elif file.type == "application/xml" or file.type == "text/xml":
|
83 |
tree = ET.parse(file)
|
84 |
root = tree.getroot()
|
85 |
+
xml = ET.tostring(root, encoding='unicode')
|
86 |
return xml
|
87 |
elif file.type == "text/markdown" or file.type == "text/md":
|
88 |
md = mistune.create_markdown()
|
|
|
94 |
return ""
|
95 |
|
96 |
def main():
|
97 |
+
col1, col2, col3 = st.columns([1, 1, 1])
|
98 |
+
|
99 |
+
with col1:
|
100 |
+
user_prompt = st.text_area("Your question:", '', height=120)
|
101 |
+
uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])
|
102 |
|
103 |
document_sections = deque()
|
104 |
document_responses = {}
|
105 |
|
106 |
if uploaded_file is not None:
|
107 |
+
file_content = read_file_content(uploaded_file)
|
108 |
+
document_sections.append(file_content)
|
109 |
|
110 |
if len(document_sections) > 0:
|
111 |
+
with col2:
|
112 |
+
st.markdown("**Sections of the uploaded file:**")
|
113 |
+
for i, section in enumerate(list(document_sections)):
|
114 |
+
st.markdown(f"**Section {i+1}**\n{section}")
|
115 |
+
|
116 |
+
st.markdown("**Chat with the model:**")
|
117 |
+
for i, section in enumerate(list(document_sections)):
|
118 |
+
if i in document_responses:
|
119 |
+
st.markdown(f"**Section {i+1}**\n{document_responses[i]}")
|
120 |
+
else:
|
121 |
+
if st.button(f"Chat about Section {i+1}"):
|
122 |
+
st.write('Thinking and Reasoning with your inputs...')
|
123 |
+
response = chat_with_model(user_prompt, section)
|
124 |
+
document_responses[i] = response
|
125 |
+
|
126 |
+
with col3:
|
127 |
+
st.markdown("**Responses from the model:**")
|
128 |
+
for i, response in enumerate(document_responses.values()):
|
129 |
+
st.markdown(f"**Response to Section {i+1}**\n{response}")
|
130 |
+
filename = generate_filename(f"{user_prompt}_section_{i+1}", choice)
|
131 |
+
create_file(filename, user_prompt, response)
|
132 |
+
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
|
133 |
|
134 |
if st.button('💬 Chat'):
|
135 |
st.write('Thinking and Reasoning with your inputs...')
|
136 |
response = chat_with_model(user_prompt, ''.join(list(document_sections)))
|
137 |
st.write('Response:')
|
138 |
st.write(response)
|
139 |
+
|
140 |
filename = generate_filename(user_prompt, choice)
|
141 |
create_file(filename, user_prompt, response)
|
142 |
st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
|