File size: 6,032 Bytes
7c0cd2b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7169ae8
7c0cd2b
 
 
 
 
 
 
 
 
7169ae8
7c0cd2b
 
 
 
 
 
 
 
 
 
 
7169ae8
 
 
 
 
7c0cd2b
 
 
 
 
7169ae8
 
7c0cd2b
 
7169ae8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7c0cd2b
 
 
 
 
 
7169ae8
7c0cd2b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
import streamlit as st
import openai
import os
import base64
import glob
import json
import mistune
import pytz
import math
from datetime import datetime
from openai import ChatCompletion
from xml.etree import ElementTree as ET
from bs4 import BeautifulSoup
from collections import deque

openai.api_key = os.getenv('OPENAI_KEY')
st.set_page_config(
    page_title="GPT Streamlit Document Reasoner",
    layout="wide")

menu = ["txt", "htm", "md", "py"]
choice = st.sidebar.selectbox("Output file type:", menu)
choicePrefix = "Output file type is "
if choice == "txt":
     st.sidebar.write(choicePrefix + "Text File.")
elif choice == "htm":
     st.sidebar.write(choicePrefix + "HTML5.")
elif choice == "md":
     st.sidebar.write(choicePrefix + "Markdown.")
elif choice == "py":
     st.sidebar.write(choicePrefix + "Python Code.")

def generate_filename(prompt, file_type):
    central = pytz.timezone('US/Central')
    safe_date_time = datetime.now(central).strftime("%m%d_%I%M")  
    safe_prompt = "".join(x for x in prompt if x.isalnum())[:28]
    return f"{safe_date_time}_{safe_prompt}.{file_type}"

def create_file(filename, prompt, response):
    if filename.endswith(".txt"):
        with open(filename, 'w') as file:
            file.write(f"Prompt:\n{prompt}\nResponse:\n{response}")
    elif filename.endswith(".htm"):
        with open(filename, 'w') as file:
            file.write(f"<h1>Prompt:</h1> <p>{prompt}</p> <h1>Response:</h1> <p>{response}</p>")
    elif filename.endswith(".md"):
        with open(filename, 'w') as file:
            file.write(f"# Prompt:\n{prompt}\n# Response:\n{response}")

def chat_with_model(prompt, document_section):
    model = "gpt-3.5-turbo"
    conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
    conversation.append({'role': 'user', 'content': prompt})
    conversation.append({'role': 'assistant', 'content': document_section})
    response = openai.ChatCompletion.create(model=model, messages=conversation)
    return response['choices'][0]['message']['content']

def get_table_download_link(file_path):
    with open(file_path, 'r') as file:
        data = file.read()
    b64 = base64.b64encode(data.encode()).decode()  
    file_name = os.path.basename(file_path)
    ext = os.path.splitext(file_name)[1]  # get the file extension
    if ext == '.txt':
        mime_type = 'text/plain'
    elif ext == '.htm':
        mime_type = 'text/html'
    elif ext == '.md':
        mime_type = 'text/markdown'
    else:
        mime_type = 'application/octet-stream'  # general binary data type
    href = f'<a href="data:{mime_type};base64,{b64}" target="_blank" download="{file_name}">{file_name}</a>'
    return href

def read_file_content(file):
    if file.type == "application/json":
        content = json.load(file)
        return str(content)
    elif file.type == "text/html" or file.type == "text/htm":
        content = BeautifulSoup(file, "html.parser")
        return content.text
    elif file.type == "application/xml" or file.type == "text/xml":
        tree = ET.parse(file)
        root = tree.getroot()
        xml = ET.tostring(root, encoding='unicode')
        return xml
    elif file.type == "text/markdown" or file.type == "text/md":
        md = mistune.create_markdown()
        content = md(file.read().decode())
        return content
    elif file.type == "text/plain":
        return file.getvalue().decode()
    else:
        return ""

def main():
    col1, col2, col3 = st.columns([1, 1, 1])

    with col1:
        user_prompt = st.text_area("Your question:", '', height=120)
        uploaded_file = st.file_uploader("Choose a file", type=["xml", "json", "html", "htm", "md", "txt"])

    document_sections = deque()
    document_responses = {}

    if uploaded_file is not None:
        file_content = read_file_content(uploaded_file)
        document_sections.append(file_content)

    if len(document_sections) > 0:
        with col2:
            st.markdown("**Sections of the uploaded file:**")
            for i, section in enumerate(list(document_sections)):
                st.markdown(f"**Section {i+1}**\n{section}")

            st.markdown("**Chat with the model:**")
            for i, section in enumerate(list(document_sections)):
                if i in document_responses:
                    st.markdown(f"**Section {i+1}**\n{document_responses[i]}")
                else:
                    if st.button(f"Chat about Section {i+1}"):
                        st.write('Thinking and Reasoning with your inputs...')
                        response = chat_with_model(user_prompt, section)
                        document_responses[i] = response

        with col3:
            st.markdown("**Responses from the model:**")
            for i, response in enumerate(document_responses.values()):
                st.markdown(f"**Response to Section {i+1}**\n{response}")
                filename = generate_filename(f"{user_prompt}_section_{i+1}", choice)
                create_file(filename, user_prompt, response)
                st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)

    if st.button('πŸ’¬ Chat'):
        st.write('Thinking and Reasoning with your inputs...')
        response = chat_with_model(user_prompt, ''.join(list(document_sections)))
        st.write('Response:')
        st.write(response)

        filename = generate_filename(user_prompt, choice)
        create_file(filename, user_prompt, response)
        st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)

    all_files = glob.glob("*.txt") + glob.glob("*.htm") + glob.glob("*.md")
    for file in all_files:
        col1, col2 = st.sidebar.columns([4,1])  # adjust the ratio as needed
        with col1:
            st.markdown(get_table_download_link(file), unsafe_allow_html=True)
        with col2:
            if st.button("πŸ—‘", key=file):
                os.remove(file)
                st.experimental_rerun()

if __name__ == "__main__":
    main()