File size: 2,109 Bytes
2ba3952
9bfc205
2ba3952
d7e561a
 
 
2ba3952
d11baa8
2ba3952
9bfc205
 
738a092
d11baa8
 
3ae6978
d11baa8
 
 
 
 
 
 
d7e561a
 
 
ef12c00
d7e561a
 
 
3ae6978
d7e561a
 
c111cec
 
 
ef12c00
d7e561a
c111cec
d11baa8
 
 
 
e6aa461
d11baa8
 
 
 
 
 
 
 
 
 
 
 
 
d7e561a
 
 
 
 
ef12c00
3ae6978
d7e561a
 
d11baa8
d7e561a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import streamlit as st
import openai
import os
import base64
import glob
from datetime import datetime
from dotenv import load_dotenv
from openai import ChatCompletion

load_dotenv()

openai.api_key = os.getenv('OPENAI_KEY')

def chat_with_model(prompts):
    model = "gpt-3.5-turbo"

    conversation = [{'role': 'system', 'content': 'You are a helpful assistant.'}]
    conversation.extend([{'role': 'user', 'content': prompt} for prompt in prompts])

    response = openai.ChatCompletion.create(model=model, messages=conversation)
    return response['choices'][0]['message']['content']

def generate_filename(prompt):
    safe_date_time = datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
    safe_prompt = "".join(x for x in prompt if x.isalnum())[:50]
    return f"{safe_date_time}_{safe_prompt}.htm"

def create_file(filename, prompt, response):
    with open(filename, 'w') as file:
        file.write(f"<h1>Prompt:</h1> <p>{prompt}</p> <h1>Response:</h1> <p>{response}</p>")

def get_table_download_link(file_path):
    with open(file_path, 'r') as file:
        data = file.read()
    b64 = base64.b64encode(data.encode()).decode()  
    href = f'<a href="data:file/htm;base64,{b64}" download="{os.path.basename(file_path)}">{os.path.basename(file_path)}</a>'
    return href
    
def main():
    st.title("Chat with AI")

    # Pre-defined prompts
    prompts = ['Hows the weather?', 'Tell me a joke.', 'What is the meaning of life?']

    # User prompt input
    user_prompt = st.text_input("Your question:", '')

    if user_prompt:
        prompts.append(user_prompt)

    if st.button('Chat'):
        st.write('Chatting with GPT-3...')
        response = chat_with_model(prompts)
        st.write('Response:')
        st.write(response)

        filename = generate_filename(user_prompt)
        create_file(filename, user_prompt, response)

        st.markdown(get_table_download_link(filename), unsafe_allow_html=True)

    htm_files = glob.glob("*.htm")
    for file in htm_files:
        st.markdown(get_table_download_link(file), unsafe_allow_html=True)

if __name__ == "__main__":
    main()