Commit
·
ba30e63
1
Parent(s):
c2cc9d4
Upload GradioChatbot_with_ChatHistory.py
Browse files
GradioChatbot_with_ChatHistory.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from gpt_index import SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
|
3 |
+
from langchain import OpenAI
|
4 |
+
import sys
|
5 |
+
import os
|
6 |
+
|
7 |
+
directory_path = r'C:\Users\paddy\Open_AI\Data'
|
8 |
+
|
9 |
+
os.environ["OPENAI_API_KEY"] = ''
|
10 |
+
|
11 |
+
def construct_index():
|
12 |
+
# set maximum input size
|
13 |
+
max_input_size = 4096
|
14 |
+
# set number of output tokens
|
15 |
+
num_outputs = 256
|
16 |
+
# set maximum chunk overlap
|
17 |
+
max_chunk_overlap = 0
|
18 |
+
# set chunk size limit
|
19 |
+
chunk_size_limit = 600
|
20 |
+
|
21 |
+
prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
|
22 |
+
|
23 |
+
llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
|
24 |
+
|
25 |
+
documents = SimpleDirectoryReader(directory_path).load_data()
|
26 |
+
|
27 |
+
index = GPTSimpleVectorIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)
|
28 |
+
|
29 |
+
index.save_to_disk('index.json')
|
30 |
+
|
31 |
+
return index
|
32 |
+
|
33 |
+
def chatbot_response(query_input, chat_history=[]):
|
34 |
+
index = construct_index()
|
35 |
+
response = index.query(query_input, response_mode="compact")
|
36 |
+
chat_history.append(("You:\n", query_input))
|
37 |
+
chat_history.append(("Bot:", response.response))
|
38 |
+
chat_history_text = "\n\n".join(["{}{}".format(item[0], item[1]) for item in chat_history[-9:-1]])
|
39 |
+
response_text = f"{chat_history_text} \n\nBot:{response.response}"
|
40 |
+
return response_text
|
41 |
+
|
42 |
+
chat_history = []
|
43 |
+
query_input = gr.Textbox(label="What do you want to ask the bot?")
|
44 |
+
output_text = gr.Textbox(label="Bot Response and Chat History")
|
45 |
+
|
46 |
+
|
47 |
+
|
48 |
+
def get_chatbot_response(query_input):
|
49 |
+
global chat_history
|
50 |
+
response_text = chatbot_response(query_input, chat_history)
|
51 |
+
# chat_history = chat_history[-10:-1]
|
52 |
+
return response_text
|
53 |
+
|
54 |
+
interface = gr.Interface(
|
55 |
+
fn=get_chatbot_response,
|
56 |
+
inputs=query_input,
|
57 |
+
outputs=output_text,
|
58 |
+
title="Chatbot",
|
59 |
+
description="Ask the bot any question and it will give you a response.",
|
60 |
+
examples=["What are some popular tourist destinations in Europe?","What are some famous landmarks in Paris?","What can visitors do in Rome, Italy?","What are some must-see sights in Rome?"],
|
61 |
+
|
62 |
+
)
|
63 |
+
|
64 |
+
interface.launch()
|