File size: 2,190 Bytes
ba30e63
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import gradio as gr
from gpt_index import SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
from langchain import OpenAI
import sys
import os

directory_path = r'C:\Users\paddy\Open_AI\Data'

os.environ["OPENAI_API_KEY"] = ''

def construct_index():
    # set maximum input size
    max_input_size = 4096
    # set number of output tokens
    num_outputs = 256
    # set maximum chunk overlap
    max_chunk_overlap = 0
    # set chunk size limit
    chunk_size_limit = 600

    prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)

    llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="gpt-3.5-turbo", max_tokens=num_outputs))

    documents = SimpleDirectoryReader(directory_path).load_data()

    index = GPTSimpleVectorIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)

    index.save_to_disk('index.json')

    return index

def chatbot_response(query_input, chat_history=[]):
    index = construct_index()
    response = index.query(query_input, response_mode="compact")
    chat_history.append(("You:\n", query_input))
    chat_history.append(("Bot:", response.response))
    chat_history_text = "\n\n".join(["{}{}".format(item[0], item[1]) for item in chat_history[-9:-1]])
    response_text = f"{chat_history_text} \n\nBot:{response.response}"
    return response_text

chat_history = []
query_input = gr.Textbox(label="What do you want to ask the bot?")
output_text = gr.Textbox(label="Bot Response and Chat History")
        


def get_chatbot_response(query_input):
    global chat_history
    response_text = chatbot_response(query_input, chat_history)
    # chat_history = chat_history[-10:-1]
    return response_text

interface = gr.Interface(
    fn=get_chatbot_response,
    inputs=query_input,
    outputs=output_text,
    title="Chatbot",
    description="Ask the bot any question and it will give you a response.",
    examples=["What are some popular tourist destinations in Europe?","What are some famous landmarks in Paris?","What can visitors do in Rome, Italy?","What are some must-see sights in Rome?"],

)

interface.launch()