File size: 4,043 Bytes
827c483
728663b
d6827af
 
827c483
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
728663b
f2f0f51
827c483
 
 
 
f2f0f51
827c483
 
 
 
 
 
 
 
 
 
 
728663b
8a6be4d
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
import os
import gradio as gr
import gradio

from llama_index import GPTSimpleVectorIndex, SimpleDirectoryReader, ServiceContext,LLMPredictor
from langchain.chat_models import ChatOpenAI
from llama_index.llm_predictor.chatgpt import ChatGPTLLMPredictor
import huggingface_hub
from huggingface_hub import Repository
from datetime import datetime
import csv

DATASET_REPO_URL = "https://huggingface.co/datasets/diazcalvi/kionlinde"#"https://huggingface.co/datasets/julien-c/persistent-space-dataset"
DATA_FILENAME = "kion.json"
DATA_FILE = os.path.join("data", DATA_FILENAME)

HF_TOKEN = os.environ.get("HF_TOKEN")
print("is none?", HF_TOKEN is None)

print("hfh", huggingface_hub.__version__)



#os.system("git config --global user.name \"Carlos Diaz\"")
#os.system("git config --global user.email \"[email protected]\"")


##repo = Repository(
#    local_dir="data", clone_from=DATASET_REPO_URL, use_auth_token=HF_TOKEN
#)


index_name = "./data/kion.json"
documents_folder = "./documents"
#@st.experimental_memo
#@st.cache_resource
def initialize_index(index_name, documents_folder):
    #llm_predictor = ChatGPTLLMPredictor()
    llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")) #	text-davinci-003"))"gpt-3.5-turbo"
    
    service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor)
    if os.path.exists(index_name):
        index = GPTSimpleVectorIndex.load_from_disk(index_name)
    else:
        documents = SimpleDirectoryReader(documents_folder).load_data()
        index = GPTSimpleVectorIndex.from_documents(documents)
        index.save_to_disk(index_name)
        print(DATA_FILE)
        index.save_to_disk(DATA_FILE)

    return index

#@st.experimental_memo
#@st.cache_data(max_entries=200, persist=True)
def query_index(_index, query_text):
    response = _index.query(query_text)
    return str(response)

def generate_html() -> str:
    with open(DATA_FILE) as csvfile:
        reader = csv.DictReader(csvfile)
        rows = []
        for row in reader:
            rows.append(row)
        rows.reverse()
        if len(rows) == 0:
            return "no messages yet"
        else:
            html = "<div class='chatbot'>"
            for row in rows:
                html += "<div>"
                html += f"<span>{row['name']}</span>"
                html += f"<span class='message'>{row['message']}</span>"
                html += "</div>"
            html += "</div>"
            return html


def store_message(name: str, message: str):
    if name and message:
        print(DATA_FILE)
        print(DATA_FILENAME)
        print(DATASET_REPO_URL)
        with open(DATA_FILE, "a") as csvfile:
            writer = csv.DictWriter(csvfile, fieldnames=["name", "message", "time"])
            writer.writerow(
                {"name": name, "message": message, "time": str(datetime.now())}
            )
        commit_url = repo.push_to_hub()
        print(commit_url)

    return commit_url #generate_html()



def greet(text):
    response = query_index(index, "Act as a KION equipment expert:" + text)
    return response




index = None
api_key = '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'#st.text_input("Enter your OpenAI API key here:", type="password")
if api_key:
    os.environ['OPENAI_API_KEY'] = api_key
    index = initialize_index(index_name, documents_folder)    


if index is None:
    st.warning("Please enter your api key first.")



gradio_interface = gradio.Interface(
  fn=greet,
  inputs="text",
  outputs="text",
  examples=[
    ["What is the track width of the P30 (b11 mm)?"],
    ["What is the acceleration of the P30 (km/h)?"]
  ],
  title="REST API with Gradio and Huggingface Spaces",
  description="This is a demo of how to build an AI powered REST API with Gradio and Huggingface Spaces – for free! Based on [this article](https://www.tomsoderlund.com/ai/building-ai-powered-rest-api). See the **Use via API** link at the bottom of this page.",
  article="© Tom Söderlund 2022"
)
gradio_interface.launch()