Update chat.py
Browse files
chat.py
CHANGED
@@ -84,20 +84,9 @@ retriever = vectorstore_persisted.as_retriever(
|
|
84 |
|
85 |
def make_completion(messages: List[Message]) -> Optional[str]:
|
86 |
|
|
|
|
|
87 |
try:
|
88 |
-
prompt = [{'role':'system', 'content': qna_system_message}] + messages
|
89 |
-
response = client.chat.completions.create(
|
90 |
-
model=qna_model,
|
91 |
-
messages=prompt,
|
92 |
-
temperature=0
|
93 |
-
)
|
94 |
-
|
95 |
-
prediction = response.choices[0].message.content.strip()
|
96 |
-
except Exception as e:
|
97 |
-
# Clip long messages to the last two
|
98 |
-
messages = messages[:-2]
|
99 |
-
|
100 |
-
prompt = [{'role':'system', 'content': qna_system_message}] + messages
|
101 |
|
102 |
response = client.chat.completions.create(
|
103 |
model=qna_model,
|
@@ -106,8 +95,7 @@ def make_completion(messages: List[Message]) -> Optional[str]:
|
|
106 |
)
|
107 |
|
108 |
prediction = response.choices[0].message.content.strip()
|
109 |
-
|
110 |
-
else:
|
111 |
prediction = f'Sorry, please contact our hotline: 1-800-AWESOMEINSURER'
|
112 |
|
113 |
return prediction
|
|
|
84 |
|
85 |
def make_completion(messages: List[Message]) -> Optional[str]:
|
86 |
|
87 |
+
prompt = [{'role':'system', 'content': qna_system_message}] + messages
|
88 |
+
|
89 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
90 |
|
91 |
response = client.chat.completions.create(
|
92 |
model=qna_model,
|
|
|
95 |
)
|
96 |
|
97 |
prediction = response.choices[0].message.content.strip()
|
98 |
+
except Exception as e:
|
|
|
99 |
prediction = f'Sorry, please contact our hotline: 1-800-AWESOMEINSURER'
|
100 |
|
101 |
return prediction
|