Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -13,14 +13,12 @@ print("OpenAI client initialized.")
|
|
| 13 |
|
| 14 |
def respond(
|
| 15 |
message,
|
| 16 |
-
history: list[tuple[str, str]]
|
| 17 |
-
system_message
|
| 18 |
):
|
| 19 |
print(f"Received message: {message}")
|
| 20 |
print(f"History: {history}")
|
| 21 |
-
print(f"System message: {system_message}")
|
| 22 |
|
| 23 |
-
messages = [
|
| 24 |
|
| 25 |
for val in history:
|
| 26 |
if val[0]:
|
|
@@ -30,7 +28,7 @@ def respond(
|
|
| 30 |
|
| 31 |
messages.append({"role": "user", "content": message})
|
| 32 |
|
| 33 |
-
model_to_use = "meta-llama/Llama-3.
|
| 34 |
|
| 35 |
response = ""
|
| 36 |
|
|
|
|
| 13 |
|
| 14 |
def respond(
|
| 15 |
message,
|
| 16 |
+
history: list[tuple[str, str]]
|
|
|
|
| 17 |
):
|
| 18 |
print(f"Received message: {message}")
|
| 19 |
print(f"History: {history}")
|
|
|
|
| 20 |
|
| 21 |
+
messages = []
|
| 22 |
|
| 23 |
for val in history:
|
| 24 |
if val[0]:
|
|
|
|
| 28 |
|
| 29 |
messages.append({"role": "user", "content": message})
|
| 30 |
|
| 31 |
+
model_to_use = "meta-llama/Llama-3.1-8B-Instruct"
|
| 32 |
|
| 33 |
response = ""
|
| 34 |
|