Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -45,18 +45,18 @@ def get_system_tokens(model):
|
|
| 45 |
return get_message_tokens(model, **system_message)
|
| 46 |
|
| 47 |
|
| 48 |
-
repo_name = "
|
| 49 |
-
model_name = "openbuddy-
|
| 50 |
|
| 51 |
snapshot_download(repo_id=repo_name, local_dir=".", allow_patterns=model_name)
|
| 52 |
|
| 53 |
model = Llama(
|
| 54 |
model_path=model_name,
|
| 55 |
-
n_ctx=
|
| 56 |
n_parts=1,
|
| 57 |
)
|
| 58 |
|
| 59 |
-
max_new_tokens =
|
| 60 |
|
| 61 |
def user(message, history):
|
| 62 |
new_history = history + [[message, None]]
|
|
|
|
| 45 |
return get_message_tokens(model, **system_message)
|
| 46 |
|
| 47 |
|
| 48 |
+
repo_name = "nold/openbuddy-gemma-7b-v19.1-4k-GGUF"
|
| 49 |
+
model_name = "openbuddy-gemma-7b-v19.1-4k_Q5_K_M.gguf"
|
| 50 |
|
| 51 |
snapshot_download(repo_id=repo_name, local_dir=".", allow_patterns=model_name)
|
| 52 |
|
| 53 |
model = Llama(
|
| 54 |
model_path=model_name,
|
| 55 |
+
n_ctx=4000,
|
| 56 |
n_parts=1,
|
| 57 |
)
|
| 58 |
|
| 59 |
+
max_new_tokens = 2500
|
| 60 |
|
| 61 |
def user(message, history):
|
| 62 |
new_history = history + [[message, None]]
|