Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,7 @@ import threading
|
|
4 |
import time
|
5 |
from pathlib import Path
|
6 |
from huggingface_hub import hf_hub_download
|
|
|
7 |
|
8 |
# Try to import llama-cpp-python, fallback to instructions if not available
|
9 |
try:
|
@@ -20,6 +21,9 @@ model_loaded = False
|
|
20 |
# HuggingFace repository information
|
21 |
HF_REPO_ID = "Axcel1/MMed-llama-alpaca-Q4_K_M-GGUF"
|
22 |
HF_FILENAME = "mmed-llama-alpaca-q4_k_m.gguf"
|
|
|
|
|
|
|
23 |
|
24 |
def find_gguf_file(directory="."):
|
25 |
"""Find GGUF files in the specified directory"""
|
|
|
4 |
import time
|
5 |
from pathlib import Path
|
6 |
from huggingface_hub import hf_hub_download
|
7 |
+
from huggingface_hub import login
|
8 |
|
9 |
# Try to import llama-cpp-python, fallback to instructions if not available
|
10 |
try:
|
|
|
21 |
# HuggingFace repository information
|
22 |
HF_REPO_ID = "Axcel1/MMed-llama-alpaca-Q4_K_M-GGUF"
|
23 |
HF_FILENAME = "mmed-llama-alpaca-q4_k_m.gguf"
|
24 |
+
hf_token = os.environ.get("HF_TOKEN")
|
25 |
+
|
26 |
+
login(token = hf_token)
|
27 |
|
28 |
def find_gguf_file(directory="."):
|
29 |
"""Find GGUF files in the specified directory"""
|