Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -17,19 +17,21 @@ import os
|
|
17 |
app = FastAPI()
|
18 |
|
19 |
# ✅ Configuration locale du cache HF pour Hugging Face
|
20 |
-
|
|
|
21 |
os.environ["HF_HOME"] = CACHE_DIR
|
22 |
os.environ["TRANSFORMERS_CACHE"] = CACHE_DIR
|
23 |
os.environ["HF_MODULES_CACHE"] = CACHE_DIR
|
24 |
os.environ["HF_HUB_CACHE"] = CACHE_DIR
|
25 |
|
|
|
26 |
# ✅ Configuration du modèle d’embedding local (ex: BGE / Nomic / GTE etc.)
|
27 |
MODEL_NAME = "BAAI/bge-small-en-v1.5"
|
28 |
|
29 |
-
|
30 |
-
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, cache_dir=os.getenv("HF_HOME"))
|
31 |
model = AutoModel.from_pretrained(MODEL_NAME, cache_dir=CACHE_DIR)
|
32 |
|
|
|
33 |
def get_embedding(text: str):
|
34 |
with torch.no_grad():
|
35 |
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
|
|
|
17 |
app = FastAPI()
|
18 |
|
19 |
# ✅ Configuration locale du cache HF pour Hugging Face
|
20 |
+
# ✅ Définir un chemin autorisé pour le cache (à l'intérieur du container Hugging Face)
|
21 |
+
CACHE_DIR = "/app/cache"
|
22 |
os.environ["HF_HOME"] = CACHE_DIR
|
23 |
os.environ["TRANSFORMERS_CACHE"] = CACHE_DIR
|
24 |
os.environ["HF_MODULES_CACHE"] = CACHE_DIR
|
25 |
os.environ["HF_HUB_CACHE"] = CACHE_DIR
|
26 |
|
27 |
+
|
28 |
# ✅ Configuration du modèle d’embedding local (ex: BGE / Nomic / GTE etc.)
|
29 |
MODEL_NAME = "BAAI/bge-small-en-v1.5"
|
30 |
|
31 |
+
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, cache_dir=CACHE_DIR)
|
|
|
32 |
model = AutoModel.from_pretrained(MODEL_NAME, cache_dir=CACHE_DIR)
|
33 |
|
34 |
+
|
35 |
def get_embedding(text: str):
|
36 |
with torch.no_grad():
|
37 |
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
|