Update app.py
Browse files
app.py
CHANGED
@@ -4,15 +4,15 @@ import os
|
|
4 |
|
5 |
app = FastAPI()
|
6 |
|
7 |
-
# Récupérer le token Hugging Face depuis
|
8 |
hf_token = os.getenv("yoyo")
|
9 |
if not hf_token:
|
10 |
-
raise ValueError("HF_TOKEN n’est pas défini
|
11 |
|
12 |
# Charger Llama 2 avec le token
|
13 |
model_name = "meta-llama/Llama-2-7b-chat-hf"
|
14 |
-
tokenizer = AutoTokenizer.from_pretrained(model_name,
|
15 |
-
model = AutoModelForCausalLM.from_pretrained(model_name,
|
16 |
|
17 |
@app.get("/")
|
18 |
async def root():
|
@@ -23,8 +23,8 @@ async def root():
|
|
23 |
async def summarize_text(file: UploadFile = File(...)):
|
24 |
content = await file.read()
|
25 |
text = content.decode("utf-8")
|
26 |
-
prompt = f"Summarize this text in 3 short sentences: {text}"
|
27 |
inputs = tokenizer(prompt, return_tensors="pt")
|
28 |
-
outputs = model.generate(**inputs, max_length=
|
29 |
summary = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
30 |
return {"summary": summary}
|
|
|
4 |
|
5 |
app = FastAPI()
|
6 |
|
7 |
+
# Récupérer le token Hugging Face depuis les secrets
|
8 |
hf_token = os.getenv("yoyo")
|
9 |
if not hf_token:
|
10 |
+
raise ValueError("HF_TOKEN n’est pas défini dans les secrets de Hugging Face Spaces.")
|
11 |
|
12 |
# Charger Llama 2 avec le token
|
13 |
model_name = "meta-llama/Llama-2-7b-chat-hf"
|
14 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token)
|
15 |
+
model = AutoModelForCausalLM.from_pretrained(model_name, token=hf_token)
|
16 |
|
17 |
@app.get("/")
|
18 |
async def root():
|
|
|
23 |
async def summarize_text(file: UploadFile = File(...)):
|
24 |
content = await file.read()
|
25 |
text = content.decode("utf-8")
|
26 |
+
prompt = f"[INST] Summarize this text in 3 short sentences: {text} [/INST]"
|
27 |
inputs = tokenizer(prompt, return_tensors="pt")
|
28 |
+
outputs = model.generate(**inputs, max_length=150, num_return_sequences=1)
|
29 |
summary = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
30 |
return {"summary": summary}
|