|
from fastapi import FastAPI, UploadFile, File |
|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
|
|
app = FastAPI() |
|
|
|
|
|
model = AutoModelForCausalLM.from_pretrained("deepseek-ai/DeepSeek-V2-Chat") |
|
tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/DeepSeek-V2-Chat") |
|
|
|
@app.post("/summarization/text") |
|
async def summarize_text(file: UploadFile = File(...)): |
|
content = await file.read() |
|
text = content.decode("utf-8") |
|
prompt = f"Résume ce texte en 3 phrases courtes : {text}" |
|
inputs = tokenizer(prompt, return_tensors="pt") |
|
outputs = model.generate(**inputs, max_length=100) |
|
summary = tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
return {"summary": summary} |