Spaces:
Sleeping
Sleeping
File size: 1,404 Bytes
c3c803a 5617dda c3c803a 5617dda c3c803a 5617dda c3c803a 5617dda c3c803a 5617dda c3c803a 5617dda c3c803a 5617dda c3c803a 5617dda ee62c26 058b8cf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
from transformers import pipeline
from tools.asr_tool import transcribe_audio
from tools.excel_tool import analyze_excel
from tools.search_tool import search_duckduckgo
import mimetypes
class GaiaAgent:
def __init__(self):
print("Loading model...")
self.model = pipeline(
"text2text-generation",
model="MBZUAI/LaMini-Flan-T5-783M",
tokenizer="MBZUAI/LaMini-Flan-T5-783M"
)
print("Model loaded.")
def __call__(self, query):
trace = ""
final_answer = ""
# Försök identifiera om det är en filreferens
if isinstance(query, str) and (query.endswith(".mp3") or query.endswith(".wav")):
trace = "Detected audio file. Transcribing..."
final_answer = transcribe_audio(query)
elif isinstance(query, str) and (query.endswith(".xls") or query.endswith(".xlsx")):
trace = "Detected Excel file. Analyzing..."
final_answer = analyze_excel(query)
elif "http" in query:
trace = "Detected URL or web reference. Performing search..."
final_answer = search_duckduckgo(query)
else:
trace = "General question. Using local model..."
output = self.model(query, max_new_tokens=128)
final_answer = output[0]["generated_text"].strip()
return final_answer, trace
|