Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -301,43 +301,6 @@ class ModelRegistry:
|
|
301 |
# Instancia global del registro
|
302 |
model_registry = ModelRegistry()
|
303 |
|
304 |
-
'''
|
305 |
-
# Modelos de Claude disponibles
|
306 |
-
CLAUDE_MODELS = {
|
307 |
-
"claude-opus-4-20250514": {
|
308 |
-
"name": "Claude Opus 4 (Latest)",
|
309 |
-
"description": "Modelo más potente para desafíos complejos",
|
310 |
-
"max_tokens": 4000,
|
311 |
-
"best_for": "Análisis muy detallados y complejos"
|
312 |
-
},
|
313 |
-
"claude-sonnet-4-20250514": {
|
314 |
-
"name": "Claude Sonnet 4 (Latest)",
|
315 |
-
"description": "Modelo inteligente y eficiente para uso cotidiano",
|
316 |
-
"max_tokens": 4000,
|
317 |
-
"best_for": "Análisis general, recomendado para la mayoría de casos"
|
318 |
-
},
|
319 |
-
"claude-3-5-haiku-20241022": {
|
320 |
-
"name": "Claude 3.5 Haiku (Latest)",
|
321 |
-
"description": "Modelo más rápido para tareas diarias",
|
322 |
-
"max_tokens": 4000,
|
323 |
-
"best_for": "Análisis rápidos y económicos"
|
324 |
-
},
|
325 |
-
"claude-3-7-sonnet-20250219": {
|
326 |
-
"name": "Claude 3.7 Sonnet",
|
327 |
-
"description": "Modelo avanzado de la serie 3.7",
|
328 |
-
"max_tokens": 4000,
|
329 |
-
"best_for": "Análisis equilibrados con alta calidad"
|
330 |
-
},
|
331 |
-
"claude-3-5-sonnet-20241022": {
|
332 |
-
"name": "Claude 3.5 Sonnet (Oct 2024)",
|
333 |
-
"description": "Excelente balance entre velocidad y capacidad",
|
334 |
-
"max_tokens": 4000,
|
335 |
-
"best_for": "Análisis rápidos y precisos"
|
336 |
-
}
|
337 |
-
}
|
338 |
-
|
339 |
-
'''
|
340 |
-
|
341 |
CLAUDE_MODELS = {
|
342 |
"Qwen/Qwen3-14B": {
|
343 |
"name": "Qwen 3-14B",
|
@@ -589,13 +552,6 @@ class AIAnalyzer:
|
|
589 |
max_tokens=10,
|
590 |
messages=[{"role": "user", "content": f"{prompt}\n{content[:1000]}"}]
|
591 |
)
|
592 |
-
|
593 |
-
#Cliente Anthropic
|
594 |
-
#response = self.client.messages.create(
|
595 |
-
#model="claude-3-haiku-20240307",
|
596 |
-
#max_tokens=10,
|
597 |
-
#messages=[{"role": "user", "content": f"{prompt}\n\n{content[:1000]}"}]
|
598 |
-
#)
|
599 |
|
600 |
#result = response.content[0].text.strip().upper()
|
601 |
result = response.choices[0].message.content.strip().upper()
|
|
|
301 |
# Instancia global del registro
|
302 |
model_registry = ModelRegistry()
|
303 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
304 |
CLAUDE_MODELS = {
|
305 |
"Qwen/Qwen3-14B": {
|
306 |
"name": "Qwen 3-14B",
|
|
|
552 |
max_tokens=10,
|
553 |
messages=[{"role": "user", "content": f"{prompt}\n{content[:1000]}"}]
|
554 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
555 |
|
556 |
#result = response.content[0].text.strip().upper()
|
557 |
result = response.choices[0].message.content.strip().upper()
|