Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -13,10 +13,15 @@ from langchain_core.language_models import LLM
|
|
13 |
from typing import Optional, List
|
14 |
import requests
|
15 |
|
|
|
|
|
|
|
|
|
16 |
class GeminiLLM(LLM):
|
17 |
"""Wrapper para usar Google Gemini como un LLM de LangChain."""
|
18 |
|
19 |
api_key: str = os.getenv("GEMINI")
|
|
|
20 |
model_name: str = "gemini-2.0-flash"
|
21 |
temperature: float = 0.1
|
22 |
|
@@ -24,15 +29,11 @@ class GeminiLLM(LLM):
|
|
24 |
def _llm_type(self) -> str:
|
25 |
return "google-gemini-llm"
|
26 |
|
27 |
-
def
|
28 |
-
"""Envía el prompt a la API de Gemini y devuelve la respuesta."""
|
29 |
-
if not self.api_key:
|
30 |
-
raise ValueError("Debes proporcionar una API Key válida de Gemini.")
|
31 |
-
|
32 |
url = f"https://generativelanguage.googleapis.com/v1beta/models/{self.model_name}:generateContent"
|
33 |
headers = {
|
34 |
"Content-Type": "application/json",
|
35 |
-
"X-goog-api-key":
|
36 |
}
|
37 |
data = {
|
38 |
"contents": [
|
@@ -48,9 +49,24 @@ class GeminiLLM(LLM):
|
|
48 |
"temperature": self.temperature
|
49 |
}
|
50 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
|
|
|
|
|
|
|
|
|
|
|
|
52 |
|
53 |
-
response = requests.post(url, headers=headers, json=data)
|
54 |
if response.status_code == 200:
|
55 |
result = response.json()
|
56 |
return result["candidates"][0]["content"]["parts"][0]["text"]
|
|
|
13 |
from typing import Optional, List
|
14 |
import requests
|
15 |
|
16 |
+
import os
|
17 |
+
import requests
|
18 |
+
from typing import Optional, List
|
19 |
+
|
20 |
class GeminiLLM(LLM):
|
21 |
"""Wrapper para usar Google Gemini como un LLM de LangChain."""
|
22 |
|
23 |
api_key: str = os.getenv("GEMINI")
|
24 |
+
fallback_api_key: str = os.getenv("GEMINI_API_KEY")
|
25 |
model_name: str = "gemini-2.0-flash"
|
26 |
temperature: float = 0.1
|
27 |
|
|
|
29 |
def _llm_type(self) -> str:
|
30 |
return "google-gemini-llm"
|
31 |
|
32 |
+
def _make_request(self, api_key: str, prompt: str) -> requests.Response:
|
|
|
|
|
|
|
|
|
33 |
url = f"https://generativelanguage.googleapis.com/v1beta/models/{self.model_name}:generateContent"
|
34 |
headers = {
|
35 |
"Content-Type": "application/json",
|
36 |
+
"X-goog-api-key": api_key
|
37 |
}
|
38 |
data = {
|
39 |
"contents": [
|
|
|
49 |
"temperature": self.temperature
|
50 |
}
|
51 |
}
|
52 |
+
return requests.post(url, headers=headers, json=data)
|
53 |
+
|
54 |
+
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
55 |
+
"""Envía el prompt a la API de Gemini y devuelve la respuesta.
|
56 |
+
Si la cuota se supera, intenta con la API key alternativa."""
|
57 |
+
|
58 |
+
if not self.api_key:
|
59 |
+
raise ValueError("Debes proporcionar una API Key válida de Gemini.")
|
60 |
+
|
61 |
+
response = self._make_request(self.api_key, prompt)
|
62 |
|
63 |
+
# Si el error es por cuota y hay fallback API key definida, intentar con la fallback
|
64 |
+
if response.status_code == 403 and "quota" in response.text.lower():
|
65 |
+
if self.fallback_api_key:
|
66 |
+
response = self._make_request(self.fallback_api_key, prompt)
|
67 |
+
else:
|
68 |
+
return f"Error {response.status_code}: {response.text} (no hay API key alternativa)"
|
69 |
|
|
|
70 |
if response.status_code == 200:
|
71 |
result = response.json()
|
72 |
return result["candidates"][0]["content"]["parts"][0]["text"]
|