Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -25,10 +25,11 @@ class GeminiLLM(LLM):
|
|
25 |
model_name: str = "gemini-2.0-flash"
|
26 |
temperature: float = 0.1
|
27 |
|
|
|
28 |
@property
|
29 |
def _llm_type(self) -> str:
|
30 |
return "google-gemini-llm"
|
31 |
-
|
32 |
def _make_request(self, api_key: str, prompt: str) -> requests.Response:
|
33 |
url = f"https://generativelanguage.googleapis.com/v1beta/models/{self.model_name}:generateContent"
|
34 |
headers = {
|
@@ -39,25 +40,25 @@ class GeminiLLM(LLM):
|
|
39 |
"contents": [
|
40 |
{
|
41 |
"role": "system",
|
42 |
-
"
|
43 |
-
"
|
44 |
-
|
45 |
},
|
46 |
{
|
47 |
"role": "user",
|
48 |
-
"
|
49 |
-
"
|
50 |
-
|
51 |
}
|
52 |
],
|
53 |
"generationConfig": {
|
54 |
"temperature": self.temperature
|
55 |
}
|
56 |
}
|
|
|
57 |
return requests.post(url, headers=headers, json=data)
|
58 |
|
59 |
|
60 |
-
|
61 |
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
62 |
"""Envía el prompt a la API de Gemini y devuelve la respuesta.
|
63 |
Si la cuota se supera, intenta con la API key alternativa."""
|
|
|
25 |
model_name: str = "gemini-2.0-flash"
|
26 |
temperature: float = 0.1
|
27 |
|
28 |
+
|
29 |
@property
|
30 |
def _llm_type(self) -> str:
|
31 |
return "google-gemini-llm"
|
32 |
+
|
33 |
def _make_request(self, api_key: str, prompt: str) -> requests.Response:
|
34 |
url = f"https://generativelanguage.googleapis.com/v1beta/models/{self.model_name}:generateContent"
|
35 |
headers = {
|
|
|
40 |
"contents": [
|
41 |
{
|
42 |
"role": "system",
|
43 |
+
"parts": [
|
44 |
+
{"text": "You are an agent. Please respond concisely only with the answer, no extra explanations."}
|
45 |
+
]
|
46 |
},
|
47 |
{
|
48 |
"role": "user",
|
49 |
+
"parts": [
|
50 |
+
{"text": prompt}
|
51 |
+
]
|
52 |
}
|
53 |
],
|
54 |
"generationConfig": {
|
55 |
"temperature": self.temperature
|
56 |
}
|
57 |
}
|
58 |
+
|
59 |
return requests.post(url, headers=headers, json=data)
|
60 |
|
61 |
|
|
|
62 |
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
63 |
"""Envía el prompt a la API de Gemini y devuelve la respuesta.
|
64 |
Si la cuota se supera, intenta con la API key alternativa."""
|