Tim Luka Horstmann
commited on
Commit
·
a9456f8
1
Parent(s):
ae2bc6b
Changed gemini system prompt integration
Browse files- app.py +11 -5
- test_gemini_integration.py +14 -5
app.py
CHANGED
@@ -162,16 +162,21 @@ async def stream_response_gemini(query, history):
|
|
162 |
f"CV: {full_cv_text}"
|
163 |
)
|
164 |
|
165 |
-
# Build messages for Gemini
|
166 |
-
messages = [
|
167 |
|
168 |
# Add conversation history
|
169 |
for msg in history:
|
170 |
role = "user" if msg["role"] == "user" else "model"
|
171 |
-
messages.append(types.Content(role=role, parts=[types.Part(text=msg["content"])]))
|
172 |
|
173 |
-
# Add current query
|
174 |
-
|
|
|
|
|
|
|
|
|
|
|
175 |
|
176 |
try:
|
177 |
response = gemini_client.models.generate_content_stream(
|
@@ -181,6 +186,7 @@ async def stream_response_gemini(query, history):
|
|
181 |
temperature=0.3,
|
182 |
top_p=0.7,
|
183 |
max_output_tokens=512,
|
|
|
184 |
)
|
185 |
)
|
186 |
|
|
|
162 |
f"CV: {full_cv_text}"
|
163 |
)
|
164 |
|
165 |
+
# Build messages for Gemini (no system role - embed instructions in first user message)
|
166 |
+
messages = []
|
167 |
|
168 |
# Add conversation history
|
169 |
for msg in history:
|
170 |
role = "user" if msg["role"] == "user" else "model"
|
171 |
+
messages.append(types.Content(role=role, parts=[types.Part.from_text(text=msg["content"])]))
|
172 |
|
173 |
+
# Add current query with system prompt embedded
|
174 |
+
if not history: # If no history, include system prompt with the first message
|
175 |
+
combined_query = f"{system_prompt}\n\nUser question: {query}"
|
176 |
+
else:
|
177 |
+
combined_query = query
|
178 |
+
|
179 |
+
messages.append(types.Content(role="user", parts=[types.Part.from_text(text=combined_query)]))
|
180 |
|
181 |
try:
|
182 |
response = gemini_client.models.generate_content_stream(
|
|
|
186 |
temperature=0.3,
|
187 |
top_p=0.7,
|
188 |
max_output_tokens=512,
|
189 |
+
response_mime_type="text/plain",
|
190 |
)
|
191 |
)
|
192 |
|
test_gemini_integration.py
CHANGED
@@ -30,6 +30,10 @@ class MockTypes:
|
|
30 |
class Part:
|
31 |
def __init__(self, text):
|
32 |
self.text = text
|
|
|
|
|
|
|
|
|
33 |
|
34 |
class GenerateContentConfig:
|
35 |
def __init__(self, temperature, top_p, max_output_tokens):
|
@@ -73,16 +77,21 @@ async def test_gemini_integration():
|
|
73 |
f"CV: {full_cv_text}"
|
74 |
)
|
75 |
|
76 |
-
# Build messages for Gemini
|
77 |
-
messages = [
|
78 |
|
79 |
# Add conversation history
|
80 |
for msg in history:
|
81 |
role = "user" if msg["role"] == "user" else "model"
|
82 |
-
messages.append(types.Content(role=role, parts=[types.Part(text=msg["content"])]))
|
83 |
|
84 |
-
# Add current query
|
85 |
-
|
|
|
|
|
|
|
|
|
|
|
86 |
|
87 |
print(f"System prompt length: {len(system_prompt)}")
|
88 |
print(f"Number of messages: {len(messages)}")
|
|
|
30 |
class Part:
|
31 |
def __init__(self, text):
|
32 |
self.text = text
|
33 |
+
|
34 |
+
@classmethod
|
35 |
+
def from_text(cls, text):
|
36 |
+
return cls(text)
|
37 |
|
38 |
class GenerateContentConfig:
|
39 |
def __init__(self, temperature, top_p, max_output_tokens):
|
|
|
77 |
f"CV: {full_cv_text}"
|
78 |
)
|
79 |
|
80 |
+
# Build messages for Gemini (no system role - embed instructions in first user message)
|
81 |
+
messages = []
|
82 |
|
83 |
# Add conversation history
|
84 |
for msg in history:
|
85 |
role = "user" if msg["role"] == "user" else "model"
|
86 |
+
messages.append(types.Content(role=role, parts=[types.Part.from_text(text=msg["content"])]))
|
87 |
|
88 |
+
# Add current query with system prompt embedded
|
89 |
+
if not history: # If no history, include system prompt with the first message
|
90 |
+
combined_query = f"{system_prompt}\n\nUser question: {query}"
|
91 |
+
else:
|
92 |
+
combined_query = query
|
93 |
+
|
94 |
+
messages.append(types.Content(role="user", parts=[types.Part.from_text(text=combined_query)]))
|
95 |
|
96 |
print(f"System prompt length: {len(system_prompt)}")
|
97 |
print(f"Number of messages: {len(messages)}")
|