Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -47,7 +47,7 @@ Please provide a comprehensive analysis with deep reasoning. Think through all i
|
|
47 |
model="deepseek-r1-distill-llama-70b",
|
48 |
messages=[{"role": "user", "content": prompt}],
|
49 |
temperature=0.6,
|
50 |
-
max_completion_tokens=
|
51 |
top_p=0.95,
|
52 |
reasoning_format="raw"
|
53 |
)
|
@@ -89,7 +89,7 @@ Be decisive and solution-focused."""
|
|
89 |
messages=[{"role": "user", "content": prompt}],
|
90 |
temperature=0.6,
|
91 |
top_p=0.95,
|
92 |
-
max_completion_tokens=
|
93 |
reasoning_effort="default"
|
94 |
)
|
95 |
|
@@ -129,7 +129,7 @@ Be extremely thorough and leave no stone unturned."""
|
|
129 |
messages=[{"role": "user", "content": prompt}],
|
130 |
temperature=0.6,
|
131 |
top_p=0.95,
|
132 |
-
max_completion_tokens=
|
133 |
reasoning_format="parsed"
|
134 |
)
|
135 |
|
@@ -175,7 +175,7 @@ Format your response as a well-structured final solution that leverages all thre
|
|
175 |
model="qwen/qwen3-32b",
|
176 |
messages=[{"role": "user", "content": synthesis_prompt}],
|
177 |
temperature=0.7,
|
178 |
-
max_completion_tokens=
|
179 |
top_p=0.9
|
180 |
)
|
181 |
|
|
|
47 |
model="deepseek-r1-distill-llama-70b",
|
48 |
messages=[{"role": "user", "content": prompt}],
|
49 |
temperature=0.6,
|
50 |
+
max_completion_tokens=4096,
|
51 |
top_p=0.95,
|
52 |
reasoning_format="raw"
|
53 |
)
|
|
|
89 |
messages=[{"role": "user", "content": prompt}],
|
90 |
temperature=0.6,
|
91 |
top_p=0.95,
|
92 |
+
max_completion_tokens=4096,
|
93 |
reasoning_effort="default"
|
94 |
)
|
95 |
|
|
|
129 |
messages=[{"role": "user", "content": prompt}],
|
130 |
temperature=0.6,
|
131 |
top_p=0.95,
|
132 |
+
max_completion_tokens=4096,
|
133 |
reasoning_format="parsed"
|
134 |
)
|
135 |
|
|
|
175 |
model="qwen/qwen3-32b",
|
176 |
messages=[{"role": "user", "content": synthesis_prompt}],
|
177 |
temperature=0.7,
|
178 |
+
max_completion_tokens=4096,
|
179 |
top_p=0.9
|
180 |
)
|
181 |
|