Spaces:
Running
Running
Fix: correct max_tokens key and simplify prompt logic
Browse files- summarizer.py +3 -15
summarizer.py
CHANGED
@@ -19,18 +19,7 @@ def build_prompt(text, mode, lang_mode="Otomatik", is_table=False):
|
|
19 |
|
20 |
if is_table:
|
21 |
instruction = "Aşağıdaki tabloyu analiz et ve teknik bir şekilde özetle."
|
22 |
-
|
23 |
-
if "Çevir" in lang_mode:
|
24 |
-
if "Türkçeye" in lang_mode:
|
25 |
-
instruction += " Çıktı Türkçe olsun."
|
26 |
-
elif "İngilizceye" in lang_mode:
|
27 |
-
instruction += " Çıktı İngilizce olsun."
|
28 |
-
elif lang_mode == "Otomatik":
|
29 |
-
instruction += " Uygun dilde özetle."
|
30 |
-
|
31 |
-
return f"{instruction}\n\n{text}"
|
32 |
-
|
33 |
-
|
34 |
|
35 |
if "Karma" in mode:
|
36 |
instruction = """
|
@@ -53,7 +42,6 @@ Aşağıdaki metni 3 ayrı biçimde özetle:
|
|
53 |
|
54 |
return f"{instruction}{lang_instruction}\n\nMetin:\n{text}"
|
55 |
|
56 |
-
|
57 |
def summarize_text(text, mode, model_name="anthropic/claude-3-haiku", lang_mode="Otomatik", is_table=False):
|
58 |
url = "https://openrouter.ai/api/v1/chat/completions"
|
59 |
headers = {
|
@@ -64,9 +52,9 @@ def summarize_text(text, mode, model_name="anthropic/claude-3-haiku", lang_mode=
|
|
64 |
payload = {
|
65 |
"model": model_name,
|
66 |
"messages": [
|
67 |
-
{"role": "user", "content": build_prompt(text, mode,lang_mode, is_table)}
|
68 |
],
|
69 |
-
"
|
70 |
}
|
71 |
|
72 |
try:
|
|
|
19 |
|
20 |
if is_table:
|
21 |
instruction = "Aşağıdaki tabloyu analiz et ve teknik bir şekilde özetle."
|
22 |
+
return f"{instruction}{lang_instruction}\n\n{text}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
|
24 |
if "Karma" in mode:
|
25 |
instruction = """
|
|
|
42 |
|
43 |
return f"{instruction}{lang_instruction}\n\nMetin:\n{text}"
|
44 |
|
|
|
45 |
def summarize_text(text, mode, model_name="anthropic/claude-3-haiku", lang_mode="Otomatik", is_table=False):
|
46 |
url = "https://openrouter.ai/api/v1/chat/completions"
|
47 |
headers = {
|
|
|
52 |
payload = {
|
53 |
"model": model_name,
|
54 |
"messages": [
|
55 |
+
{"role": "user", "content": build_prompt(text, mode, lang_mode, is_table)}
|
56 |
],
|
57 |
+
"max_tokens": 800
|
58 |
}
|
59 |
|
60 |
try:
|