Spaces:
Running
Running
Fix: Add max_tokens to avoid token limit error
Browse files- summarizer.py +8 -4
summarizer.py
CHANGED
@@ -7,7 +7,10 @@ api_key = os.getenv("OPENROUTER_API_KEY")
|
|
7 |
if not api_key or not api_key.strip():
|
8 |
raise RuntimeError("❌ OPENROUTER_API_KEY bulunamadı. Hugging Face Secrets kısmına eklenmeli.")
|
9 |
|
10 |
-
def build_prompt(text, mode, lang_mode="Otomatik"):
|
|
|
|
|
|
|
11 |
if "Karma" in mode:
|
12 |
instruction = """
|
13 |
Aşağıdaki metni 3 ayrı biçimde özetle:
|
@@ -37,7 +40,7 @@ Aşağıdaki metni 3 ayrı biçimde özetle:
|
|
37 |
|
38 |
return f"{instruction}\n\nMetin:\n{text}"
|
39 |
|
40 |
-
def summarize_text(text, mode, model_name="anthropic/claude-3-haiku", lang_mode="Otomatik"):
|
41 |
url = "https://openrouter.ai/api/v1/chat/completions"
|
42 |
headers = {
|
43 |
"Authorization": f"Bearer {api_key.strip()}",
|
@@ -47,8 +50,9 @@ def summarize_text(text, mode, model_name="anthropic/claude-3-haiku", lang_mode=
|
|
47 |
payload = {
|
48 |
"model": model_name,
|
49 |
"messages": [
|
50 |
-
{"role": "user", "content": build_prompt(text, mode,lang_mode)}
|
51 |
-
]
|
|
|
52 |
}
|
53 |
|
54 |
try:
|
|
|
7 |
if not api_key or not api_key.strip():
|
8 |
raise RuntimeError("❌ OPENROUTER_API_KEY bulunamadı. Hugging Face Secrets kısmına eklenmeli.")
|
9 |
|
10 |
+
def build_prompt(text, mode, lang_mode="Otomatik", is_table=False):
|
11 |
+
if is_table:
|
12 |
+
return f"Aşağıdaki tabloyu analiz et ve teknik bir şekilde özetle:\n\n{text}"
|
13 |
+
|
14 |
if "Karma" in mode:
|
15 |
instruction = """
|
16 |
Aşağıdaki metni 3 ayrı biçimde özetle:
|
|
|
40 |
|
41 |
return f"{instruction}\n\nMetin:\n{text}"
|
42 |
|
43 |
+
def summarize_text(text, mode, model_name="anthropic/claude-3-haiku", lang_mode="Otomatik", is_table=False):
|
44 |
url = "https://openrouter.ai/api/v1/chat/completions"
|
45 |
headers = {
|
46 |
"Authorization": f"Bearer {api_key.strip()}",
|
|
|
50 |
payload = {
|
51 |
"model": model_name,
|
52 |
"messages": [
|
53 |
+
{"role": "user", "content": build_prompt(text, mode,lang_mode, is_table)}
|
54 |
+
],
|
55 |
+
"max_token": 2000
|
56 |
}
|
57 |
|
58 |
try:
|