Spaces:
Sleeping
Sleeping
tomas.helmfridsson
commited on
Commit
·
b155241
1
Parent(s):
324c351
LLM AI-Sweden-Models/gpt-sw3-1.3B
Browse files
app.py
CHANGED
@@ -17,7 +17,11 @@ EMB_MODEL = "KBLab/sentence-bert-swedish-cased"
|
|
17 |
#LLM_MODEL = "bigscience/bloom-560m" # Dålig
|
18 |
#LLM_MODEL = "NbAiLab/nb-gpt-j-6B" #- Restricted
|
19 |
#LLM_MODEL = "datificate/gpt2-small-swedish" # Finns ej på Hugging face
|
20 |
-
LLM_MODEL = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
|
|
|
|
|
|
|
|
|
21 |
|
22 |
CHUNK_SIZE = 400
|
23 |
CHUNK_OVERLAP = 40
|
@@ -142,7 +146,7 @@ def chat_fn(q, temp, max_new_tokens, k, ctx_tok_max, history):
|
|
142 |
future = executor.submit(generate)
|
143 |
ans = future.result(timeout=GEN_TIMEOUT) # Timeout in seconds
|
144 |
except concurrent.futures.TimeoutError:
|
145 |
-
ans = "⏰ Ingen respons från modellen inom {GEN_TIMEOUT} sekunder."
|
146 |
except Exception as e:
|
147 |
log.exception("Genererings‑fel")
|
148 |
ans = f"❌ Fel vid generering: {type(e).__name__}: {e}\n\nPrompt:\n{prompt}"
|
|
|
17 |
#LLM_MODEL = "bigscience/bloom-560m" # Dålig
|
18 |
#LLM_MODEL = "NbAiLab/nb-gpt-j-6B" #- Restricted
|
19 |
#LLM_MODEL = "datificate/gpt2-small-swedish" # Finns ej på Hugging face
|
20 |
+
#LLM_MODEL = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
|
21 |
+
# timpal0l/mdeberta-v3-base-squad2 liten och möjlig på Svenska
|
22 |
+
LLM_MODEL = "AI-Sweden-Models/gpt-sw3-1.3B" # finns olika varianter 126M, 356M, 1.3B, 6.7B, 20B, 40B
|
23 |
+
# LLM_MODEL = AI-Sweden-Models/Llama-3-8B-instruct # kanske för stor
|
24 |
+
# https://www.ai.se/en/ai-labs/natural-language-understanding/models-resources
|
25 |
|
26 |
CHUNK_SIZE = 400
|
27 |
CHUNK_OVERLAP = 40
|
|
|
146 |
future = executor.submit(generate)
|
147 |
ans = future.result(timeout=GEN_TIMEOUT) # Timeout in seconds
|
148 |
except concurrent.futures.TimeoutError:
|
149 |
+
ans = f"⏰ Ingen respons från modellen inom {GEN_TIMEOUT} sekunder."
|
150 |
except Exception as e:
|
151 |
log.exception("Genererings‑fel")
|
152 |
ans = f"❌ Fel vid generering: {type(e).__name__}: {e}\n\nPrompt:\n{prompt}"
|