Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -26,12 +26,16 @@ model_id = "mistralai/Mistral-7B-Instruct-v0.3"
|
|
26 |
# Explicitly define the sentiment analysis model
|
27 |
sentiment_analyzer = pipeline("sentiment-analysis", model="distilbert/distilbert-base-uncased-finetuned-sst-2-english", revision="714eb0f")
|
28 |
|
29 |
-
def get_llm_hf_inference(model_id=model_id, max_new_tokens=128, temperature=0.
|
|
|
|
|
|
|
30 |
return HuggingFaceEndpoint(
|
31 |
repo_id=model_id,
|
|
|
32 |
max_new_tokens=max_new_tokens,
|
33 |
temperature=temperature,
|
34 |
-
token=os.getenv("HF_TOKEN") # Hugging Face API Token
|
35 |
)
|
36 |
|
37 |
def get_nasa_apod():
|
|
|
26 |
# Explicitly define the sentiment analysis model
|
27 |
sentiment_analyzer = pipeline("sentiment-analysis", model="distilbert/distilbert-base-uncased-finetuned-sst-2-english", revision="714eb0f")
|
28 |
|
29 |
+
def get_llm_hf_inference(model_id=model_id, max_new_tokens=128, temperature=0.1):
|
30 |
+
"""
|
31 |
+
Initializes the Hugging Face text generation model with correct settings.
|
32 |
+
"""
|
33 |
return HuggingFaceEndpoint(
|
34 |
repo_id=model_id,
|
35 |
+
task="text-generation", # Explicitly define the task
|
36 |
max_new_tokens=max_new_tokens,
|
37 |
temperature=temperature,
|
38 |
+
token=os.getenv("HF_TOKEN") # Ensure your Hugging Face API Token is set
|
39 |
)
|
40 |
|
41 |
def get_nasa_apod():
|