Spaces:
Sleeping
Sleeping
File size: 2,525 Bytes
a3e0475 b9e2074 a3e0475 b9e2074 a3e0475 3cc060e 6b56b5d a3e0475 233eda7 b0211dd 233eda7 d24d7b6 7e6badf 1218ea2 7e6badf 233eda7 94ac9e7 a3e0475 b9e2074 b0211dd 6b56b5d a3e0475 6b56b5d a3e0475 94ac9e7 3cc060e 94ac9e7 3cc060e a3e0475 b9e2074 3cc060e b9e2074 1218ea2 b9e2074 68577cc b0211dd 68577cc b0211dd 053ce9b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
import os
import requests
import streamlit as st
from langchain_huggingface import HuggingFaceEndpoint
from langchain_core.prompts import PromptTemplate
from langchain_core.output_parsers import StrOutputParser
from transformers import pipeline
from config import NASA_API_KEY # Ensure this file exists with your NASA API Key
# Set up Streamlit UI
st.set_page_config(page_title="HAL - NASA ChatBot", page_icon="🚀")
# --- Ensure Session State Variables are Initialized ---
if "chat_history" not in st.session_state:
st.session_state.chat_history = [{"role": "assistant", "content": "Hello! How can I assist you today?"}]
if "response_ready" not in st.session_state:
st.session_state.response_ready = False # Tracks whether HAL has responded
if "follow_up" not in st.session_state:
st.session_state.follow_up = "" # Stores follow-up question
if "last_topic" not in st.session_state:
st.session_state.last_topic = "" # Stores last user topic
# --- Set Up Model & API Functions ---
model_id = "mistralai/Mistral-7B-Instruct-v0.3"
# Initialize sentiment analysis pipeline
sentiment_analyzer = pipeline("sentiment-analysis")
def get_llm_hf_inference(model_id=model_id, max_new_tokens=128, temperature=0.7):
return HuggingFaceEndpoint(
repo_id=model_id,
max_new_tokens=max_new_tokens,
temperature=temperature,
token=os.getenv("HF_TOKEN") # Hugging Face API Token
)
def get_nasa_apod():
url = f"https://api.nasa.gov/planetary/apod?api_key={NASA_API_KEY}"
response = requests.get(url)
if response.status_code == 200:
data = response.json()
return data.get("url", ""), data.get("title", ""), data.get("explanation", "")
else:
return "", "NASA Data Unavailable", "I couldn't fetch data from NASA right now. Please try again later."
def analyze_sentiment(user_text):
result = sentiment_analyzer(user_text)[0]
return result['label']
def predict_action(user_text):
if "NASA" in user_text or "space" in user_text:
return "nasa_info"
return "general_query"
def generate_follow_up(user_text):
"""
Generates a concise and conversational follow-up question related to the user's input.
"""
prompt_text = (
f"Given the user's question: '{user_text}', generate a single friendly follow-up question. "
"Make it short, conversational, and natural—like a human would ask. "
"Example: If the user asks 'What is a quark?', respond with something like "
"'Would
|