Spaces:
Runtime error
Runtime error
| # ============================================================================= | |
| # DON'T STEAL THE FREE CODE OF DEVS! Use it for free an do not touch credits! | |
| # If you steal this code, in the future you will pay for apps like this! | |
| # A bit of respect goes a long way – all rights reserved under German law. | |
| # Copyright Volkan Kücükbudak https://github.com/volkansah | |
| # Repo URL: https://github.com/AiCodeCraft | |
| # ============================================================================= | |
| import streamlit as st | |
| import os | |
| import json | |
| import datetime | |
| import openai | |
| from datetime import timedelta | |
| import logging | |
| from datasets import load_dataset, Dataset, concatenate_datasets | |
| # ------------------ Logging konfigurieren ------------------ | |
| logging.basicConfig( | |
| level=logging.INFO, | |
| format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' | |
| ) | |
| logger = logging.getLogger(__name__) | |
| # ------------------ Initialisierung ------------------ | |
| def main(): | |
| logger.info("App-Initialisierung gestartet...") | |
| # ------------------ Hugging Face Token ------------------ | |
| try: | |
| # Versuche zuerst aus Umgebungsvariablen zu laden | |
| HF_TOKEN_MEMORY = os.getenv("HF_TOKEN_MEMORY", "") | |
| # Wenn nicht vorhanden, versuche aus st.secrets zu laden | |
| if not HF_TOKEN_MEMORY: | |
| try: | |
| HF_TOKEN_MEMORY = st.secrets["HF_TOKEN_MEMORY"] | |
| logger.info("Token aus Streamlit Secrets geladen") | |
| except Exception as e: | |
| logger.warning(f"Token nicht in Streamlit Secrets gefunden: {str(e)}") | |
| except Exception as e: | |
| logger.warning(f"Fehler beim Laden des Tokens: {str(e)}") | |
| HF_TOKEN_MEMORY = "" | |
| # Debug-Ausgabe (nur während der Entwicklung, später entfernen) | |
| if HF_TOKEN_MEMORY: | |
| # Nicht den vollständigen Token loggen! | |
| token_preview = HF_TOKEN_MEMORY[:4] + "..." if len(HF_TOKEN_MEMORY) > 4 else "Ungültig" | |
| logger.info(f"HF Token gefunden. Startet mit: {token_preview}") | |
| else: | |
| logger.warning("Kein HF Token gefunden!") | |
| st.warning("⚠️ Hugging Face Token fehlt. Dataset-Funktionen werden nicht verfügbar sein.") | |
| # ------------------ Streamlit UI ------------------ | |
| st.title("AI Customer Support Agent with Memory 🛒") | |
| st.caption("Chat with an assistant who remembers past interactions") | |
| # OpenAI Key Eingabe | |
| openai_api_key = st.text_input("Enter OpenAI API Key", type="password", key="openai_key") | |
| if not openai_api_key: | |
| st.warning("⚠️ API-Key benötigt") | |
| st.stop() | |
| openai.api_key = openai_api_key | |
| # ------------------ Dataset Funktionen ------------------ | |
| # Korrekter Repository-Name | |
| DATASET_REPO = "AiCodeCraft/customer_memory" # Korrigiert von "AiCodeCarft" | |
| def load_memory_dataset(): | |
| """ | |
| Versucht, das Memory-Dataset vom HF Hub zu laden. | |
| Falls nicht vorhanden, wird ein leeres Dataset erstellt und gepusht. | |
| """ | |
| if not HF_TOKEN_MEMORY: | |
| logger.warning("Kein HF Token vorhanden, verwende lokales Dataset") | |
| # Einfaches lokales Dataset zurückgeben | |
| return Dataset.from_dict({"user_id": [], "query": [], "response": [], "timestamp": []}) | |
| try: | |
| # Mit Token versuchen | |
| logger.info(f"Versuche Dataset {DATASET_REPO} zu laden...") | |
| ds = load_dataset(DATASET_REPO, split="train", token=HF_TOKEN_MEMORY) | |
| logger.info(f"Dataset erfolgreich geladen mit {len(ds)} Einträgen.") | |
| return ds | |
| except Exception as e: | |
| logger.warning(f"Fehler beim Laden des Datasets: {str(e)}") | |
| # Neues Dataset erstellen | |
| logger.info("Erstelle neues Dataset...") | |
| data = {"user_id": [], "query": [], "response": [], "timestamp": []} | |
| ds = Dataset.from_dict(data) | |
| try: | |
| # Dataset pushen | |
| ds.push_to_hub(DATASET_REPO, token=HF_TOKEN_MEMORY) | |
| logger.info("Neues Dataset erfolgreich erstellt und gepusht.") | |
| return ds | |
| except Exception as push_error: | |
| logger.error(f"Fehler beim Pushen des Datasets: {str(push_error)}") | |
| st.error("Konnte kein Dataset erstellen. Bitte überprüfe deine Berechtigungen.") | |
| # Lokales Dataset zurückgeben | |
| return ds | |
| # ------------------ AI Agent Klasse ------------------ | |
| class CustomerSupportAIAgent: | |
| def __init__(self): | |
| self.memory = load_memory_dataset() | |
| def handle_query(self, query, user_id): | |
| # Memory abrufen | |
| user_history = self.memory.filter(lambda x: x["user_id"] == user_id) | |
| # Kontext erstellen | |
| context = "Previous interactions:\n" + "\n".join( | |
| [f"Q: {h['query']}\nA: {h['response']}" | |
| for h in user_history] | |
| ) if len(user_history) > 0 else "No previous interactions" | |
| # API-Anfrage | |
| response = openai.ChatCompletion.create( | |
| model="gpt-3.5-turbo", | |
| messages=[ | |
| {"role": "system", "content": f"You are a support agent. Context:\n{context}"}, | |
| {"role": "user", "content": query} | |
| ] | |
| ) | |
| # Antwort verarbeiten | |
| answer = response.choices[0].message.content | |
| # Memory aktualisieren mit Timestamp | |
| current_time = datetime.datetime.now().isoformat() | |
| new_entry = Dataset.from_dict({ | |
| "user_id": [user_id], | |
| "query": [query], | |
| "response": [answer], | |
| "timestamp": [current_time] | |
| }) | |
| self.memory = concatenate_datasets([self.memory, new_entry]) | |
| # Nur pushen, wenn Token vorhanden | |
| if HF_TOKEN_MEMORY: | |
| try: | |
| self.memory.push_to_hub(DATASET_REPO, token=HF_TOKEN_MEMORY) | |
| logger.info(f"Memory aktualisiert für User {user_id}") | |
| except Exception as e: | |
| logger.error(f"Fehler beim Aktualisieren des Datasets: {str(e)}") | |
| st.warning("Konnte Conversation History nicht speichern, aber Antwort ist verfügbar.") | |
| return answer | |
| # ------------------ App-Logik ------------------ | |
| support_agent = CustomerSupportAIAgent() | |
| # Customer ID Handling | |
| customer_id = st.sidebar.text_input("Customer ID", key="cust_id") | |
| if not customer_id: | |
| st.sidebar.error("Bitte Customer ID eingeben") | |
| st.stop() | |
| # Chat-History | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| # Nachrichten anzeigen | |
| for msg in st.session_state.messages: | |
| st.chat_message(msg["role"]).write(msg["content"]) | |
| # Eingabe verarbeiten | |
| if prompt := st.chat_input("Your question"): | |
| st.session_state.messages.append({"role": "user", "content": prompt}) | |
| st.chat_message("user").write(prompt) | |
| with st.spinner("Denke nach..."): | |
| response = support_agent.handle_query(prompt, customer_id) | |
| st.session_state.messages.append({"role": "assistant", "content": response}) | |
| st.chat_message("assistant").write(response) | |
| # ------------------ Hauptausführung ------------------ | |
| if __name__ == "__main__": | |
| main() |