import streamlit as st import os import json import datetime import openai from datasets import load_dataset, Dataset, concatenate_datasets from huggingface_hub import login # -- Einstellungen für Hugging Face Dataset Repository -- # Ersetze "your_username/customer_memory" durch deinen eigenen Repository-Namen! DATASET_REPO = "AiCodeCarft/customer_memory" # Hugging Face Login hf_token = st.sidebar.text_input("Enter your Hugging Face Token", type="password") if hf_token: login(token=hf_token) st.sidebar.success("Logged in to Hugging Face!") # Hilfsfunktion: Versuche, das Dataset vom HF Hub zu laden; falls nicht vorhanden, initialisiere es def load_memory_dataset(): try: ds = load_dataset(DATASET_REPO, split="train") st.write("Dataset loaded from HF Hub.") except Exception as e: st.write("Dataset not found on HF Hub. Creating a new one...") # Leeres Dataset mit den Spalten: user_id, query, response data = {"user_id": [], "query": [], "response": []} ds = Dataset.from_dict(data) ds.push_to_hub(DATASET_REPO) st.write("New dataset created and pushed to HF Hub.") return ds # Hilfsfunktion: Füge einen neuen Eintrag (Memory) hinzu und pushe das aktualisierte Dataset def add_to_memory(user_id, query, response): ds = load_memory_dataset() # Neuer Eintrag als kleines Dataset new_entry = Dataset.from_dict({ "user_id": [user_id], "query": [query], "response": [response] }) # Bestehendes Dataset mit dem neuen Eintrag zusammenführen updated_ds = concatenate_datasets([ds, new_entry]) # Aktualisiere das Dataset auf HF Hub updated_ds.push_to_hub(DATASET_REPO) st.write("Memory updated.") # Hilfsfunktion: Filtere das Dataset nach einer bestimmten customer_id def get_memory(user_id): ds = load_memory_dataset() return ds.filter(lambda x: x["user_id"] == user_id) # OpenAI GPT-4 API-Anbindung def generate_response(prompt): response = openai.ChatCompletion.create( model="gpt-4", messages=[ {"role": "system", "content": "You are a customer support AI for TechGadgets.com."}, {"role": "user", "content": prompt} ] ) return response.choices[0].message.content # Streamlit App UI st.title("AI Customer Support Agent with Memory 🛒") st.caption("Chat with a customer support assistant who remembers your past interactions.") # OpenAI API Key Eingabe openai_api_key = st.text_input("Enter OpenAI API Key", type="password") if openai_api_key: os.environ["OPENAI_API_KEY"] = openai_api_key openai.api_key = openai_api_key # Sidebar: Customer ID und Optionen st.sidebar.title("Enter your Customer ID:") customer_id = st.sidebar.text_input("Customer ID") # Optional: Synthetic Data generieren (Beispiel-Daten) if st.sidebar.button("Generate Synthetic Data"): if customer_id: synthetic_data = { "name": "Max Mustermann", "recent_order": { "product": "High-end Smartphone", "order_date": (datetime.datetime.now() - datetime.timedelta(days=10)).strftime("%B %d, %Y"), "delivery_date": (datetime.datetime.now() + datetime.timedelta(days=2)).strftime("%B %d, %Y"), "order_number": "ORD123456" }, "previous_orders": [ {"product": "Laptop", "order_date": "January 12, 2025"}, {"product": "Tablet", "order_date": "March 01, 2025"} ], "customer_service_interactions": [ "Asked about order status", "Inquired about warranty" ] } st.session_state.customer_data = synthetic_data st.sidebar.success("Synthetic data generated!") else: st.sidebar.error("Please enter a customer ID first.") if st.sidebar.button("View Customer Profile"): if "customer_data" in st.session_state and st.session_state.customer_data: st.sidebar.json(st.session_state.customer_data) else: st.sidebar.info("No synthetic data available.") if st.sidebar.button("View Memory Info"): if customer_id: memories = get_memory(customer_id) st.sidebar.write(f"Memory for customer **{customer_id}**:") for mem in memories: st.sidebar.write(f"**Query:** {mem['query']}\n**Response:** {mem['response']}\n---") else: st.sidebar.error("Please enter a customer ID.") # Initialisiere Chatverlauf in session_state if "messages" not in st.session_state: st.session_state.messages = [] # Zeige bisherigen Chatverlauf for message in st.session_state.messages: st.chat_message(message["role"]).markdown(message["content"]) # Haupt-Chat: Benutzer-Eingabe query = st.chat_input("How can I assist you today?") if query and customer_id: # Hole bisherigen Memory-Context memories = get_memory(customer_id) context = "" for mem in memories: context += f"Query: {mem['query']}\nResponse: {mem['response']}\n" # Kombiniere Kontext mit aktueller Anfrage full_prompt = context + f"\nCustomer: {query}\nSupport Agent:" with st.spinner("Generating response..."): answer = generate_response(full_prompt) # Aktualisiere den Chatverlauf st.session_state.messages.append({"role": "user", "content": query}) st.session_state.messages.append({"role": "assistant", "content": answer}) st.chat_message("assistant").markdown(answer) # Speicher die Interaktion in der Memory (Dataset) add_to_memory(customer_id, query, answer) elif query and not customer_id: st.error("Please enter a customer ID to start the chat.") else: st.warning("Please enter your OpenAI API key to use the customer support agent.")