File size: 8,896 Bytes
32d7156
 
 
 
 
 
 
 
0281aec
9c206b8
7078460
32d7156
9c206b8
36c9568
32d7156
 
 
 
 
 
 
 
 
 
 
36c9568
 
32d7156
 
9c206b8
 
 
32d7156
9246354
 
 
32d7156
 
 
 
 
 
 
9c206b8
7078460
32d7156
7078460
9c206b8
32d7156
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6dc2e31
32d7156
 
 
 
 
0281aec
9965e63
4b5e244
9965e63
32d7156
7078460
0281aec
 
 
7078460
32d7156
 
37df822
275cd2b
6a04711
8e7aaf2
 
 
 
 
 
 
 
 
32d7156
 
 
 
 
9c206b8
32d7156
 
7078460
 
 
 
 
 
 
4b5e244
9965e63
7078460
9965e63
9c206b8
32d7156
9c206b8
0281aec
7078460
0281aec
 
 
 
9c206b8
 
 
 
 
7078460
 
 
 
 
9965e63
 
9c206b8
 
32d7156
 
 
 
 
 
 
 
 
36c9568
32d7156
7078460
32d7156
 
 
36c9568
32d7156
 
 
36c9568
32d7156
 
9c206b8
36c9568
 
37df822
7078460
37df822
 
 
7078460
9c206b8
 
7078460
 
 
 
9c206b8
 
37df822
 
 
32d7156
36c9568
32d7156
7078460
32d7156
 
 
 
 
 
37df822
32d7156
275cd2b
32d7156
 
 
 
36c9568
9c206b8
32d7156
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from typing import List, Dict, Any
from pymongo import MongoClient
from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration
import spacy
import os
import logging
import re
import torch
import random  # For response variety

# Set up logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

app = FastAPI()

# MongoDB Setup
connection_string = os.getenv("MONGO_URI", "mongodb+srv://clician:[email protected]/?retryWrites=true&w=majority&appName=Hutterdev")
client = MongoClient(connection_string)
db = client["test"]
products_collection = db["products"]

# BlenderBot Setup
model_repo = "SyedHutter/blenderbot_model"
model_subfolder = "blenderbot_model"
model_dir = "/home/user/app/blenderbot_model"

device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
logger.info(f"Using device: {device}")

if not os.path.exists(model_dir):
    logger.info(f"Downloading {model_repo}/{model_subfolder} to {model_dir}...")
    tokenizer = BlenderbotTokenizer.from_pretrained(model_repo, subfolder=model_subfolder)
    model = BlenderbotForConditionalGeneration.from_pretrained(model_repo, subfolder=model_subfolder)
    os.makedirs(model_dir, exist_ok=True)
    tokenizer.save_pretrained(model_dir)
    model.save_pretrained(model_dir)
    logger.info("Model download complete.")
else:
    logger.info(f"Loading pre-existing model from {model_dir}.")
tokenizer = BlenderbotTokenizer.from_pretrained(model_dir)
model = BlenderbotForConditionalGeneration.from_pretrained(model_dir).to(device)
model.eval()

# Static Context
context_msg = "I am Hutter, your shopping guide for Hutter Products GmbH, here to help you find sustainable products."

# spaCy Setup
spacy_model_path = "/home/user/app/en_core_web_sm-3.8.0"
nlp = spacy.load(spacy_model_path)

# Pydantic Models
class PromptRequest(BaseModel):
    input_text: str
    conversation_history: List[str] = []

class CombinedResponse(BaseModel):
    ner: Dict[str, Any]
    qa: Dict[str, Any]
    products_matched: List[Dict[str, Any]]

# Helper Functions
def extract_keywords(text: str) -> List[str]:
    doc = nlp(text)
    keywords = [token.text for token in doc if token.pos_ in ["NOUN", "PROPN"]]
    return list(set(keywords))

def detect_intent(text: str) -> str:
    doc = nlp(text.lower())
    text_lower = text.lower()
    if any(token.text in ["buy", "shop", "find", "recommend", "product", "products", "item", "store", "catalog"] for token in doc) or "what" in text_lower.split()[:2]:
        return "recommend_product"
    elif any(token.text in ["company", "who", "do"] for token in doc):
        return "company_info"
    elif "name" in text_lower or "yourself" in text_lower or "you" in doc and "about" in doc:
        return "ask_name"
    elif re.search(r"\d+\s*[\+\-\*/]\s*\d+", text_lower):
        return "math_query"
    return "chat"  # New fallback for general conversation

def search_products_by_keywords(keywords: List[str]) -> List[Dict[str, Any]]:
    if not keywords:
        return []
    query = {"$or": [{"name": {"$regex": f"\\b{keyword}\\b", "$options": "i"}} for keyword in keywords]}
    matched_products = [
        {
            "id": str(p["_id"]),
            "name": p.get("name", "Unknown"),
            "skuNumber": p.get("skuNumber", "N/A"),
            "description": p.get("description", "No description available")
        }
        for p in products_collection.find(query)
    ]
    return matched_products

def get_product_context(products: List[Dict]) -> str:
    if not products:
        return ""
    product_str = "Products: " + ", ".join([f"'{p['name']}' - {p['description']}" for p in products[:2]])
    return product_str

def format_response(response: str, products: List[Dict], intent: str, input_text: str, history: List[str]) -> str:
    no_product_prompts = [
        "I’d love to recommend something! What are you looking for in our sustainable catalog?",
        "Our sustainable catalog has lots to offer—what catches your interest?",
        "Tell me what you’re after, and I’ll find something great from our eco-friendly range!"
    ]
    
    if intent == "recommend_product":
        if not products:
            return random.choice(no_product_prompts)
        product = products[0]
        return f"Check out our '{product['name']}'—it’s {product['description'].lower()}. Want more options?"
    elif intent == "company_info":
        return "Hutter Products GmbH offers sustainable products like recycled textiles and ocean plastic goods."
    elif intent == "ask_name":
        return "I’m Hutter, your shopping guide for Hutter Products GmbH. I’m here to help you find eco-friendly products—how can I assist?"
    elif intent == "math_query":
        match = re.search(r"(\d+)\s*([\+\-\*/])\s*(\d+)", input_text.lower())
        if match:
            num1, op, num2 = int(match.group(1)), match.group(2), int(match.group(3))
            if op == "+": return f"{num1} + {num2} = {num1 + num2}. Need shopping help?"
            elif op == "-": return f"{num1} - {num2} = {num1 - num2}. Anything else?"
            elif op == "*": return f"{num1} * {num2} = {num1 * num2}. Explore our products?"
            elif op == "/": return f"{num1} / {num2} = {num1 / num2}." if num2 != 0 else "Can’t divide by zero! Try our products?"
        return "I can do math—try '2 + 2'. What else can I help with?"
    elif intent == "chat":
        # Use BlenderBot’s response if appropriate, else nudge toward shopping
        if "yes" in input_text.lower() and history and "hat" in history[-1].lower():
            return "Great! Besides the Bucket Hat, we have other sustainable items—want to hear more?"
        return f"{response} How can I assist with our sustainable products today?" if response else "I’m here to help—anything on your mind?"
    if products:
        product = products[0]
        return f"{response} Also, check out '{product['name']}'—it’s {product['description'].lower()}."
    return response if response else "How can I assist with our sustainable products?"

# Endpoints
@app.get("/")
async def root():
    return {"message": "Welcome to the NER and Chat API!"}

@app.post("/process/", response_model=CombinedResponse)
async def process_prompt(request: PromptRequest):
    try:
        logger.info(f"Processing request: {request.input_text}")
        input_text = request.input_text
        history = request.conversation_history[-1:] if request.conversation_history else []

        intent = detect_intent(input_text)
        keywords = extract_keywords(input_text)
        logger.info(f"Intent: {intent}, Keywords: {keywords}")

        products = search_products_by_keywords(keywords)
        product_context = get_product_context(products)
        logger.info(f"Products matched: {len(products)}")

        history_str = " || ".join(history)
        full_input = f"{context_msg} || {product_context} || {input_text}" if product_context else f"{context_msg} || {input_text}"
        logger.info(f"Full input to model: {full_input}")

        logger.info("Tokenizing input...")
        inputs = tokenizer(full_input, return_tensors="pt", truncation=True, max_length=64).to(device)
        logger.info("Input tokenized successfully.")

        logger.info("Generating model response...")
        with torch.no_grad():
            outputs = model.generate(
                **inputs,
                max_new_tokens=30,
                do_sample=True,
                top_p=0.95,  # Slightly higher for more variety
                temperature=0.8,  # Slightly higher for creativity
                no_repeat_ngram_size=2
            )
        logger.info("Model generation complete.")

        logger.info("Decoding model output...")
        response = tokenizer.decode(outputs[0], skip_special_tokens=True)
        logger.info(f"Model response: {response}")

        enhanced_response = format_response(response, products, intent, input_text, request.conversation_history)
        qa_response = {
            "question": input_text,
            "answer": enhanced_response,
            "score": 1.0
        }

        logger.info("Returning response...")
        return {
            "ner": {"extracted_keywords": keywords},
            "qa": qa_response,
            "products_matched": products
        }
    except Exception as e:
        logger.error(f"Error processing request: {str(e)}", exc_info=True)
        raise HTTPException(status_code=500, detail=f"Oops, something went wrong: {str(e)}")

@app.on_event("startup")
async def startup_event():
    logger.info("API is running with BlenderBot-400M-distill, connected to MongoDB.")

@app.on_event("shutdown")
def shutdown_event():
    client.close()