SyedHutter commited on
Commit
36c9568
·
verified ·
1 Parent(s): 9246354

app.py Beta 2 (Push 3)

Browse files
Files changed (1) hide show
  1. app.py +11 -4
app.py CHANGED
@@ -7,8 +7,8 @@ import spacy
7
  import os
8
  import logging
9
 
10
- # Set up logging
11
- logging.basicConfig(level=logging.INFO)
12
  logger = logging.getLogger(__name__)
13
 
14
  app = FastAPI()
@@ -20,8 +20,8 @@ db = client["test"]
20
  products_collection = db["products"]
21
 
22
  # BlenderBot Setup
23
- model_repo = "SyedHutter/blenderbot_model" # Repo ID
24
- model_subfolder = "blenderbot_model" # Subdirectory within repo
25
  model_dir = "/home/user/app/blenderbot_model"
26
 
27
  if not os.path.exists(model_dir):
@@ -99,21 +99,27 @@ async def root():
99
  @app.post("/process/", response_model=CombinedResponse)
100
  async def process_prompt(request: PromptRequest):
101
  try:
 
102
  input_text = request.input_text
103
  history = request.conversation_history[-3:] if request.conversation_history else []
104
 
105
  intent = detect_intent(input_text)
106
  keywords = extract_keywords(input_text)
107
  ner_response = {"extracted_keywords": keywords}
 
108
 
109
  products = search_products_by_keywords(keywords)
110
  product_context = get_product_context(products)
 
111
 
112
  history_str = " || ".join(history)
113
  full_input = f"{history_str} || {product_context} {context_msg} || {input_text}" if history else f"{product_context} {context_msg} || {input_text}"
 
 
114
  inputs = tokenizer(full_input, return_tensors="pt", truncation=True, max_length=512)
115
  outputs = model.generate(**inputs, max_length=150, num_beams=5, no_repeat_ngram_size=2)
116
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)
 
117
 
118
  enhanced_response = format_response(response, products, intent)
119
  qa_response = {
@@ -128,6 +134,7 @@ async def process_prompt(request: PromptRequest):
128
  "products_matched": products
129
  }
130
  except Exception as e:
 
131
  raise HTTPException(status_code=500, detail=f"Oops, something went wrong: {str(e)}. Try again!")
132
 
133
  @app.on_event("startup")
 
7
  import os
8
  import logging
9
 
10
+ # Set up logging with detailed output
11
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
12
  logger = logging.getLogger(__name__)
13
 
14
  app = FastAPI()
 
20
  products_collection = db["products"]
21
 
22
  # BlenderBot Setup
23
+ model_repo = "SyedHutter/blenderbot_model"
24
+ model_subfolder = "blenderbot_model"
25
  model_dir = "/home/user/app/blenderbot_model"
26
 
27
  if not os.path.exists(model_dir):
 
99
  @app.post("/process/", response_model=CombinedResponse)
100
  async def process_prompt(request: PromptRequest):
101
  try:
102
+ logger.info(f"Processing request: {request.input_text}")
103
  input_text = request.input_text
104
  history = request.conversation_history[-3:] if request.conversation_history else []
105
 
106
  intent = detect_intent(input_text)
107
  keywords = extract_keywords(input_text)
108
  ner_response = {"extracted_keywords": keywords}
109
+ logger.info(f"Intent: {intent}, Keywords: {keywords}")
110
 
111
  products = search_products_by_keywords(keywords)
112
  product_context = get_product_context(products)
113
+ logger.info(f"Products matched: {len(products)}")
114
 
115
  history_str = " || ".join(history)
116
  full_input = f"{history_str} || {product_context} {context_msg} || {input_text}" if history else f"{product_context} {context_msg} || {input_text}"
117
+ logger.info(f"Full input to model: {full_input}")
118
+
119
  inputs = tokenizer(full_input, return_tensors="pt", truncation=True, max_length=512)
120
  outputs = model.generate(**inputs, max_length=150, num_beams=5, no_repeat_ngram_size=2)
121
  response = tokenizer.decode(outputs[0], skip_special_tokens=True)
122
+ logger.info(f"Model response: {response}")
123
 
124
  enhanced_response = format_response(response, products, intent)
125
  qa_response = {
 
134
  "products_matched": products
135
  }
136
  except Exception as e:
137
+ logger.error(f"Error processing request: {str(e)}", exc_info=True)
138
  raise HTTPException(status_code=500, detail=f"Oops, something went wrong: {str(e)}. Try again!")
139
 
140
  @app.on_event("startup")