kevinhug commited on
Commit
ab83b05
·
1 Parent(s): 781f0d3
Files changed (4) hide show
  1. app.py +12 -1
  2. knowledge.py +1 -1
  3. rag.py +2 -1
  4. tool.py +2 -1
app.py CHANGED
@@ -126,6 +126,10 @@ If your product discovery experience isn’t working as hard as your marketing b
126
  ================================================
127
  - Retrieval: Public Product Data using Tavily Search
128
  - Recommend: Competition Product
 
 
 
 
129
  """)
130
  in_verbatim = gr.Textbox(label="Verbatim")
131
  out_product = gr.Textbox(label="Product")
@@ -237,6 +241,7 @@ Uses customer data and behavior to craft messages that resonate with specific se
237
  gr.Markdown("""
238
  Objective: Transform Personal Pain Points into Actionable Insights with a Dynamic Knowledge Graph Framework
239
  =====================================
 
240
  """)
241
  in_verbatim = gr.Textbox(label="Question")
242
  out_product = gr.JSON(label="Knowledge Graph")
@@ -371,13 +376,18 @@ Allows downstream tasks (like sentiment analysis or topic modeling) to focus on
371
  """)
372
 
373
 
374
- with gr.Tab("Classify"):
375
  gr.Markdown("""
376
  Objective: Streamline Customer Insights: Auto-Classify Feedback for Product Optimization
377
  ================================================
378
  - multi class classification, could have multiple label for 1 feedback
379
  - fix classification in this use case: online banking, card, auto finance, mortgage, insurance
380
  - LLM Judge to evaluate relevancy
 
 
 
 
 
381
  """)
382
  in_verbatim = gr.Textbox(label="Customer Feedback separate by ;")
383
  out_product = gr.Textbox(label="Classification & Evaluation")
@@ -506,6 +516,7 @@ For example, Comcast reduced repeat service calls by 17% after deploying entity
506
  gr.Markdown("""
507
  Objective: Leveraging Human Feedback to Deliver Personalized Content that Proactively Solves Customer Pain Points
508
  ================================================
 
509
  """)
510
 
511
  in_verbatim = gr.Textbox(label="Persona")
 
126
  ================================================
127
  - Retrieval: Public Product Data using Tavily Search
128
  - Recommend: Competition Product
129
+
130
+ ### benefits
131
+ - remove friction in research, saving labour time
132
+ - improve insight quality by identify competitor
133
  """)
134
  in_verbatim = gr.Textbox(label="Verbatim")
135
  out_product = gr.Textbox(label="Product")
 
241
  gr.Markdown("""
242
  Objective: Transform Personal Pain Points into Actionable Insights with a Dynamic Knowledge Graph Framework
243
  =====================================
244
+ - Identify what channel customer prefer
245
  """)
246
  in_verbatim = gr.Textbox(label="Question")
247
  out_product = gr.JSON(label="Knowledge Graph")
 
376
  """)
377
 
378
 
379
+ with gr.Tab("Segmentation"):
380
  gr.Markdown("""
381
  Objective: Streamline Customer Insights: Auto-Classify Feedback for Product Optimization
382
  ================================================
383
  - multi class classification, could have multiple label for 1 feedback
384
  - fix classification in this use case: online banking, card, auto finance, mortgage, insurance
385
  - LLM Judge to evaluate relevancy
386
+
387
+ Business use case: customer segmentation for ab testing
388
+ ------------------------------------------------
389
+ - Acquisition: Behavior cluster, we can predict not only who is likely to click—but who is likely to retain
390
+ - Activation: segmenting users based on behavioral signals—like browsing activity, time since last engagement, or declining open/click rates.
391
  """)
392
  in_verbatim = gr.Textbox(label="Customer Feedback separate by ;")
393
  out_product = gr.Textbox(label="Classification & Evaluation")
 
516
  gr.Markdown("""
517
  Objective: Leveraging Human Feedback to Deliver Personalized Content that Proactively Solves Customer Pain Points
518
  ================================================
519
+ - replace human with reward/penalty function, you will get RLHF by ranking the solutions
520
  """)
521
 
522
  in_verbatim = gr.Textbox(label="Persona")
knowledge.py CHANGED
@@ -88,7 +88,7 @@ def generate_graph(q, input=KnowledgeGraph()) -> KnowledgeGraph:
88
  messages=[
89
  {
90
  "role": "user",
91
- "content": dedent(f"""As a world class iterative knowledge graph builder and a Marketing Data Scientist for delivery personalized solution in Personal and Commercial Banking. Help me understand this person pain points and needs by describing the interaction as a detailed knowledge graph:
92
  ### Interaction: {q}
93
  ### Merge from existing KnowledgeGraph, Here is the current state of the graph:
94
  {input.model_dump_json()}
 
88
  messages=[
89
  {
90
  "role": "user",
91
+ "content": dedent(f"""As a world class iterative knowledge graph builder and a Marketing Data Scientist for delivery personalized solution in Personal and Commercial Banking. Help me understand this person pain points, preference of contacts and needs by describing the interaction as a detailed knowledge graph:
92
  ### Interaction: {q}
93
  ### Merge from existing KnowledgeGraph, Here is the current state of the graph:
94
  {input.model_dump_json()}
rag.py CHANGED
@@ -4,7 +4,8 @@ import pandas as pd
4
 
5
  #lm = dspy.LM('ollama_chat/deepseek-r1', api_base='http://localhost:11434', api_key='')
6
  #lm = dspy.LM('huggingface/Qwen/Qwen2.5-Coder-32B-Instruct')
7
- lm = dspy.LM('huggingface/meta-llama/Llama-3.2-1B')
 
8
  dspy.configure(lm=lm)
9
 
10
  df = pd.read_csv("product2.csv")
 
4
 
5
  #lm = dspy.LM('ollama_chat/deepseek-r1', api_base='http://localhost:11434', api_key='')
6
  #lm = dspy.LM('huggingface/Qwen/Qwen2.5-Coder-32B-Instruct')
7
+ #lm = dspy.LM('huggingface/meta-llama/Llama-3.2-1B')
8
+ lm = dspy.LM('groq/gemma-7b-it')
9
  dspy.configure(lm=lm)
10
 
11
  df = pd.read_csv("product2.csv")
tool.py CHANGED
@@ -6,7 +6,8 @@ from tavily import TavilyClient
6
 
7
  #lm = dspy.LM('ollama_chat/deepseek-r1', api_base='http://localhost:11434', api_key='')
8
  #lm = dspy.LM('huggingface/Qwen/Qwen2.5-Coder-32B-Instruct')
9
- lm = dspy.LM('huggingface/meta-llama/Llama-3.2-1B')
 
10
  dspy.configure(lm=lm)
11
 
12
  search_client = TavilyClient(api_key=os.environ["T_TOKEN"])
 
6
 
7
  #lm = dspy.LM('ollama_chat/deepseek-r1', api_base='http://localhost:11434', api_key='')
8
  #lm = dspy.LM('huggingface/Qwen/Qwen2.5-Coder-32B-Instruct')
9
+ #lm = dspy.LM('huggingface/meta-llama/Llama-3.2-1B')
10
+ lm = dspy.LM('groq/gemma-7b-it')
11
  dspy.configure(lm=lm)
12
 
13
  search_client = TavilyClient(api_key=os.environ["T_TOKEN"])