siddhartharyaai commited on
Commit
c404ec8
·
verified ·
1 Parent(s): e19b8c2

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +55 -32
utils.py CHANGED
@@ -496,11 +496,11 @@ def run_research_agent(
496
  """
497
  Low-Call approach:
498
  1) Tavily search (up to 20 URLs).
499
- 2) Firecrawl scrape => combined text
500
- 3) Truncate to 12k tokens total
501
- 4) Split into chunks (each 4500 tokens) => Summarize each chunk individually => summaries
502
- 5) Single final merge => final PDF
503
- => 2 or more total LLM calls (but no more than 10) to reduce the chance of rate limit errors.
504
  """
505
  print(f"[LOG] Starting LOW-CALL research agent for topic: {topic}")
506
 
@@ -541,16 +541,17 @@ def run_research_agent(
541
  print("[LOG] Could not retrieve content from any search results. Exiting.")
542
  return "Could not retrieve content from any of the search results."
543
 
544
- # Step 3: Truncate to 12k tokens total
545
- print("[LOG] Step 3: Truncating combined text to 12,000 tokens if needed.")
546
- combined_content = truncate_text_tokens(combined_content, max_tokens=12000)
 
547
 
548
  # Step 4: Splitting text into chunks (4500 tokens each) and summarizing each chunk.
549
  print("[LOG] Step 4: Splitting text into chunks (4500 tokens each). Summarizing each chunk.")
550
  tokenizer = tiktoken.get_encoding("cl100k_base")
551
  tokens = tokenizer.encode(combined_content)
552
  chunk_size = 4500 # Reduced chunk size to avoid exceeding the LLM's TPM limit.
553
- max_chunks = 10 # Allow up to 10 chunks (and thus 10 LLM calls).
554
  summaries = []
555
  start = 0
556
  chunk_index = 1
@@ -564,8 +565,7 @@ def run_research_agent(
564
  prompt = f"""
565
  You are a specialized summarization engine. Summarize the following text
566
  for a professional research report. Provide accurate details but do not
567
- include chain-of-thought or internal reasoning. Keep it concise, but
568
- include key data points and context:
569
  {chunk_text}
570
  """
571
  data = {
@@ -580,36 +580,56 @@ include key data points and context:
580
  start = end
581
  chunk_index += 1
582
 
583
- # Step 5: Single final merge call
584
- print("[LOG] Step 5: Doing one final merge of chunk summaries.")
585
  references_text = "\n".join(f"- {url}" for url in references_list) if references_list else "None"
586
  truncated_summaries = [truncate_text_for_llm(s, max_tokens=1000) for s in summaries]
587
  merged_input = "\n\n".join(truncated_summaries)
588
 
 
589
  final_prompt = f"""
590
- IMPORTANT: Do NOT include chain-of-thought or hidden planning.
591
- Produce a long, academic-style research paper with the following structure:
592
- - Title Page (concise descriptive title)
593
- - Table of Contents
594
- - Executive Summary
595
- - Introduction
596
- - Historical or Contextual Background
597
- - Multiple Thematic Sections (with subheadings)
598
- - Detailed Analysis (multi-paragraph sections)
599
- - Footnotes or inline citations referencing the URLs
600
- - Conclusion
601
- - References / Bibliography (list these URLs at the end)
602
- Requirements:
603
- - Minimal bullet points, prefer multi-paragraph
604
- - Each section at least 2-3 paragraphs
605
- - Aim for 1500+ words if possible
606
- - Under 6000 tokens total
607
- - Professional, academic tone
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
608
  Partial Summaries:
609
  {merged_input}
610
  References (URLs):
611
  {references_text}
612
- Now, merge these partial summaries into one thoroughly expanded research paper:
 
613
  """
614
  final_data = {
615
  "model": MODEL_COMBINATION,
@@ -620,6 +640,9 @@ Now, merge these partial summaries into one thoroughly expanded research paper:
620
  final_response = call_llm_with_retry(groq_client, **final_data)
621
  final_text = final_response.choices[0].message.content.strip()
622
 
 
 
 
623
  # Step 6: PDF generation
624
  print("[LOG] Step 6: Generating final PDF from the merged text.")
625
  final_report = generate_report(final_text)
 
496
  """
497
  Low-Call approach:
498
  1) Tavily search (up to 20 URLs).
499
+ 2) Firecrawl scrape => combined text from the URLs.
500
+ 3) (No truncation) Use the full richness of the scraped materials.
501
+ 4) Split the text into chunks (each 4500 tokens) and summarize each chunk individually.
502
+ 5) Use a single final merge call to produce a comprehensive, detailed, and exhaustive research report.
503
+ The final report must adhere to world-class research report guidelines.
504
  """
505
  print(f"[LOG] Starting LOW-CALL research agent for topic: {topic}")
506
 
 
541
  print("[LOG] Could not retrieve content from any search results. Exiting.")
542
  return "Could not retrieve content from any of the search results."
543
 
544
+ # Input Sanitization: Remove any chain-of-thought markers from the scraped content.
545
+ combined_content = re.sub(r"<think>.*?</think>", "", combined_content, flags=re.DOTALL)
546
+
547
+ # Note: The previous truncation to 12,000 tokens is removed so the full content is used.
548
 
549
  # Step 4: Splitting text into chunks (4500 tokens each) and summarizing each chunk.
550
  print("[LOG] Step 4: Splitting text into chunks (4500 tokens each). Summarizing each chunk.")
551
  tokenizer = tiktoken.get_encoding("cl100k_base")
552
  tokens = tokenizer.encode(combined_content)
553
  chunk_size = 4500 # Reduced chunk size to avoid exceeding the LLM's TPM limit.
554
+ max_chunks = 10 # Allow up to 10 chunks (and thus up to 10 LLM calls).
555
  summaries = []
556
  start = 0
557
  chunk_index = 1
 
565
  prompt = f"""
566
  You are a specialized summarization engine. Summarize the following text
567
  for a professional research report. Provide accurate details but do not
568
+ include any chain-of-thought or internal planning. Keep it concise, yet capture all key points:
 
569
  {chunk_text}
570
  """
571
  data = {
 
580
  start = end
581
  chunk_index += 1
582
 
583
+ # Step 5: Single final merge call with enhanced instructions.
584
+ print("[LOG] Step 5: Merging chunk summaries into the final research report.")
585
  references_text = "\n".join(f"- {url}" for url in references_list) if references_list else "None"
586
  truncated_summaries = [truncate_text_for_llm(s, max_tokens=1000) for s in summaries]
587
  merged_input = "\n\n".join(truncated_summaries)
588
 
589
+ # Enhanced final prompt including world-class report guidelines.
590
  final_prompt = f"""
591
+ IMPORTANT: Do NOT include any chain-of-thought, internal planning, or hidden reasoning in the final output.
592
+ Draft a professional, world-class research report that adheres to the following tenets:
593
+
594
+ I. Essential Principles and Qualities:
595
+ - Accuracy: Present accurate facts with no spelling or grammatical errors.
596
+ - Clarity: Use clear, straightforward language.
597
+ - Brevity: Be concise yet complete.
598
+ - Objectivity: Avoid personal bias.
599
+ - Simplicity: Use simple language, and explain any necessary technical jargon briefly.
600
+ - Logical Sequence: Arrange points in a logical order with proper planning.
601
+ - Proper Form and Presentation: Follow required formats with an attractive presentation.
602
+ - Selectiveness: Include only necessary content.
603
+ - Comprehensiveness: Provide complete and detailed coverage.
604
+ - Reliability, Coherence, and Relevance: Ensure a logical flow and relevance to the research questions.
605
+
606
+ II. Structure the Report as Follows:
607
+ - Title Page (with a concise descriptive title)
608
+ - Table of Contents
609
+ - Executive Summary
610
+ - Introduction (clearly outlining the research purpose and objectives)
611
+ - Historical or Contextual Background
612
+ - Detailed Findings organized into coherent thematic sections
613
+ - Conclusion (with recommendations and insights)
614
+ - References/Bibliography (listing the provided URLs)
615
+
616
+ III. Content and Writing Style:
617
+ - Use consistent and clear language.
618
+ - Support arguments with reliable evidence.
619
+ - Write in active voice with clear headings and a logical flow.
620
+ - Develop each section in multiple detailed paragraphs.
621
+
622
+ IV. Steps for Writing the Report:
623
+ - Write a clear thesis statement.
624
+ - Prepare an outline and develop content sequentially.
625
+
626
+ Use the following partial summaries and references as source materials to produce a detailed and exhaustive research report.
627
  Partial Summaries:
628
  {merged_input}
629
  References (URLs):
630
  {references_text}
631
+
632
+ Now, merge these partial summaries into one thoroughly expanded, detailed, and exhaustive research report:
633
  """
634
  final_data = {
635
  "model": MODEL_COMBINATION,
 
640
  final_response = call_llm_with_retry(groq_client, **final_data)
641
  final_text = final_response.choices[0].message.content.strip()
642
 
643
+ # Post-process final_text to remove any lingering chain-of-thought markers.
644
+ final_text = re.sub(r"<think>.*?</think>", "", final_text, flags=re.DOTALL)
645
+
646
  # Step 6: PDF generation
647
  print("[LOG] Step 6: Generating final PDF from the merged text.")
648
  final_report = generate_report(final_text)