File size: 1,354 Bytes
66a4d52
 
 
1936100
66a4d52
1936100
 
 
 
 
66a4d52
1936100
66a4d52
 
1936100
 
66a4d52
1936100
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66a4d52
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import gradio as gr
import requests
from bs4 import BeautifulSoup
from transformers import pipeline

# Load summarization pipeline from Hugging Face
summarizer = pipeline("summarization", model="facebook/bart-large-cnn")

def scrape_website(url):
    """Extracts text from a website."""
    try:
        response = requests.get(url, timeout=10)
        soup = BeautifulSoup(response.text, "html.parser")
        paragraphs = soup.find_all("p")
        text = " ".join([p.get_text() for p in paragraphs])
        return text if text else "No content found."
    except Exception as e:
        return f"Error: {str(e)}"

def summarize_website(url):
    """Scrapes website and summarizes the extracted content."""
    extracted_text = scrape_website(url)
    
    if "Error:" in extracted_text or len(extracted_text.split()) < 50:
        return "Could not extract enough text to summarize."
    
    # Summarize using Hugging Face model
    summary = summarizer(extracted_text, max_length=200, min_length=50, do_sample=False)
    return f"**Summary:**\n\n{summary[0]['summary_text']}"

# Gradio interface
iface = gr.Interface(
    fn=summarize_website,
    inputs="text",
    outputs="markdown",
    title="AI-Powered Website Summarizer",
    description="Enter a website URL, and this tool will summarize its content using an AI model."
)

iface.launch()