import torch import gradio as gr # Use a pipeline as a high-level helper from transformers import pipeline # Use a pipeline as a high-level helper # Use a pipeline as a high-level helper from transformers import pipeline text_summary = pipeline("summarization", model="sshleifer/distilbart-xsum-1-1", torch_dtype=torch.bfloat16) # model_path=("../Models/models--sshleifer--distilbart-xsum-1-1/snapshots/891968fcbb0e421075cc2c3dfc8da8d4b24d54a4") # text_summary = pipeline("summarization", model=model_path, # torch_dtype=torch.bfloat16) """torch_dtype=torch.bfloat16 this parameter helps in compressing the model without compromising the performance""" text="Elon Reeve Musk (/ˈiːlɒn/ EE-lon; born June 28, 1971) is a businessman known for his leadership of Tesla, SpaceX, and X (formerly Twitter). Since 2025, he has been a senior advisor to United States President Donald Trump and the de facto head of the Department of Government Efficiency (DOGE). Musk is the wealthiest person in the world; as of March 2025, Forbes estimates his net worth to be US$345 billion. He was named Time magazine's Person of the Year in 2021." #print(text_summary(text)) "Gradio actually accepts a function, whatever input we give it gives back to the function" def summary(input): output=text_summary(input) return output[0]['summary_text'] gr.close_all() # demo = gr.Interface(fn=summary, inputs="text", outputs="text") demo = gr.Interface(fn=summary, inputs=[gr.Textbox(label="Input text to summarize", lines=6)], outputs=[gr.Textbox(label="Summarized text",lines=4)], title="GenAi Text Summarizer", description="This application is used to create the summary of the text") demo.launch(share=True)