File size: 3,650 Bytes
6f78863 c8ccaac 6f78863 c8ccaac 6f78863 65dee87 c8ccaac 6f78863 c8ccaac 6f78863 b18f9a7 6f78863 b34d7e8 8605573 468f2cb 8605573 65dee87 1ec8383 6f78863 c8ccaac 8431cf4 b34d7e8 c8ccaac 8605573 65dee87 8605573 4451a94 8605573 a21f24b 8605573 65dee87 8605573 65dee87 8605573 c3246c1 8605573 65dee87 8605573 65dee87 8605573 ddf2105 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
import gradio as gr
from huggingface_hub import InferenceClient
from PyPDF2 import PdfReader
# Models Setup
models = {
"Job Consultant (Zephyr)": {
"client": InferenceClient(model="HuggingFaceH4/zephyr-7b-beta"),
},
"PDF Summarizer (T5)": {
"client": InferenceClient(model="aaliyaan/t5-small-finetuned-career"),
},
"Broken Answer (T0pp)": {
"client": InferenceClient(model="bigscience/T0p"),
},
}
# Chat Function with Context
def chat_with_model(model_choice, user_message, chat_history, file=None):
if model_choice == "Resume Summarizer (T5)" and file is not None:
pdf_text = extract_text_from_pdf(file)
user_message += f"\n\nPDF Content:\n{pdf_text}"
if not user_message.strip():
return chat_history, ""
model_info = models[model_choice]
client = model_info["client"]
# Prepare messages for the InferenceClient including chat history
messages = [{"role": "system", "content": "You are a helpful assistant."}]
# Add previous conversation to the messages
for user_msg, bot_msg in chat_history:
messages.append({"role": "user", "content": user_msg})
messages.append({"role": "assistant", "content": bot_msg})
# Add the current user message
messages.append({"role": "user", "content": user_message})
# Generate Response
response = ""
for message in client.chat_completion(
messages,
max_tokens=150,
stream=True,
temperature=0.7,
top_p=0.95
):
token = message.choices[0].delta.content
response += token
# Update Chat History
chat_history.append((user_message, response))
return chat_history, ""
# Function to Extract Text from PDF
def extract_text_from_pdf(file):
reader = PdfReader(file.name)
text = "\n".join(page.extract_text() for page in reader.pages if page.extract_text())
return text
# Interface Setup
def create_chat_interface():
with gr.Blocks(css="""
.chatbox {
background-color: #f7f7f8;
border-radius: 12px;
padding: 16px;
font-family: 'Segoe UI', Tahoma, sans-serif;
}
.chat-title {
font-size: 24px;
font-weight: bold;
text-align: center;
margin-bottom: 12px;
color: #3a9fd6;
}
""") as interface:
gr.Markdown("<div class='chat-title'>Job Consultant AI</div>")
with gr.Row():
model_choice = gr.Dropdown(
choices=list(models.keys()),
value="Job Consultant (Zephyr)",
label="Select Model"
)
chat_history = gr.Chatbot(label="Chat History", elem_classes="chatbox")
user_message = gr.Textbox(
placeholder="Type your message here and press Enter...",
show_label=False,
elem_classes="chatbox",
)
file_input = gr.File(label="Upload PDF", visible=False, file_types=[".pdf"])
def toggle_pdf_input(selected_model):
return gr.update(visible=(selected_model == "Resume Summarizer (T5)"))
model_choice.change(fn=toggle_pdf_input, inputs=model_choice, outputs=file_input)
# Link the input box to send messages on Enter
user_message.submit(
chat_with_model,
inputs=[model_choice, user_message, chat_history, file_input],
outputs=[chat_history, user_message],
)
return interface
if __name__ == "__main__":
interface = create_chat_interface()
interface.launch(server_name="0.0.0.0", server_port=7860)
|