Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,6 @@ from transformers import pipeline, BloomForCausalLM, BloomTokenizerFast
|
|
6 |
from huggingface_hub import login
|
7 |
import requests
|
8 |
import os
|
9 |
-
|
10 |
from models import evaluate_with_gpt,evaluate_with_gemma,evaluate_with_bloom,evaluate_with_jabir,evaluate_with_llama
|
11 |
|
12 |
|
@@ -32,7 +31,7 @@ def evaluate_all_models(pdf_file, job_description):
|
|
32 |
|
33 |
iface = gr.Interface(
|
34 |
# fn=lambda pdf, jd, model: evaluate_with_gpt(pdf, jd) if model == "GPT-4o" else evaluate_with_gemma(pdf, jd) if model == "Gemma" else evaluate_with_bloom(pdf, jd) if model == "Bloom" else evaluate_with_jabir(pdf, jd) if model == "jabir" else evaluate_all_models(pdf, jd) if model == "llama" else evaluate_all_models(pdf, jd),
|
35 |
-
fn=lambda pdf, jd, model: evaluate_with_llama(pdf, jd)
|
36 |
|
37 |
inputs=[
|
38 |
gr.File(label="Upload Resume PDF"),
|
|
|
6 |
from huggingface_hub import login
|
7 |
import requests
|
8 |
import os
|
|
|
9 |
from models import evaluate_with_gpt,evaluate_with_gemma,evaluate_with_bloom,evaluate_with_jabir,evaluate_with_llama
|
10 |
|
11 |
|
|
|
31 |
|
32 |
iface = gr.Interface(
|
33 |
# fn=lambda pdf, jd, model: evaluate_with_gpt(pdf, jd) if model == "GPT-4o" else evaluate_with_gemma(pdf, jd) if model == "Gemma" else evaluate_with_bloom(pdf, jd) if model == "Bloom" else evaluate_with_jabir(pdf, jd) if model == "jabir" else evaluate_all_models(pdf, jd) if model == "llama" else evaluate_all_models(pdf, jd),
|
34 |
+
fn=lambda pdf, jd, model: evaluate_with_llama(pdf, jd)
|
35 |
|
36 |
inputs=[
|
37 |
gr.File(label="Upload Resume PDF"),
|