Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -8,6 +8,8 @@ from bs4 import BeautifulSoup
|
|
8 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
9 |
|
10 |
def extract_text_from_pdf(file):
|
|
|
|
|
11 |
reader = PdfReader(file)
|
12 |
text = ""
|
13 |
for page in reader.pages:
|
@@ -24,7 +26,7 @@ def ats_friendly_checker(file):
|
|
24 |
max_tokens=512,
|
25 |
temperature=0.7,
|
26 |
top_p=0.95
|
27 |
-
).choices[0].message
|
28 |
|
29 |
score = response.split("\n")[0].split(":")[-1].strip()
|
30 |
feedback = "\n".join(response.split("\n")[1:])
|
@@ -47,7 +49,7 @@ def resume_match_checker(file, job_url):
|
|
47 |
max_tokens=512,
|
48 |
temperature=0.7,
|
49 |
top_p=0.95
|
50 |
-
).choices[0].message
|
51 |
|
52 |
match_score = response.split(":")[-1].strip()
|
53 |
return match_score
|
@@ -55,17 +57,19 @@ def resume_match_checker(file, job_url):
|
|
55 |
def resume_quality_score(file):
|
56 |
resume_text = extract_text_from_pdf(file)
|
57 |
# Implement resume quality scoring logic using LLM
|
58 |
-
system_message = "Evaluate the following resume for overall quality and provide a score."
|
59 |
message = resume_text
|
60 |
response = client.chat_completion(
|
61 |
[{"role": "system", "content": system_message}, {"role": "user", "content": message}],
|
62 |
max_tokens=512,
|
63 |
temperature=0.7,
|
64 |
top_p=0.95
|
65 |
-
).choices[0].message
|
66 |
|
67 |
-
|
68 |
-
|
|
|
|
|
69 |
|
70 |
def text_to_overleaf(resume_text):
|
71 |
# Implement the conversion to Overleaf code using LLM
|
@@ -76,7 +80,7 @@ def text_to_overleaf(resume_text):
|
|
76 |
max_tokens=512,
|
77 |
temperature=0.7,
|
78 |
top_p=0.95
|
79 |
-
).choices[0].message
|
80 |
|
81 |
overleaf_code = response
|
82 |
return overleaf_code
|
@@ -103,7 +107,8 @@ with gr.Blocks() as demo:
|
|
103 |
with gr.Row():
|
104 |
resume = gr.File(label="Upload your Resume (PDF)")
|
105 |
quality_score = gr.Number(label="Quality Score", interactive=False)
|
106 |
-
|
|
|
107 |
|
108 |
with gr.Tab("Text to Overleaf Code"):
|
109 |
with gr.Row():
|
@@ -114,4 +119,4 @@ with gr.Blocks() as demo:
|
|
114 |
gr.Markdown("---\nBuilt with love by [Bahae Eddine HALIM](https://www.linkedin.com/in/halimbahae/)")
|
115 |
|
116 |
if __name__ == "__main__":
|
117 |
-
demo.launch()
|
|
|
8 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
9 |
|
10 |
def extract_text_from_pdf(file):
|
11 |
+
if file is None:
|
12 |
+
return ""
|
13 |
reader = PdfReader(file)
|
14 |
text = ""
|
15 |
for page in reader.pages:
|
|
|
26 |
max_tokens=512,
|
27 |
temperature=0.7,
|
28 |
top_p=0.95
|
29 |
+
).choices[0].message.content
|
30 |
|
31 |
score = response.split("\n")[0].split(":")[-1].strip()
|
32 |
feedback = "\n".join(response.split("\n")[1:])
|
|
|
49 |
max_tokens=512,
|
50 |
temperature=0.7,
|
51 |
top_p=0.95
|
52 |
+
).choices[0].message.content
|
53 |
|
54 |
match_score = response.split(":")[-1].strip()
|
55 |
return match_score
|
|
|
57 |
def resume_quality_score(file):
|
58 |
resume_text = extract_text_from_pdf(file)
|
59 |
# Implement resume quality scoring logic using LLM
|
60 |
+
system_message = "Evaluate the following resume for overall quality and provide a score and interpretation."
|
61 |
message = resume_text
|
62 |
response = client.chat_completion(
|
63 |
[{"role": "system", "content": system_message}, {"role": "user", "content": message}],
|
64 |
max_tokens=512,
|
65 |
temperature=0.7,
|
66 |
top_p=0.95
|
67 |
+
).choices[0].message.content
|
68 |
|
69 |
+
score_lines = response.split("\n")
|
70 |
+
quality_score = score_lines[0].split(":")[-1].strip()
|
71 |
+
interpretation = "\n".join(score_lines[1:])
|
72 |
+
return quality_score, interpretation
|
73 |
|
74 |
def text_to_overleaf(resume_text):
|
75 |
# Implement the conversion to Overleaf code using LLM
|
|
|
80 |
max_tokens=512,
|
81 |
temperature=0.7,
|
82 |
top_p=0.95
|
83 |
+
).choices[0].message.content
|
84 |
|
85 |
overleaf_code = response
|
86 |
return overleaf_code
|
|
|
107 |
with gr.Row():
|
108 |
resume = gr.File(label="Upload your Resume (PDF)")
|
109 |
quality_score = gr.Number(label="Quality Score", interactive=False)
|
110 |
+
interpretation = gr.Textbox(label="Interpretation", interactive=False)
|
111 |
+
resume.upload(resume_quality_score, resume, [quality_score, interpretation])
|
112 |
|
113 |
with gr.Tab("Text to Overleaf Code"):
|
114 |
with gr.Row():
|
|
|
119 |
gr.Markdown("---\nBuilt with love by [Bahae Eddine HALIM](https://www.linkedin.com/in/halimbahae/)")
|
120 |
|
121 |
if __name__ == "__main__":
|
122 |
+
demo.launch(share=True)
|