Garvitj commited on
Commit
2aa09ae
·
verified ·
1 Parent(s): 4fbc74b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +146 -44
app.py CHANGED
@@ -1,48 +1,152 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
- from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
3
- import torch
4
- import numpy as np
5
- import cv2
6
- from PIL import Image
7
  import pytesseract
8
  from sentence_transformers import SentenceTransformer, util
9
- import io
10
  from typing import List
11
-
12
- def extract_text_from_image(filepath: str, languages: List[str]):
13
- image = Image.open(filepath)
14
- return pytesseract.image_to_string(image=image, lang=', '.join(languages))
15
-
16
- # tess.pytesseract.tesseract_cmd = r"tesseract"
17
-
18
  import requests
19
 
 
 
 
 
20
  API_URL = "https://api-inference.huggingface.co/models/openai-community/gpt2"
21
- headers = {"Authorization": "hf_TsCTtXxnvpmhFKABqKmcVLyLEhjQPsITSVx"}
22
 
 
23
  def query(payload):
24
- response = requests.post(API_URL, headers=headers, json=payload)
25
- return response.json()
26
-
27
- # output = query({
28
- # "inputs": "Can you please let us know more details about your ",
29
- # })
30
 
 
31
  def generate_response(prompt):
32
- # Generate response from the API
33
- response = query({"inputs":prompt})
34
  return response[0]['generated_text']
35
 
 
 
 
 
 
36
 
 
37
  def get_embedding(text):
38
  return model1.encode(text, convert_to_tensor=True)
39
 
 
40
  def calculate_similarity(text1, text2):
41
  embedding1 = get_embedding(text1)
42
  embedding2 = get_embedding(text2)
43
  similarity = util.pytorch_cos_sim(embedding1, embedding2)
44
  return similarity.item()
45
 
 
46
  def get_grade(similarity_score):
47
  if similarity_score >= 0.9:
48
  return 5
@@ -55,45 +159,43 @@ def get_grade(similarity_score):
55
  else:
56
  return 1
57
 
58
-
59
- def evaluate_answer(image,languages):
60
- student_answer = extract_text_from_image(image,languages)
61
  model_answer = "The process of photosynthesis helps plants produce glucose using sunlight."
62
  similarity_score = calculate_similarity(student_answer, model_answer)
63
  grade = get_grade(similarity_score)
64
  feedback = f"Student's answer: {student_answer}\nTeacher's answer: {model_answer}"
65
  return grade, similarity_score * 100, feedback
66
 
67
- def generate_response(prompt):
68
- # Generate response from the new model using the pipeline
69
- response = pipe(prompt, max_length=150, temperature=0.7)
70
- return response[0]['generated_text']
71
-
72
- def gradio_interface(image, languages: List[str]):
73
- grade, similarity_score, feedback = evaluate_answer(image,languages)
74
  response = generate_response(prompt)
75
- return grade, similarity_score, response
76
 
77
- # # Define Gradio interface
78
- # interface = gr.Interface(
79
- # fn=gradio_interface,
80
- # inputs=[gr.Image(type="pil"), gr.Textbox(lines=2, placeholder="Enter your prompt here")],
81
- # outputs=[gr.Label(), gr.Label(), gr.Textbox(), gr.Textbox()],
82
- # live=True
83
- # )
84
  language_choices = pytesseract.get_languages()
 
 
85
  interface = gr.Interface(
86
  fn=gradio_interface,
87
  inputs=[
88
  gr.Image(type="filepath", label="Input"),
89
- gr.CheckboxGroup(language_choices, type="value", value=['eng'], label='language')
90
- ],
91
- outputs=[gr.Text(label="Grade"), gr.Number(label="Similarity Score (%)"), gr.Text(label="Feedback")],
 
 
 
 
 
 
92
  title="Automated Grading System",
93
  description="Upload an image of your answer sheet to get a grade from 1 to 5, similarity score, and feedback based on the model answer.",
94
  live=True
95
  )
96
-
97
 
98
  if __name__ == "__main__":
99
  interface.launch()
 
 
1
+ # import gradio as gr
2
+ # from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
3
+ # import torch
4
+ # import numpy as np
5
+ # import cv2
6
+ # from PIL import Image
7
+ # import pytesseract
8
+ # from sentence_transformers import SentenceTransformer, util
9
+ # import io
10
+ # from typing import List
11
+
12
+ # def extract_text_from_image(filepath: str, languages: List[str]):
13
+ # image = Image.open(filepath)
14
+ # return pytesseract.image_to_string(image=image, lang=', '.join(languages))
15
+
16
+ # # tess.pytesseract.tesseract_cmd = r"tesseract"
17
+
18
+ # import requests
19
+
20
+ # API_URL = "https://api-inference.huggingface.co/models/openai-community/gpt2"
21
+ # headers = {"Authorization": "hf_TsCTtXxnvpmhFKABqKmcVLyLEhjQPsITSVx"}
22
+
23
+ # def query(payload):
24
+ # response = requests.post(API_URL, headers=headers, json=payload)
25
+ # return response.json()
26
+
27
+ # # output = query({
28
+ # # "inputs": "Can you please let us know more details about your ",
29
+ # # })
30
+
31
+ # def generate_response(prompt):
32
+ # # Generate response from the API
33
+ # response = query({"inputs":prompt})
34
+ # return response[0]['generated_text']
35
+
36
+
37
+ # def get_embedding(text):
38
+ # return model1.encode(text, convert_to_tensor=True)
39
+
40
+ # def calculate_similarity(text1, text2):
41
+ # embedding1 = get_embedding(text1)
42
+ # embedding2 = get_embedding(text2)
43
+ # similarity = util.pytorch_cos_sim(embedding1, embedding2)
44
+ # return similarity.item()
45
+
46
+ # def get_grade(similarity_score):
47
+ # if similarity_score >= 0.9:
48
+ # return 5
49
+ # elif similarity_score >= 0.8:
50
+ # return 4
51
+ # elif similarity_score >= 0.7:
52
+ # return 3
53
+ # elif similarity_score >= 0.6:
54
+ # return 2
55
+ # else:
56
+ # return 1
57
+
58
+
59
+ # def evaluate_answer(image,languages):
60
+ # student_answer = extract_text_from_image(image,languages)
61
+ # model_answer = "The process of photosynthesis helps plants produce glucose using sunlight."
62
+ # similarity_score = calculate_similarity(student_answer, model_answer)
63
+ # grade = get_grade(similarity_score)
64
+ # feedback = f"Student's answer: {student_answer}\nTeacher's answer: {model_answer}"
65
+ # return grade, similarity_score * 100, feedback
66
+
67
+ # def generate_response(prompt):
68
+ # # Generate response from the new model using the pipeline
69
+ # response = pipe(prompt, max_length=150, temperature=0.7)
70
+ # return response[0]['generated_text']
71
+
72
+ # def gradio_interface(image, languages: List[str]):
73
+ # grade, similarity_score, feedback = evaluate_answer(image,languages)
74
+ # response = generate_response(prompt)
75
+ # return grade, similarity_score, response
76
+
77
+ # # # Define Gradio interface
78
+ # # interface = gr.Interface(
79
+ # # fn=gradio_interface,
80
+ # # inputs=[gr.Image(type="pil"), gr.Textbox(lines=2, placeholder="Enter your prompt here")],
81
+ # # outputs=[gr.Label(), gr.Label(), gr.Textbox(), gr.Textbox()],
82
+ # # live=True
83
+ # # )
84
+ # language_choices = pytesseract.get_languages()
85
+ # interface = gr.Interface(
86
+ # fn=gradio_interface,
87
+ # inputs=[
88
+ # gr.Image(type="filepath", label="Input"),
89
+ # gr.CheckboxGroup(language_choices, type="value", value=['eng'], label='language')
90
+ # ],
91
+ # outputs=[gr.Text(label="Grade"), gr.Number(label="Similarity Score (%)"), gr.Text(label="Feedback")],
92
+ # title="Automated Grading System",
93
+ # description="Upload an image of your answer sheet to get a grade from 1 to 5, similarity score, and feedback based on the model answer.",
94
+ # live=True
95
+ # )
96
+
97
+
98
+ # if __name__ == "__main__":
99
+ # interface.launch()
100
+
101
+
102
+
103
+
104
+
105
+
106
+
107
  import gradio as gr
108
+ from transformers import pipeline
 
 
 
 
109
  import pytesseract
110
  from sentence_transformers import SentenceTransformer, util
111
+ from PIL import Image
112
  from typing import List
 
 
 
 
 
 
 
113
  import requests
114
 
115
+ # Initialize sentence transformer model
116
+ model1 = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
117
+
118
+ # Hugging Face API details
119
  API_URL = "https://api-inference.huggingface.co/models/openai-community/gpt2"
120
+ headers = {"Authorization": "Bearer hf_TsCTtXxnvpmhFKABqKmcVLyLEhjQPsITSVx"}
121
 
122
+ # Function to interact with Hugging Face API for GPT-2
123
  def query(payload):
124
+ response = requests.post(API_URL, headers=headers, json=payload)
125
+ return response.json()
 
 
 
 
126
 
127
+ # Function to generate text response from GPT-2 model using Hugging Face API
128
  def generate_response(prompt):
129
+ response = query({"inputs": prompt})
 
130
  return response[0]['generated_text']
131
 
132
+ # Extract text from an image using Tesseract
133
+ def extract_text_from_image(filepath: str, languages: List[str]):
134
+ image = Image.open(filepath)
135
+ lang_str = '+'.join(languages) # Join languages for Tesseract
136
+ return pytesseract.image_to_string(image=image, lang=lang_str)
137
 
138
+ # Function to get embeddings for text using SentenceTransformer
139
  def get_embedding(text):
140
  return model1.encode(text, convert_to_tensor=True)
141
 
142
+ # Calculate similarity between two texts using cosine similarity
143
  def calculate_similarity(text1, text2):
144
  embedding1 = get_embedding(text1)
145
  embedding2 = get_embedding(text2)
146
  similarity = util.pytorch_cos_sim(embedding1, embedding2)
147
  return similarity.item()
148
 
149
+ # Assign grades based on similarity score
150
  def get_grade(similarity_score):
151
  if similarity_score >= 0.9:
152
  return 5
 
159
  else:
160
  return 1
161
 
162
+ # Function to evaluate student's answer by comparing it to a model answer
163
+ def evaluate_answer(image, languages):
164
+ student_answer = extract_text_from_image(image, languages)
165
  model_answer = "The process of photosynthesis helps plants produce glucose using sunlight."
166
  similarity_score = calculate_similarity(student_answer, model_answer)
167
  grade = get_grade(similarity_score)
168
  feedback = f"Student's answer: {student_answer}\nTeacher's answer: {model_answer}"
169
  return grade, similarity_score * 100, feedback
170
 
171
+ # Main interface function for Gradio
172
+ def gradio_interface(image, languages: List[str], prompt):
173
+ grade, similarity_score, feedback = evaluate_answer(image, languages)
 
 
 
 
174
  response = generate_response(prompt)
175
+ return grade, similarity_score, feedback, response
176
 
177
+ # Get available Tesseract languages
 
 
 
 
 
 
178
  language_choices = pytesseract.get_languages()
179
+
180
+ # Define Gradio interface
181
  interface = gr.Interface(
182
  fn=gradio_interface,
183
  inputs=[
184
  gr.Image(type="filepath", label="Input"),
185
+ gr.CheckboxGroup(language_choices, type="value", value=['eng'], label='language'),
186
+ gr.Textbox(lines=2, placeholder="Enter your prompt here", label="Prompt")
187
+ ],
188
+ outputs=[
189
+ gr.Text(label="Grade"),
190
+ gr.Number(label="Similarity Score (%)"),
191
+ gr.Text(label="Feedback"),
192
+ gr.Text(label="Generated Response")
193
+ ],
194
  title="Automated Grading System",
195
  description="Upload an image of your answer sheet to get a grade from 1 to 5, similarity score, and feedback based on the model answer.",
196
  live=True
197
  )
 
198
 
199
  if __name__ == "__main__":
200
  interface.launch()
201
+