File size: 3,220 Bytes
c2d3401
 
 
61cbae8
 
c2d3401
 
fba231d
ea1fbb8
c2d3401
 
 
 
 
 
61cbae8
 
c2d3401
fba231d
c2d3401
 
 
fba231d
 
c2d3401
 
 
 
61cbae8
fba231d
c2d3401
61cbae8
c2d3401
 
ea1fbb8
fba231d
ea1fbb8
 
61cbae8
1e2ac4e
61cbae8
 
85bfce2
fba231d
c2d3401
 
 
f9f81d6
 
 
 
 
 
 
b8788bc
c2d3401
 
61cbae8
c2d3401
 
61cbae8
 
c2d3401
 
61cbae8
c2d3401
 
ea1fbb8
c2d3401
61cbae8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
227af06
61cbae8
 
 
 
 
 
 
 
 
 
 
 
c2d3401
61cbae8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c2d3401
 
 
 
a33eefb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145

import requests
import time
from PIL import Image
import gradio as gr


hold_time = time.time()

API_URL = "https://cm7kxsqi3sekfih7.us-east-1.aws.endpoints.huggingface.cloud"
headers = {
	"Accept" : "application/json",
	"Content-Type": "application/json" 
}



def query(payload):
	global hold_time
	response = requests.post(API_URL, headers=headers, json=payload)
	if response.status_code != 200:
		print('Sleeping due to API error')
		if (time.time() - hold_time) > 60:
			hold_time = time.time()
		return None
	return response.json()


def run_model(Dialects):
	global hold_time
	output = query({
	"inputs": Dialects,
	"parameters": {}
})
	if output:
		hold_time = 1
		return output[0]['generated_text']
	else:
		wait_time = int((hold_time - time.time()) + 35)
		if wait_time >= 0:
			return f'Model is being loaded, please try again in {wait_time} seconds.'
		else:
			return 'Taking longer than usual to load, please wait.'
		
	



response = requests.post(API_URL, headers=headers, json={
	"inputs": 'احا',
	"parameters": {}
})
if response.status_code != 200:
	print('Sleeping due to Model is being loaded')
	time.sleep(40)


examples_text = [
    ["ما ابغا أروح الإمتحان"],
    ["أييد أن انام ف لبيتنا"],
	["Hello how are you today"],
	["kef al7al"]
]


def mode_run(text):
	result = run_model(text)
	return result

link = '<a href="{}" target="_blank" style="cursor: pointer; font-size: 18px;">{}</a>'

with gr.Blocks(theme=gr.themes.GoogleFont('ali')) as demo:



	gr.Markdown(
		"""
		## Dialects to MSA transformer
		Start typing Non-Traditional Arabic to convert into Classical version.
		"""
	)

	with gr.Row():
		with gr.Column():
			input = gr.Textbox(label='Dialects')
		with gr.Column():
			output = gr.Textbox(label='MSA')
		
		

	with gr.Row():
		button = gr.Button('Submit',variant='primary')
		clear = gr.ClearButton(input)
		

	examples = gr.Examples(examples_text,input,output,mode_run,cache_examples=True)
	
	
	with gr.Row():
		gr.Markdown(
			"""
			## Model Overview
			This Model is optimized to convert written text in various non Standard Classical Arabic into Classic Arabic, the model was Fine-Tuned on 0.8M pairs of sentence generated by OpenAI API gpt-4o-mini Text Generation Model, beside being able to convert Dialects into Classical Arabic, the model can also be used in other NLP tasks such as Text Correction, Diacretization and Sentence Punctuation.
			"""
		)

	


	with gr.Row():
		gr.Markdown(
			"""
			## Dielcts the Model trained on
			Below image shows an estimate of dialects the model trained on.
			"""
		)
	with gr.Row():
		with gr.Column(scale=3):
			gr.Image(Image.open('Dialects by Region.png'),height=300,container=False)
		with gr.Column(scale=3):
			pass



	with gr.Row():
		with gr.Column(scale=1):
			pass
		with gr.Column(scale=2):
			gr.HTML(
				'<div style="text-align: center;">' +\
				link.format('https://huggingface.co/HamzaNaser/Dialects-to-MSA-Transformer', 'Model Card') + ' -- '+link.format('https://www.linkedin.com/in/hamza-naser-0b4b90160/', 'LinkedIn') +\
				'</div>'
				)
		with gr.Column(scale=1):
			pass


	input.submit(mode_run,input,output)
	button.click(mode_run,input,output)

demo.launch()