Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import pickle
|
3 |
+
import re
|
4 |
+
|
5 |
+
import gradio as gr
|
6 |
+
import matplotlib.pyplot as plt
|
7 |
+
import networkx as nx
|
8 |
+
from tqdm import tqdm
|
9 |
+
|
10 |
+
from Utility.utils import load_json_from_path
|
11 |
+
|
12 |
+
|
13 |
+
class Visualizer:
|
14 |
+
|
15 |
+
def __init__(self, cache_root="."):
|
16 |
+
tree_lookup_path = os.path.join(cache_root, "lang_1_to_lang_2_to_tree_dist.json")
|
17 |
+
self.tree_dist = load_json_from_path(tree_lookup_path)
|
18 |
+
|
19 |
+
map_lookup_path = os.path.join(cache_root, "lang_1_to_lang_2_to_map_dist.json")
|
20 |
+
self.map_dist = load_json_from_path(map_lookup_path)
|
21 |
+
largest_value_map_dist = 0.0
|
22 |
+
for _, values in self.map_dist.items():
|
23 |
+
for _, value in values.items():
|
24 |
+
largest_value_map_dist = max(largest_value_map_dist, value)
|
25 |
+
for key1 in self.map_dist:
|
26 |
+
for key2 in self.map_dist[key1]:
|
27 |
+
self.map_dist[key1][key2] = self.map_dist[key1][key2] / largest_value_map_dist
|
28 |
+
|
29 |
+
asp_dict_path = os.path.join(cache_root, "asp_dict.pkl")
|
30 |
+
with open(asp_dict_path, 'rb') as dictfile:
|
31 |
+
asp_sim = pickle.load(dictfile)
|
32 |
+
lang_list = list(asp_sim.keys())
|
33 |
+
self.asp_dist = dict()
|
34 |
+
seen_langs = set()
|
35 |
+
for lang_1 in lang_list:
|
36 |
+
if lang_1 not in seen_langs:
|
37 |
+
seen_langs.add(lang_1)
|
38 |
+
self.asp_dist[lang_1] = dict()
|
39 |
+
for index, lang_2 in enumerate(lang_list):
|
40 |
+
if lang_2 not in seen_langs: # it's symmetric
|
41 |
+
self.asp_dist[lang_1][lang_2] = 1 - asp_sim[lang_1][index]
|
42 |
+
|
43 |
+
self.iso_codes_to_names = load_json_from_path(os.path.join(cache_root, "iso_to_fullname.json"))
|
44 |
+
for code in self.iso_codes_to_names:
|
45 |
+
self.iso_codes_to_names[code] = re.sub("\(.*?\)", "", self.iso_codes_to_names[code])
|
46 |
+
|
47 |
+
def visualize(self, distance_type, neighbor, num_neighbors):
|
48 |
+
plt.clf()
|
49 |
+
plt.figure(figsize=(12, 12))
|
50 |
+
|
51 |
+
assert distance_type in ["Physical Distance between Language Centroids on the Globe",
|
52 |
+
"Distance to the Lowest Common Ancestor in the Language Family Tree",
|
53 |
+
"Angular Distance between the Frequencies of Phonemes"]
|
54 |
+
if distance_type == "Distance to the Lowest Common Ancestor in the Language Family Tree":
|
55 |
+
distance_measure = self.tree_dist
|
56 |
+
elif distance_type == "Angular Distance between the Frequencies of Phonemes":
|
57 |
+
distance_measure = self.asp_dist
|
58 |
+
elif distance_type == "Physical Distance between Language Centroids on the Globe":
|
59 |
+
distance_measure = self.map_dist
|
60 |
+
|
61 |
+
distances = list()
|
62 |
+
|
63 |
+
for lang_1 in distance_measure:
|
64 |
+
if lang_1 not in self.iso_codes_to_names:
|
65 |
+
continue
|
66 |
+
for lang_2 in distance_measure[lang_1]:
|
67 |
+
if lang_2 not in self.iso_codes_to_names:
|
68 |
+
continue
|
69 |
+
distances.append((self.iso_codes_to_names[lang_1], self.iso_codes_to_names[lang_2], distance_measure[lang_1][lang_2]))
|
70 |
+
|
71 |
+
G = nx.Graph()
|
72 |
+
min_dist = min(d for _, _, d in distances)
|
73 |
+
max_dist = max(d for _, _, d in distances)
|
74 |
+
normalized_distances = [(entity1, entity2, (d - min_dist) / (max_dist - min_dist)) for entity1, entity2, d in distances]
|
75 |
+
|
76 |
+
d_dist = list()
|
77 |
+
for entity1, entity2, d in tqdm(normalized_distances):
|
78 |
+
if neighbor == entity2 or neighbor == entity1:
|
79 |
+
if entity1 != entity2:
|
80 |
+
d_dist.append(d)
|
81 |
+
thresh = sorted(d_dist)[num_neighbors]
|
82 |
+
neighbors = set()
|
83 |
+
for entity1, entity2, d in tqdm(normalized_distances):
|
84 |
+
if d < thresh and (neighbor == entity2 or neighbor == entity1) and (entity1 != entity2):
|
85 |
+
neighbors.add(entity1)
|
86 |
+
neighbors.add(entity2)
|
87 |
+
spring_tension = (thresh - d) * 10 # for vis purposes
|
88 |
+
G.add_edge(entity1, entity2, weight=spring_tension)
|
89 |
+
neighbors.remove(neighbor)
|
90 |
+
for entity1, entity2, d in tqdm(normalized_distances):
|
91 |
+
if entity2 in neighbors and entity1 in neighbors:
|
92 |
+
if entity1 != entity2:
|
93 |
+
spring_tension = thresh - d
|
94 |
+
G.add_edge(entity1, entity2, weight=spring_tension)
|
95 |
+
|
96 |
+
pos = nx.spring_layout(G, weight="weight") # Positions for all nodes
|
97 |
+
edges = G.edges(data=True)
|
98 |
+
nx.draw_networkx_nodes(G, pos, node_size=1, alpha=0.01)
|
99 |
+
edges_connected_to_specific_node = [(u, v) for u, v in G.edges() if u == neighbor or v == neighbor]
|
100 |
+
nx.draw_networkx_edges(G, pos, edgelist=edges_connected_to_specific_node, edge_color='orange', alpha=0.4, width=3)
|
101 |
+
# edges_not_connected_to_specific_node = [(u, v) for u, v in G.edges() if u != neighbor and v != neighbor]
|
102 |
+
# nx.draw_networkx_edges(G, pos, edgelist=edges_not_connected_to_specific_node, edge_color='gray', alpha=0.1, width=1)
|
103 |
+
for u, v, d in edges:
|
104 |
+
if u == neighbor or v == neighbor:
|
105 |
+
nx.draw_networkx_edge_labels(G, pos, edge_labels={(u, v): round((thresh - (d['weight'] / 10)) * 10, 2)}, font_color="red", alpha=0.4) # reverse modifications
|
106 |
+
nx.draw_networkx_labels(G, pos, font_size=14, font_family='sans-serif', font_color='green')
|
107 |
+
nx.draw_networkx_labels(G, pos, labels={neighbor: neighbor}, font_size=14, font_family='sans-serif', font_color='red')
|
108 |
+
plt.title(f'Graph of {distance_type}')
|
109 |
+
plt.subplots_adjust(left=0, right=1, top=0.9, bottom=0)
|
110 |
+
plt.tight_layout()
|
111 |
+
return plt.gcf()
|
112 |
+
|
113 |
+
|
114 |
+
if __name__ == '__main__':
|
115 |
+
vis = Visualizer(cache_root=".")
|
116 |
+
text_selection = [f"{vis.iso_codes_to_names[iso_code]}" for iso_code in vis.iso_codes_to_names]
|
117 |
+
iface = gr.Interface(fn=vis.visualize,
|
118 |
+
inputs=[gr.Dropdown(["Physical Distance between Language Centroids on the Globe",
|
119 |
+
"Distance to the Lowest Common Ancestor in the Language Family Tree",
|
120 |
+
"Angular Distance between the Frequencies of Phonemes"],
|
121 |
+
type="value",
|
122 |
+
value='Physical Distance between Language Centroids on the Globe',
|
123 |
+
label="Select the Type of Distance"),
|
124 |
+
gr.Dropdown(text_selection,
|
125 |
+
type="value",
|
126 |
+
value="German",
|
127 |
+
label="Select the second Language (type on your keyboard to find it quickly)"),
|
128 |
+
gr.Slider(minimum=0, maximum=100, step=1,
|
129 |
+
value=12,
|
130 |
+
label="How many Nearest Neighbors should be displayed?")
|
131 |
+
],
|
132 |
+
outputs=[gr.Plot(label="", show_label=False, format="png", container=True)],
|
133 |
+
description="<br><br> This demo allows you to find the nearest neighbors of a language from the ISO 639-3 list according to several distance measurement functions. "
|
134 |
+
"For more information, check out our paper: https://arxiv.org/abs/2406.06403 and our text-to-speech tool, in which we make use of "
|
135 |
+
"this technique: https://github.com/DigitalPhonetics/IMS-Toucan <br><br>",
|
136 |
+
fill_width=True,
|
137 |
+
allow_flagging="never")
|
138 |
+
iface.launch()
|