Spaces:
Running
Running
import gradio as gr | |
import pickle | |
from datasets import load_dataset | |
from plaid.containers.sample import Sample | |
import numpy as np | |
import pyrender | |
from trimesh import Trimesh | |
import matplotlib as mpl | |
import matplotlib.cm as cm | |
import os | |
# switch to "osmesa" or "egl" before loading pyrender | |
os.environ["PYOPENGL_PLATFORM"] = "egl" | |
# os.system("wget https://zenodo.org/records/10124594/files/Tensile2d.tar.gz") | |
# os.system("tar -xvf Tensile2d.tar.gz") | |
hf_dataset = load_dataset("PLAID-datasets/VKI-LS59", split="all_samples") | |
nb_samples = 671 # only training samples have all fields | |
field_names_train = ["mach", "ro", "rou", "rov", "roe", "nut", "sdf"] | |
_HEADER_ = ''' | |
<h2><b>Visualization demo of <a href='https://huggingface.co/datasets/PLAID-datasets/VKI-LS59' target='_blank'><b>VKI-LS59 dataset</b></b></h2> | |
''' | |
def round_num(num)->str: | |
return '%s' % float('%.3g' % num) | |
def sample_info(sample_id_str, fieldn): | |
sample_ = hf_dataset[int(sample_id_str)]["sample"] | |
plaid_sample = Sample.model_validate(pickle.loads(sample_)) | |
# plaid_sample = Sample.load_from_dir(f"Tensile2d/dataset/samples/sample_"+str(sample_id_str).zfill(9)) | |
nodes = plaid_sample.get_nodes(base_name="Base_2_2") | |
field = plaid_sample.get_field(fieldn, base_name="Base_2_2") | |
# if nodes.shape[1] == 2: | |
# nodes__ = np.zeros((nodes.shape[0],nodes.shape[1]+1)) | |
# nodes__[:,:-1] = nodes | |
# nodes = nodes__ | |
norm = (field - field.min()) / (field.max() - field.min()) | |
colormap_func = mpl.pyplot.get_cmap('viridis') | |
rgb_colors = colormap_func(norm)[:, :3] | |
nb_nodes = nodes.shape[0] | |
quads = plaid_sample.get_elements(base_name="Base_2_2")['QUAD_4'] | |
nb_quads = quads.shape[0] | |
assert field.shape[0] == nb_nodes | |
with open("visu.obj", 'w') as f: | |
for i in range(nb_nodes): | |
f.write(f"v {-nodes[i,0]} {nodes[i,1]} {0.} {rgb_colors[i,0]} {rgb_colors[i,1]} {rgb_colors[i,2]}\n") | |
for i in range(nb_quads): | |
f.write(f"f {quads[i,0] + 1} {quads[i,3] + 1} {quads[i,2] + 1} {quads[i,1] + 1}\n") | |
# with open("visu.obj", 'w') as f: | |
# for i in range(nb_nodes): | |
# f.write(f"v {nodes[i,0]} {nodes[i,1]} {0.} {rgb_colors[i,0]} {rgb_colors[i,1]} {rgb_colors[i,2]}\n") | |
# for i in range(nb_quads): | |
# f.write(f"f {quads[i,0] + 1} {quads[i,1] + 1} {quads[i,2] + 1} {quads[i,3] + 1}\n") | |
# quads = plaid_sample.get_elements()['QUAD_4'] | |
# # generate colormap | |
# if np.linalg.norm(field) > 0: | |
# norm = mpl.colors.Normalize(vmin=np.min(field), vmax=np.max(field)) | |
# cmap = cm.nipy_spectral#cm.coolwarm | |
# m = cm.ScalarMappable(norm=norm, cmap=cmap) | |
# vertex_colors = m.to_rgba(field)[:,:3] | |
# else: | |
# vertex_colors = 1+np.zeros((field.shape[0], 3)) | |
# vertex_colors[:,0] = 0.2298057 | |
# vertex_colors[:,1] = 0.01555616 | |
# vertex_colors[:,2] = 0.15023281 | |
# # generate mesh | |
# trimesh = Trimesh(vertices = nodes, faces = quads) | |
# trimesh.visual.vertex_colors = vertex_colors | |
# mesh = pyrender.Mesh.from_trimesh(trimesh, smooth=False) | |
# # compose scene | |
# scene = pyrender.Scene(ambient_light=[.1, .1, .3], bg_color=[0, 0, 0]) | |
# camera = pyrender.PerspectiveCamera( yfov=np.pi / 6.0) | |
# light = pyrender.DirectionalLight(color=[1,1,1], intensity=1000.) | |
# scene.add(mesh, pose= np.eye(4)) | |
# scene.add(light, pose= np.eye(4)) | |
# scene.add(camera, pose=[[ 1, 0, 0, 0.02], | |
# [ 0, 1, 0, 0.21], | |
# [ 0, 0, 1, 0.19], | |
# [ 0, 0, 0, 1]]) | |
# # render scene | |
# r = pyrender.OffscreenRenderer(1024, 1024) | |
# color, _ = r.render(scene) | |
str__ = f"Training sample {sample_id_str}\n" | |
str__ += str(plaid_sample)+"\n" | |
if len(hf_dataset.description['in_scalars_names'])>0: | |
str__ += "\ninput scalars:\n" | |
for sname in hf_dataset.description['in_scalars_names']: | |
str__ += f"- {sname}: {round_num(plaid_sample.get_scalar(sname))}\n" | |
if len(hf_dataset.description['out_scalars_names'])>0: | |
str__ += "\noutput scalars:\n" | |
for sname in hf_dataset.description['out_scalars_names']: | |
str__ += f"- {sname}: {round_num(plaid_sample.get_scalar(sname))}\n" | |
# str__ += f"\n\nMesh number of nodes: {nodes.shape[0]}\n" | |
# if len(hf_dataset.description['in_fields_names'])>0: | |
# str__ += "\ninput fields:\n" | |
# for fname in hf_dataset.description['in_fields_names']: | |
# str__ += f"- {fname}\n" | |
# if len(hf_dataset.description['out_fields_names'])>0: | |
# str__ += "\noutput fields:\n" | |
# for fname in hf_dataset.description['out_fields_names']: | |
# str__ += f"- {fname}\n" | |
return str__, "./visu.obj" | |
if __name__ == "__main__": | |
with gr.Blocks(fill_width=True) as demo: | |
gr.Markdown(_HEADER_) | |
with gr.Row(variant="panel"): | |
with gr.Column(scale=1): | |
d1 = gr.Slider(0, nb_samples-1, value=0, label="Training sample id", info="Choose between 0 and "+str(nb_samples-1)) | |
output1 = gr.Text(label="Training sample info") | |
with gr.Column(scale=2, min_width=300): | |
d2 = gr.Dropdown(field_names_train, value=field_names_train[0], label="Field name") | |
# output2 = gr.Image(label="Training sample visualization") | |
output2 = gr.Model3D(label="Training sample visualization") | |
d1.input(sample_info, [d1, d2], [output1, output2]) | |
d2.input(sample_info, [d1, d2], [output1, output2]) | |
demo.launch() | |