Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -292,6 +292,473 @@ df_combined["chaos_score"] = np.log1p(df_combined.get("diag_srl_gamma", 0)) / (d
|
|
292 |
_audio_path_cache = {}
|
293 |
_cmt_data_cache = {}
|
294 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
295 |
def resolve_audio_path(row: pd.Series) -> str:
|
296 |
"""
|
297 |
Intelligently reconstructs the full path to an audio file
|
@@ -691,13 +1158,191 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="teal", secondary_hue="cyan")) a
|
|
691 |
default_primary = file_choices[0] if file_choices else ""
|
692 |
|
693 |
with gr.Tabs():
|
694 |
-
with gr.TabItem("
|
695 |
-
gr.
|
696 |
-
|
697 |
-
|
698 |
-
|
699 |
-
|
700 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
701 |
|
702 |
with gr.TabItem("Interactive Holography"):
|
703 |
with gr.Row():
|
|
|
292 |
_audio_path_cache = {}
|
293 |
_cmt_data_cache = {}
|
294 |
|
295 |
+
# Advanced manifold analysis functions
|
296 |
+
def calculate_species_boundary(df_combined):
|
297 |
+
"""Calculate the geometric boundary between species using support vector machines."""
|
298 |
+
from sklearn.svm import SVC
|
299 |
+
|
300 |
+
# Prepare data for boundary calculation
|
301 |
+
human_data = df_combined[df_combined['source'] == 'Human'][['x', 'y', 'z']].values
|
302 |
+
dog_data = df_combined[df_combined['source'] == 'Dog'][['x', 'y', 'z']].values
|
303 |
+
|
304 |
+
# Create binary classification data
|
305 |
+
X = np.vstack([human_data, dog_data])
|
306 |
+
y = np.hstack([np.ones(len(human_data)), np.zeros(len(dog_data))])
|
307 |
+
|
308 |
+
# Fit SVM for boundary
|
309 |
+
svm = SVC(kernel='rbf', probability=True)
|
310 |
+
svm.fit(X, y)
|
311 |
+
|
312 |
+
# Create boundary surface
|
313 |
+
x_range = np.linspace(X[:, 0].min(), X[:, 0].max(), 20)
|
314 |
+
y_range = np.linspace(X[:, 1].min(), X[:, 1].max(), 20)
|
315 |
+
z_range = np.linspace(X[:, 2].min(), X[:, 2].max(), 20)
|
316 |
+
|
317 |
+
xx, yy = np.meshgrid(x_range, y_range)
|
318 |
+
boundary_points = []
|
319 |
+
|
320 |
+
for z_val in z_range:
|
321 |
+
grid_points = np.c_[xx.ravel(), yy.ravel(), np.full(xx.ravel().shape, z_val)]
|
322 |
+
probabilities = svm.predict_proba(grid_points)[:, 1]
|
323 |
+
|
324 |
+
# Find points near decision boundary (probability ~ 0.5)
|
325 |
+
boundary_mask = np.abs(probabilities - 0.5) < 0.05
|
326 |
+
if np.any(boundary_mask):
|
327 |
+
boundary_points.extend(grid_points[boundary_mask])
|
328 |
+
|
329 |
+
return np.array(boundary_points) if boundary_points else None
|
330 |
+
|
331 |
+
def create_enhanced_manifold_plot(df_filtered, lens_selected, color_scheme, point_size,
|
332 |
+
show_boundary, show_trajectories):
|
333 |
+
"""Create the main 3D manifold visualization with all advanced features."""
|
334 |
+
|
335 |
+
# Get CMT diagnostic values for the selected lens
|
336 |
+
alpha_col = f"diag_alpha_{lens_selected}"
|
337 |
+
srl_col = f"diag_srl_{lens_selected}"
|
338 |
+
|
339 |
+
# Determine color values based on scheme
|
340 |
+
if color_scheme == "Species":
|
341 |
+
color_values = [1 if s == "Human" else 0 for s in df_filtered['source']]
|
342 |
+
colorscale = [[0, '#1f77b4'], [1, '#ff7f0e']] # Blue for Dog, Orange for Human
|
343 |
+
colorbar_title = "Species (Blue=Dog, Orange=Human)"
|
344 |
+
elif color_scheme == "Emotion":
|
345 |
+
unique_emotions = df_filtered['label'].unique()
|
346 |
+
emotion_map = {emotion: i for i, emotion in enumerate(unique_emotions)}
|
347 |
+
color_values = [emotion_map[label] for label in df_filtered['label']]
|
348 |
+
colorscale = 'Viridis'
|
349 |
+
colorbar_title = "Emotional State"
|
350 |
+
elif color_scheme == "CMT_Alpha":
|
351 |
+
color_values = df_filtered[alpha_col].values
|
352 |
+
colorscale = 'Plasma'
|
353 |
+
colorbar_title = f"CMT Alpha ({lens_selected})"
|
354 |
+
elif color_scheme == "CMT_SRL":
|
355 |
+
color_values = df_filtered[srl_col].values
|
356 |
+
colorscale = 'Turbo'
|
357 |
+
colorbar_title = f"SRL Complexity ({lens_selected})"
|
358 |
+
else: # Cluster
|
359 |
+
color_values = df_filtered['cluster'].values
|
360 |
+
colorscale = 'Set3'
|
361 |
+
colorbar_title = "Cluster ID"
|
362 |
+
|
363 |
+
# Create hover text with rich information
|
364 |
+
hover_text = []
|
365 |
+
for _, row in df_filtered.iterrows():
|
366 |
+
hover_info = f"""
|
367 |
+
<b>{row['source']}</b>: {row['label']}<br>
|
368 |
+
File: {row['filepath']}<br>
|
369 |
+
<b>CMT Diagnostics ({lens_selected}):</b><br>
|
370 |
+
Ξ±: {row[alpha_col]:.4f}<br>
|
371 |
+
SRL: {row[srl_col]:.4f}<br>
|
372 |
+
Coordinates: ({row['x']:.3f}, {row['y']:.3f}, {row['z']:.3f})
|
373 |
+
"""
|
374 |
+
hover_text.append(hover_info)
|
375 |
+
|
376 |
+
# Create main scatter plot
|
377 |
+
fig = go.Figure()
|
378 |
+
|
379 |
+
# Add main data points
|
380 |
+
fig.add_trace(go.Scatter3d(
|
381 |
+
x=df_filtered['x'],
|
382 |
+
y=df_filtered['y'],
|
383 |
+
z=df_filtered['z'],
|
384 |
+
mode='markers',
|
385 |
+
marker=dict(
|
386 |
+
size=point_size,
|
387 |
+
color=color_values,
|
388 |
+
colorscale=colorscale,
|
389 |
+
showscale=True,
|
390 |
+
colorbar=dict(title=colorbar_title),
|
391 |
+
opacity=0.8,
|
392 |
+
line=dict(width=0.5, color='rgba(50,50,50,0.5)')
|
393 |
+
),
|
394 |
+
text=hover_text,
|
395 |
+
hovertemplate='%{text}<extra></extra>',
|
396 |
+
name='Communications'
|
397 |
+
))
|
398 |
+
|
399 |
+
# Add species boundary if requested
|
400 |
+
if show_boundary:
|
401 |
+
boundary_points = calculate_species_boundary(df_filtered)
|
402 |
+
if boundary_points is not None and len(boundary_points) > 0:
|
403 |
+
fig.add_trace(go.Scatter3d(
|
404 |
+
x=boundary_points[:, 0],
|
405 |
+
y=boundary_points[:, 1],
|
406 |
+
z=boundary_points[:, 2],
|
407 |
+
mode='markers',
|
408 |
+
marker=dict(
|
409 |
+
size=2,
|
410 |
+
color='red',
|
411 |
+
opacity=0.3
|
412 |
+
),
|
413 |
+
name='Species Boundary',
|
414 |
+
hovertemplate='Species Boundary<extra></extra>'
|
415 |
+
))
|
416 |
+
|
417 |
+
# Add trajectories if requested
|
418 |
+
if show_trajectories:
|
419 |
+
# Create trajectories between similar emotional states
|
420 |
+
for emotion in df_filtered['label'].unique():
|
421 |
+
emotion_data = df_filtered[df_filtered['label'] == emotion]
|
422 |
+
if len(emotion_data) > 1:
|
423 |
+
# Connect points within each emotional state
|
424 |
+
x_coords = emotion_data['x'].values
|
425 |
+
y_coords = emotion_data['y'].values
|
426 |
+
z_coords = emotion_data['z'].values
|
427 |
+
|
428 |
+
fig.add_trace(go.Scatter3d(
|
429 |
+
x=x_coords,
|
430 |
+
y=y_coords,
|
431 |
+
z=z_coords,
|
432 |
+
mode='lines',
|
433 |
+
line=dict(width=2, color='rgba(100,100,100,0.3)'),
|
434 |
+
name=f'{emotion} trajectory',
|
435 |
+
showlegend=False,
|
436 |
+
hovertemplate='%{fullData.name}<extra></extra>'
|
437 |
+
))
|
438 |
+
|
439 |
+
# Update layout
|
440 |
+
fig.update_layout(
|
441 |
+
title={
|
442 |
+
'text': "π Universal Interspecies Communication Manifold<br><sub>First mathematical map of cross-species communication geometry</sub>",
|
443 |
+
'x': 0.5,
|
444 |
+
'xanchor': 'center'
|
445 |
+
},
|
446 |
+
scene=dict(
|
447 |
+
xaxis_title='Manifold Dimension 1',
|
448 |
+
yaxis_title='Manifold Dimension 2',
|
449 |
+
zaxis_title='Manifold Dimension 3',
|
450 |
+
camera=dict(
|
451 |
+
eye=dict(x=1.5, y=1.5, z=1.5)
|
452 |
+
),
|
453 |
+
bgcolor='rgba(0,0,0,0)',
|
454 |
+
aspectmode='cube'
|
455 |
+
),
|
456 |
+
margin=dict(l=0, r=0, b=0, t=60),
|
457 |
+
legend=dict(
|
458 |
+
yanchor="top",
|
459 |
+
y=0.99,
|
460 |
+
xanchor="left",
|
461 |
+
x=0.01
|
462 |
+
)
|
463 |
+
)
|
464 |
+
|
465 |
+
return fig
|
466 |
+
|
467 |
+
def create_2d_projection_plot(df_filtered, color_scheme):
|
468 |
+
"""Create 2D projection for easier analysis."""
|
469 |
+
fig = go.Figure()
|
470 |
+
|
471 |
+
# Create color mapping
|
472 |
+
if color_scheme == "Species":
|
473 |
+
color_values = df_filtered['source']
|
474 |
+
color_map = {'Human': '#ff7f0e', 'Dog': '#1f77b4'}
|
475 |
+
else:
|
476 |
+
color_values = df_filtered['label']
|
477 |
+
unique_labels = df_filtered['label'].unique()
|
478 |
+
colors = ['#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd', '#8c564b']
|
479 |
+
color_map = {label: colors[i % len(colors)] for i, label in enumerate(unique_labels)}
|
480 |
+
|
481 |
+
for value in color_values.unique():
|
482 |
+
data_subset = df_filtered[color_values == value]
|
483 |
+
fig.add_trace(go.Scatter(
|
484 |
+
x=data_subset['x'],
|
485 |
+
y=data_subset['y'],
|
486 |
+
mode='markers',
|
487 |
+
marker=dict(
|
488 |
+
size=8,
|
489 |
+
color=color_map.get(value, '#1f77b4'),
|
490 |
+
opacity=0.7
|
491 |
+
),
|
492 |
+
name=str(value),
|
493 |
+
text=[f"{row['source']}: {row['label']}" for _, row in data_subset.iterrows()],
|
494 |
+
hovertemplate='%{text}<br>X: %{x:.3f}<br>Y: %{y:.3f}<extra></extra>'
|
495 |
+
))
|
496 |
+
|
497 |
+
fig.update_layout(
|
498 |
+
title="2D Manifold Projection (X-Y Plane)",
|
499 |
+
xaxis_title="Manifold Dimension 1",
|
500 |
+
yaxis_title="Manifold Dimension 2",
|
501 |
+
height=400
|
502 |
+
)
|
503 |
+
|
504 |
+
return fig
|
505 |
+
|
506 |
+
def create_density_heatmap(df_filtered):
|
507 |
+
"""Create density heatmap showing communication hotspots."""
|
508 |
+
from scipy.stats import gaussian_kde
|
509 |
+
|
510 |
+
# Create 2D density estimation
|
511 |
+
x = df_filtered['x'].values
|
512 |
+
y = df_filtered['y'].values
|
513 |
+
|
514 |
+
# Create grid for density calculation
|
515 |
+
x_grid = np.linspace(x.min(), x.max(), 50)
|
516 |
+
y_grid = np.linspace(y.min(), y.max(), 50)
|
517 |
+
X_grid, Y_grid = np.meshgrid(x_grid, y_grid)
|
518 |
+
positions = np.vstack([X_grid.ravel(), Y_grid.ravel()])
|
519 |
+
|
520 |
+
# Calculate density
|
521 |
+
values = np.vstack([x, y])
|
522 |
+
kernel = gaussian_kde(values)
|
523 |
+
density = np.reshape(kernel(positions).T, X_grid.shape)
|
524 |
+
|
525 |
+
fig = go.Figure(data=go.Heatmap(
|
526 |
+
z=density,
|
527 |
+
x=x_grid,
|
528 |
+
y=y_grid,
|
529 |
+
colorscale='Viridis',
|
530 |
+
colorbar=dict(title="Communication Density")
|
531 |
+
))
|
532 |
+
|
533 |
+
# Overlay actual points
|
534 |
+
fig.add_trace(go.Scatter(
|
535 |
+
x=x, y=y,
|
536 |
+
mode='markers',
|
537 |
+
marker=dict(size=4, color='white', opacity=0.6),
|
538 |
+
name='Actual Communications',
|
539 |
+
hovertemplate='X: %{x:.3f}<br>Y: %{y:.3f}<extra></extra>'
|
540 |
+
))
|
541 |
+
|
542 |
+
fig.update_layout(
|
543 |
+
title="Communication Density Heatmap",
|
544 |
+
xaxis_title="Manifold Dimension 1",
|
545 |
+
yaxis_title="Manifold Dimension 2",
|
546 |
+
height=400
|
547 |
+
)
|
548 |
+
|
549 |
+
return fig
|
550 |
+
|
551 |
+
def create_feature_distributions(df_filtered, lens_selected):
|
552 |
+
"""Create feature distribution plots comparing species."""
|
553 |
+
alpha_col = f"diag_alpha_{lens_selected}"
|
554 |
+
srl_col = f"diag_srl_{lens_selected}"
|
555 |
+
|
556 |
+
fig = make_subplots(
|
557 |
+
rows=2, cols=2,
|
558 |
+
subplot_titles=[
|
559 |
+
f'CMT Alpha Distribution ({lens_selected})',
|
560 |
+
f'SRL Distribution ({lens_selected})',
|
561 |
+
'Manifold X Coordinate',
|
562 |
+
'Manifold Y Coordinate'
|
563 |
+
]
|
564 |
+
)
|
565 |
+
|
566 |
+
# Alpha distribution
|
567 |
+
for species in ['Human', 'Dog']:
|
568 |
+
data = df_filtered[df_filtered['source'] == species][alpha_col]
|
569 |
+
fig.add_trace(
|
570 |
+
go.Histogram(x=data, name=f'{species} Alpha', opacity=0.7, nbinsx=20),
|
571 |
+
row=1, col=1
|
572 |
+
)
|
573 |
+
|
574 |
+
# SRL distribution
|
575 |
+
for species in ['Human', 'Dog']:
|
576 |
+
data = df_filtered[df_filtered['source'] == species][srl_col]
|
577 |
+
fig.add_trace(
|
578 |
+
go.Histogram(x=data, name=f'{species} SRL', opacity=0.7, nbinsx=20),
|
579 |
+
row=1, col=2
|
580 |
+
)
|
581 |
+
|
582 |
+
# X coordinate distribution
|
583 |
+
for species in ['Human', 'Dog']:
|
584 |
+
data = df_filtered[df_filtered['source'] == species]['x']
|
585 |
+
fig.add_trace(
|
586 |
+
go.Histogram(x=data, name=f'{species} X', opacity=0.7, nbinsx=20),
|
587 |
+
row=2, col=1
|
588 |
+
)
|
589 |
+
|
590 |
+
# Y coordinate distribution
|
591 |
+
for species in ['Human', 'Dog']:
|
592 |
+
data = df_filtered[df_filtered['source'] == species]['y']
|
593 |
+
fig.add_trace(
|
594 |
+
go.Histogram(x=data, name=f'{species} Y', opacity=0.7, nbinsx=20),
|
595 |
+
row=2, col=2
|
596 |
+
)
|
597 |
+
|
598 |
+
fig.update_layout(
|
599 |
+
height=300,
|
600 |
+
title_text="Feature Distributions by Species",
|
601 |
+
showlegend=True
|
602 |
+
)
|
603 |
+
|
604 |
+
return fig
|
605 |
+
|
606 |
+
def create_correlation_matrix(df_filtered, lens_selected):
|
607 |
+
"""Create correlation matrix of CMT features."""
|
608 |
+
# Select relevant columns for correlation
|
609 |
+
feature_cols = ['x', 'y', 'z'] + [col for col in df_filtered.columns if col.startswith('feature_')]
|
610 |
+
cmt_cols = [f"diag_alpha_{lens_selected}", f"diag_srl_{lens_selected}"]
|
611 |
+
|
612 |
+
all_cols = feature_cols + cmt_cols
|
613 |
+
available_cols = [col for col in all_cols if col in df_filtered.columns]
|
614 |
+
|
615 |
+
if len(available_cols) < 2:
|
616 |
+
# Fallback with basic columns
|
617 |
+
available_cols = ['x', 'y', 'z']
|
618 |
+
|
619 |
+
# Calculate correlation matrix
|
620 |
+
corr_matrix = df_filtered[available_cols].corr()
|
621 |
+
|
622 |
+
fig = go.Figure(data=go.Heatmap(
|
623 |
+
z=corr_matrix.values,
|
624 |
+
x=corr_matrix.columns,
|
625 |
+
y=corr_matrix.columns,
|
626 |
+
colorscale='RdBu',
|
627 |
+
zmid=0,
|
628 |
+
colorbar=dict(title="Correlation"),
|
629 |
+
text=np.round(corr_matrix.values, 2),
|
630 |
+
texttemplate="%{text}",
|
631 |
+
textfont={"size": 10}
|
632 |
+
))
|
633 |
+
|
634 |
+
fig.update_layout(
|
635 |
+
title="Cross-Species Feature Correlations",
|
636 |
+
height=300,
|
637 |
+
xaxis_title="Features",
|
638 |
+
yaxis_title="Features"
|
639 |
+
)
|
640 |
+
|
641 |
+
return fig
|
642 |
+
|
643 |
+
def calculate_statistics(df_filtered, lens_selected):
|
644 |
+
"""Calculate comprehensive statistics for the filtered data."""
|
645 |
+
alpha_col = f"diag_alpha_{lens_selected}"
|
646 |
+
srl_col = f"diag_srl_{lens_selected}"
|
647 |
+
|
648 |
+
stats = {}
|
649 |
+
|
650 |
+
# Overall statistics
|
651 |
+
stats['total_points'] = len(df_filtered)
|
652 |
+
stats['human_count'] = len(df_filtered[df_filtered['source'] == 'Human'])
|
653 |
+
stats['dog_count'] = len(df_filtered[df_filtered['source'] == 'Dog'])
|
654 |
+
|
655 |
+
# CMT statistics by species
|
656 |
+
for species in ['Human', 'Dog']:
|
657 |
+
species_data = df_filtered[df_filtered['source'] == species]
|
658 |
+
if len(species_data) > 0:
|
659 |
+
stats[f'{species.lower()}_alpha_mean'] = species_data[alpha_col].mean()
|
660 |
+
stats[f'{species.lower()}_alpha_std'] = species_data[alpha_col].std()
|
661 |
+
stats[f'{species.lower()}_srl_mean'] = species_data[srl_col].mean()
|
662 |
+
stats[f'{species.lower()}_srl_std'] = species_data[srl_col].std()
|
663 |
+
|
664 |
+
# Geometric separation
|
665 |
+
if stats['human_count'] > 0 and stats['dog_count'] > 0:
|
666 |
+
human_center = df_filtered[df_filtered['source'] == 'Human'][['x', 'y', 'z']].mean()
|
667 |
+
dog_center = df_filtered[df_filtered['source'] == 'Dog'][['x', 'y', 'z']].mean()
|
668 |
+
stats['geometric_separation'] = np.sqrt(((human_center - dog_center) ** 2).sum())
|
669 |
+
|
670 |
+
return stats
|
671 |
+
|
672 |
+
def update_manifold_visualization(species_selection, emotion_selection, lens_selection,
|
673 |
+
alpha_range, srl_range, feature_range, point_size,
|
674 |
+
show_boundary, show_trajectories, color_scheme):
|
675 |
+
"""Main update function for the manifold visualization."""
|
676 |
+
|
677 |
+
# Filter data based on selections
|
678 |
+
df_filtered = df_combined.copy()
|
679 |
+
|
680 |
+
# Species filter
|
681 |
+
if species_selection:
|
682 |
+
df_filtered = df_filtered[df_filtered['source'].isin(species_selection)]
|
683 |
+
|
684 |
+
# Emotion filter
|
685 |
+
if emotion_selection:
|
686 |
+
df_filtered = df_filtered[df_filtered['label'].isin(emotion_selection)]
|
687 |
+
|
688 |
+
# CMT diagnostic filters
|
689 |
+
alpha_col = f"diag_alpha_{lens_selection}"
|
690 |
+
srl_col = f"diag_srl_{lens_selection}"
|
691 |
+
|
692 |
+
if alpha_col in df_filtered.columns:
|
693 |
+
df_filtered = df_filtered[
|
694 |
+
(df_filtered[alpha_col] >= alpha_range[0]) &
|
695 |
+
(df_filtered[alpha_col] <= alpha_range[1])
|
696 |
+
]
|
697 |
+
|
698 |
+
if srl_col in df_filtered.columns:
|
699 |
+
df_filtered = df_filtered[
|
700 |
+
(df_filtered[srl_col] >= srl_range[0]) &
|
701 |
+
(df_filtered[srl_col] <= srl_range[1])
|
702 |
+
]
|
703 |
+
|
704 |
+
# Feature magnitude filter (using first few feature columns if they exist)
|
705 |
+
feature_cols = [col for col in df_filtered.columns if col.startswith('feature_')]
|
706 |
+
if feature_cols:
|
707 |
+
feature_magnitudes = np.sqrt(df_filtered[feature_cols[:3]].pow(2).sum(axis=1))
|
708 |
+
df_filtered = df_filtered[
|
709 |
+
(feature_magnitudes >= feature_range[0]) &
|
710 |
+
(feature_magnitudes <= feature_range[1])
|
711 |
+
]
|
712 |
+
|
713 |
+
# Create visualizations
|
714 |
+
if len(df_filtered) == 0:
|
715 |
+
empty_fig = go.Figure().add_annotation(
|
716 |
+
text="No data points match the current filters",
|
717 |
+
xref="paper", yref="paper", x=0.5, y=0.5, showarrow=False
|
718 |
+
)
|
719 |
+
return (empty_fig, empty_fig, empty_fig, empty_fig, empty_fig,
|
720 |
+
"No data available", "No data available", "No data available")
|
721 |
+
|
722 |
+
# Main manifold plot
|
723 |
+
manifold_fig = create_enhanced_manifold_plot(
|
724 |
+
df_filtered, lens_selection, color_scheme, point_size,
|
725 |
+
show_boundary, show_trajectories
|
726 |
+
)
|
727 |
+
|
728 |
+
# Secondary plots
|
729 |
+
projection_fig = create_2d_projection_plot(df_filtered, color_scheme)
|
730 |
+
density_fig = create_density_heatmap(df_filtered)
|
731 |
+
distributions_fig = create_feature_distributions(df_filtered, lens_selection)
|
732 |
+
correlation_fig = create_correlation_matrix(df_filtered, lens_selection)
|
733 |
+
|
734 |
+
# Statistics
|
735 |
+
stats = calculate_statistics(df_filtered, lens_selection)
|
736 |
+
|
737 |
+
# Format statistics HTML
|
738 |
+
species_stats_html = f"""
|
739 |
+
<h4>π Data Overview</h4>
|
740 |
+
<p><b>Total Points:</b> {stats['total_points']}</p>
|
741 |
+
<p><b>Human:</b> {stats['human_count']} | <b>Dog:</b> {stats['dog_count']}</p>
|
742 |
+
<p><b>Ratio:</b> {stats['human_count']/(stats['dog_count']+1):.2f}:1</p>
|
743 |
+
"""
|
744 |
+
|
745 |
+
boundary_stats_html = f"""
|
746 |
+
<h4>π¬ Geometric Analysis</h4>
|
747 |
+
<p><b>Lens:</b> {lens_selection.title()}</p>
|
748 |
+
{"<p><b>Separation:</b> {:.3f}</p>".format(stats.get('geometric_separation', 0)) if 'geometric_separation' in stats else ""}
|
749 |
+
<p><b>Dimensions:</b> 3D UMAP</p>
|
750 |
+
"""
|
751 |
+
|
752 |
+
similarity_html = f"""
|
753 |
+
<h4>π Species Comparison</h4>
|
754 |
+
<p><b>Human Ξ±:</b> {stats.get('human_alpha_mean', 0):.3f} Β± {stats.get('human_alpha_std', 0):.3f}</p>
|
755 |
+
<p><b>Dog Ξ±:</b> {stats.get('dog_alpha_mean', 0):.3f} Β± {stats.get('dog_alpha_std', 0):.3f}</p>
|
756 |
+
<p><b>Overlap Index:</b> {1 / (1 + stats.get('geometric_separation', 1)):.3f}</p>
|
757 |
+
"""
|
758 |
+
|
759 |
+
return (manifold_fig, projection_fig, density_fig, distributions_fig, correlation_fig,
|
760 |
+
species_stats_html, boundary_stats_html, similarity_html)
|
761 |
+
|
762 |
def resolve_audio_path(row: pd.Series) -> str:
|
763 |
"""
|
764 |
Intelligently reconstructs the full path to an audio file
|
|
|
1158 |
default_primary = file_choices[0] if file_choices else ""
|
1159 |
|
1160 |
with gr.Tabs():
|
1161 |
+
with gr.TabItem("π Universal Manifold Explorer"):
|
1162 |
+
gr.Markdown("""
|
1163 |
+
# π― **First Universal Interspecies Communication Map**
|
1164 |
+
*Discover the hidden mathematical geometry underlying human and dog communication*
|
1165 |
+
""")
|
1166 |
+
|
1167 |
+
with gr.Row():
|
1168 |
+
with gr.Column(scale=1):
|
1169 |
+
gr.Markdown("### π¬ **Analysis Controls**")
|
1170 |
+
|
1171 |
+
# Species filtering
|
1172 |
+
species_filter = gr.CheckboxGroup(
|
1173 |
+
label="Species Selection",
|
1174 |
+
choices=["Human", "Dog"],
|
1175 |
+
value=["Human", "Dog"],
|
1176 |
+
info="Select which species to display"
|
1177 |
+
)
|
1178 |
+
|
1179 |
+
# Emotional state filtering
|
1180 |
+
emotion_filter = gr.CheckboxGroup(
|
1181 |
+
label="Emotional States",
|
1182 |
+
choices=list(df_combined['label'].unique()),
|
1183 |
+
value=list(df_combined['label'].unique()),
|
1184 |
+
info="Filter by emotional expression"
|
1185 |
+
)
|
1186 |
+
|
1187 |
+
# CMT Lens selection for coloring
|
1188 |
+
lens_selector = gr.Dropdown(
|
1189 |
+
label="Mathematical Lens View",
|
1190 |
+
choices=["gamma", "zeta", "airy", "bessel"],
|
1191 |
+
value="gamma",
|
1192 |
+
info="Choose which mathematical lens to use for analysis"
|
1193 |
+
)
|
1194 |
+
|
1195 |
+
# Advanced filtering sliders
|
1196 |
+
with gr.Accordion("ποΈ Advanced CMT Filters", open=False):
|
1197 |
+
alpha_range = gr.RangeSlider(
|
1198 |
+
label="CMT Alpha Range (Geometric Consistency)",
|
1199 |
+
minimum=0, maximum=1, value=[0, 1], step=0.01,
|
1200 |
+
info="Filter by geometric consistency measure"
|
1201 |
+
)
|
1202 |
+
|
1203 |
+
srl_range = gr.RangeSlider(
|
1204 |
+
label="SRL Range (Complexity Level)",
|
1205 |
+
minimum=0, maximum=100, value=[0, 100], step=1,
|
1206 |
+
info="Filter by spike response level (complexity)"
|
1207 |
+
)
|
1208 |
+
|
1209 |
+
feature_magnitude = gr.RangeSlider(
|
1210 |
+
label="Feature Magnitude Range",
|
1211 |
+
minimum=-3, maximum=3, value=[-3, 3], step=0.1,
|
1212 |
+
info="Filter by overall feature strength"
|
1213 |
+
)
|
1214 |
+
|
1215 |
+
# Visualization options
|
1216 |
+
with gr.Accordion("π¨ Visualization Options", open=True):
|
1217 |
+
point_size = gr.Slider(
|
1218 |
+
label="Point Size",
|
1219 |
+
minimum=2, maximum=15, value=6, step=1
|
1220 |
+
)
|
1221 |
+
|
1222 |
+
show_species_boundary = gr.Checkbox(
|
1223 |
+
label="Show Species Boundary",
|
1224 |
+
value=True,
|
1225 |
+
info="Display geometric boundary between species"
|
1226 |
+
)
|
1227 |
+
|
1228 |
+
show_trajectories = gr.Checkbox(
|
1229 |
+
label="Show Communication Trajectories",
|
1230 |
+
value=False,
|
1231 |
+
info="Display paths between related vocalizations"
|
1232 |
+
)
|
1233 |
+
|
1234 |
+
color_scheme = gr.Dropdown(
|
1235 |
+
label="Color Scheme",
|
1236 |
+
choices=["Species", "Emotion", "CMT_Alpha", "CMT_SRL", "Cluster"],
|
1237 |
+
value="Species",
|
1238 |
+
info="Choose coloring strategy"
|
1239 |
+
)
|
1240 |
+
|
1241 |
+
# Real-time analysis
|
1242 |
+
with gr.Accordion("π Real-Time Analysis", open=False):
|
1243 |
+
analysis_button = gr.Button("π¬ Analyze Selected Region", variant="primary")
|
1244 |
+
|
1245 |
+
selected_info = gr.HTML(
|
1246 |
+
label="Selection Analysis",
|
1247 |
+
value="<i>Select points on the manifold for detailed analysis</i>"
|
1248 |
+
)
|
1249 |
+
|
1250 |
+
with gr.Column(scale=3):
|
1251 |
+
# Main 3D manifold plot
|
1252 |
+
manifold_plot = gr.Plot(
|
1253 |
+
label="Universal Communication Manifold",
|
1254 |
+
height=600
|
1255 |
+
)
|
1256 |
+
|
1257 |
+
# Statistics panel below the plot
|
1258 |
+
with gr.Row():
|
1259 |
+
with gr.Column():
|
1260 |
+
species_stats = gr.HTML(
|
1261 |
+
label="Species Statistics",
|
1262 |
+
value=""
|
1263 |
+
)
|
1264 |
+
|
1265 |
+
with gr.Column():
|
1266 |
+
boundary_stats = gr.HTML(
|
1267 |
+
label="Boundary Analysis",
|
1268 |
+
value=""
|
1269 |
+
)
|
1270 |
+
|
1271 |
+
with gr.Column():
|
1272 |
+
similarity_stats = gr.HTML(
|
1273 |
+
label="Cross-Species Similarity",
|
1274 |
+
value=""
|
1275 |
+
)
|
1276 |
+
|
1277 |
+
# Secondary analysis views
|
1278 |
+
with gr.Row():
|
1279 |
+
with gr.Column():
|
1280 |
+
# 2D projection plot
|
1281 |
+
projection_2d = gr.Plot(
|
1282 |
+
label="2D Projection View",
|
1283 |
+
height=400
|
1284 |
+
)
|
1285 |
+
|
1286 |
+
with gr.Column():
|
1287 |
+
# Density heatmap
|
1288 |
+
density_plot = gr.Plot(
|
1289 |
+
label="Communication Density Map",
|
1290 |
+
height=400
|
1291 |
+
)
|
1292 |
+
|
1293 |
+
# Bottom analysis panel
|
1294 |
+
with gr.Row():
|
1295 |
+
with gr.Column():
|
1296 |
+
# Feature distribution plots
|
1297 |
+
feature_distributions = gr.Plot(
|
1298 |
+
label="CMT Feature Distributions",
|
1299 |
+
height=300
|
1300 |
+
)
|
1301 |
+
|
1302 |
+
with gr.Column():
|
1303 |
+
# Correlation matrix
|
1304 |
+
correlation_matrix = gr.Plot(
|
1305 |
+
label="Cross-Species Feature Correlations",
|
1306 |
+
height=300
|
1307 |
+
)
|
1308 |
+
|
1309 |
+
# Wire up all the interactive components
|
1310 |
+
manifold_inputs = [
|
1311 |
+
species_filter, emotion_filter, lens_selector,
|
1312 |
+
alpha_range, srl_range, feature_magnitude, point_size,
|
1313 |
+
show_species_boundary, show_trajectories, color_scheme
|
1314 |
+
]
|
1315 |
+
|
1316 |
+
manifold_outputs = [
|
1317 |
+
manifold_plot, projection_2d, density_plot,
|
1318 |
+
feature_distributions, correlation_matrix,
|
1319 |
+
species_stats, boundary_stats, similarity_stats
|
1320 |
+
]
|
1321 |
+
|
1322 |
+
# Set up event handlers for real-time updates
|
1323 |
+
for component in manifold_inputs:
|
1324 |
+
component.change(
|
1325 |
+
update_manifold_visualization,
|
1326 |
+
inputs=manifold_inputs,
|
1327 |
+
outputs=manifold_outputs
|
1328 |
+
)
|
1329 |
+
|
1330 |
+
# Initialize the plots with default values
|
1331 |
+
demo.load(
|
1332 |
+
lambda: update_manifold_visualization(
|
1333 |
+
["Human", "Dog"], # species_selection
|
1334 |
+
list(df_combined['label'].unique()), # emotion_selection
|
1335 |
+
"gamma", # lens_selection
|
1336 |
+
[0, 1], # alpha_range
|
1337 |
+
[0, 100], # srl_range
|
1338 |
+
[-3, 3], # feature_range
|
1339 |
+
6, # point_size
|
1340 |
+
True, # show_boundary
|
1341 |
+
False, # show_trajectories
|
1342 |
+
"Species" # color_scheme
|
1343 |
+
),
|
1344 |
+
outputs=manifold_outputs
|
1345 |
+
)
|
1346 |
|
1347 |
with gr.TabItem("Interactive Holography"):
|
1348 |
with gr.Row():
|