Spaces:
Running
Running
File size: 6,132 Bytes
d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 9979917 d467bd4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 |
import gradio as gr
import torch
from neuralop.models import FNO
import matplotlib.pyplot as plt
import numpy as np
import os
import requests # <--- ADD THIS IMPORT for downloading files
from tqdm import tqdm # Optional: for a progress bar during download
# --- Configuration ---
MODEL_PATH = "fno_ckpt_single_res" # This model file still needs to be in your repo
# Zenodo direct download URL for the Navier-Stokes 2D dataset
DATASET_URL = "https://zenodo.org/record/12825163/files/navier_stokes_2d.pt?download=1"
LOCAL_DATASET_PATH = "navier_stokes_2d.pt" # Where the file will be saved locally in the Space
# --- Global Variables for Model and Data (loaded once) ---
MODEL = None
FULL_DATASET_X = None
# --- Function to Download Dataset ---
def download_file(url, local_filename):
"""Downloads a file from a URL to a local path with a progress bar."""
if os.path.exists(local_filename):
print(f"{local_filename} already exists. Skipping download.")
return
print(f"Downloading {url} to {local_filename}...")
try:
response = requests.get(url, stream=True)
response.raise_for_status() # Raise an HTTPError for bad responses (4xx or 5xx)
total_size = int(response.headers.get('content-length', 0))
block_size = 1024 # 1 KB
with open(local_filename, 'wb') as f:
with tqdm(total=total_size, unit='iB', unit_scale=True, desc=local_filename) as pbar:
for chunk in response.iter_content(chunk_size=block_size):
if chunk:
f.write(chunk)
pbar.update(len(chunk))
print(f"Downloaded {local_filename} successfully.")
except requests.exceptions.RequestException as e:
print(f"Error downloading file: {e}")
raise gr.Error(f"Failed to download dataset from Zenodo: {e}")
# --- 1. Model Loading Function (No change here for model) ---
def load_model():
"""Loads the pre-trained FNO model."""
global MODEL
if MODEL is None:
print("Loading FNO model...")
try:
MODEL = torch.load(MODEL_PATH, weights_only=False, map_location='cpu')
MODEL.eval()
print("Model loaded successfully.")
except Exception as e:
print(f"Error loading model: {e}")
raise gr.Error(f"Failed to load model: {e}")
return MODEL
# --- 2. Dataset Loading Function (MODIFIED) ---
def load_dataset():
"""Downloads and loads the initial conditions dataset."""
global FULL_DATASET_X
if FULL_DATASET_X is None:
download_file(DATASET_URL, LOCAL_DATASET_PATH) # <--- Download here!
print("Loading dataset from local file...")
try:
data = torch.load(LOCAL_DATASET_PATH, map_location='cpu')
if isinstance(data, dict) and 'x' in data:
FULL_DATASET_X = data['x']
elif isinstance(data, torch.Tensor):
FULL_DATASET_X = data
else:
raise ValueError("Unknown dataset format or 'x' key missing.")
print(f"Dataset loaded. Total samples: {FULL_DATASET_X.shape[0]}")
except Exception as e:
print(f"Error loading dataset: {e}")
raise gr.Error(f"Failed to load dataset from local file: {e}")
return FULL_DATASET_X
# --- 3. Inference Function for Gradio (No change) ---
def run_inference(sample_index: int):
"""
Performs inference for a selected sample index from the dataset.
Returns two Matplotlib figures: one for input, one for output.
"""
model = load_model()
dataset = load_dataset() # This will trigger download and load if not already done
if not (0 <= sample_index < dataset.shape[0]):
raise gr.Error(f"Sample index out of range. Please choose between 0 and {dataset.shape[0]-1}.")
single_initial_condition = dataset[sample_index:sample_index+1, :, :].unsqueeze(1)
print(f"Running inference for sample index {sample_index}...")
with torch.no_grad():
predicted_solution = model(single_initial_condition)
input_numpy = single_initial_condition.squeeze().cpu().numpy()
output_numpy = predicted_solution.squeeze().cpu().numpy()
fig_input, ax_input = plt.subplots()
im_input = ax_input.imshow(input_numpy, cmap='viridis')
ax_input.set_title(f"Initial Condition (Sample {sample_index})")
fig_input.colorbar(im_input, ax=ax_input, label="Vorticity")
plt.close(fig_input)
fig_output, ax_output = plt.subplots()
im_output = ax_output.imshow(output_numpy, cmap='viridis')
ax_output.set_title(f"Predicted Solution")
fig_output.colorbar(im_output, ax=ax_output, label="Vorticity")
plt.close(fig_output)
return fig_input, fig_output
# --- Gradio Interface Setup (No change) ---
with gr.Blocks() as demo:
gr.Markdown(
"""
# Fourier Neural Operator (FNO) for Navier-Stokes Equations
Select a sample index from the pre-loaded dataset to see the FNO's prediction
of the vorticity field evolution.
"""
)
with gr.Row():
with gr.Column():
# Max value can be dynamic based on dataset size if needed,
# but 9999 for 10,000 samples is correct.
sample_input_slider = gr.Slider(
minimum=0,
maximum=9999,
value=0,
step=1,
label="Select Sample Index"
)
run_button = gr.Button("Generate Solution")
with gr.Column():
input_image_plot = gr.Plot(label="Selected Initial Condition")
output_image_plot = gr.Plot(label="Predicted Solution")
run_button.click(
fn=run_inference,
inputs=[sample_input_slider],
outputs=[input_image_plot, output_image_plot]
)
def load_initial_data_and_predict():
load_model()
load_dataset() # This will now download if not present
return run_inference(0)
demo.load(load_initial_data_and_predict, inputs=None, outputs=[input_image_plot, output_image_plot])
if __name__ == "__main__":
demo.launch() |