antitheft159 commited on
Commit
569d65e
·
verified ·
1 Parent(s): ff46d12

Create securecypher.space.py

Browse files
Files changed (1) hide show
  1. securecypher.space.py +145 -0
securecypher.space.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import numpy as np
4
+ import matplotlib.pyplot as plt
5
+ from matplotlib.animation import FuncAnimation
6
+ import seaborn as sns
7
+
8
+ class WaveformVisualizer:
9
+ def __init__(self, processor, input_data, sampling_rate=1000):
10
+ self.processor = processor
11
+ self.input_data = input_data
12
+ self.sampling_rate sampling_rate
13
+ self.time = np.arange(input_data.shape[1]) / sampling_rate
14
+
15
+ class SecureWaveformProcessor(nn.Module):
16
+ def __init__(self, input_size, hidden_size, sampling_rate=1000):
17
+ super(SecureWaveformProcessor, self).__init__()
18
+ self.layer1 = nn.Linear(input_size, hidden_size)
19
+ self.layer2 = nn.Linear(hidden_size, input_size)
20
+ self.sampling_rate = sampling_rate
21
+
22
+ def forward(self, x):
23
+ x = torch.relu(self.layer1(x))
24
+ x = self.layer2(x)
25
+ return x
26
+
27
+ def plot_waveforms(self):
28
+ processed_data = self.forward(input_data)
29
+ self.time = np.arange(input_data.shape[1]) / self.sampling_rate
30
+
31
+ def forward(self, x):
32
+ x = torch.relu(self.layer1(x))
33
+ x = self.layer2(x)
34
+ return x
35
+
36
+ def plot_waveforms(self):
37
+ processed_data = self.forward(input_data)
38
+ self.time = np.arange(input_data.shape[1]) / self.sampling_rate
39
+ self.input_data = input_data
40
+
41
+ fig = plt.figure(figsize=(15, 10))
42
+ gs = fig.add_gridspec(2, 2, hspace=0.3, wspace=0.3)
43
+
44
+ ax1 = fig.add_subplot(gs[0, 0])
45
+ self._plot_waveform(self.input_data[0], ax1, "Original Data")
46
+
47
+ ax2 = fig.add_subplot(gs[0, 1])
48
+ self.plot_waveform(processed_data[0], ax2, "Processed Data")
49
+
50
+ ax3 = fig.add_subplot(gs[1, 0])
51
+ self._plot_spectrogram(self.input_data[0], ax3, "Original Visual")
52
+
53
+ ax4 = fig.add_subplot(gs[1, 1])
54
+ self._plot_spectrogram(processed_data[0], x4, "Processed Visual")
55
+
56
+ plt.tight_layout()
57
+ return fig
58
+
59
+ def _plot_waveform(self,data, ax, title):
60
+ data_np = data.detach().numpy()
61
+ ax.plot(self.time, data_np, 'b-', linewidth=1)
62
+ ax.set_title(title)
63
+ ax.set_xlabel('Time (s)')
64
+ ax.set_ylabel('Amplitude')
65
+ ax.grid(True)
66
+
67
+ def _plot_spectrogram(self, data, ax, title):
68
+ data_np = data.detach().numpy
69
+ ax.specgram(data,np, Fs=self.sampling_rate, cmap='viridis')
70
+ ax.set_title(title)
71
+ ax.set_xlabel('Time (s)')
72
+ ax.set_ylabel('Frequency (Hz)')
73
+
74
+ def animate_processing(self, frame=50):
75
+ fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 8))
76
+
77
+ processed_data = self.forward(self.input_data)
78
+ data_original = self.input_data[0].detach().numpy()
79
+ data_processed = processed_data[0].detach().numpy()
80
+
81
+ line1, = ax1.plot([], [], 'b-', label='Original')
82
+ line2, = ax2.plot([], [], 'r-', label='Processed')
83
+
84
+ def init():
85
+ ax1.set_xlim(0, self.time[-1])
86
+ ax1.set_ylim(data_original.min()*1.2, data_original.max()*1.2)
87
+ ax2.set_xlim(0, self.time[-1])
88
+ ax2.set_ylim(data_processed.min()*1.2, data_processed.max()*1.2)
89
+
90
+ ax1.set_title('Original Data')
91
+ ax2.set_title('Processed Visual')
92
+ ax1.grid(True)
93
+ ax2.grid(True)
94
+ ax1.legend()
95
+ ax2.legend()
96
+
97
+ return line1, line2
98
+
99
+ def animate(frame):
100
+ idx = int((frame / frames) * len(self.time))
101
+ line1.set_data(self.time[:idx], data_original[:idx])
102
+ line2.set_data(self.time[:idx], data_processed[:idx])
103
+ return line1, line2
104
+
105
+ anim = FuncAnimation(fig, animate, frames=frames,
106
+ init_func=init, blit=True,
107
+ interval=50)
108
+
109
+ plt.tight_layout()
110
+ return anim
111
+
112
+ __name__== "__main__":
113
+ input_size = 1000
114
+ batch_size = 32
115
+ sampling_rate = 1000
116
+
117
+ processor = SecureWaveformProcessor(input_size=input_size, hidden_size=64, sampling_rate=sampling_rate)
118
+
119
+
120
+ t = np.linspace(0, 10, input_size)
121
+ base_signal = np.sin(2 * np.pi * 1 * t) + 0.5 * np.sin(2 * np.pi * 2 * t)
122
+ noise = np.random.normal(0, 0.1, input_size)
123
+ signal = base_signal + noise
124
+
125
+ input_data = torch.tensor(np.tile(signal, (batch_size, 1)), dtype=torch.float32)
126
+ processor = SecureWaveformProcessor(input_size=input_size, hidden_size=64)
127
+
128
+ visualizer = WaveformVisualizer(processor, input_data)
129
+
130
+ fig_static = processor.plot_waveforms()
131
+ plt.show()
132
+
133
+ anim = processor.animate_processing()
134
+ plt.show()
135
+
136
+
137
+
138
+
139
+
140
+
141
+
142
+
143
+
144
+
145
+