Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
import streamlit as st
|
2 |
import numpy as np
|
3 |
import tensorflow as tf
|
@@ -17,14 +18,9 @@ def generate_dataset(task_id):
|
|
17 |
class Net(keras.Model):
|
18 |
def __init__(self):
|
19 |
super(Net, self).__init__()
|
20 |
-
self.
|
21 |
-
self.
|
22 |
-
self.
|
23 |
-
|
24 |
-
def build(self, input_shape):
|
25 |
-
self.fc1 = keras.layers.Dense(self.units, activation='relu', input_shape=(10,))
|
26 |
-
self.fc2 = keras.layers.Dense(self.units2, activation='relu')
|
27 |
-
self.fc3 = keras.layers.Dense(self.units3)
|
28 |
|
29 |
def call(self, x):
|
30 |
x = self.fc1(x)
|
@@ -34,38 +30,43 @@ class Net(keras.Model):
|
|
34 |
|
35 |
# Define a genetic algorithm class
|
36 |
class GeneticAlgorithm:
|
37 |
-
def __init__(self, population_size
|
38 |
self.population_size = population_size
|
39 |
-
self.task_id = task_id
|
40 |
self.population = [Net() for _ in range(population_size)]
|
41 |
|
42 |
-
def selection(self):
|
43 |
-
X_train, X_test, y_train, y_test = generate_dataset(
|
44 |
fitness = []
|
45 |
-
for
|
46 |
-
net.build(input_shape=(None, 10))
|
47 |
net.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
|
48 |
net.fit(X_train, y_train, epochs=10, verbose=0)
|
49 |
loss, accuracy = net.evaluate(X_test, y_test, verbose=0)
|
50 |
fitness.append(accuracy)
|
51 |
-
|
52 |
-
self.population = [self.population[i] for i in np.argsort(fitness)[-self.population_size//2:]]
|
53 |
|
54 |
def crossover(self):
|
55 |
offspring = []
|
|
|
56 |
for _ in range(self.population_size//2):
|
57 |
parent1, parent2 = random.sample(self.population, 2)
|
58 |
child = Net()
|
59 |
-
child
|
|
|
|
|
|
|
|
|
60 |
parent1_weights = parent1.get_weights()
|
61 |
parent2_weights = parent2.get_weights()
|
62 |
child_weights = [(np.array(w1) + np.array(w2)) / 2 for w1, w2 in zip(parent1_weights, parent2_weights)]
|
63 |
child.set_weights(child_weights)
|
|
|
64 |
offspring.append(child)
|
65 |
self.population += offspring
|
66 |
|
67 |
def mutation(self):
|
|
|
68 |
for net in self.population:
|
|
|
69 |
if random.random() < 0.1:
|
70 |
weights = net.get_weights()
|
71 |
new_weights = [np.array(w) + np.random.randn(*w.shape) * 0.1 for w in weights]
|
@@ -80,58 +81,25 @@ population_size = st.sidebar.slider("Population size", 10, 100, 50)
|
|
80 |
num_tasks = st.sidebar.slider("Number of tasks", 1, 10, 5)
|
81 |
num_generations = st.sidebar.slider("Number of generations", 1, 100, 10)
|
82 |
|
83 |
-
gas = None
|
84 |
-
|
85 |
# Run the evolution
|
86 |
if st.button("Run evolution"):
|
87 |
-
|
88 |
for generation in range(num_generations):
|
89 |
-
for
|
90 |
-
ga.selection()
|
91 |
ga.crossover()
|
92 |
ga.mutation()
|
93 |
st.write(f"Generation {generation+1} complete")
|
94 |
|
95 |
# Evaluate the final population
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
final_accuracy.append(np.mean(accuracy))
|
108 |
-
|
109 |
-
# Trade populations between tasks
|
110 |
-
if gas is not None:
|
111 |
-
for i in range(len(gas)):
|
112 |
-
for j in range(i+1, len(gas)):
|
113 |
-
ga1 = gas[i]
|
114 |
-
ga2 = gas[j]
|
115 |
-
population1 = ga1.population
|
116 |
-
population2 = ga2.population
|
117 |
-
num_trade = int(0.1 * population_size)
|
118 |
-
trade1 = random.sample(population1, num_trade)
|
119 |
-
trade2 = random.sample(population2, num_trade)
|
120 |
-
ga1.population = population1 + trade2
|
121 |
-
ga2.population = population2 + trade1
|
122 |
-
|
123 |
-
# Evaluate the final population after trading
|
124 |
-
if gas is not None:
|
125 |
-
final_accuracy_after_trade = []
|
126 |
-
for task_id, ga in enumerate(gas):
|
127 |
-
X_train, X_test, y_train, y_test = generate_dataset(task_id)
|
128 |
-
accuracy = []
|
129 |
-
for net in ga.population:
|
130 |
-
net.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
|
131 |
-
net.fit(X_train, y_train, epochs=10, verbose=0)
|
132 |
-
loss, acc = net.evaluate(X_test, y_test, verbose=0)
|
133 |
-
accuracy.append(acc)
|
134 |
-
final_accuracy_after_trade.append(np.mean(accuracy))
|
135 |
-
if len(final_accuracy) > 0 and len(final_accuracy_after_trade) > 0:
|
136 |
-
st.write(f"Final accuracy: {np.mean(final_accuracy)}")
|
137 |
-
st.write(f"Final accuracy after trading: {np.mean(final_accuracy_after_trade)}")
|
|
|
1 |
+
|
2 |
import streamlit as st
|
3 |
import numpy as np
|
4 |
import tensorflow as tf
|
|
|
18 |
class Net(keras.Model):
|
19 |
def __init__(self):
|
20 |
super(Net, self).__init__()
|
21 |
+
self.fc1 = keras.layers.Dense(20, activation='relu', input_shape=(10,))
|
22 |
+
self.fc2 = keras.layers.Dense(10, activation='relu')
|
23 |
+
self.fc3 = keras.layers.Dense(2)
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
def call(self, x):
|
26 |
x = self.fc1(x)
|
|
|
30 |
|
31 |
# Define a genetic algorithm class
|
32 |
class GeneticAlgorithm:
|
33 |
+
def __init__(self, population_size):
|
34 |
self.population_size = population_size
|
|
|
35 |
self.population = [Net() for _ in range(population_size)]
|
36 |
|
37 |
+
def selection(self, task_id):
|
38 |
+
X_train, X_test, y_train, y_test = generate_dataset(task_id)
|
39 |
fitness = []
|
40 |
+
for net in self.population:
|
|
|
41 |
net.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
|
42 |
net.fit(X_train, y_train, epochs=10, verbose=0)
|
43 |
loss, accuracy = net.evaluate(X_test, y_test, verbose=0)
|
44 |
fitness.append(accuracy)
|
45 |
+
self.population = [self.population[i] for i in np.argsort(fitness)[-self.population_size//2:]]
|
|
|
46 |
|
47 |
def crossover(self):
|
48 |
offspring = []
|
49 |
+
X = np.random.rand(1, 10) # dummy input to build the layers
|
50 |
for _ in range(self.population_size//2):
|
51 |
parent1, parent2 = random.sample(self.population, 2)
|
52 |
child = Net()
|
53 |
+
child(X) # build the layers
|
54 |
+
parent1(X) # build the layers
|
55 |
+
parent2(X) # build the layers
|
56 |
+
|
57 |
+
# Average the weights of the two parents
|
58 |
parent1_weights = parent1.get_weights()
|
59 |
parent2_weights = parent2.get_weights()
|
60 |
child_weights = [(np.array(w1) + np.array(w2)) / 2 for w1, w2 in zip(parent1_weights, parent2_weights)]
|
61 |
child.set_weights(child_weights)
|
62 |
+
|
63 |
offspring.append(child)
|
64 |
self.population += offspring
|
65 |
|
66 |
def mutation(self):
|
67 |
+
X = np.random.rand(1, 10) # dummy input to build the layers
|
68 |
for net in self.population:
|
69 |
+
net(X) # build the layers
|
70 |
if random.random() < 0.1:
|
71 |
weights = net.get_weights()
|
72 |
new_weights = [np.array(w) + np.random.randn(*w.shape) * 0.1 for w in weights]
|
|
|
81 |
num_tasks = st.sidebar.slider("Number of tasks", 1, 10, 5)
|
82 |
num_generations = st.sidebar.slider("Number of generations", 1, 100, 10)
|
83 |
|
|
|
|
|
84 |
# Run the evolution
|
85 |
if st.button("Run evolution"):
|
86 |
+
ga = GeneticAlgorithm(population_size)
|
87 |
for generation in range(num_generations):
|
88 |
+
for task_id in range(num_tasks):
|
89 |
+
ga.selection(task_id)
|
90 |
ga.crossover()
|
91 |
ga.mutation()
|
92 |
st.write(f"Generation {generation+1} complete")
|
93 |
|
94 |
# Evaluate the final population
|
95 |
+
final_accuracy = []
|
96 |
+
for task_id in range(num_tasks):
|
97 |
+
X_train, X_test, y_train, y_test = generate_dataset(task_id)
|
98 |
+
accuracy = []
|
99 |
+
for net in ga.population:
|
100 |
+
net.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
|
101 |
+
net.fit(X_train, y_train, epochs=10, verbose=0)
|
102 |
+
loss, acc = net.evaluate(X_test, y_test, verbose=0)
|
103 |
+
accuracy.append(acc)
|
104 |
+
final_accuracy.append(np.mean(accuracy))
|
105 |
+
st.write(f"Final accuracy: {np.mean(final_accuracy)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|