Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -902,7 +902,7 @@ with ui.navset_card_tab(id="tab"):
|
|
| 902 |
ui.panel_title("Kmer Distribution")
|
| 903 |
with ui.layout_columns():
|
| 904 |
with ui.card():
|
| 905 |
-
ui.input_slider("kmer", "kmer", 0, 10,
|
| 906 |
ui.input_slider("top_k", "top:", 0, 1000, 15)
|
| 907 |
|
| 908 |
ui.input_selectize(
|
|
@@ -941,37 +941,38 @@ with ui.navset_card_tab(id="tab"):
|
|
| 941 |
ax.set_xticklabels(df['kmer'], rotation=90)
|
| 942 |
return fig
|
| 943 |
#ui.output_plot("plot_micro_output")
|
| 944 |
-
|
| 945 |
-
|
| 946 |
-
|
| 947 |
|
| 948 |
-
|
| 949 |
-
|
| 950 |
-
|
| 951 |
-
|
| 952 |
-
|
| 953 |
-
|
| 954 |
-
|
| 955 |
-
|
| 956 |
-
|
| 957 |
-
|
| 958 |
-
|
| 959 |
-
|
| 960 |
-
|
| 961 |
-
|
| 962 |
-
|
| 963 |
-
|
| 964 |
-
|
| 965 |
-
|
| 966 |
-
|
| 967 |
-
|
| 968 |
-
|
| 969 |
-
|
| 970 |
-
|
| 971 |
-
|
| 972 |
-
|
| 973 |
-
|
| 974 |
-
|
|
|
|
| 975 |
|
| 976 |
|
| 977 |
# @render.image
|
|
|
|
| 902 |
ui.panel_title("Kmer Distribution")
|
| 903 |
with ui.layout_columns():
|
| 904 |
with ui.card():
|
| 905 |
+
ui.input_slider("kmer", "kmer", 0, 10, 4)
|
| 906 |
ui.input_slider("top_k", "top:", 0, 1000, 15)
|
| 907 |
|
| 908 |
ui.input_selectize(
|
|
|
|
| 941 |
ax.set_xticklabels(df['kmer'], rotation=90)
|
| 942 |
return fig
|
| 943 |
#ui.output_plot("plot_micro_output")
|
| 944 |
+
with ui.nav_panel("Viral Model Training"):
|
| 945 |
+
ui.page_opts(fillable=True)
|
| 946 |
+
ui.panel_title("Does context size matter for a nucleotide model?")
|
| 947 |
|
| 948 |
+
def plot_loss_rates(df, type):
|
| 949 |
+
# interplot each column to be same number of points
|
| 950 |
+
x = np.linspace(0, 1, 1000)
|
| 951 |
+
loss_rates = []
|
| 952 |
+
labels = ['32', '64', '128', '256', '512', '1024']
|
| 953 |
+
#drop the column step
|
| 954 |
+
df = df.drop(columns=['Step'])
|
| 955 |
+
for col in df.columns:
|
| 956 |
+
y = df[col].dropna().astype('float', errors = 'ignore').dropna().values
|
| 957 |
+
f = interp1d(np.linspace(0, 1, len(y)), y)
|
| 958 |
+
loss_rates.append(f(x))
|
| 959 |
+
fig, ax = plt.subplots()
|
| 960 |
+
for i, loss_rate in enumerate(loss_rates):
|
| 961 |
+
ax.plot(x, loss_rate, label=labels[i])
|
| 962 |
+
ax.legend()
|
| 963 |
+
ax.set_title(f'Loss rates for a {type} parameter model')
|
| 964 |
+
ax.set_xlabel('Training steps')
|
| 965 |
+
ax.set_ylabel('Loss rate')
|
| 966 |
+
return fig
|
| 967 |
+
|
| 968 |
+
import matplotlib as mpl
|
| 969 |
+
@render.plot()
|
| 970 |
+
def plot_context_size_scaling():
|
| 971 |
+
fig = None
|
| 972 |
+
df = pd.read_csv('14m.csv')
|
| 973 |
+
mpl.rcParams.update(mpl.rcParamsDefault)
|
| 974 |
+
fig = plot_loss_rates(df, '14M')
|
| 975 |
+
return fig
|
| 976 |
|
| 977 |
|
| 978 |
# @render.image
|