asigalov61 commited on
Commit
20a4294
·
verified ·
1 Parent(s): 6b6f593

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -82
app.py CHANGED
@@ -156,58 +156,6 @@ def load_midi(input_midi, melody_patch=-1):
156
 
157
  #==================================================================================
158
 
159
- def save_midi(tokens, batch_number=None, model_selector=''):
160
-
161
- song = tokens
162
- song_f = []
163
-
164
- time = 0
165
- dur = 0
166
- vel = 90
167
- pitch = 0
168
- channel = 0
169
- patch = 0
170
-
171
- patches = [0] * 16
172
-
173
- for m in song:
174
-
175
- if 0 <= m < 128:
176
- time += m * 32
177
-
178
- elif 128 < m < 256:
179
- dur = (m-128) * 32
180
-
181
- elif 256 < m < 384:
182
- pitch = (m-256)
183
-
184
- if model_selector == 'without velocity - 3 epochs' or model_selector == 'without velocity - 7 epochs':
185
- song_f.append(['note', time, dur, 0, pitch, max(40, pitch), 0])
186
-
187
- elif 384 < m < 512:
188
- vel = (m-384)
189
-
190
- if model_selector == 'with velocity - 3 epochs':
191
- song_f.append(['note', time, dur, 0, pitch, vel, 0])
192
-
193
- if batch_number == None:
194
- fname = 'Guided-Accompaniment-Transformer-Music-Composition'
195
-
196
- else:
197
- fname = 'Guided-Accompaniment-Transformer-Music-Composition_'+str(batch_number)
198
-
199
- data = TMIDIX.Tegridy_ms_SONG_to_MIDI_Converter(song_f,
200
- output_signature = 'Guided Accompaniment Transformer',
201
- output_file_name = fname,
202
- track_name='Project Los Angeles',
203
- list_of_MIDI_patches=patches,
204
- verbose=False
205
- )
206
-
207
- return song_f
208
-
209
- #==================================================================================
210
-
211
  @spaces.GPU
212
  def Generate_Accompaniment(input_midi,
213
  generation_type,
@@ -269,39 +217,40 @@ def Generate_Accompaniment(input_midi,
269
  time = 0
270
  dur = 0
271
  vel = 90
272
- pitch = 60
273
  channel = 0
274
  patch = 0
275
-
276
- patches = [0] * 16
277
-
278
- for ss in final_song:
279
-
280
- if 0 <= ss < 256:
281
-
282
- time += ss * 16
283
-
284
- if 256 <= ss < 384:
285
-
286
- pitch = ss-256
287
-
288
- if 384 <= ss < 640:
289
-
290
- dur = (ss-384) * 16
291
-
292
- if 640 <= ss < 768:
293
-
294
- vel = (ss-640)
295
-
 
296
  song_f.append(['note', time, dur, channel, pitch, vel, patch])
297
 
298
- fn1 = "Score-2-Performance-Transformer-Composition"
299
 
300
  detailed_stats = TMIDIX.Tegridy_ms_SONG_to_MIDI_Converter(song_f,
301
- output_signature = 'Score 2 Performance Transformer',
302
  output_file_name = fn1,
303
  track_name='Project Los Angeles',
304
- list_of_MIDI_patches=patches
305
  )
306
 
307
  new_fn = fn1+'.mid'
@@ -319,19 +268,14 @@ def Generate_Accompaniment(input_midi,
319
 
320
  #========================================================
321
 
322
- output_midi_title = str(fn1)
323
- output_midi_summary = str(song_f[:3])
324
  output_midi = str(new_fn)
325
  output_audio = (16000, audio)
326
 
327
  output_plot = TMIDIX.plot_ms_SONG(song_f, plot_title=output_midi, return_plt=True)
328
 
329
  print('Output MIDI file name:', output_midi)
330
- print('Output MIDI title:', output_midi_title)
331
- print('Output MIDI summary:', output_midi_summary)
332
  print('=' * 70)
333
 
334
-
335
  #========================================================
336
 
337
  print('-' * 70)
 
156
 
157
  #==================================================================================
158
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
159
  @spaces.GPU
160
  def Generate_Accompaniment(input_midi,
161
  generation_type,
 
217
  time = 0
218
  dur = 0
219
  vel = 90
220
+ pitch = 0
221
  channel = 0
222
  patch = 0
223
+
224
+ channels_map = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 9, 12, 13, 14, 15]
225
+ patches_map = [40, 0, 10, 19, 24, 35, 40, 52, 56, 9, 65, 73, 0, 0, 0, 0]
226
+ velocities_map = [125, 80, 100, 80, 90, 100, 100, 80, 110, 110, 110, 110, 80, 80, 80, 80]
227
+
228
+ for m in final_song:
229
+
230
+ if 0 <= m < 128:
231
+ time += m * 32
232
+
233
+ elif 128 < m < 256:
234
+ dur = (m-128) * 32
235
+
236
+ elif 256 < m < 1792:
237
+ cha = (m-256) // 128
238
+ pitch = (m-256) % 128
239
+
240
+ channel = channels_map[cha]
241
+ patch = patches_map[channel]
242
+ vel = velocities_map[channel]
243
+
244
+
245
  song_f.append(['note', time, dur, channel, pitch, vel, patch])
246
 
247
+ fn1 = "Guided-Accompaniment-Transformer-Composition"
248
 
249
  detailed_stats = TMIDIX.Tegridy_ms_SONG_to_MIDI_Converter(song_f,
250
+ output_signature = 'Guided Accompaniment Transformer',
251
  output_file_name = fn1,
252
  track_name='Project Los Angeles',
253
+ list_of_MIDI_patches=patches_map
254
  )
255
 
256
  new_fn = fn1+'.mid'
 
268
 
269
  #========================================================
270
 
 
 
271
  output_midi = str(new_fn)
272
  output_audio = (16000, audio)
273
 
274
  output_plot = TMIDIX.plot_ms_SONG(song_f, plot_title=output_midi, return_plt=True)
275
 
276
  print('Output MIDI file name:', output_midi)
 
 
277
  print('=' * 70)
278
 
 
279
  #========================================================
280
 
281
  print('-' * 70)