Spaces:
Sleeping
Sleeping
File size: 2,956 Bytes
27b1390 3f9fabf b1d66c1 f3c0738 27b1390 afa4c92 27b1390 3f9fabf 6e76183 3f9fabf 6e76183 4bb84bd 3f9fabf 4bb84bd f3c0738 50321a5 e5f191a 1f20467 50321a5 e5f191a 50321a5 1f20467 8ac4951 6e76183 8ac4951 b1d66c1 f3c0738 8ac4951 1f20467 b1d66c1 6e76183 3f9fabf afa4c92 4bb84bd 61c72ea 6e76183 61c72ea f3c0738 3b31dc5 468ea08 4bb84bd afa4c92 8ac4951 3b31dc5 756b159 afa4c92 b1d66c1 3f9fabf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 |
import gradio as gr
import os
import allin1
import time
from pathlib import Path
HEADER = """
<header style="text-align: center;">
<h1>
All-In-One Music Structure Analyzer 🔮
</h1>
<p>
<a href="https://github.com/mir-aidj/all-in-one">[Python Package]</a>
<a href="https://arxiv.org/abs/2307.16425">[Paper]</a>
<a href="https://taejun.kim/music-dissector/">[Visual Demo]</a>
</p>
</header>
<main
style="display: flex; justify-content: center;"
>
<div
style="display: inline-block;"
>
<p>
This Space demonstrates the music structure analyzer predicts:
<ul
style="padding-left: 1rem;"
>
<li>BPM</li>
<li>Beats</li>
<li>Downbeats</li>
<li>Functional segment boundaries</li>
<li>Functional segment labels (e.g. intro, verse, chorus, bridge, outro)</li>
</ul>
</p>
<p>
For more information, please visit the links above ✨🧸
</p>
</div>
</main>
"""
CACHE_EXAMPLES = os.getenv('CACHE_EXAMPLES', '1') == '1'
def analyze(path):
#Measure time for inference
start = time.time()
path = Path(path)
result, bass_path, drums_path, other_path, vocals_path = allin1.analyze(
path,
out_dir='./struct',
multiprocess=False,
keep_byproducts=True, # TODO: remove this
)
#fig = allin1.visualize(
# result,
# multiprocess=False,
#)
#fig.set_dpi(300)
#allin1.sonify(
# result,
# out_dir='./sonif',
# multiprocess=False,
#)
#sonif_path = Path(f'./sonif/{path.stem}.sonif{path.suffix}').resolve().as_posix()
#Measure time for inference
end = time.time()
elapsed_time = end-start
#return result.bpm, fig, sonif_path, elapsed_time
return result.bpm, elapsed_time, bass_path, drums_path, other_path, vocals_path
with gr.Blocks() as demo:
gr.HTML(HEADER)
input_audio_path = gr.Audio(
label='Input',
source='upload',
type='filepath',
format='mp3',
show_download_button=False,
)
button = gr.Button('Analyze', variant='primary')
#output_viz = gr.Plot(label='Visualization')
with gr.Row():
output_bpm = gr.Textbox(label='BPM', scale=1)
#output_sonif = gr.Audio(
# label='Sonification',
# type='filepath',
# format='mp3',
# show_download_button=False,
# scale=9,
#)
elapsed_time = gr.Textbox(label='Overall inference time', scale=1)
paths = gr.Textbox(label='paths', scale=1)
#gr.Examples(
# examples=[
# './assets/NewJeans - Super Shy.mp3',
# './assets/Bruno Mars - 24k Magic.mp3'
# ],
# inputs=input_audio_path,
# outputs=[output_bpm, output_viz, output_sonif],
# fn=analyze,
# cache_examples=CACHE_EXAMPLES,
#)
button.click(
fn=analyze,
inputs=input_audio_path,
#outputs=[output_bpm, output_viz, output_sonif, elapsed_time],
outputs=[output_bpm, elapsed_time, paths],
api_name='analyze',
)
if __name__ == '__main__':
demo.launch()
|