import gradio as gr import os import allin1 from pathlib import Path HEADER = """ <header style="text-align: center;"> <h1> All-In-One Music Structure Analyzer 🔮 </h1> <p> <a href="https://github.com/mir-aidj/all-in-one">[Python Package]</a> <a href="https://arxiv.org/abs/2307.16425">[Paper]</a> <a href="https://taejun.kim/music-dissector/">[Visual Demo]</a> </p> </header> <main style="display: flex; justify-content: center;" > <div style="display: inline-block;" > <p> This Space demonstrates the music structure analyzer predicts: <ul style="padding-left: 1rem;" > <li>BPM</li> <li>Beats</li> <li>Downbeats</li> <li>Functional segment boundaries</li> <li>Functional segment labels (e.g. intro, verse, chorus, bridge, outro)</li> </ul> </p> <p> For more information, please visit the links above ✨🧸 </p> </div> </main> """ CACHE_EXAMPLES = os.getenv('CACHE_EXAMPLES', '1') == '1' def analyze(path): path = Path(path) result = allin1.analyze( path, out_dir='./struct', multiprocess=False, keep_byproducts=True, # TODO: remove this ) result_json_path = Path(f'./struct/{path.stem}.json').resolve().as_posix() fig = allin1.visualize( result, multiprocess=False, ) fig.set_dpi(300) allin1.sonify( result, out_dir='./sonif', multiprocess=False, ) sonif_path = Path(f'./sonif/{path.stem}.sonif{path.suffix}').resolve().as_posix() return result.bpm, fig, sonif_path,result_json_path with gr.Blocks() as demo: gr.HTML(HEADER) input_audio_path = gr.Audio( label='Input', source='upload', type='filepath', format='mp3', show_download_button=False, ) button = gr.Button('Analyze', variant='primary') output_viz = gr.Plot(label='Visualization') with gr.Row(): output_bpm = gr.Textbox(label='BPM', scale=1) output_sonif = gr.Audio( label='Sonification', type='filepath', format='mp3', show_download_button=False, scale=9, ) output_json = gr.File( label='Download JSON', type='file', show_download_button=True, ) # gr.Examples( # examples=[ # './assets/NewJeans - Super Shy.mp3', # './assets/Bruno Mars - 24k Magic.mp3' # ], # inputs=input_audio_path, # outputs=[output_bpm, output_viz, output_sonif], # fn=analyze, # cache_examples=CACHE_EXAMPLES, # ) button.click( fn=analyze, inputs=input_audio_path, outputs=[output_bpm, output_viz, output_sonif,output_json], api_name='analyze', ) if __name__ == '__main__': demo.launch()