@conference {189, title = {Web-based visualizations and acoustic rendering for multimodal data from orchestra performances using repovizz}, booktitle = {WAC - 1st Web Audio Conference}, year = {2015}, month = {01/2015}, publisher = {IRCAM/MOZILLA}, organization = {IRCAM/MOZILLA}, abstract = {

In the demo a set of fully working web-based prototypes developed in the context of the EU FP7 PHENICX Project (http://phenicx.upf.edu) will be presented. The Phenicx project is about innovating the classical music experience providing them with a multimodal, multi-perspective and multilayer interactive engagement, before, during and after the concert. In this demo we present some prototypes that are related with the post concert experience.


We have recorded a set of classical pieces performed by top level orchestras, including some data modalities like multi-channel audio, video, motion capture sensors, midi and text. Once all data streams have been time-synchronized, we have performed the following analysis on the data:

- Low-level and high-level audio descriptors for each individual audio source
- Description of conductor gestures based on the motion capture sensors
- Score to performance alignment
- Audio source separation
- Musical structure analysis of the performed piece


Then all these data is uploaded to the repovizz web repository (repovizz.upf.edu) that allows visualization and sharing of the data over the network. A set of customized web-based visualizations have been designed to build the prototypes that will be shown in this demo. Multimodal data streams are accessed on-line using the repovizz web API and html5 is used for the visualizations of the multimodal data and descriptors extracted from the performances. The web-audio API is used to handle the audio rendering in the client to mix between the different audio channels obtained from the different recorded audio sources or from the automatic isolation of instruments performed in the analysis step.

The visualizations available include (all web-based):

- Scrolling piano roll visualization of the musical score while audio is playing
- Orchestra layout visualization showing instrument activity and loudness while playing audio
- Audio focus to hear individual instruments playing alone
- Multi-perspective video angle selection during the concert
- 3D render of the conductor body

Here is a live example of the orchestra layout visualization including the isolation of instruments as an example of one of the prototypes that will be shown during the demo:\ http://repovizz.upf.edu/phenicx

Video showing some of the repovizz orchestra visualizations developed in the context of PHENICX:\ https://www.youtube.com/watch?v=c7pmDvkKY7A$\#$t=168

}, keywords = {Technical Demo}, author = {Mayor, O.} } @conference {129, title = {What Really Moves Us in Music: Expressivity as a Challenge to Semantic Audio Research}, booktitle = {53rd AES Conference on Semantic Audio}, year = {2014}, month = {01/2014}, address = {London, UK}, author = {Widmer, Gerhard} }