@conference {273, title = {A Personality-based Adaptive System for Visualizing Classical Music Performances}, booktitle = {Proceedings of the 7th ACM Multimedia Systems Conference (MMSys)}, year = {2016}, month = {May}, address = {Klagenfurt, Austria}, author = {Markus Schedl and Mark Melenhorst and Cynthia C.S. Liem and Agust{\'\i}n Martorell and {\'O}scar Mayor and Marko Tkal{\v c}i{\v c}} } @conference {274, title = {Personalized Retrieval and Browsing of Classical Music and Supporting Multimedia Material}, booktitle = {Proceedings of the ACM International Conference on Multimedia Retrieval (ICMR)}, year = {2016}, month = {June}, address = {New York, USA}, author = {Marko Tkal{\v c}i{\v c} and Markus Schedl and Cynthia C.S. Liem and Mark Melenhorst} } @inbook {173, title = {Contextual set-class analysis}, booktitle = {Computational Music Analysis}, year = {2015}, pages = {81-110}, publisher = {Springer}, organization = {Springer}, chapter = {4}, address = {Heidelberg}, issn = {978-3-319-25929-1}, doi = {10.1007/978-3-319-25931-4}, author = {Martorell, Agust{\'\i}n and G{\'o}mez, Emilia}, editor = {Meredith, David} } @conference {235, title = {Improving score-informed source separation for classical music through note refinement}, booktitle = {16th International Society for Music Information Retrieval (ISMIR) Conference.}, year = {2015}, address = {Malaga}, author = {Marius Miron and Julio Jos{\'e} Carabias and Jordi Janer} } @conference {272, title = {Melovizz: A Web-based tool for Score-Informed Melody Extraction Visualization}, booktitle = {ISMIR (Late Breaking Demo)}, year = {2015}, author = {Bosch, J. and Mayor, O. and G{\'o}mez, E.} } @conference {245, title = {Put the Concert Attendee in the Spotlight. A User-Centered Design and Development Approach for Classical Concert Applications}, booktitle = {16th International Society for Music Information Retrieval Conference}, year = {2015}, month = {10/2015}, address = {M{\'a}laga, Spain}, url = {http://ismir2015.uma.es/articles/67_Paper.pdf}, author = {Mark S. Melenhorst and Cynthia C. S. Liem} } @conference {247, title = {A Tablet App to Enrich the Live and Post-Live Experience of Classical Concerts}, booktitle = {Proceedings of the 3rd International Workshop on Interactive Content Consumption (WSICC) at TVX 2015}, year = {2015}, month = {06/2015}, author = {Mark S. Melenhorst and Ron van der Sterren and Andreas Arzt and Martorell, Agust{\'\i}n and Cynthia C. S. Liem} } @conference {189, title = {Web-based visualizations and acoustic rendering for multimodal data from orchestra performances using repovizz}, booktitle = {WAC - 1st Web Audio Conference}, year = {2015}, month = {01/2015}, publisher = {IRCAM/MOZILLA}, organization = {IRCAM/MOZILLA}, abstract = {
In the demo a set of fully working web-based prototypes developed in the context of the EU FP7 PHENICX Project (http://phenicx.upf.edu) will be presented. The Phenicx project is about innovating the classical music experience providing them with a multimodal, multi-perspective and multilayer interactive engagement, before, during and after the concert. In this demo we present some prototypes that are related with the post concert experience.
We have recorded a set of classical pieces performed by top level orchestras, including some data modalities like multi-channel audio, video, motion capture sensors, midi and text. Once all data streams have been time-synchronized, we have performed the following analysis on the data:
- Low-level and high-level audio descriptors for each individual audio source
- Description of conductor gestures based on the motion capture sensors
- Score to performance alignment
- Audio source separation
- Musical structure analysis of the performed piece
Then all these data is uploaded to the repovizz web repository (repovizz.upf.edu) that allows visualization and sharing of the data over the network. A set of customized web-based visualizations have been designed to build the prototypes that will be shown in this demo. Multimodal data streams are accessed on-line using the repovizz web API and html5 is used for the visualizations of the multimodal data and descriptors extracted from the performances. The web-audio API is used to handle the audio rendering in the client to mix between the different audio channels obtained from the different recorded audio sources or from the automatic isolation of instruments performed in the analysis step.
The visualizations available include (all web-based):
- Scrolling piano roll visualization of the musical score while audio is playing
- Orchestra layout visualization showing instrument activity and loudness while playing audio
- Audio focus to hear individual instruments playing alone
- Multi-perspective video angle selection during the concert
- 3D render of the conductor body
Here is a live example of the orchestra layout visualization including the isolation of instruments as an example of one of the prototypes that will be shown during the demo:\ http://repovizz.upf.edu/phenicx
Video showing some of the repovizz orchestra visualizations developed in the context of PHENICX:\ https://www.youtube.com/watch?v=c7pmDvkKY7A$\#$t=168
The FP7 PHENICX project focuses on creating a new digital classical concert experience, improving the accessibility of classical music concert performances by enhancing and enriching them in novel digital ways, In this paper, we present the project{\textquoteright}s foreseen use cases. Subsequently, we summarize initial use case feedback from two different user groups. Despite the early stage of the project, the feedback already gives important insight into real-world considerations to make for interactive music content consumption solutions.
}, keywords = {interactivity, multimedia information systems, multimodality, music information retrieval, performing arts, social networks, user studies}, author = {Cynthia C. S. Liem and Ron van der Sterren and Marcel van Tilburg and {\'A}lvaro Saras{\'u}a and Juan J. Bosch and Jordi Janer and Mark S. Melenhorst and Emilia G{\'o}mez and Alan Hanjalic} } @conference {126, title = {Low-latency Bass Separation using Harmonic-Percussion Decomposition}, booktitle = {International Conference on Digital Audio Effects Conference (DAFx-13)}, year = {2013}, month = {02/09/2013}, address = {Maynooth, Ireland}, abstract = {Many recent approaches to musical source separation rely on model-based inference methods that take into account the signal{\textquoteright}s harmonic structure. To address the particular case of low-latency bass separation, we propose a method that combines harmonic decomposition using a Tikhonov regularization-based algorithm, with the peak contrast analysis of the pitch likelihood function. Our experiment compares the separation performance of this method to a naive low-pass filter, a state-of-the-art NMF-based method and a near-optimal binary mask. The proposed low-latency method achieves results similar to the NMF-based high-latency approach at a lower computational cost. Therefore the method is valid for real-time implementations.
}, url = {http://dafx13.nuim.ie/papers/11.dafx2013_submission_13.pdf}, author = {Marxer, R. and Janer, J.} } @mastersthesis {123, title = {Modelling tonal context dynamics by temporal multi-scale analysis}, year = {2013}, school = {Universitat Pompeu Fabra}, type = {PhD diss.}, address = {Barcelona}, abstract = {Modern digital multimedia and internet technology have radically changed the ways people find entertainment and discover new interests online, seemingly without any phys- ical or social barriers. Such new access paradigms are in sharp contrast with the traditional means of entertainment. An illustrative example of this is live music concert perfor- mances that are largely being attended by dedicated audi- ences only.
This papers introduces the PHENICX project, which aims at enriching traditional concert experiences by using state- of-the-art multimedia and internet technologies. The project focuses on classical music and its main goal is twofold: (a) to make live concerts appealing to potential new au- dience and (b) to maximize the quality of concert experi- ence for everyone. Concerts will then become multimodal, multi-perspective and multilayer digital artifacts that can be easily explored, customized, personalized, (re)enjoyed and shared among the users. The paper presents the main scientific objectives on the project, provides a state of the art review on related research and presents the main chal- lenges to be addressed.
Drums modelling is of special interest in musical source separation because of its widespread presence in western popular music. Current research has often focused on drums separation without specifically modelling the other sources present in the signal. This paper presents an extensive study of the use of regularizations and constraints to drive the factorization towards the separation between percussive and non-percussive music accompaniment. The proposed regularizations control the frequency smoothness of the basis components and the temporal sparseness of the gains. We also evaluated the use of temporal constraints on the gains to perform the separation, using both ground truth manual annotations (made publicly available) and automatically extracted transients. Objective evaluation of the results shows that, while optimal regularizations are highly dependent on the signal, drum event position contains enough information to achieve a high quality separation.
}, keywords = {drums, NMF, source Separation}, url = {http://dafx13.nuim.ie/papers/16.dafx2013_submission_16.pdf}, author = {Marxer, R. and Janer, J.} }