journalarticles.bib

@comment{{This file has been generated by bib2bib 1.99}}
@comment{{Command line: bib2bib -ob ./publications/journalarticles.bib -c 'keywords : ".*journal-article.*" and keywords : ".*refereed.*" and not keywords : ".*non-refereed.*"' ../publications.bib}}
@article{Martin:2020aa,
  abstract = {Machine-learning models of music often exist outside the worlds of musical performance practice and abstracted from the physical gestures of musicians. In this work, we consider how a recurrent neural network (RNN) model of simple music gestures may be integrated into a physical instrument so that predictions are sonically and physically entwined with the performer's actions. We introduce EMPI, an embodied musical prediction interface that simplifies musical interaction and prediction to just one dimension of continuous input and output. The predictive model is a mixture density RNN trained to estimate the performer's next physical input action and the time at which this will occur. Predictions are represented sonically, through synthesised audio, and physically, with a motorised output indicator. We use EMPI to investigate how performers understand and exploit different predictive models to make music through a controlled study of performances with different models and levels of physical feedback. We show that while performers often favour a model trained on human-sourced data, they find different musical affordances in models trained on synthetic, and even random, data. Physical representation of predictions seemed to affect the length of performances. This work contributes new understandings of how musicians use generative ML models in real-time performance backed up by experimental evidence. We argue that a constrained musical interface can expose the affordances of embodied predictive interactions.},
  author = {Charles Patrick Martin and Kyrre Glette and T{\o}nnes Frostad Nygaard and Jim Torresen},
  date-added = {2020-02-10 09:42:30 +1100},
  date-modified = {2020-07-25 13:45:40 +1000},
  doi = {10.3389/frai.2020.00006},
  journal = {Frontiers in Artificial Intelligence},
  keywords = {journal-article, refereed},
  month = {feb},
  title = {Understanding Musical Predictions with an Embodied Interface for Musical Machine Learning},
  video = {https://youtu.be/tvgqxmHr9wU},
  year = {2020}
}
@article{Martin:2019ab,
  archiveprefix = {arXiv},
  author = {Charles Patrick Martin and Jim Torresen},
  date-added = {2019-02-04 11:20:55 +0100},
  date-modified = {2020-07-25 13:45:52 +1000},
  eprint = {1902.00680},
  journal = {Computer Music Journal (in press)},
  keywords = {journal-article, refereed},
  preprint = {https://metatonetransfer.com/preprints/2019-DataDrivenAnalysisMicroJam.pdf},
  status = {in-press},
  title = {Data Driven Analysis of Tiny Touchscreen Performance with MicroJam},
  url = {https://arxiv.org/abs/1902.00680},
  video = {https://youtu.be/HhI47-XzrtI},
  year = 2020,
  bdsk-url-1 = {https://arxiv.org/abs/1902.00680}
}
@article{Martin:2017af,
  author = {Martin, Charles P.},
  date-modified = {2019-09-06 15:25:59 +1000},
  doi = {10.1080/07494467.2017.1370794},
  journal = {Contemporary Music Review},
  keywords = {journal-article, refereed},
  month = sep,
  number = {1--2},
  pages = {64--85},
  preprint = {https://metatonetransfer.com/preprints/2017-PercussionistCentredDesign.pdf},
  status = {published},
  title = {Percussionist-Centred Design for Touchscreen Digital Musical Instruments},
  volume = 36,
  year = {2017},
  bdsk-url-1 = {https://dx.doi.org/10.1080/07494467.2017.1370794}
}