Conference paper Open Access

Semi-Automated Mappings for Object-Manipulating Gestural Control of Electronic Music

de las Pozas, Virginia


MARC21 XML Export

<?xml version='1.0' encoding='UTF-8'?>
<record xmlns="http://www.loc.gov/MARC21/slim">
  <leader>00000nam##2200000uu#4500</leader>
  <controlfield tag="005">20210530022043.0</controlfield>
  <controlfield tag="001">4813232</controlfield>
  <datafield tag="711" ind1=" " ind2=" ">
    <subfield code="a">International Conference on New Interfaces for Musical Expression</subfield>
    <subfield code="c">Birmingham, UK</subfield>
  </datafield>
  <datafield tag="700" ind1=" " ind2=" ">
    <subfield code="4">edt</subfield>
    <subfield code="a">Michon, Romain</subfield>
  </datafield>
  <datafield tag="700" ind1=" " ind2=" ">
    <subfield code="4">edt</subfield>
    <subfield code="a">Schroeder, Franziska</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2=" ">
    <subfield code="s">472817</subfield>
    <subfield code="z">md5:d75b126f3cf972ffb2e2d3dd9b2e03d4</subfield>
    <subfield code="u">https://zenodo.org/record/4813232/files/nime2020_paper124.pdf</subfield>
  </datafield>
  <datafield tag="542" ind1=" " ind2=" ">
    <subfield code="l">open</subfield>
  </datafield>
  <datafield tag="260" ind1=" " ind2=" ">
    <subfield code="c">2020-06-01</subfield>
  </datafield>
  <datafield tag="909" ind1="C" ind2="O">
    <subfield code="p">openaire</subfield>
    <subfield code="p">user-nime_conference</subfield>
    <subfield code="o">oai:zenodo.org:4813232</subfield>
  </datafield>
  <datafield tag="100" ind1=" " ind2=" ">
    <subfield code="a">de las Pozas, Virginia</subfield>
  </datafield>
  <datafield tag="245" ind1=" " ind2=" ">
    <subfield code="a">Semi-Automated Mappings for Object-Manipulating Gestural Control of Electronic Music</subfield>
  </datafield>
  <datafield tag="980" ind1=" " ind2=" ">
    <subfield code="a">user-nime_conference</subfield>
  </datafield>
  <datafield tag="540" ind1=" " ind2=" ">
    <subfield code="u">https://creativecommons.org/licenses/by/4.0/legalcode</subfield>
    <subfield code="a">Creative Commons Attribution 4.0 International</subfield>
  </datafield>
  <datafield tag="650" ind1="1" ind2="7">
    <subfield code="a">cc-by</subfield>
    <subfield code="2">opendefinition.org</subfield>
  </datafield>
  <datafield tag="520" ind1=" " ind2=" ">
    <subfield code="a">This paper describes a system for automating the generation of mapping schemes between human interaction with extramusical objects and electronic dance music. These mappings are determined through the comparison of sensor input to a synthesized matrix of sequenced audio. The goal of the system is to facilitate live performances that feature quotidian objects in the place of traditional musical instruments. The practical and artistic applications of musical control with quotidian objects is discussed. The associated object-manipulating gesture vocabularies are mapped to musical output so that the objects themselves may be perceived as DMIs. This strategy is used in a performance to explore the liveness qualities of the system.</subfield>
  </datafield>
  <datafield tag="773" ind1=" " ind2=" ">
    <subfield code="n">issn</subfield>
    <subfield code="i">isPartOf</subfield>
    <subfield code="a">2220-4806</subfield>
  </datafield>
  <datafield tag="773" ind1=" " ind2=" ">
    <subfield code="n">doi</subfield>
    <subfield code="i">isVersionOf</subfield>
    <subfield code="a">10.5281/zenodo.4813231</subfield>
  </datafield>
  <datafield tag="024" ind1=" " ind2=" ">
    <subfield code="a">10.5281/zenodo.4813232</subfield>
    <subfield code="2">doi</subfield>
  </datafield>
  <datafield tag="980" ind1=" " ind2=" ">
    <subfield code="a">publication</subfield>
    <subfield code="b">conferencepaper</subfield>
  </datafield>
</record>
114
75
views
downloads
All versions This version
Views 114114
Downloads 7575
Data volume 35.5 MB35.5 MB
Unique views 9696
Unique downloads 6868

Share

Cite as