Dataset Open Access

MAD-EEG: an EEG dataset for decoding auditory attention to a target instrument in polyphonic music

Giorgia Cantisani; Gabriel Trégoat; Slim Essid; Gaël Richard


MARC21 XML Export

<?xml version='1.0' encoding='UTF-8'?>
<record xmlns="http://www.loc.gov/MARC21/slim">
  <leader>00000nmm##2200000uu#4500</leader>
  <datafield tag="041" ind1=" " ind2=" ">
    <subfield code="a">eng</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Auditory attention decoding</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">EEG</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Polyphonic music</subfield>
  </datafield>
  <controlfield tag="005">20210903112629.0</controlfield>
  <controlfield tag="001">4537751</controlfield>
  <datafield tag="700" ind1=" " ind2=" ">
    <subfield code="a">Gabriel Trégoat</subfield>
  </datafield>
  <datafield tag="700" ind1=" " ind2=" ">
    <subfield code="u">LTCI, Télécom Paris, Institut Polytechnique de Paris</subfield>
    <subfield code="a">Slim Essid</subfield>
  </datafield>
  <datafield tag="700" ind1=" " ind2=" ">
    <subfield code="u">LTCI, Télécom Paris, Institut Polytechnique de Paris</subfield>
    <subfield code="a">Gaël Richard</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2=" ">
    <subfield code="s">13693</subfield>
    <subfield code="z">md5:bcd8f706f0c1ab0eee8fe3211f0d8cfc</subfield>
    <subfield code="u">https://zenodo.org/record/4537751/files/behavioural_data.xlsx</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2=" ">
    <subfield code="s">3725304231</subfield>
    <subfield code="z">md5:92f9c3684fe72203160b0838ff2bb0f7</subfield>
    <subfield code="u">https://zenodo.org/record/4537751/files/madeeg_preprocessed.hdf5</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2=" ">
    <subfield code="s">153880</subfield>
    <subfield code="z">md5:1d093597df6fb1bada04903e20b06201</subfield>
    <subfield code="u">https://zenodo.org/record/4537751/files/madeeg_preprocessed.yaml</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2=" ">
    <subfield code="s">702084981</subfield>
    <subfield code="z">md5:795d7eea8f66550898ca2afeec55767c</subfield>
    <subfield code="u">https://zenodo.org/record/4537751/files/madeeg_raw.hdf5</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2=" ">
    <subfield code="s">77459</subfield>
    <subfield code="z">md5:cda4950e78da7c9fe94b3c139b4d711f</subfield>
    <subfield code="u">https://zenodo.org/record/4537751/files/madeeg_raw.yaml</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2=" ">
    <subfield code="s">300700</subfield>
    <subfield code="z">md5:815475b21c289107a03cb355f2d24e96</subfield>
    <subfield code="u">https://zenodo.org/record/4537751/files/madeeg_sequences_raw.yaml</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2=" ">
    <subfield code="s">288402242</subfield>
    <subfield code="z">md5:6165f80d0bc09ece2c42b10098434533</subfield>
    <subfield code="u">https://zenodo.org/record/4537751/files/stimuli.zip</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2=" ">
    <subfield code="s">1411092</subfield>
    <subfield code="z">md5:67f281aba3e38caeabb627ef16a60830</subfield>
    <subfield code="u">https://zenodo.org/record/4537751/files/tutorial-MAD-EEG.ipynb</subfield>
  </datafield>
  <datafield tag="542" ind1=" " ind2=" ">
    <subfield code="l">open</subfield>
  </datafield>
  <datafield tag="260" ind1=" " ind2=" ">
    <subfield code="c">2019-09-19</subfield>
  </datafield>
  <datafield tag="909" ind1="C" ind2="O">
    <subfield code="p">openaire_data</subfield>
    <subfield code="p">user-mir</subfield>
    <subfield code="p">user-ieee</subfield>
    <subfield code="o">oai:zenodo.org:4537751</subfield>
  </datafield>
  <datafield tag="100" ind1=" " ind2=" ">
    <subfield code="u">LTCI, Télécom Paris, Institut Polytechnique de Paris</subfield>
    <subfield code="a">Giorgia Cantisani</subfield>
  </datafield>
  <datafield tag="245" ind1=" " ind2=" ">
    <subfield code="a">MAD-EEG: an EEG dataset for decoding auditory attention to a target instrument in polyphonic music</subfield>
  </datafield>
  <datafield tag="980" ind1=" " ind2=" ">
    <subfield code="a">user-ieee</subfield>
  </datafield>
  <datafield tag="980" ind1=" " ind2=" ">
    <subfield code="a">user-mir</subfield>
  </datafield>
  <datafield tag="536" ind1=" " ind2=" ">
    <subfield code="c">765068</subfield>
    <subfield code="a">New Frontiers in Music Information Processing</subfield>
  </datafield>
  <datafield tag="540" ind1=" " ind2=" ">
    <subfield code="u">https://creativecommons.org/licenses/by-sa/4.0/legalcode</subfield>
    <subfield code="a">Creative Commons Attribution Share Alike 4.0 International</subfield>
  </datafield>
  <datafield tag="650" ind1="1" ind2="7">
    <subfield code="a">cc-by</subfield>
    <subfield code="2">opendefinition.org</subfield>
  </datafield>
  <datafield tag="520" ind1=" " ind2=" ">
    <subfield code="a">&lt;p&gt;The&amp;nbsp;&lt;em&gt;&lt;strong&gt;MAD-EEG&amp;nbsp;Dataset&lt;/strong&gt;&lt;/em&gt; is&amp;nbsp;a&amp;nbsp;research&amp;nbsp;corpus&amp;nbsp;for studying&amp;nbsp;EEG-based auditory attention decoding to a target instrument in polyphonic music.&amp;nbsp;&lt;/p&gt;

&lt;p&gt;The dataset&amp;nbsp;consists&amp;nbsp;of&amp;nbsp;20-channel&amp;nbsp;EEG&amp;nbsp;responses to music recorded from 8 subjects while attending to a particular instrument in&amp;nbsp;a music mixture.&amp;nbsp;&lt;/p&gt;

&lt;p&gt;For further details, please refer to the paper:&amp;nbsp;&lt;em&gt;&lt;a href="https://hal.archives-ouvertes.fr/hal-02291882/document"&gt;MAD-EEG: an EEG dataset for decoding auditory attention to a target instrument in polyphonic music&lt;/a&gt;.&lt;/em&gt;&lt;/p&gt;

&lt;p&gt;If you use the data in your research, please reference the paper (not just&amp;nbsp;the Zenodo record):&lt;/p&gt;

&lt;pre&gt;&lt;code&gt;@inproceedings{Cantisani2019,
  author={Giorgia Cantisani and Gabriel Trégoat and Slim Essid and Gaël Richard},
  title={{MAD-EEG: an EEG dataset for decoding auditory attention to a target instrument in polyphonic music}},
  year=2019,
  booktitle={Proc. SMM19, Workshop on Speech, Music and Mind 2019},
  pages={51--55},
  doi={10.21437/SMM.2019-11},
  url={http://dx.doi.org/10.21437/SMM.2019-11}
}&lt;/code&gt;&lt;/pre&gt;

&lt;p&gt;&amp;nbsp;&lt;/p&gt;</subfield>
  </datafield>
  <datafield tag="773" ind1=" " ind2=" ">
    <subfield code="n">doi</subfield>
    <subfield code="i">documents</subfield>
    <subfield code="a">10.21437/SMM.2019-11</subfield>
  </datafield>
  <datafield tag="773" ind1=" " ind2=" ">
    <subfield code="n">doi</subfield>
    <subfield code="i">isVersionOf</subfield>
    <subfield code="a">10.5281/zenodo.4537750</subfield>
  </datafield>
  <datafield tag="024" ind1=" " ind2=" ">
    <subfield code="a">10.5281/zenodo.4537751</subfield>
    <subfield code="2">doi</subfield>
  </datafield>
  <datafield tag="980" ind1=" " ind2=" ">
    <subfield code="a">dataset</subfield>
  </datafield>
</record>
207
133
views
downloads
All versions This version
Views 207207
Downloads 133133
Data volume 122.7 GB122.7 GB
Unique views 175175
Unique downloads 5252

Share

Cite as