Dataset Open Access

MAD-EEG: an EEG dataset for decoding auditory attention to a target instrument in polyphonic music

Giorgia Cantisani; Gabriel Trégoat; Slim Essid; Gaël Richard


JSON Export

{
  "files": [
    {
      "links": {
        "self": "https://zenodo.org/api/files/5905022b-3d32-4c1e-bfba-fea9514ee1b0/behavioural_data.xlsx"
      }, 
      "checksum": "md5:bcd8f706f0c1ab0eee8fe3211f0d8cfc", 
      "bucket": "5905022b-3d32-4c1e-bfba-fea9514ee1b0", 
      "key": "behavioural_data.xlsx", 
      "type": "xlsx", 
      "size": 13693
    }, 
    {
      "links": {
        "self": "https://zenodo.org/api/files/5905022b-3d32-4c1e-bfba-fea9514ee1b0/madeeg_preprocessed.hdf5"
      }, 
      "checksum": "md5:92f9c3684fe72203160b0838ff2bb0f7", 
      "bucket": "5905022b-3d32-4c1e-bfba-fea9514ee1b0", 
      "key": "madeeg_preprocessed.hdf5", 
      "type": "hdf5", 
      "size": 3725304231
    }, 
    {
      "links": {
        "self": "https://zenodo.org/api/files/5905022b-3d32-4c1e-bfba-fea9514ee1b0/madeeg_preprocessed.yaml"
      }, 
      "checksum": "md5:1d093597df6fb1bada04903e20b06201", 
      "bucket": "5905022b-3d32-4c1e-bfba-fea9514ee1b0", 
      "key": "madeeg_preprocessed.yaml", 
      "type": "yaml", 
      "size": 153880
    }, 
    {
      "links": {
        "self": "https://zenodo.org/api/files/5905022b-3d32-4c1e-bfba-fea9514ee1b0/madeeg_raw.hdf5"
      }, 
      "checksum": "md5:795d7eea8f66550898ca2afeec55767c", 
      "bucket": "5905022b-3d32-4c1e-bfba-fea9514ee1b0", 
      "key": "madeeg_raw.hdf5", 
      "type": "hdf5", 
      "size": 702084981
    }, 
    {
      "links": {
        "self": "https://zenodo.org/api/files/5905022b-3d32-4c1e-bfba-fea9514ee1b0/madeeg_raw.yaml"
      }, 
      "checksum": "md5:cda4950e78da7c9fe94b3c139b4d711f", 
      "bucket": "5905022b-3d32-4c1e-bfba-fea9514ee1b0", 
      "key": "madeeg_raw.yaml", 
      "type": "yaml", 
      "size": 77459
    }, 
    {
      "links": {
        "self": "https://zenodo.org/api/files/5905022b-3d32-4c1e-bfba-fea9514ee1b0/madeeg_sequences_raw.yaml"
      }, 
      "checksum": "md5:815475b21c289107a03cb355f2d24e96", 
      "bucket": "5905022b-3d32-4c1e-bfba-fea9514ee1b0", 
      "key": "madeeg_sequences_raw.yaml", 
      "type": "yaml", 
      "size": 300700
    }, 
    {
      "links": {
        "self": "https://zenodo.org/api/files/5905022b-3d32-4c1e-bfba-fea9514ee1b0/stimuli.zip"
      }, 
      "checksum": "md5:6165f80d0bc09ece2c42b10098434533", 
      "bucket": "5905022b-3d32-4c1e-bfba-fea9514ee1b0", 
      "key": "stimuli.zip", 
      "type": "zip", 
      "size": 288402242
    }, 
    {
      "links": {
        "self": "https://zenodo.org/api/files/5905022b-3d32-4c1e-bfba-fea9514ee1b0/tutorial-MAD-EEG.ipynb"
      }, 
      "checksum": "md5:67f281aba3e38caeabb627ef16a60830", 
      "bucket": "5905022b-3d32-4c1e-bfba-fea9514ee1b0", 
      "key": "tutorial-MAD-EEG.ipynb", 
      "type": "ipynb", 
      "size": 1411092
    }
  ], 
  "owners": [
    67250
  ], 
  "doi": "10.5281/zenodo.4537751", 
  "stats": {
    "version_unique_downloads": 52.0, 
    "unique_views": 172.0, 
    "views": 204.0, 
    "version_views": 204.0, 
    "unique_downloads": 52.0, 
    "version_unique_views": 172.0, 
    "volume": 122669652397.0, 
    "version_downloads": 133.0, 
    "downloads": 133.0, 
    "version_volume": 122669652397.0
  }, 
  "links": {
    "doi": "https://doi.org/10.5281/zenodo.4537751", 
    "conceptdoi": "https://doi.org/10.5281/zenodo.4537750", 
    "bucket": "https://zenodo.org/api/files/5905022b-3d32-4c1e-bfba-fea9514ee1b0", 
    "conceptbadge": "https://zenodo.org/badge/doi/10.5281/zenodo.4537750.svg", 
    "html": "https://zenodo.org/record/4537751", 
    "latest_html": "https://zenodo.org/record/4537751", 
    "badge": "https://zenodo.org/badge/doi/10.5281/zenodo.4537751.svg", 
    "latest": "https://zenodo.org/api/records/4537751"
  }, 
  "conceptdoi": "10.5281/zenodo.4537750", 
  "created": "2021-08-31T15:31:01.379777+00:00", 
  "updated": "2021-09-03T11:26:29.387025+00:00", 
  "conceptrecid": "4537750", 
  "revision": 5, 
  "id": 4537751, 
  "metadata": {
    "access_right_category": "success", 
    "doi": "10.5281/zenodo.4537751", 
    "version": "1.0.0", 
    "language": "eng", 
    "title": "MAD-EEG: an EEG dataset for decoding auditory attention to a target instrument in polyphonic music", 
    "license": {
      "id": "CC-BY-SA-4.0"
    }, 
    "related_identifiers": [
      {
        "scheme": "doi", 
        "identifier": "10.21437/SMM.2019-11", 
        "relation": "documents", 
        "resource_type": "publication-conferencepaper"
      }, 
      {
        "scheme": "doi", 
        "identifier": "10.5281/zenodo.4537750", 
        "relation": "isVersionOf"
      }
    ], 
    "relations": {
      "version": [
        {
          "count": 1, 
          "index": 0, 
          "parent": {
            "pid_type": "recid", 
            "pid_value": "4537750"
          }, 
          "is_last": true, 
          "last_child": {
            "pid_type": "recid", 
            "pid_value": "4537751"
          }
        }
      ]
    }, 
    "communities": [
      {
        "id": "ieee"
      }, 
      {
        "id": "mir"
      }
    ], 
    "grants": [
      {
        "code": "765068", 
        "links": {
          "self": "https://zenodo.org/api/grants/10.13039/501100000780::765068"
        }, 
        "title": "New Frontiers in Music Information Processing", 
        "acronym": "MIP-Frontiers", 
        "program": "H2020", 
        "funder": {
          "doi": "10.13039/501100000780", 
          "acronyms": [], 
          "name": "European Commission", 
          "links": {
            "self": "https://zenodo.org/api/funders/10.13039/501100000780"
          }
        }
      }
    ], 
    "keywords": [
      "Auditory attention decoding", 
      "EEG", 
      "Polyphonic music"
    ], 
    "publication_date": "2019-09-19", 
    "creators": [
      {
        "affiliation": "LTCI, T\u00e9l\u00e9com Paris, Institut Polytechnique de Paris", 
        "name": "Giorgia Cantisani"
      }, 
      {
        "name": "Gabriel Tr\u00e9goat"
      }, 
      {
        "affiliation": "LTCI, T\u00e9l\u00e9com Paris, Institut Polytechnique de Paris", 
        "name": "Slim Essid"
      }, 
      {
        "affiliation": "LTCI, T\u00e9l\u00e9com Paris, Institut Polytechnique de Paris", 
        "name": "Ga\u00ebl Richard"
      }
    ], 
    "access_right": "open", 
    "resource_type": {
      "type": "dataset", 
      "title": "Dataset"
    }, 
    "description": "<p>The&nbsp;<em><strong>MAD-EEG&nbsp;Dataset</strong></em> is&nbsp;a&nbsp;research&nbsp;corpus&nbsp;for studying&nbsp;EEG-based auditory attention decoding to a target instrument in polyphonic music.&nbsp;</p>\n\n<p>The dataset&nbsp;consists&nbsp;of&nbsp;20-channel&nbsp;EEG&nbsp;responses to music recorded from 8 subjects while attending to a particular instrument in&nbsp;a music mixture.&nbsp;</p>\n\n<p>For further details, please refer to the paper:&nbsp;<em><a href=\"https://hal.archives-ouvertes.fr/hal-02291882/document\">MAD-EEG: an EEG dataset for decoding auditory attention to a target instrument in polyphonic music</a>.</em></p>\n\n<p>If you use the data in your research, please reference the paper (not just&nbsp;the Zenodo record):</p>\n\n<pre><code>@inproceedings{Cantisani2019,\n  author={Giorgia Cantisani and Gabriel Tr\u00e9goat and Slim Essid and Ga\u00ebl Richard},\n  title={{MAD-EEG: an EEG dataset for decoding auditory attention to a target instrument in polyphonic music}},\n  year=2019,\n  booktitle={Proc. SMM19, Workshop on Speech, Music and Mind 2019},\n  pages={51--55},\n  doi={10.21437/SMM.2019-11},\n  url={http://dx.doi.org/10.21437/SMM.2019-11}\n}</code></pre>\n\n<p>&nbsp;</p>"
  }
}
204
133
views
downloads
All versions This version
Views 204204
Downloads 133133
Data volume 122.7 GB122.7 GB
Unique views 172172
Unique downloads 5252

Share

Cite as