Dataset Open Access
Juan J. Bosch;
Ferdinand Fuhrmann;
Perfecto Herrera
{ "files": [ { "links": { "self": "https://zenodo.org/api/files/76ac96d0-1d06-479e-aedd-cf64760cb4cc/IRMAS-TestingData-Part1.zip" }, "checksum": "md5:5a2e65520dcedada565dff2050bb2a56", "bucket": "76ac96d0-1d06-479e-aedd-cf64760cb4cc", "key": "IRMAS-TestingData-Part1.zip", "type": "zip", "size": 2282264831 }, { "links": { "self": "https://zenodo.org/api/files/76ac96d0-1d06-479e-aedd-cf64760cb4cc/IRMAS-TestingData-Part2.zip" }, "checksum": "md5:afb0c8ea92f34ee653693106be95c895", "bucket": "76ac96d0-1d06-479e-aedd-cf64760cb4cc", "key": "IRMAS-TestingData-Part2.zip", "type": "zip", "size": 3387310839 }, { "links": { "self": "https://zenodo.org/api/files/76ac96d0-1d06-479e-aedd-cf64760cb4cc/IRMAS-TestingData-Part3.zip" }, "checksum": "md5:9b3fb2d0c89cdc98037121c25bd5b556", "bucket": "76ac96d0-1d06-479e-aedd-cf64760cb4cc", "key": "IRMAS-TestingData-Part3.zip", "type": "zip", "size": 2132712716 }, { "links": { "self": "https://zenodo.org/api/files/76ac96d0-1d06-479e-aedd-cf64760cb4cc/IRMAS-TrainingData.zip" }, "checksum": "md5:4fd9f5ed5a18d8e2687e6360b5f60afe", "bucket": "76ac96d0-1d06-479e-aedd-cf64760cb4cc", "key": "IRMAS-TrainingData.zip", "type": "zip", "size": 3181049879 } ], "owners": [ 46759 ], "doi": "10.5281/zenodo.1290750", "stats": { "version_unique_downloads": 7097.0, "unique_views": 8855.0, "views": 10064.0, "version_views": 10069.0, "unique_downloads": 7096.0, "version_unique_views": 8860.0, "volume": 68521908619521.0, "version_downloads": 24777.0, "downloads": 24776.0, "version_volume": 68525089669400.0 }, "links": { "doi": "https://doi.org/10.5281/zenodo.1290750", "conceptdoi": "https://doi.org/10.5281/zenodo.1290749", "bucket": "https://zenodo.org/api/files/76ac96d0-1d06-479e-aedd-cf64760cb4cc", "conceptbadge": "https://zenodo.org/badge/doi/10.5281/zenodo.1290749.svg", "html": "https://zenodo.org/record/1290750", "latest_html": "https://zenodo.org/record/1290750", "badge": "https://zenodo.org/badge/doi/10.5281/zenodo.1290750.svg", "latest": "https://zenodo.org/api/records/1290750" }, "conceptdoi": "10.5281/zenodo.1290749", "created": "2018-06-20T10:22:36.655450+00:00", "updated": "2020-01-24T19:26:01.990506+00:00", "conceptrecid": "1290749", "revision": 12, "id": 1290750, "metadata": { "access_right_category": "success", "doi": "10.5281/zenodo.1290750", "description": "<p>This dataset includes musical audio excerpts with annotations of the predominant instrument(s) present. It was used for the evaluation in the following article:</p>\n\n<blockquote>\n<p>Bosch, J. J., Janer, J., Fuhrmann, F., & Herrera, P. “<a href=\"http://ismir2012.ismir.net/event/papers/559_ISMIR_2012.pdf\">A Comparison of Sound Segregation Techniques for Predominant Instrument Recognition in Musical Audio Signals</a>”, in Proc. ISMIR (pp. 559-564), 2012</p>\n</blockquote>\n\n<p>Please Acknowledge IRMAS in Academic Research</p>\n\n<p>IRMAS is intended to be used for training and testing methods for the automatic recognition of predominant instruments in musical audio. The instruments considered are: cello, clarinet, flute, acoustic guitar, electric guitar, organ, piano, saxophone, trumpet, violin, and human singing voice. This dataset is derived from the one compiled by Ferdinand Fuhrmann in his <a href=\"http://www.dtic.upf.edu/~ffuhrmann/PhD/\">PhD thesis</a>, with the difference that we provide audio data in stereo format, the annotations in the testing dataset are limited to specific pitched instruments, and there is a different amount and lenght of excerpts.</p>\n\n<p><strong>Using this dataset</strong></p>\n\n<p>When IRMAS is used for academic research, we would highly appreciate if scientific publications of works partly based on the IRMAS dataset quote the above publication.</p>\n\n<p>We are interested in knowing if you find our datasets useful! If you use our dataset please email us at <a href=\"mailto:mtg-info@upf.edu\">mtg-info@upf.edu</a> and tell us about your research.</p>\n\n<p> </p>\n\n<p><a href=\"https://www.upf.edu/web/mtg/irmas\">https://www.upf.edu/web/mtg/irmas </a></p>", "license": { "id": "CC-BY-NC-SA-4.0" }, "title": "IRMAS: a dataset for instrument recognition in musical audio signals", "relations": { "version": [ { "count": 1, "index": 0, "parent": { "pid_type": "recid", "pid_value": "1290749" }, "is_last": true, "last_child": { "pid_type": "recid", "pid_value": "1290750" } } ] }, "version": "1.0", "communities": [ { "id": "mdm-dtic-upf" }, { "id": "mtgupf" } ], "publication_date": "2014-09-08", "creators": [ { "orcid": "0000-0003-4221-3517", "affiliation": "Music Technology Group, Universitat Pompeu Fabra, Barcelona, Spain", "name": "Juan J. Bosch" }, { "affiliation": "Music Technology Group, Universitat Pompeu Fabra, Barcelona, Spain", "name": "Ferdinand Fuhrmann" }, { "orcid": "0000-0003-2799-7675", "affiliation": "Music Technology Group, Universitat Pompeu Fabra, Barcelona, Spain", "name": "Perfecto Herrera" } ], "meeting": { "acronym": "ISMIR 2012", "dates": "08/10/2012", "place": "Porto, Portugal", "title": "13th International Society for Music Information Retrieval Conference" }, "access_right": "open", "resource_type": { "type": "dataset", "title": "Dataset" }, "related_identifiers": [ { "scheme": "doi", "identifier": "10.5281/zenodo.1290749", "relation": "isVersionOf" } ] } }
All versions | This version | |
---|---|---|
Views | 10,069 | 10,064 |
Downloads | 24,777 | 24,776 |
Data volume | 68.5 TB | 68.5 TB |
Unique views | 8,860 | 8,855 |
Unique downloads | 7,097 | 7,096 |