Conference paper Open Access

Framing Named Entity Linking Error Types

Brasoveanu, Adrian M. P.; Rizzo, Giuseppe; Kuntschick, Philipp; Weichselbraun, Albert; Nixon, Lyndon J. B.


DataCite XML Export

<?xml version='1.0' encoding='utf-8'?>
<resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://datacite.org/schema/kernel-4" xsi:schemaLocation="http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.1/metadata.xsd">
  <identifier identifierType="DOI">10.5281/zenodo.2543382</identifier>
  <creators>
    <creator>
      <creatorName>Brasoveanu, Adrian M. P.</creatorName>
      <givenName>Adrian M. P.</givenName>
      <familyName>Brasoveanu</familyName>
      <affiliation>MODUL Technology GmbH, Vienna, Austria</affiliation>
    </creator>
    <creator>
      <creatorName>Rizzo, Giuseppe</creatorName>
      <givenName>Giuseppe</givenName>
      <familyName>Rizzo</familyName>
      <affiliation>ISMB, Via Pier Carlo Boggio 61, 10138 Torino, Italy</affiliation>
    </creator>
    <creator>
      <creatorName>Kuntschick, Philipp</creatorName>
      <givenName>Philipp</givenName>
      <familyName>Kuntschick</familyName>
      <affiliation>Swiss Institute for Information Research, University of Applied Sciences Chur</affiliation>
    </creator>
    <creator>
      <creatorName>Weichselbraun, Albert</creatorName>
      <givenName>Albert</givenName>
      <familyName>Weichselbraun</familyName>
      <affiliation>Swiss Institute for Information Research, University of Applied Sciences Chur</affiliation>
    </creator>
    <creator>
      <creatorName>Nixon,  Lyndon J. B.</creatorName>
      <givenName>Lyndon J. B.</givenName>
      <familyName>Nixon</familyName>
      <affiliation>MODUL Technology GmbH, Vienna, Austria</affiliation>
    </creator>
  </creators>
  <titles>
    <title>Framing Named Entity Linking Error Types</title>
  </titles>
  <publisher>Zenodo</publisher>
  <publicationYear>2018</publicationYear>
  <subjects>
    <subject>Named Entity Linking</subject>
    <subject>Linked Data Quality</subject>
    <subject>Corpora</subject>
    <subject>Evaluation</subject>
    <subject>Error Analysis</subject>
  </subjects>
  <dates>
    <date dateType="Issued">2018-05-09</date>
  </dates>
  <language>en</language>
  <resourceType resourceTypeGeneral="Text">Conference paper</resourceType>
  <alternateIdentifiers>
    <alternateIdentifier alternateIdentifierType="url">https://zenodo.org/record/2543382</alternateIdentifier>
  </alternateIdentifiers>
  <relatedIdentifiers>
    <relatedIdentifier relatedIdentifierType="DOI" relationType="IsVersionOf">10.5281/zenodo.2543381</relatedIdentifier>
    <relatedIdentifier relatedIdentifierType="URL" relationType="IsPartOf">https://zenodo.org/communities/invid-h2020</relatedIdentifier>
  </relatedIdentifiers>
  <rightsList>
    <rights rightsURI="http://creativecommons.org/licenses/by/4.0/legalcode">Creative Commons Attribution 4.0 International</rights>
    <rights rightsURI="info:eu-repo/semantics/openAccess">Open Access</rights>
  </rightsList>
  <descriptions>
    <description descriptionType="Abstract">&lt;p&gt;Named Entity Linking (NEL) and relation extraction forms the backbone of Knowledge Base Population tasks. The recent rise of large open source Knowledge Bases and the continuous focus on improving NEL performance has led to the creation of automated benchmark solutions during the last decade. The benchmarking of NEL systems offers a valuable approach to understand a NEL system&amp;rsquo;s performance quantitatively. However, an in-depth qualitative analysis that helps improving NEL methods by identifying error causes usually requires a more thorough error analysis. This paper proposes a taxonomy to frame common errors and applies this taxonomy in a survey study to assess the performance of four well-known Named Entity Linking systems on three recent gold standards.&lt;/p&gt;</description>
  </descriptions>
  <fundingReferences>
    <fundingReference>
      <funderName>European Commission</funderName>
      <funderIdentifier funderIdentifierType="Crossref Funder ID">10.13039/501100000780</funderIdentifier>
      <awardNumber awardURI="info:eu-repo/grantAgreement/EC/H2020/687786/">687786</awardNumber>
      <awardTitle>In Video Veritas – Verification of Social Media Video Content for the News Industry</awardTitle>
    </fundingReference>
  </fundingReferences>
</resource>
35
17
views
downloads
All versions This version
Views 3538
Downloads 1717
Data volume 2.2 MB2.2 MB
Unique views 3134
Unique downloads 1414

Share

Cite as