Conference paper Open Access

Tri Automatique de la Littérature pour les Revues Systématiques

Norman, Christopher; Leeflang, Mariska; Zweigenbaum, Pierre; Névéol, Aurélie


DataCite XML Export

<?xml version='1.0' encoding='utf-8'?>
<resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://datacite.org/schema/kernel-4" xsi:schemaLocation="http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.1/metadata.xsd">
  <identifier identifierType="DOI">10.5281/zenodo.887606</identifier>
  <creators>
    <creator>
      <creatorName>Norman, Christopher</creatorName>
      <givenName>Christopher</givenName>
      <familyName>Norman</familyName>
      <affiliation>LIMSI, CNRS, Université Paris Saclay, 91405 Orsay, France</affiliation>
    </creator>
    <creator>
      <creatorName>Leeflang, Mariska</creatorName>
      <givenName>Mariska</givenName>
      <familyName>Leeflang</familyName>
      <affiliation>Academic Medical Center, University of Amsterdam, Amsterdam, the Netherlands</affiliation>
    </creator>
    <creator>
      <creatorName>Zweigenbaum, Pierre</creatorName>
      <givenName>Pierre</givenName>
      <familyName>Zweigenbaum</familyName>
      <affiliation>LIMSI, CNRS, Université Paris Saclay, 91405 Orsay, France</affiliation>
    </creator>
    <creator>
      <creatorName>Névéol, Aurélie</creatorName>
      <givenName>Aurélie</givenName>
      <familyName>Névéol</familyName>
      <affiliation>LIMSI, CNRS, Université Paris Saclay, 91405 Orsay, France</affiliation>
    </creator>
  </creators>
  <titles>
    <title>Tri Automatique de la Littérature pour les Revues Systématiques</title>
  </titles>
  <publisher>Zenodo</publisher>
  <publicationYear>2017</publicationYear>
  <subjects>
    <subject>Information Retrieval</subject>
    <subject>Supervised Classification</subject>
    <subject>Systematic Reviews</subject>
  </subjects>
  <dates>
    <date dateType="Issued">2017-06-30</date>
  </dates>
  <resourceType resourceTypeGeneral="Text">Conference paper</resourceType>
  <alternateIdentifiers>
    <alternateIdentifier alternateIdentifierType="url">https://zenodo.org/record/887606</alternateIdentifier>
  </alternateIdentifiers>
  <relatedIdentifiers>
    <relatedIdentifier relatedIdentifierType="DOI" relationType="IsVersionOf">10.5281/zenodo.887605</relatedIdentifier>
    <relatedIdentifier relatedIdentifierType="URL" relationType="IsPartOf">https://zenodo.org/communities/miror</relatedIdentifier>
  </relatedIdentifiers>
  <rightsList>
    <rights rightsURI="http://creativecommons.org/licenses/by/4.0/legalcode">Creative Commons Attribution 4.0 International</rights>
    <rights rightsURI="info:eu-repo/semantics/openAccess">Open Access</rights>
  </rightsList>
  <descriptions>
    <description descriptionType="Abstract">&lt;p&gt;Current approaches to document discovery for systematic reviews in biomedicine rely on exhaustive manual screening. We evaluate the performance of classifier based article discovery using different definitions of inclusion criteria. We test a logistic regressor on two datasets created from existing systematic reviews on clinical NLP and drug efficacy, using different criteria to generate positive and negative examples. The classification and ranking achieves an average AUC of 0.769 when relying on gold standard decisions based on title and abstracts of articles, and an AUC of 0.835 when relying on decisions based on full text. Results suggest that inclusion based on title and abstract generalizes to inclusion based on full text, so that references excluded in earlier stages are important for classification, and that common-off-the-shelves algorithms can partially automate the process.&lt;/p&gt;</description>
  </descriptions>
  <fundingReferences>
    <fundingReference>
      <funderName>European Commission</funderName>
      <funderIdentifier funderIdentifierType="Crossref Funder ID">10.13039/501100000780</funderIdentifier>
      <awardNumber awardURI="info:eu-repo/grantAgreement/EC/H2020/676207/">676207</awardNumber>
      <awardTitle>Methods in Research on Research</awardTitle>
    </fundingReference>
  </fundingReferences>
</resource>
28
13
views
downloads
All versions This version
Views 2828
Downloads 1313
Data volume 5.3 MB5.3 MB
Unique views 2020
Unique downloads 1313

Share

Cite as