Conference paper Open Access

Short-term Recognition of Human Activities using Convolutional Neural Networks

M.Papakostas; T. Giannakopoulos; F. Makedon; V. Karkaletsis


DataCite XML Export

<?xml version='1.0' encoding='utf-8'?>
<resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://datacite.org/schema/kernel-4" xsi:schemaLocation="http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.1/metadata.xsd">
  <identifier identifierType="DOI">10.5281/zenodo.376482</identifier>
  <creators>
    <creator>
      <creatorName>M.Papakostas</creatorName>
      <affiliation>NCSRD</affiliation>
    </creator>
    <creator>
      <creatorName>T. Giannakopoulos</creatorName>
      <affiliation>NCSRD</affiliation>
    </creator>
    <creator>
      <creatorName>F. Makedon</creatorName>
      <affiliation>U Texas Arlington</affiliation>
    </creator>
    <creator>
      <creatorName>V. Karkaletsis</creatorName>
      <affiliation>NCSRD</affiliation>
    </creator>
  </creators>
  <titles>
    <title>Short-term Recognition of Human Activities using Convolutional Neural Networks</title>
  </titles>
  <publisher>Zenodo</publisher>
  <publicationYear>2017</publicationYear>
  <dates>
    <date dateType="Issued">2017-03-10</date>
  </dates>
  <resourceType resourceTypeGeneral="Text">Conference paper</resourceType>
  <alternateIdentifiers>
    <alternateIdentifier alternateIdentifierType="url">https://zenodo.org/record/376482</alternateIdentifier>
  </alternateIdentifiers>
  <relatedIdentifiers>
    <relatedIdentifier relatedIdentifierType="URL" relationType="IsPartOf">https://zenodo.org/communities/radio</relatedIdentifier>
  </relatedIdentifiers>
  <rightsList>
    <rights rightsURI="http://creativecommons.org/licenses/by/4.0/legalcode">Creative Commons Attribution 4.0 International</rights>
    <rights rightsURI="info:eu-repo/semantics/openAccess">Open Access</rights>
  </rightsList>
  <descriptions>
    <description descriptionType="Abstract">&lt;p&gt;This paper proposes a deep learning classification method for frame-wise recognition of human activities, using raw color (RGB) information. In particular, we present a Convolutional Neural Network (CNN) classification approach for recognising three basic motion activity classes, that cover the vast majority of human activities in the context of a home monitoring environment, namely: sitting, walking and standing up. A real-world fully annotated dataset has been compiled, in the context of an assisted living home environment. Through extensive experimentation we have highlighted the benefits of deep learning architectures against traditional shallow classifiers functioning on hand-crafted features, on the task of activity recognition. Our approach proves the robustness and the quality of CNN classifiers that lies on learning highly invariant features. Our ultimate goal is to tackle the challenging task of activity recognition in environments that are characterized with high levels of inherent noise.&lt;/p&gt;</description>
  </descriptions>
  <fundingReferences>
    <fundingReference>
      <funderName>European Commission</funderName>
      <funderIdentifier funderIdentifierType="Crossref Funder ID">10.13039/501100000780</funderIdentifier>
      <awardNumber awardURI="info:eu-repo/grantAgreement/EC/H2020/643892/">643892</awardNumber>
      <awardTitle>Robots in assisted living environments: Unobtrusive, efficient, reliable and modular solutions for independent ageing</awardTitle>
    </fundingReference>
  </fundingReferences>
</resource>
43
35
views
downloads
All versions This version
Views 4343
Downloads 3535
Data volume 55.6 MB55.6 MB
Unique views 4242
Unique downloads 3333

Share

Cite as