Conference paper Open Access

Asynchronous Event-based Line Tracking for Time-to-Contact Maneuvers in UAS

A. Gomez Eguiluz; J. P. Rodríguez-Gómez,; J .R. Martínez-de Dios,; A. Ollero


DataCite XML Export

<?xml version='1.0' encoding='utf-8'?>
<resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://datacite.org/schema/kernel-4" xsi:schemaLocation="http://datacite.org/schema/kernel-4 http://schema.datacite.org/meta/kernel-4.1/metadata.xsd">
  <identifier identifierType="DOI">10.5281/zenodo.5082153</identifier>
  <creators>
    <creator>
      <creatorName>A. Gomez Eguiluz</creatorName>
      <nameIdentifier nameIdentifierScheme="ORCID" schemeURI="http://orcid.org/">0000-0002-2285-2605</nameIdentifier>
      <affiliation>University of Seville</affiliation>
    </creator>
    <creator>
      <creatorName>J. P. Rodríguez-Gómez,</creatorName>
      <familyName>J. P. Rodríguez-Gómez</familyName>
      <nameIdentifier nameIdentifierScheme="ORCID" schemeURI="http://orcid.org/">0000-0001-7628-1660</nameIdentifier>
      <affiliation>University of Seville</affiliation>
    </creator>
    <creator>
      <creatorName>J .R. Martínez-de Dios,</creatorName>
      <familyName>J .R. Martínez-de Dios</familyName>
      <nameIdentifier nameIdentifierScheme="ORCID" schemeURI="http://orcid.org/">0000-0001-9431-7831</nameIdentifier>
      <affiliation>University of Seville</affiliation>
    </creator>
    <creator>
      <creatorName>A. Ollero</creatorName>
      <nameIdentifier nameIdentifierScheme="ORCID" schemeURI="http://orcid.org/">0000-0003-2155-2472</nameIdentifier>
      <affiliation>University of Seville</affiliation>
    </creator>
  </creators>
  <titles>
    <title>Asynchronous Event-based Line Tracking for Time-to-Contact Maneuvers in UAS</title>
  </titles>
  <publisher>Zenodo</publisher>
  <publicationYear>2021</publicationYear>
  <subjects>
    <subject>Event camera</subject>
    <subject>Aerial robots</subject>
    <subject>Perception systems</subject>
    <subject>Time-to-contact</subject>
    <subject>Tau theory</subject>
    <subject>Event-based vision</subject>
    <subject>Line tracker</subject>
    <subject>Visual servoing</subject>
  </subjects>
  <dates>
    <date dateType="Issued">2021-01-23</date>
  </dates>
  <language>en</language>
  <resourceType resourceTypeGeneral="ConferencePaper"/>
  <alternateIdentifiers>
    <alternateIdentifier alternateIdentifierType="url">https://zenodo.org/record/5082153</alternateIdentifier>
  </alternateIdentifiers>
  <relatedIdentifiers>
    <relatedIdentifier relatedIdentifierType="DOI" relationType="IsVersionOf">10.5281/zenodo.4459505</relatedIdentifier>
  </relatedIdentifiers>
  <version>3</version>
  <rightsList>
    <rights rightsURI="https://creativecommons.org/licenses/by/4.0/legalcode">Creative Commons Attribution 4.0 International</rights>
    <rights rightsURI="info:eu-repo/semantics/openAccess">Open Access</rights>
  </rightsList>
  <descriptions>
    <description descriptionType="Abstract">&lt;p&gt;This paper presents an bio-inspired event-based perception scheme for agile aerial robot maneuvering. It tries to mimic birds, which perform purposeful maneuvers by closing the separation in the retinal image (w.r.t. the goal) to follow time-to-contact trajectories. The proposed approach is based on event cameras, also called artificial retinas, which provide fast response and robustness against motion blur and lighting conditions. Our scheme guides the robot by only adjusting the position of features extracted in the event image plane to their goal positions at a predefined time using smooth time-to-contact trajectories. The proposed scheme is robust, efficient and can be added on top of commonly-used aerial robot velocity controllers. It has been validated on-board a UAV with real-time computation in low-cost hardware during sets of experiments with different descent maneuvers and lighting conditions.&lt;/p&gt;</description>
    <description descriptionType="Other">This work was supported by the ARM-EXTEND (DPI2017-8979-R) project funded by the Spanish National R&amp;amp;D Plan.</description>
  </descriptions>
  <fundingReferences>
    <fundingReference>
      <funderName>European Commission</funderName>
      <funderIdentifier funderIdentifierType="Crossref Funder ID">10.13039/100010661</funderIdentifier>
      <awardNumber awardURI="info:eu-repo/grantAgreement/EC/H2020/788247/">788247</awardNumber>
      <awardTitle>General compliant aerial Robotic manipulation system Integrating Fixed and Flapping wings to INcrease range and safety</awardTitle>
    </fundingReference>
    <fundingReference>
      <funderName>European Commission</funderName>
      <funderIdentifier funderIdentifierType="Crossref Funder ID">10.13039/100010661</funderIdentifier>
      <awardNumber awardURI="info:eu-repo/grantAgreement/EC/H2020/871479/">871479</awardNumber>
      <awardTitle>AERIAL COgnitive integrated multi-task Robotic system with Extended operation range and safety</awardTitle>
    </fundingReference>
  </fundingReferences>
</resource>
123
152
views
downloads
All versions This version
Views 12335
Downloads 15240
Data volume 905.4 MB238.4 MB
Unique views 8529
Unique downloads 13633

Share

Cite as