Conference paper Open Access

CoConUT: context collection for non-stationary user testing

Schröder, Svenja; Hirschl, Jakob; Reichl, Peter


MARC21 XML Export

<?xml version='1.0' encoding='UTF-8'?>
<record xmlns="http://www.loc.gov/MARC21/slim">
  <leader>00000nam##2200000uu#4500</leader>
  <datafield tag="942" ind1=" " ind2=" ">
    <subfield code="a">2017-09-06</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Human Factors</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Usability</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Field Studies</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Mobile Context</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Context</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Attention</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Mobile HCI</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">App</subfield>
  </datafield>
  <controlfield tag="005">20191104071151.0</controlfield>
  <controlfield tag="001">851647</controlfield>
  <datafield tag="700" ind1=" " ind2=" ">
    <subfield code="u">University of Vienna</subfield>
    <subfield code="a">Hirschl, Jakob</subfield>
  </datafield>
  <datafield tag="700" ind1=" " ind2=" ">
    <subfield code="u">University of Vienna</subfield>
    <subfield code="a">Reichl, Peter</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2=" ">
    <subfield code="s">2084595</subfield>
    <subfield code="z">md5:2150590d28c653aaf946b3427e605fb7</subfield>
    <subfield code="u">https://zenodo.org/record/851647/files/camery-ready-coconut-schroeder-hirschl-reichl.pdf</subfield>
  </datafield>
  <datafield tag="542" ind1=" " ind2=" ">
    <subfield code="l">open</subfield>
  </datafield>
  <datafield tag="260" ind1=" " ind2=" ">
    <subfield code="c">2016-09-06</subfield>
  </datafield>
  <datafield tag="909" ind1="C" ind2="O">
    <subfield code="o">oai:zenodo.org:851647</subfield>
  </datafield>
  <datafield tag="100" ind1=" " ind2=" ">
    <subfield code="u">University of Vienna</subfield>
    <subfield code="a">Schröder, Svenja</subfield>
  </datafield>
  <datafield tag="245" ind1=" " ind2=" ">
    <subfield code="a">CoConUT: context collection for non-stationary user testing</subfield>
  </datafield>
  <datafield tag="536" ind1=" " ind2=" ">
    <subfield code="c">688156</subfield>
    <subfield code="a">Symbiosis of smart objects across IoT environments</subfield>
  </datafield>
  <datafield tag="540" ind1=" " ind2=" ">
    <subfield code="u">http://creativecommons.org/licenses/by/4.0/legalcode</subfield>
    <subfield code="a">Creative Commons Attribution 4.0 International</subfield>
  </datafield>
  <datafield tag="650" ind1="1" ind2="7">
    <subfield code="a">cc-by</subfield>
    <subfield code="2">opendefinition.org</subfield>
  </datafield>
  <datafield tag="520" ind1=" " ind2=" ">
    <subfield code="a">&lt;p&gt;CoConUT is an Android app for collecting the mobile context as well as the frequency of interactions during mobile field studies (for example usability studies) using sensor data on the test device. For evaluation purposes the recorded user trial sessions can be visually explored. This facilitates an assessment of the user's attention patterns and enables the detection of limited cognitive resources caused by distracting contextual factors. The app was tested in a preliminary study for technical feasibility and is planned to be extended in the near future.&lt;/p&gt;</subfield>
  </datafield>
  <datafield tag="024" ind1=" " ind2=" ">
    <subfield code="a">10.1145/2957265.2962658</subfield>
    <subfield code="2">doi</subfield>
  </datafield>
  <datafield tag="980" ind1=" " ind2=" ">
    <subfield code="a">publication</subfield>
    <subfield code="b">conferencepaper</subfield>
  </datafield>
</record>
21
19
views
downloads
Views 21
Downloads 19
Data volume 39.6 MB
Unique views 20
Unique downloads 19

Share

Cite as