Journal article Open Access

Informative trees by visual pruning

Iorio, Carmela; Aria, Massimo; D'Ambrosio, Antonio; Siciliano, Roberta


MARC21 XML Export

<?xml version='1.0' encoding='UTF-8'?>
<record xmlns="http://www.loc.gov/MARC21/slim">
  <leader>00000nam##2200000uu#4500</leader>
  <datafield tag="041" ind1=" " ind2=" ">
    <subfield code="a">eng</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">CART</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Impurity proportional reduction</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Cost-complexity pruning</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Visualization</subfield>
  </datafield>
  <datafield tag="653" ind1=" " ind2=" ">
    <subfield code="a">Supervised statistical learning</subfield>
  </datafield>
  <controlfield tag="005">20200120135936.0</controlfield>
  <controlfield tag="001">3267338</controlfield>
  <datafield tag="700" ind1=" " ind2=" ">
    <subfield code="u">University of Naples Federico II, Italy</subfield>
    <subfield code="a">Aria, Massimo</subfield>
  </datafield>
  <datafield tag="700" ind1=" " ind2=" ">
    <subfield code="u">University of Naples Federico II, Italy</subfield>
    <subfield code="a">D'Ambrosio, Antonio</subfield>
  </datafield>
  <datafield tag="700" ind1=" " ind2=" ">
    <subfield code="u">University of Naples Federico II, Italy</subfield>
    <subfield code="a">Siciliano, Roberta</subfield>
  </datafield>
  <datafield tag="856" ind1="4" ind2=" ">
    <subfield code="s">4024332</subfield>
    <subfield code="z">md5:450e78a324d9fadfa938a40d34ea367c</subfield>
    <subfield code="u">https://zenodo.org/record/3267338/files/Iorio-et-al_2019_ESWA.pdf</subfield>
  </datafield>
  <datafield tag="542" ind1=" " ind2=" ">
    <subfield code="l">open</subfield>
  </datafield>
  <datafield tag="260" ind1=" " ind2=" ">
    <subfield code="c">2019-08-01</subfield>
  </datafield>
  <datafield tag="909" ind1="C" ind2="O">
    <subfield code="p">openaire</subfield>
    <subfield code="p">user-magic</subfield>
    <subfield code="o">oai:zenodo.org:3267338</subfield>
  </datafield>
  <datafield tag="909" ind1="C" ind2="4">
    <subfield code="c">228-240</subfield>
    <subfield code="v">127</subfield>
    <subfield code="p">Expert Systems with Applications</subfield>
  </datafield>
  <datafield tag="100" ind1=" " ind2=" ">
    <subfield code="u">University of Naples Federico II, Italy</subfield>
    <subfield code="a">Iorio, Carmela</subfield>
  </datafield>
  <datafield tag="245" ind1=" " ind2=" ">
    <subfield code="a">Informative trees by visual pruning</subfield>
  </datafield>
  <datafield tag="980" ind1=" " ind2=" ">
    <subfield code="a">user-magic</subfield>
  </datafield>
  <datafield tag="536" ind1=" " ind2=" ">
    <subfield code="c">689669</subfield>
    <subfield code="a">Moving Towards Adaptive Governance in Complexity: Informing Nexus Security</subfield>
  </datafield>
  <datafield tag="540" ind1=" " ind2=" ">
    <subfield code="u">https://creativecommons.org/licenses/by/4.0/legalcode</subfield>
    <subfield code="a">Creative Commons Attribution 4.0 International</subfield>
  </datafield>
  <datafield tag="650" ind1="1" ind2="7">
    <subfield code="a">cc-by</subfield>
    <subfield code="2">opendefinition.org</subfield>
  </datafield>
  <datafield tag="520" ind1=" " ind2=" ">
    <subfield code="a">&lt;p&gt;The aim of this study is to provide visual pruning and decision tree selection for classification and regression trees. Specifically, we introduce an unedited tree graph to be made informative for recursive tree data partitioning. A decision tree is visually selected through a dendrogram-like procedure or through automatic tree-size selection. Our proposal is a one-step procedure whereby the most predictive paths are visualized. This method appears to be useful in all real world cases where tree-path interpretation is crucial. Experimental evaluations using real world data sets are presented. The performance was very similar to Classification and Regression Trees (CART) benchmarking methodology, showing that our method is a valid alternative to the well-known method of cost-complexity pruning.&lt;/p&gt;</subfield>
  </datafield>
  <datafield tag="024" ind1=" " ind2=" ">
    <subfield code="a">10.1016/j.eswa.2019.03.018</subfield>
    <subfield code="2">doi</subfield>
  </datafield>
  <datafield tag="980" ind1=" " ind2=" ">
    <subfield code="a">publication</subfield>
    <subfield code="b">article</subfield>
  </datafield>
</record>
67
49
views
downloads
Views 67
Downloads 49
Data volume 178.8 MB
Unique views 53
Unique downloads 42

Share

Cite as