Our goal is to accurately model human class separation judgements in color-coded scatterplots. Towards this goal, we propose a set of 2002 visual separation measures, by systematically combining 17 neighborhood graphs and 14 class purity functions, with different parameterizations. Using a Machine Learning framework, we evaluate these measures based on how well they predict human separation judgements. We found that more than 58% of the 2002 new measures outperform the best state-of-the-art Distance Consistency (DSC) measure. Among the 2002, the best measure is the average proportion of same-class neighbors among the 0.35-Observable Neighbors of each point of the target class (short GONG 0.35 DIR CPT), with a prediction accuracy of 92.9%, which is 11.7% better than DSC. We also discuss alternative, well-performing measures and give guidelines when to use which.
%0 Conference Paper
%1 conf/apvis/AupetitS16
%A Aupetit, Michaël
%A Sedlmair, Michael
%B Proceedings of the IEEE Pacific Visualization Symposium (PacificVis)
%D 2016
%E Hansen, Chuck
%E Viola, Ivan
%E Yuan, Xiaoru
%I IEEE
%K 2016 A08 from:leonkokkoliadis sfbtrr161 visus visus:sedlmaml
%P 1-8
%R 10.1109/PACIFICVIS.2016.7465244
%T SepMe: 2002 New Visual Separation Measures.
%U https://ieeexplore.ieee.org/abstract/document/7465244
%X Our goal is to accurately model human class separation judgements in color-coded scatterplots. Towards this goal, we propose a set of 2002 visual separation measures, by systematically combining 17 neighborhood graphs and 14 class purity functions, with different parameterizations. Using a Machine Learning framework, we evaluate these measures based on how well they predict human separation judgements. We found that more than 58% of the 2002 new measures outperform the best state-of-the-art Distance Consistency (DSC) measure. Among the 2002, the best measure is the average proportion of same-class neighbors among the 0.35-Observable Neighbors of each point of the target class (short GONG 0.35 DIR CPT), with a prediction accuracy of 92.9%, which is 11.7% better than DSC. We also discuss alternative, well-performing measures and give guidelines when to use which.
%@ 978-1-5090-1451-4
@inproceedings{conf/apvis/AupetitS16,
abstract = {Our goal is to accurately model human class separation judgements in color-coded scatterplots. Towards this goal, we propose a set of 2002 visual separation measures, by systematically combining 17 neighborhood graphs and 14 class purity functions, with different parameterizations. Using a Machine Learning framework, we evaluate these measures based on how well they predict human separation judgements. We found that more than 58% of the 2002 new measures outperform the best state-of-the-art Distance Consistency (DSC) measure. Among the 2002, the best measure is the average proportion of same-class neighbors among the 0.35-Observable Neighbors of each point of the target class (short GONG 0.35 DIR CPT), with a prediction accuracy of 92.9%, which is 11.7% better than DSC. We also discuss alternative, well-performing measures and give guidelines when to use which.},
added-at = {2020-10-09T12:31:49.000+0200},
author = {Aupetit, Michaël and Sedlmair, Michael},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/28b5c210ca73f60f2d68031aff2504370/mueller},
booktitle = {Proceedings of the IEEE Pacific Visualization Symposium (PacificVis)},
crossref = {conf/apvis/2016},
description = {SepMe: 2002 New Visual Separation Measures.},
doi = {10.1109/PACIFICVIS.2016.7465244},
editor = {Hansen, Chuck and Viola, Ivan and Yuan, Xiaoru},
ee = {http://doi.ieeecomputersociety.org/10.1109/PACIFICVIS.2016.7465244},
interhash = {ad0c3d8eb24092d9e8017e15c55a3e29},
intrahash = {8b5c210ca73f60f2d68031aff2504370},
isbn = {978-1-5090-1451-4},
keywords = {2016 A08 from:leonkokkoliadis sfbtrr161 visus visus:sedlmaml},
pages = {1-8},
publisher = {IEEE},
timestamp = {2020-10-09T10:31:49.000+0200},
title = {SepMe: 2002 New Visual Separation Measures.},
url = {https://ieeexplore.ieee.org/abstract/document/7465244},
year = 2016
}