We propose a screening approach to find reliable and effectively expert crowd workers in image quality assessment (IQA). Our method measures the users' ability to identify image degradations by using test questions, together with several relaxed reliability checks. We conduct multiple experiments, obtaining reproducible results with a high agreement between the expertise-screened crowd and the freelance experts of 0.95 Spearman rank order correlation (SROCC), with one restriction on the image type. Our contributions include a reliability screening method for uninformative users, a new type of test questions that rely on our proposed database 1 of pristine and artificially distorted images, a group agreement extrapolation method and an analysis of the crowdsourcing experiments.
%0 Conference Paper
%1 hosu2018expertise
%A Hosu, V.
%A Lin, H.
%A Saupe, D.
%B Proceedings of the 10th International Conference on Quality of Multimedia Experience (QoMEX)
%D 2018
%I IEEE
%K 2018 A05 sfbtrr161
%P 276-281
%R https://dx.doi.org/10.1109/QoMEX.2018.8463427
%T Expertise Screening in Crowdsourcing Image Quality
%U https://ieeexplore.ieee.org/document/8463427
%X We propose a screening approach to find reliable and effectively expert crowd workers in image quality assessment (IQA). Our method measures the users' ability to identify image degradations by using test questions, together with several relaxed reliability checks. We conduct multiple experiments, obtaining reproducible results with a high agreement between the expertise-screened crowd and the freelance experts of 0.95 Spearman rank order correlation (SROCC), with one restriction on the image type. Our contributions include a reliability screening method for uninformative users, a new type of test questions that rely on our proposed database 1 of pristine and artificially distorted images, a group agreement extrapolation method and an analysis of the crowdsourcing experiments.
@inproceedings{hosu2018expertise,
abstract = {We propose a screening approach to find reliable and effectively expert crowd workers in image quality assessment (IQA). Our method measures the users' ability to identify image degradations by using test questions, together with several relaxed reliability checks. We conduct multiple experiments, obtaining reproducible results with a high agreement between the expertise-screened crowd and the freelance experts of 0.95 Spearman rank order correlation (SROCC), with one restriction on the image type. Our contributions include a reliability screening method for uninformative users, a new type of test questions that rely on our proposed database 1 of pristine and artificially distorted images, a group agreement extrapolation method and an analysis of the crowdsourcing experiments.},
added-at = {2020-02-26T15:33:54.000+0100},
author = {Hosu, V. and Lin, H. and Saupe, D.},
biburl = {https://puma.ub.uni-stuttgart.de/bibtex/225216bd21379a19bdcebe49b6897a301/leonkokkoliadis},
booktitle = {Proceedings of the 10th International Conference on Quality of Multimedia Experience (QoMEX)},
doi = {https://dx.doi.org/10.1109/QoMEX.2018.8463427},
interhash = {40aa053dd96077e89f8188a937b3ac61},
intrahash = {25216bd21379a19bdcebe49b6897a301},
keywords = {2018 A05 sfbtrr161},
pages = {276-281},
publisher = {IEEE},
timestamp = {2020-02-26T14:33:54.000+0100},
title = {Expertise Screening in Crowdsourcing Image Quality},
url = {https://ieeexplore.ieee.org/document/8463427},
year = 2018
}