@inproceedings{07bfc24ec7d84245b1b6675c94857e29,
title = "The influence of crowd type and task complexity on crowdsourced work quality",
abstract = "As the use of crowdsourcing spreads, the need to ensure the quality of crowdsourced work is magnified. While quality control in crowdsourcing has been widely studied, established mechanisms may still be improved to take into account other factors that affect quality. However, since crowdsourcing relies on humans, it is difficult to identify and consider all factors affecting quality. In this study, we conduct an initial investigation on the effect of crowd type and task complexity on work quality by crowdsourcing a simple and more complex version of a data extraction task to paid and unpaid crowds. We then measure the quality of the results in terms of its similarity to a gold standard data set. Our experiments show that the unpaid crowd produces results of high quality regardless of the type of task while the paid crowd yields better results in simple tasks. We intend to extend our work to integrate existing quality con-trol mechanisms and perform more experiments with more varied crowd members.",
keywords = "Crowdsourcing, Task complexity, Text extraction",
author = "Borromeo, {Ria Mae} and Thomas Laurent and Motomichi Toyama",
note = "Publisher Copyright: {\textcopyright} ACM 2016.; 20th International Database Engineering and Applications Symposium, IDEAS 2016 ; Conference date: 11-07-2016 Through 13-07-2016",
year = "2016",
month = jul,
day = "11",
doi = "10.1145/2938503.2938511",
language = "English",
series = "ACM International Conference Proceeding Series",
publisher = "Association for Computing Machinery",
pages = "70--76",
editor = "Desai, {Bipin C.} and Evan Desai",
booktitle = "Proceedings of the 20th International Database Engineering and Applications Symposium, IDEAS 2016",
}