@inbook{795c4db8072c4555b77e961b8c8c9a21,
title = "Crowdsourcing for Information Visualization: Promises and Pitfalls",
abstract = "Crowdsourcing offers great potential to overcome the limitations of controlled lab studies. To guide future designs of crowdsourcing-based studies for visualization, we review visualization research that has attempted to leverage crowdsourcing for empirical evaluations of visualizations. We discuss six core aspects for successful employment of crowdsourcing in empirical studies for visualization – participants, study design, study procedure, data, tasks, and metrics & measures. We then present four case studies, discussing potential mechanisms to overcome common pitfalls. This chapter will help the visualization community understand how to effectively and efficiently take advantage of the exciting potential crowdsourcing has to offer to support empirical visualization research. ",
author = "Rita Borgo and {Bongshin Lee, Benjamin Bach, Sara Fabrikant, Radu Jianu, Andreas Kerren, Stephen Kobourov, Fintan McGee, Luana Micallef, Tatiana von Landesberger, Katrin Ballweg, Stephan Diehl, Paolo Simonetto, Michelle Zhou}",
year = "2017",
month = sep,
day = "28",
doi = "10.1007/978-3-319-66435-4",
language = "English",
isbn = "978-3-319-66434-7",
series = "Lecture Notes in Computer Science ",
publisher = "Springer",
pages = "96--138",
editor = "Archambault, {Daniel } and Purchase, {Helen } and Ho{\ss}feld, {Tobias }",
booktitle = "Evaluation in the Crowd. Crowdsourcing and Human-Centered Experiments",
note = "Dagstuhl Seminar 15481 ; Conference date: 22-11-2015 Through 27-11-2015",
url = "http://www.dagstuhl.de/de/programm/kalender/semhp/?semnr=15481",
}