@inproceedings{0f9bcde8a9e64d498e49f449bc29effb,
title = "Crowdsourcing technology to support academic research",
abstract = "Current crowdsourcing platforms typically concentrate on simple microtasks and do not meet the needs of academic research well, where more complex, time consuming studies are required. This has lead to the development of specialised software tools to support academic research on such platforms. However, the loose coupling of the software with the crowdsourcing site means that there is only limited access to the features of the platform. In addition, the specialised nature of the software tools means that technical knowledge is needed to operate them. Hence there is great potential to enrich the features of crowdsourcing platforms from an academic perspective. In this chapter we discuss the possibilities for practical improvement of academic crowdsourced studies through adaption of technological solutions.",
author = "Matthias Hirth and Jason Jacques and Peter Rodgers and Ognjen Scekic and Michael Wybrow",
year = "2017",
doi = "10.1007/978-3-319-66435-4_4",
language = "English",
isbn = "9783319664347",
series = "Lecture Notes in Computer Science",
publisher = "Springer",
pages = "70--95",
editor = "Daniel Archambault and Helen Purchase and Tobias Ho{\ss}feld",
booktitle = "Evaluation in the Crowd",
note = "Evaluation in the Crowd : Crowdsourcing and Human-Centered Experiments 2015 (Dagstuhl Seminar 15481) ; Conference date: 22-11-2015 Through 27-11-2015",
}