@inproceedings{5dfcc89bcedb435d84aeb7ad20d54aca,
title = "Can crowdworkers be used to rate the quality of ultrasound images?",
abstract = "The present study examined whether inexperienced individuals can evaluate ultrasonography (US) images using a new rating scale and visual AIDS. A US image assessment tool and visual AIDS in the form of alignable comparison standards (one good and one poor example) were developed to rate images of the hepatorenal interface along eight characteristics. Twenty novice participants from Amazon.com's Mechanical Turk service used the visual AIDS to evaluate 42 US images in good and poor categories. The results revealed that crowdworkers could distinguish the good from poor US images. Their ratings were highly and positively correlated with those of experts. However, the crowdworkers' ratings were more severe than those of the experts. These results suggest that inexperienced individuals can use a rating scale and visual AIDS to reliably distinguish good from poor US images but may require a more extensive set of visual cues or additional training to match ratings provide by experts.",
author = "Amanda Ashdown and Scerbo, {Mark W.} and Knapp, {Barry J.} and Felicia Toreno and Craig Goodmurphy and Don Byars",
note = "Publisher Copyright: {\textcopyright} 2018 Human Factors an Ergonomics Society Inc.. All rights reserved.; 62nd Human Factors and Ergonomics Society Annual Meeting, HFES 2018 ; Conference date: 01-10-2018 Through 05-10-2018",
year = "2018",
language = "English (US)",
series = "Proceedings of the Human Factors and Ergonomics Society",
publisher = "Human Factors and Ergonomics Society Inc.",
pages = "595--599",
booktitle = "62nd Human Factors and Ergonomics Society Annual Meeting, HFES 2018",
}