@inproceedings{conf/iknow/0003BHMHT15, added-at = {2019-10-19T00:00:00.000+0200}, author = {Becker, Martin and Borchert, Kathrin and Hirth, Matthias and Mewes, Hauke and Hotho, Andreas and Tran-Gia, Phuoc}, biburl = {https://www.bibsonomy.org/bibtex/222137c59d804fcbe2cf1bbdeebb7f701/dblp}, booktitle = {I-KNOW}, crossref = {conf/iknow/2015}, editor = {Lindstaedt, Stefanie N. and Ley, Tobias and Sack, Harald}, ee = {https://doi.org/10.1145/2809563.2809608}, interhash = {597cca8bce7e0de88f265cba672e88eb}, intrahash = {22137c59d804fcbe2cf1bbdeebb7f701}, isbn = {978-1-4503-3721-2}, keywords = {dblp}, pages = {10:1-10:8}, publisher = {ACM}, timestamp = {2019-10-22T14:00:09.000+0200}, title = {MicroTrails: comparing hypotheses about task selection on a crowdsourcing platform.}, url = {http://dblp.uni-trier.de/db/conf/iknow/iknow2015.html#0003BHMHT15}, year = 2015 } @inproceedings{becker2015microtrails, added-at = {2019-08-09T13:49:10.000+0200}, address = {New York, NY, USA}, author = {Becker, Martin and Borchert, Kathrin and Hirth, Matthias and Mewes, Hauke and Hotho, Andreas and Tran-Gia, Phuoc}, biburl = {https://www.bibsonomy.org/bibtex/2476c98451e60ba174f94e6cd39718a0d/tobias.koopmann}, booktitle = {Proceedings of the 15th International Conference on Knowledge Technologies and Data-driven Business}, interhash = {597cca8bce7e0de88f265cba672e88eb}, intrahash = {476c98451e60ba174f94e6cd39718a0d}, keywords = {crowdsourcing from:becker hyptrails microtrails navigation}, month = oct, pages = {10:1--10:8}, publisher = {ACM}, series = {i-KNOW '15}, timestamp = {2019-08-09T13:49:10.000+0200}, title = {MicroTrails: Comparing Hypotheses About Task Selection on a Crowdsourcing Platform}, url = {http://doi.acm.org/10.1145/2809563.2809608}, year = 2015 } @inproceedings{becker2015microtrails, added-at = {2017-12-10T12:05:30.000+0100}, address = {New York, NY, USA}, author = {Becker, Martin and Borchert, Kathrin and Hirth, Matthias and Mewes, Hauke and Hotho, Andreas and Tran-Gia, Phuoc}, biburl = {https://www.bibsonomy.org/bibtex/2476c98451e60ba174f94e6cd39718a0d/thoni}, booktitle = {Proceedings of the 15th International Conference on Knowledge Technologies and Data-driven Business}, interhash = {597cca8bce7e0de88f265cba672e88eb}, intrahash = {476c98451e60ba174f94e6cd39718a0d}, keywords = {analysis hyptrails microworker navigation semantic}, month = oct, pages = {10:1--10:8}, publisher = {ACM}, series = {i-KNOW '15}, timestamp = {2017-12-10T12:05:30.000+0100}, title = {MicroTrails: Comparing Hypotheses About Task Selection on a Crowdsourcing Platform}, url = {http://doi.acm.org/10.1145/2809563.2809608}, year = 2015 } @inproceedings{becker2015microtrails, abstract = {To optimize the workflow on commercial crowdsourcing platforms like Amazon Mechanical Turk or Microworkers, it is important to understand how users choose their tasks. Current work usually explores the underlying processes by employing user studies based on surveys with a limited set of participants. In contrast, we formulate hypotheses based on the different findings in these studies and, instead of verifying them based on user feedback, we compare them directly on data from a commercial crowdsourcing platform. For evaluation, we use a Bayesian approach called HypTrails which allows us to give a relative ranking of the corresponding hypotheses. The hypotheses considered, are for example based on task categories, monetary incentives or semantic similarity of task descriptions. We find that, in our scenario, hypotheses based on employers as well the the task descriptions work best. Overall, we objectively compare different factors influencing users when choosing their tasks. Our approach enables crowdsourcing companies to better understand their users in order to optimize their platforms, e.g., by incorparting the gained knowledge about these factors into task recommentation systems.}, acmid = {2809608}, added-at = {2016-11-28T10:15:50.000+0100}, address = {New York, NY, USA}, articleno = {10}, author = {Becker, Martin and Borchert, Kathrin and Hirth, Matthias and Mewes, Hauke and Hotho, Andreas and Tran-Gia, Phuoc}, biburl = {https://www.bibsonomy.org/bibtex/2476c98451e60ba174f94e6cd39718a0d/kde-alumni}, booktitle = {Proceedings of the 15th International Conference on Knowledge Technologies and Data-driven Business}, doi = {10.1145/2809563.2809608}, interhash = {597cca8bce7e0de88f265cba672e88eb}, intrahash = {476c98451e60ba174f94e6cd39718a0d}, isbn = {978-1-4503-3721-2}, keywords = {imported}, location = {Graz, Austria}, numpages = {8}, pages = {10:1--10:8}, publisher = {ACM}, series = {i-KNOW '15}, timestamp = {2016-11-28T10:15:50.000+0100}, title = {MicroTrails: Comparing Hypotheses About Task Selection on a Crowdsourcing Platform}, url = {http://doi.acm.org/10.1145/2809563.2809608}, year = 2015 } @inproceedings{becker2015microtrails, added-at = {2016-10-11T20:49:04.000+0200}, address = {New York, NY, USA}, author = {Becker, Martin and Borchert, Kathrin and Hirth, Matthias and Mewes, Hauke and Hotho, Andreas and Tran-Gia, Phuoc}, biburl = {https://www.bibsonomy.org/bibtex/2476c98451e60ba174f94e6cd39718a0d/dmir}, booktitle = {Proceedings of the 15th International Conference on Knowledge Technologies and Data-driven Business}, interhash = {597cca8bce7e0de88f265cba672e88eb}, intrahash = {476c98451e60ba174f94e6cd39718a0d}, keywords = {crowdsourcing eva21 from:becker hyptrails incentives microtrails myown navigation own posts postsII}, month = oct, pages = {10:1--10:8}, publisher = {ACM}, series = {i-KNOW '15}, timestamp = {2024-01-18T10:31:52.000+0100}, title = {MicroTrails: Comparing Hypotheses About Task Selection on a Crowdsourcing Platform}, url = {http://doi.acm.org/10.1145/2809563.2809608}, year = 2015 } @inproceedings{info3-inproceedings-2015-526, abstract = {To optimize the workflow on commercial crowdsourcing platforms like Amazon Mechanical Turk or Microworkers, it is important to understand how users choose their tasks. Current work usually explores the underlying processes by employing user studies based on surveys with a limited set of participants. In contrast, we formulate hypotheses based on the different findings in these studies and, in- stead of verifying them based on user feedback, we compare them directly on data from a commercial crowdsourcing platform. For evaluation, we use a Bayesian approach called HypTrails which allows us to give a relative ranking of the corresponding hypotheses. The hypotheses considered, are for example based on task categories, monetary incentives or semantic similarity of task descriptions. We find that, in our scenario, hypotheses based on employers as well the the task descriptions work best. Overall, we objectively compare different factors influencing users when choosing their tasks. Our approach enables crowdsourcing companies to better understand their users in order to optimize their platforms, e.g., by incorporating the gained knowledge about these factors into task recommendation systems.}, added-at = {2016-03-10T17:38:35.000+0100}, address = {Graz, Austria}, author = {Becker, Martin and Borchert, Kathrin and Hirth, Matthias and Mewes, Hauke and Hotho, Andreas and Tran-Gia, Phuoc}, biburl = {https://www.bibsonomy.org/bibtex/2e7bdcdcc5464fe757960ddd3790dda2d/uniwue_info3}, booktitle = {International Conference on Knowledge Technologies and Data-driven Business (I-KNOW)}, interhash = {597cca8bce7e0de88f265cba672e88eb}, intrahash = {e7bdcdcc5464fe757960ddd3790dda2d}, keywords = {myown crowdsourcing-design}, month = {10}, timestamp = {2022-03-14T00:11:11.000+0100}, title = {MicroTrails: Comparing Hypotheses about Task Selection on a Crowd Sourcing Platform}, year = 2015 } @inproceedings{becker2015microtrails, abstract = {To optimize the workflow on commercial crowdsourcing platforms like Amazon Mechanical Turk or Microworkers, it is important to understand how users choose their tasks. Current work usually explores the underlying processes by employing user studies based on surveys with a limited set of participants. In contrast, we formulate hypotheses based on the different findings in these studies and, instead of verifying them based on user feedback, we compare them directly on data from a commercial crowdsourcing platform. For evaluation, we use a Bayesian approach called HypTrails which allows us to give a relative ranking of the corresponding hypotheses. The hypotheses considered, are for example based on task categories, monetary incentives or semantic similarity of task descriptions. We find that, in our scenario, hypotheses based on employers as well the the task descriptions work best. Overall, we objectively compare different factors influencing users when choosing their tasks. Our approach enables crowdsourcing companies to better understand their users in order to optimize their platforms, e.g., by incorparting the gained knowledge about these factors into task recommentation systems.}, acmid = {2809608}, added-at = {2015-11-17T06:21:53.000+0100}, address = {New York, NY, USA}, articleno = {10}, author = {Becker, Martin and Borchert, Kathrin and Hirth, Matthias and Mewes, Hauke and Hotho, Andreas and Tran-Gia, Phuoc}, biburl = {https://www.bibsonomy.org/bibtex/2476c98451e60ba174f94e6cd39718a0d/hotho}, booktitle = {Proceedings of the 15th International Conference on Knowledge Technologies and Data-driven Business}, description = {MicroTrails}, doi = {10.1145/2809563.2809608}, interhash = {597cca8bce7e0de88f265cba672e88eb}, intrahash = {476c98451e60ba174f94e6cd39718a0d}, isbn = {978-1-4503-3721-2}, keywords = {analysis crowd crowdsource hyptrails microworker myown sourcing working}, location = {Graz, Austria}, numpages = {8}, pages = {10:1--10:8}, publisher = {ACM}, series = {i-KNOW '15}, timestamp = {2015-11-17T06:21:53.000+0100}, title = {MicroTrails: Comparing Hypotheses About Task Selection on a Crowdsourcing Platform}, url = {http://doi.acm.org/10.1145/2809563.2809608}, year = 2015 } @inproceedings{becker2015microtrails, added-at = {2015-09-09T17:24:06.000+0200}, address = {New York, NY, USA}, author = {Becker, Martin and Borchert, Kathrin and Hirth, Matthias and Mewes, Hauke and Hotho, Andreas and Tran-Gia, Phuoc}, biburl = {https://www.bibsonomy.org/bibtex/2cc25d7d05fc623a39b1ed711a0e79a3c/becker}, booktitle = {International Conference on Knowledge Technologies and Data-driven Business}, interhash = {597cca8bce7e0de88f265cba672e88eb}, intrahash = {cc25d7d05fc623a39b1ed711a0e79a3c}, isbn = {978-1-4503-3721-2}, keywords = {crowdsourcing diss diss:allmypubs eva21 hyptrails incentives inthesis microtrails myown navigation own project:bmbf}, month = oct, pages = {10:1--10:8}, publisher = {ACM}, timestamp = {2021-07-27T18:57:25.000+0200}, title = {MicroTrails: Comparing Hypotheses About Task Selection on a Crowdsourcing Platform}, url = {http://doi.acm.org/10.1145/2809563.2809608}, year = 2015 }