@inproceedings{Acosta2013, abstract = {In this paper we look into the use of crowdsourcing as a means to handle Linked Data quality problems that are challenging to be solved automatically. We analyzed the most common errors encountered in Linked Data sources and classified them according to the extent to which they are likely to be amenable to a specific crowdsourcing approach. Based on this analysis, we implemented and compared two quality assessment methods for Linked Data that leverage the wisdom of the crowds in different ways: (i) a contest format targeting an expert crowd of researchers and Linked Data enthusiasts; and (ii) paid microtasks published on Amazon Mechanical Turk. We evaluated the two methods empirically in terms of their capacity to spot quality issues in DBpedia and investigated how the contributions of the two crowds could be optimally integrated into Linked Data curation processes. The results showed that the two styles of crowdsourcing are complementary, and that crowdsourcing-enabled quality assessment is a promising and affordable way to enhance the quality of Linked Data sets.}, added-at = {2024-03-04T14:13:37.000+0100}, author = {Acosta, Maribel and Zaveri, Amrapali and Simperl, Elena and Kontokostas, Dimitris and Auer, S\"oren and Lehmann, Jens}, bdsk-url-1 = {http://svn.aksw.org/papers/2013/ISWC_Crowdsourcing/public.pdf}, biburl = {https://www.bibsonomy.org/bibtex/2132de4f6f5338787add058702702fc59/aksw}, booktitle = {12th International Semantic Web Conference, 21-25 October 2013, Sydney, Australia}, date-modified = {2015-02-06 06:57:18 +0000}, interhash = {feaccf743613192f0d018520589ed582}, intrahash = {132de4f6f5338787add058702702fc59}, keywords = {2013 MOLE auer dataquamole dbpediadqcrowd event_ISWC group_aksw kontokostas lehmann lod2page topic_Crowdsourcing, topic_QualityAnalysis zaveri}, owner = {soeren}, pages = {260-276}, timestamp = {2024-03-04T14:13:37.000+0100}, title = {Crowdsourcing Linked Data quality assessment}, url = {http://svn.aksw.org/papers/2013/ISWC_Crowdsourcing/public.pdf}, year = 2013 } @inproceedings{conf/semweb/AcostaZSKAL13, added-at = {2023-11-12T00:00:00.000+0100}, author = {Acosta, Maribel and Zaveri, Amrapali and Simperl, Elena and Kontokostas, Dimitris and Auer, Sören and Lehmann, Jens}, biburl = {https://www.bibsonomy.org/bibtex/2ddadf0f72673c49a6e6c26f757a7a4de/dblp}, booktitle = {ISWC (2)}, crossref = {conf/semweb/2013-2}, editor = {Alani, Harith and Kagal, Lalana and Fokoue, Achille and Groth, Paul and Biemann, Chris and Parreira, Josiane Xavier and Aroyo, Lora and Noy, Natasha F. and Welty, Chris and Janowicz, Krzysztof}, ee = {https://www.wikidata.org/entity/Q55177671}, interhash = {feaccf743613192f0d018520589ed582}, intrahash = {ddadf0f72673c49a6e6c26f757a7a4de}, isbn = {978-3-642-41337-7}, keywords = {dblp}, pages = {260-276}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, timestamp = {2024-04-09T11:30:37.000+0200}, title = {Crowdsourcing Linked Data Quality Assessment.}, url = {http://dblp.uni-trier.de/db/conf/semweb/iswc2013-2.html#AcostaZSKAL13}, volume = 8219, year = 2013 } @inproceedings{Acosta2013, abstract = {In this paper we look into the use of crowdsourcing as a means to handle Linked Data quality problems that are challenging to be solved automatically. We analyzed the most common errors encountered in Linked Data sources and classified them according to the extent to which they are likely to be amenable to a specific crowdsourcing approach. Based on this analysis, we implemented and compared two quality assessment methods for Linked Data that leverage the wisdom of the crowds in different ways: (i) a contest format targeting an expert crowd of researchers and Linked Data enthusiasts; and (ii) paid microtasks published on Amazon Mechanical Turk. We evaluated the two methods empirically in terms of their capacity to spot quality issues in DBpedia and investigated how the contributions of the two crowds could be optimally integrated into Linked Data curation processes. The results showed that the two styles of crowdsourcing are complementary, and that crowdsourcing-enabled quality assessment is a promising and affordable way to enhance the quality of Linked Data sets.}, added-at = {2023-04-25T16:34:32.000+0200}, author = {Acosta, Maribel and Zaveri, Amrapali and Simperl, Elena and Kontokostas, Dimitris and Auer, S{\"o}ren and Lehmann, Jens}, bdsk-url-1 = {http://svn.aksw.org/papers/2013/ISWC\_Crowdsourcing/public.pdf}, biburl = {https://www.bibsonomy.org/bibtex/2132de4f6f5338787add058702702fc59/dice-research}, booktitle = {12th International Semantic Web Conference, 21-25 October 2013, Sydney, Australia}, date-modified = {2015-02-06 06:57:18 +0000}, interhash = {feaccf743613192f0d018520589ed582}, intrahash = {132de4f6f5338787add058702702fc59}, keywords = {2013 MOLE auer dataquamole dbpediadqcrowd event\_ISWC group\_aksw kontokostas lehmann lod2page topic\_Crowdsourcing, topic\_QualityAnalysis zaveri}, owner = {soeren}, pages = {260--276}, timestamp = {2023-04-25T16:34:32.000+0200}, title = {Crowdsourcing Linked Data quality assessment}, url = {http://svn.aksw.org/papers/2013/ISWC_Crowdsourcing/public.pdf}, year = 2013 } @inproceedings{DBLP:conf/semweb/AcostaZSKAL13, added-at = {2021-02-19T12:10:28.000+0100}, author = {Acosta, Maribel and Zaveri, Amrapali and Simperl, Elena and Kontokostas, Dimitris and Auer, S{\"{o}}ren and Lehmann, Jens}, bibsource = {dblp computer science bibliography, https://dblp.org}, biburl = {https://www.bibsonomy.org/bibtex/2277d9801a45aad4e7f2fa926527508da/soeren}, booktitle = {The Semantic Web - {ISWC} 2013 - 12th International Semantic Web Conference, Sydney, NSW, Australia, October 21-25, 2013, Proceedings, Part {II}}, crossref = {DBLP:conf/semweb/2013-2}, doi = {10.1007/978-3-642-41338-4\_17}, editor = {Alani, Harith and Kagal, Lalana and Fokoue, Achille and Groth, Paul and Biemann, Chris and Parreira, Josiane Xavier and Aroyo, Lora and Noy, Natasha F. and Welty, Chris and Janowicz, Krzysztof}, interhash = {feaccf743613192f0d018520589ed582}, intrahash = {277d9801a45aad4e7f2fa926527508da}, isbn = {978-3-642-41337-7}, keywords = {imported myown}, pages = {260--276}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, timestamp = {2021-02-19T12:10:28.000+0100}, title = {Crowdsourcing Linked Data Quality Assessment}, url = {https://doi.org/10.1007/978-3-642-41338-4\_17}, volume = 8219, year = 2013 } @inproceedings{Acosta2013, abstract = {In this paper we look into the use of crowdsourcing as a means to handle Linked Data quality problems that are challenging to be solved automatically. We analyzed the most common errors encountered in Linked Data sources and classified them according to the extent to which they are likely to be amenable to a specific crowdsourcing approach. Based on this analysis, we implemented and compared two quality assessment methods for Linked Data that leverage the wisdom of the crowds in different ways: (i) a contest format targeting an expert crowd of researchers and Linked Data enthusiasts; and (ii) paid microtasks published on Amazon Mechanical Turk. We evaluated the two methods empirically in terms of their capacity to spot quality issues in DBpedia and investigated how the contributions of the two crowds could be optimally integrated into Linked Data curation processes. The results showed that the two styles of crowdsourcing are complementary, and that crowdsourcing-enabled quality assessment is a promising and affordable way to enhance the quality of Linked Data sets.}, added-at = {2017-01-27T23:28:47.000+0100}, author = {Acosta, Maribel and Zaveri, Amrapali and Simperl, Elena and Kontokostas, Dimitris and Auer, S{\"o}ren and Lehmann, Jens}, bdsk-url-1 = {http://svn.aksw.org/papers/2013/ISWC_Crowdsourcing/public.pdf}, biburl = {https://www.bibsonomy.org/bibtex/2132de4f6f5338787add058702702fc59/soeren}, booktitle = {12th International Semantic Web Conference, 21-25 October 2013, Sydney, Australia}, date-modified = {2013-07-11 19:43:31 +0000}, interhash = {feaccf743613192f0d018520589ed582}, intrahash = {132de4f6f5338787add058702702fc59}, keywords = {2013 auer dbpediadqcrowd event_ISWC group_aksw kontokostas lehmann lod2page zaveri}, owner = {soeren}, pages = {260--276}, timestamp = {2017-01-27T23:30:12.000+0100}, title = {Crowdsourcing Linked Data quality assessment}, url = {http://svn.aksw.org/papers/2013/ISWC_Crowdsourcing/public.pdf}, year = 2013 }