The number of scholarly publications grows steadily every year and it becomes harder to find, assess and compare scholarly knowledge effectively. Scholarly knowledge graphs have the potential to address these challenges. However, creating such graphs remains a complex task. We propose a method to crowdsource structured scholarly knowledge from paper authors with a web-based user interface supported by artificial intelligence. The interface enables authors to select key sentences for annotation. It integrates multiple machine learning algorithms to assist authors during the annotation, including class recommendation and key sentence highlighting. We envision that the interface is integrated in paper submission processes for which we define three main task requirements: The task has to be . We evaluated the interface with a user study in which participants were assigned the task to annotate one of their own articles. With the resulting data, we determined whether the participants were successfully able to perform the task. Furthermore, we evaluated the interface’s usability and the participant’s attitude towards the interface with a survey. The results suggest that sentence annotation is a feasible task for researchers and that they do not object to annotate their articles during the submission process.
Description
Crowdsourcing Scholarly Discourse Annotations | 26th International Conference on Intelligent User Interfaces
%0 Conference Paper
%1 Oelen_2021
%A Oelen, Allard
%A Stocker, Markus
%A Auer, Sören
%B 26th International Conference on Intelligent User Interfaces
%D 2021
%I ACM
%K annotation human-ai-collaboration human-in-the-loop iui2021 knowledge-graph
%P 464-474
%R 10.1145/3397481.3450685
%T Crowdsourcing Scholarly Discourse Annotations
%U https://doi.org/10.1145%2F3397481.3450685
%X The number of scholarly publications grows steadily every year and it becomes harder to find, assess and compare scholarly knowledge effectively. Scholarly knowledge graphs have the potential to address these challenges. However, creating such graphs remains a complex task. We propose a method to crowdsource structured scholarly knowledge from paper authors with a web-based user interface supported by artificial intelligence. The interface enables authors to select key sentences for annotation. It integrates multiple machine learning algorithms to assist authors during the annotation, including class recommendation and key sentence highlighting. We envision that the interface is integrated in paper submission processes for which we define three main task requirements: The task has to be . We evaluated the interface with a user study in which participants were assigned the task to annotate one of their own articles. With the resulting data, we determined whether the participants were successfully able to perform the task. Furthermore, we evaluated the interface’s usability and the participant’s attitude towards the interface with a survey. The results suggest that sentence annotation is a feasible task for researchers and that they do not object to annotate their articles during the submission process.
@inproceedings{Oelen_2021,
abstract = {The number of scholarly publications grows steadily every year and it becomes harder to find, assess and compare scholarly knowledge effectively. Scholarly knowledge graphs have the potential to address these challenges. However, creating such graphs remains a complex task. We propose a method to crowdsource structured scholarly knowledge from paper authors with a web-based user interface supported by artificial intelligence. The interface enables authors to select key sentences for annotation. It integrates multiple machine learning algorithms to assist authors during the annotation, including class recommendation and key sentence highlighting. We envision that the interface is integrated in paper submission processes for which we define three main task requirements: The task has to be . We evaluated the interface with a user study in which participants were assigned the task to annotate one of their own articles. With the resulting data, we determined whether the participants were successfully able to perform the task. Furthermore, we evaluated the interface’s usability and the participant’s attitude towards the interface with a survey. The results suggest that sentence annotation is a feasible task for researchers and that they do not object to annotate their articles during the submission process.},
added-at = {2021-04-17T21:08:05.000+0200},
author = {Oelen, Allard and Stocker, Markus and Auer, Sören},
biburl = {https://www.bibsonomy.org/bibtex/278e89274324a4192df5fc502529d9b57/brusilovsky},
booktitle = {26th International Conference on Intelligent User Interfaces},
description = {Crowdsourcing Scholarly Discourse Annotations | 26th International Conference on Intelligent User Interfaces},
doi = {10.1145/3397481.3450685},
interhash = {3c7d941ed6af95de57ff63385974ad0d},
intrahash = {78e89274324a4192df5fc502529d9b57},
keywords = {annotation human-ai-collaboration human-in-the-loop iui2021 knowledge-graph},
month = apr,
pages = {464-474},
publisher = {{ACM}},
timestamp = {2021-04-17T21:08:05.000+0200},
title = {Crowdsourcing Scholarly Discourse Annotations},
url = {https://doi.org/10.1145%2F3397481.3450685},
year = 2021
}