@article{dbpedia_live_2012, abstract = {Purpose - DBpedia extracts structured information from Wikipedia, interlinks it with other knowledge bases and freely publishes the results on the Web using Linked Data and SPARQL. However, the DBpedia release process is heavy-weight and releases are sometimes based on several months old data. DBpedia-Live solves this problem by providing a live synchronization method based on the update stream of Wikipedia. Design/methodology/approach - Wikipedia provides DBpedia with a continuous stream of updates, i.e. a stream of recently updated articles. DBpedia-Live processes that stream on the fly to obtain RDF data and stores the extracted data back to DBpedia. DBpedia-Live publishes the newly added/deleted triples in files, in order to enable synchronization between our DBpedia endpoint and other DBpedia mirrors. Findings - During the realization of DBpedia-Live we learned, that it is crucial to process Wikipedia updates in a priority queue. Recently-updated Wikipedia articles should have the highest priority, over mapping-changes and unmodified pages. An overall finding is that there is a plenty of opportunities arising from the emerging Web of Data for librarians. Practical implications - DBpedia had and has a great effect on the Web of Data and became a crystallization point for it. Many companies and researchers use DBpedia and its public services to improve their applications and research approaches. The DBpedia-Live framework improves DBpedia further by timely synchronizing it with Wikipedia, which is relevant for many use cases requiring up-to-date information. Originality/value - The new DBpedia-Live framework adds new features to the old DBpedia-Live framework, e.g. abstract extraction, ontology changes, and changesets publication.}, added-at = {2024-06-18T09:45:23.000+0200}, author = {Morsey, Mohamed and Lehmann, Jens and Auer, S{\"o}ren and Stadler, Claus and Hellmann, Sebastian}, bdsk-url-1 = {http://svn.aksw.org/papers/2011/DBpedia_Live/public.pdf}, biburl = {https://www.bibsonomy.org/bibtex/25c5751842300f144af2ed7c7ad5b4742/aksw}, date-modified = {2012-12-02 13:06:10 +0000}, interhash = {f101a3d518e1777b11022677497de853}, intrahash = {5c5751842300f144af2ed7c7ad5b4742}, journal = {Program: electronic library and information systems}, keywords = {2012 MOLE auer group_aksw hellmann kilt lehmann morsey stadler}, owner = {mohamed}, pages = 27, timestamp = {2024-06-18T09:45:23.000+0200}, title = {{DB}pedia and the {L}ive {E}xtraction of {S}tructured {D}ata from {W}ikipedia}, url = {http://svn.aksw.org/papers/2011/DBpedia_Live/public.pdf}, volume = 46, year = 2012 } @article{dbpedia_live_2012, abstract = {Purpose - DBpedia extracts structured information from Wikipedia, interlinks it with other knowledge bases and freely publishes the results on the Web using Linked Data and SPARQL. However, the DBpedia release process is heavy-weight and releases are sometimes based on several months old data. DBpedia-Live solves this problem by providing a live synchronization method based on the update stream of Wikipedia. Design/methodology/approach - Wikipedia provides DBpedia with a continuous stream of updates, i.e. a stream of recently updated articles. DBpedia-Live processes that stream on the fly to obtain RDF data and stores the extracted data back to DBpedia. DBpedia-Live publishes the newly added/deleted triples in files, in order to enable synchronization between our DBpedia endpoint and other DBpedia mirrors. Findings - During the realization of DBpedia-Live we learned, that it is crucial to process Wikipedia updates in a priority queue. Recently-updated Wikipedia articles should have the highest priority, over mapping-changes and unmodified pages. An overall finding is that there is a plenty of opportunities arising from the emerging Web of Data for librarians. Practical implications - DBpedia had and has a great effect on the Web of Data and became a crystallization point for it. Many companies and researchers use DBpedia and its public services to improve their applications and research approaches. The DBpedia-Live framework improves DBpedia further by timely synchronizing it with Wikipedia, which is relevant for many use cases requiring up-to-date information. Originality/value - The new DBpedia-Live framework adds new features to the old DBpedia-Live framework, e.g. abstract extraction, ontology changes, and changesets publication.}, added-at = {2023-04-25T16:34:45.000+0200}, author = {Morsey, Mohamed and Lehmann, Jens and Auer, S{\"o}ren and Stadler, Claus and Hellmann, Sebastian}, bdsk-url-1 = {http://svn.aksw.org/papers/2011/DBpedia\_Live/public.pdf}, biburl = {https://www.bibsonomy.org/bibtex/25c5751842300f144af2ed7c7ad5b4742/dice-research}, date-modified = {2012-12-02 13:06:10 +0000}, interhash = {f101a3d518e1777b11022677497de853}, intrahash = {5c5751842300f144af2ed7c7ad5b4742}, journal = {Program: electronic library and information systems}, keywords = {2012 MOLE auer group\_aksw hellmann kilt lehmann morsey stadler}, owner = {mohamed}, pages = 27, timestamp = {2023-04-25T16:34:45.000+0200}, title = {{DB}pedia and the {L}ive {E}xtraction of {S}tructured {D}ata from {W}ikipedia}, url = {http://svn.aksw.org/papers/2011/DBpedia_Live/public.pdf}, volume = 46, year = 2012 } @article{journals/program/MorseyLASH12, added-at = {2022-12-07T00:00:00.000+0100}, author = {Morsey, Mohamed and Lehmann, Jens and Auer, Sören and Stadler, Claus and Hellmann, Sebastian}, biburl = {https://www.bibsonomy.org/bibtex/24f0d2ab79ee7f742f55c732448763178/dblp}, ee = {https://www.wikidata.org/entity/Q56783418}, interhash = {f101a3d518e1777b11022677497de853}, intrahash = {4f0d2ab79ee7f742f55c732448763178}, journal = {Program}, keywords = {dblp}, number = 2, pages = {157-181}, timestamp = {2024-04-08T20:29:36.000+0200}, title = {DBpedia and the live extraction of structured data from Wikipedia.}, url = {http://dblp.uni-trier.de/db/journals/program/program46.html#MorseyLASH12}, volume = 46, year = 2012 } @article{DBLP:journals/program/MorseyLASH12, added-at = {2021-02-19T12:10:28.000+0100}, author = {Morsey, Mohamed and Lehmann, Jens and Auer, S{\"{o}}ren and Stadler, Claus and Hellmann, Sebastian}, bibsource = {dblp computer science bibliography, https://dblp.org}, biburl = {https://www.bibsonomy.org/bibtex/2cdde17955c0d5bb43d06afefc5e7ceec/soeren}, doi = {10.1108/00330331211221828}, interhash = {f101a3d518e1777b11022677497de853}, intrahash = {cdde17955c0d5bb43d06afefc5e7ceec}, journal = {Program}, keywords = {imported myown}, number = 2, pages = {157--181}, timestamp = {2021-02-19T12:10:28.000+0100}, title = {DBpedia and the live extraction of structured data from Wikipedia}, url = {https://doi.org/10.1108/00330331211221828}, volume = 46, year = 2012 } @article{dbpedia_live_2012, abstract = {Purpose - DBpedia extracts structured information from Wikipedia, interlinks it with other knowledge bases and freely publishes the results on the Web using Linked Data and SPARQL. However, the DBpedia release process is heavy-weight and releases are sometimes based on several months old data. DBpedia-Live solves this problem by providing a live synchronization method based on the update stream of Wikipedia. Design/methodology/approach - Wikipedia provides DBpedia with a continuous stream of updates, i.e. a stream of recently updated articles. DBpedia-Live processes that stream on the fly to obtain RDF data and stores the extracted data back to DBpedia. DBpedia-Live publishes the newly added/deleted triples in files, in order to enable synchronization between our DBpedia endpoint and other DBpedia mirrors. Findings - During the realization of DBpedia-Live we learned, that it is crucial to process Wikipedia updates in a priority queue. Recently-updated Wikipedia articles should have the highest priority, over mapping-changes and unmodified pages. An overall finding is that there is a plenty of opportunities arising from the emerging Web of Data for librarians. Practical implications - DBpedia had and has a great effect on the Web of Data and became a crystallization point for it. Many companies and researchers use DBpedia and its public services to improve their applications and research approaches. The DBpedia-Live framework improves DBpedia further by timely synchronizing it with Wikipedia, which is relevant for many use cases requiring up-to-date information. Originality/value - The new DBpedia-Live framework adds new features to the old DBpedia-Live framework, e.g. abstract extraction, ontology changes, and changesets publication.}, added-at = {2017-06-19T08:20:34.000+0200}, author = {Morsey, Mohamed and Lehmann, Jens and Auer, S{\"o}ren and Stadler, Claus and Hellmann, Sebastian}, bdsk-url-1 = {http://svn.aksw.org/papers/2011/DBpedia_Live/public.pdf}, biburl = {https://www.bibsonomy.org/bibtex/25c5751842300f144af2ed7c7ad5b4742/dbpedia-pub}, date-modified = {2012-12-02 13:06:10 +0000}, interhash = {f101a3d518e1777b11022677497de853}, intrahash = {5c5751842300f144af2ed7c7ad5b4742}, journal = {Program: electronic library and information systems}, keywords = {dbpedia dbpedia_core dbpedia_scipub}, owner = {mohamed}, pages = 27, timestamp = {2017-06-27T15:17:02.000+0200}, title = {{DB}pedia and the {L}ive {E}xtraction of {S}tructured {D}ata from {W}ikipedia}, url = {http://svn.aksw.org/papers/2011/DBpedia_Live/public.pdf}, volume = 46, year = 2012 } @article{dbpedia_live_2012, abstract = {Purpose - DBpedia extracts structured information from Wikipedia, interlinks it with other knowledge bases and freely publishes the results on the Web using Linked Data and SPARQL. However, the DBpedia release process is heavy-weight and releases are sometimes based on several months old data. DBpedia-Live solves this problem by providing a live synchronization method based on the update stream of Wikipedia. Design/methodology/approach - Wikipedia provides DBpedia with a continuous stream of updates, i.e. a stream of recently updated articles. DBpedia-Live processes that stream on the fly to obtain RDF data and stores the extracted data back to DBpedia. DBpedia-Live publishes the newly added/deleted triples in files, in order to enable synchronization between our DBpedia endpoint and other DBpedia mirrors. Findings - During the realization of DBpedia-Live we learned, that it is crucial to process Wikipedia updates in a priority queue. Recently-updated Wikipedia articles should have the highest priority, over mapping-changes and unmodified pages. An overall finding is that there is a plenty of opportunities arising from the emerging Web of Data for librarians. Practical implications - DBpedia had and has a great effect on the Web of Data and became a crystallization point for it. Many companies and researchers use DBpedia and its public services to improve their applications and research approaches. The DBpedia-Live framework improves DBpedia further by timely synchronizing it with Wikipedia, which is relevant for many use cases requiring up-to-date information. Originality/value - The new DBpedia-Live framework adds new features to the old DBpedia-Live framework, e.g. abstract extraction, ontology changes, and changesets publication.}, added-at = {2017-01-27T23:28:47.000+0100}, author = {Morsey, Mohamed and Lehmann, Jens and Auer, S{\"o}ren and Stadler, Claus and Hellmann, Sebastian}, bdsk-url-1 = {http://svn.aksw.org/papers/2011/DBpedia_Live/public.pdf}, biburl = {https://www.bibsonomy.org/bibtex/25c5751842300f144af2ed7c7ad5b4742/soeren}, date-modified = {2012-12-02 13:06:10 +0000}, interhash = {f101a3d518e1777b11022677497de853}, intrahash = {5c5751842300f144af2ed7c7ad5b4742}, journal = {Program: electronic library and information systems}, keywords = {2012 MOLE auer group_aksw hellmann lehmann morsey stadler}, owner = {mohamed}, pages = 27, timestamp = {2017-01-27T23:30:12.000+0100}, title = {{DB}pedia and the {L}ive {E}xtraction of {S}tructured {D}ata from {W}ikipedia}, url = {http://svn.aksw.org/papers/2011/DBpedia_Live/public.pdf}, volume = 46, year = 2012 }