With the increasing number of published Web services
providing similar functionalities, it’s very tedious for a
service consumer to make decision to select the appropriate one
according to her/his needs. In this paper, we explore several
probabilistic topic models: Probabilistic Latent Semantic Analysis
(PLSA), Latent Dirichlet Allocation (LDA) and Correlated
Topic Model (CTM) to extract latent factors from web service
descriptions. In our approach, topic models are used as efficient
dimension reduction techniques, which are able to capture semantic
relationships between word-topic and topic-service interpreted
in terms of probability distributions. To address the limitation of
keywords-based queries, we represent web service description as
a vector space and we introduce a new approach for discovering
and ranking web services using latent factors. In our experiment,
we evaluated our Service Discovery and Ranking approach
by calculating the precision (P@n) and normalized discounted
cumulative gain (NDCGn).
%0 Journal Article
%1 IJACSA.2013.040637
%A Mustapha AZNAG Mohamed QUAFAFOU, Zahi JARIR
%D 2013
%J International Journal of Advanced Computer Science and Applications(IJACSA)
%K Data Discovery, Learning, Machine Models,Web Ranking, Representation, Topic service
%N 6
%T Correlated Topic Model for Web Services Ranking
%U http://ijacsa.thesai.org/
%V 4
%X With the increasing number of published Web services
providing similar functionalities, it’s very tedious for a
service consumer to make decision to select the appropriate one
according to her/his needs. In this paper, we explore several
probabilistic topic models: Probabilistic Latent Semantic Analysis
(PLSA), Latent Dirichlet Allocation (LDA) and Correlated
Topic Model (CTM) to extract latent factors from web service
descriptions. In our approach, topic models are used as efficient
dimension reduction techniques, which are able to capture semantic
relationships between word-topic and topic-service interpreted
in terms of probability distributions. To address the limitation of
keywords-based queries, we represent web service description as
a vector space and we introduce a new approach for discovering
and ranking web services using latent factors. In our experiment,
we evaluated our Service Discovery and Ranking approach
by calculating the precision (P@n) and normalized discounted
cumulative gain (NDCGn).
@article{IJACSA.2013.040637,
abstract = {With the increasing number of published Web services
providing similar functionalities, it’s very tedious for a
service consumer to make decision to select the appropriate one
according to her/his needs. In this paper, we explore several
probabilistic topic models: Probabilistic Latent Semantic Analysis
(PLSA), Latent Dirichlet Allocation (LDA) and Correlated
Topic Model (CTM) to extract latent factors from web service
descriptions. In our approach, topic models are used as efficient
dimension reduction techniques, which are able to capture semantic
relationships between word-topic and topic-service interpreted
in terms of probability distributions. To address the limitation of
keywords-based queries, we represent web service description as
a vector space and we introduce a new approach for discovering
and ranking web services using latent factors. In our experiment,
we evaluated our Service Discovery and Ranking approach
by calculating the precision (P@n) and normalized discounted
cumulative gain (NDCGn).},
added-at = {2014-02-21T08:00:08.000+0100},
author = {{Mustapha AZNAG Mohamed QUAFAFOU}, Zahi JARIR},
biburl = {https://www.bibsonomy.org/bibtex/254887b52b18317b21d9a8b493311ef4f/thesaiorg},
interhash = {d681428272df8f9e10e87ca4128e3ba6},
intrahash = {54887b52b18317b21d9a8b493311ef4f},
journal = {International Journal of Advanced Computer Science and Applications(IJACSA)},
keywords = {Data Discovery, Learning, Machine Models,Web Ranking, Representation, Topic service},
number = 6,
timestamp = {2014-02-21T08:00:08.000+0100},
title = {{Correlated Topic Model for Web Services Ranking}},
url = {http://ijacsa.thesai.org/},
volume = 4,
year = 2013
}