Architectural performance models are a common approach to predict the performance properties of a software system. Parametric dependencies, which describe the relation between the input parameters of a component and its performance properties, significantly increase the prediction accuracy of architectural performance models. However, manually modeling parametric dependencies is time-intensive and requires expert knowledge. Existing automated extraction approaches require dedicated performance tests, which are often infeasible. In this paper, we introduce an approach to automatically identify parametric dependencies from monitoring data using feature selection techniques from the area of machine learning. We evaluate the applicability of three techniques selected from each of the three groups of feature selection methods: a filter method, an embedded method, and a wrapper method. Our evaluation shows that the filter technique outperforms the other approaches. Based on these results, we apply this technique to a distributed micro-service web-shop, where it correctly identifies 11 performance-relevant dependencies, achieving a precision of 91.7% based on a manually labeled gold-standard.
%0 Conference Paper
%1 GrEiElMaKiKo2019-MASCOTS-DependencyIdentification
%A Grohmann, Johannes
%A Eismann, Simon
%A Elflein, Sven
%A Mazkatli, Manar
%A von Kistowski, Jóakim
%A Kounev, Samuel
%B 2019 IEEE 27th International Symposium on Modeling, Analysis, and Simulation of Computer and Telecommunication Systems (MASCOTS)
%D 2019
%I IEEE Computer Society
%K Automated_model_learning DML PRISMA Performance Prediction Self-aware-computing Statistical_estimation_and_machine_learning descartes t_full
%P 309--322
%R 10.1109/MASCOTS.2019.00042
%T Detecting Parametric Dependencies for Performance Models Using Feature Selection Techniques
%U https://doi.org/10.1109/MASCOTS.2019.00042
%X Architectural performance models are a common approach to predict the performance properties of a software system. Parametric dependencies, which describe the relation between the input parameters of a component and its performance properties, significantly increase the prediction accuracy of architectural performance models. However, manually modeling parametric dependencies is time-intensive and requires expert knowledge. Existing automated extraction approaches require dedicated performance tests, which are often infeasible. In this paper, we introduce an approach to automatically identify parametric dependencies from monitoring data using feature selection techniques from the area of machine learning. We evaluate the applicability of three techniques selected from each of the three groups of feature selection methods: a filter method, an embedded method, and a wrapper method. Our evaluation shows that the filter technique outperforms the other approaches. Based on these results, we apply this technique to a distributed micro-service web-shop, where it correctly identifies 11 performance-relevant dependencies, achieving a precision of 91.7% based on a manually labeled gold-standard.
%@ 978-1-7281-4950-9
@inproceedings{GrEiElMaKiKo2019-MASCOTS-DependencyIdentification,
abstract = {Architectural performance models are a common approach to predict the performance properties of a software system. Parametric dependencies, which describe the relation between the input parameters of a component and its performance properties, significantly increase the prediction accuracy of architectural performance models. However, manually modeling parametric dependencies is time-intensive and requires expert knowledge. Existing automated extraction approaches require dedicated performance tests, which are often infeasible. In this paper, we introduce an approach to automatically identify parametric dependencies from monitoring data using feature selection techniques from the area of machine learning. We evaluate the applicability of three techniques selected from each of the three groups of feature selection methods: a filter method, an embedded method, and a wrapper method. Our evaluation shows that the filter technique outperforms the other approaches. Based on these results, we apply this technique to a distributed micro-service web-shop, where it correctly identifies 11 performance-relevant dependencies, achieving a precision of 91.7% based on a manually labeled gold-standard.},
added-at = {2020-04-06T11:25:38.000+0200},
author = {Grohmann, Johannes and Eismann, Simon and Elflein, Sven and Mazkatli, Manar and von Kistowski, J{\'o}akim and Kounev, Samuel},
biburl = {https://www.bibsonomy.org/bibtex/206fe92b6b23da1ea2c70ec0f623ed314/se-group},
booktitle = {2019 IEEE 27th International Symposium on Modeling, Analysis, and Simulation of Computer and Telecommunication Systems (MASCOTS)},
doi = {10.1109/MASCOTS.2019.00042},
interhash = {fbad3bf1e2522fa1b93b5ebee18943f2},
intrahash = {06fe92b6b23da1ea2c70ec0f623ed314},
isbn = {978-1-7281-4950-9},
issn = {2375-0227},
keywords = {Automated_model_learning DML PRISMA Performance Prediction Self-aware-computing Statistical_estimation_and_machine_learning descartes t_full},
month = {October},
note = {{Acceptance Rate: 23.8% (29/122)}},
pages = {309--322},
publisher = {IEEE Computer Society},
series = {MASCOTS '19},
timestamp = {2021-01-13T11:21:05.000+0100},
title = {{Detecting Parametric Dependencies for Performance Models Using Feature Selection Techniques}},
url = {https://doi.org/10.1109/MASCOTS.2019.00042},
venue = {Rennes, FR, France},
year = 2019
}