This paper has two objectives. Firstly, to introduce a new framework XF -OPTIMETA for testing and comparing Hyperparameter Optimization (HPO) methods. The framework supports model-free methods, e.g., Random Search (RS), as well as model-based methods, such as Bayesian Optimization (BO), with various surrogate models. Due to the generalized and modular structure of the XF-OPTIMETA framework, it can be easily extended to other optimization methods for dif-ferent optimization problems. The second objective is to empir-ically compare the performance of various HPO methods for population-based metaheuristics. For that the XF -OPTIMETA framework is…(mehr)
Bitte melden Sie sich an um selbst Rezensionen oder Kommentare zu erstellen.
Zitieren Sie diese Publikation
Mehr Zitationsstile
- bitte auswählen -
%0 Conference Paper
%1 10371833
%A Werner, Daniel
%A Turna, Fatma
%A Le, Hoang Thanh
%A Middendorf, Martin
%B 2023 IEEE Symposium Series on Computational Intelligence (SSCI)
%D 2023
%K imported nopdf
%P 1183-1188
%R 10.1109/SSCI52147.2023.10371833
%T XF-OPT/META: A Hyperparameter Optimization Framework Applied to the H -SPPBO Metaheuristic for the Dynamic TSP
%X This paper has two objectives. Firstly, to introduce a new framework XF -OPTIMETA for testing and comparing Hyperparameter Optimization (HPO) methods. The framework supports model-free methods, e.g., Random Search (RS), as well as model-based methods, such as Bayesian Optimization (BO), with various surrogate models. Due to the generalized and modular structure of the XF-OPTIMETA framework, it can be easily extended to other optimization methods for dif-ferent optimization problems. The second objective is to empir-ically compare the performance of various HPO methods for population-based metaheuristics. For that the XF -OPTIMETA framework is used to apply HPO methods to the Hierarchical Simple Probabilistic Population-Based Optimization (H-SPPBO) metaheuristic for the Dynamic Traveling Salesperson Problem (DTSP) and to calculate high-performing parameter values for H-SPPBO. Promising results are obtained using the parameter values found by BO. In particular, a parameter set obtained with Gradient-Boosted Regression Trees (GBRT) outperforms a reference parameter set for H-SPPBO from an existing study.
@inproceedings{10371833,
abstract = {This paper has two objectives. Firstly, to introduce a new framework XF -OPTIMETA for testing and comparing Hyperparameter Optimization (HPO) methods. The framework supports model-free methods, e.g., Random Search (RS), as well as model-based methods, such as Bayesian Optimization (BO), with various surrogate models. Due to the generalized and modular structure of the XF-OPTIMETA framework, it can be easily extended to other optimization methods for dif-ferent optimization problems. The second objective is to empir-ically compare the performance of various HPO methods for population-based metaheuristics. For that the XF -OPTIMETA framework is used to apply HPO methods to the Hierarchical Simple Probabilistic Population-Based Optimization (H-SPPBO) metaheuristic for the Dynamic Traveling Salesperson Problem (DTSP) and to calculate high-performing parameter values for H-SPPBO. Promising results are obtained using the parameter values found by BO. In particular, a parameter set obtained with Gradient-Boosted Regression Trees (GBRT) outperforms a reference parameter set for H-SPPBO from an existing study.},
added-at = {2024-12-10T12:41:16.000+0100},
author = {Werner, Daniel and Turna, Fatma and Le, Hoang Thanh and Middendorf, Martin},
biburl = {https://puma.scadsai.uni-leipzig.de/bibtex/21a6df07c19d8888dba39e2f01ea07847/scadsfct},
booktitle = {2023 IEEE Symposium Series on Computational Intelligence (SSCI)},
doi = {10.1109/SSCI52147.2023.10371833},
interhash = {05f9d212c441361243b355845db9b4e6},
intrahash = {1a6df07c19d8888dba39e2f01ea07847},
issn = {2472-8322},
keywords = {imported nopdf},
month = dec,
pages = {1183-1188},
timestamp = {2025-02-27T11:51:31.000+0100},
title = {XF-OPT/META: A Hyperparameter Optimization Framework Applied to the H -SPPBO Metaheuristic for the Dynamic TSP},
year = 2023
}