@article{Levchenko:15225,
      recid = {15225},
      author = {Levchenko, Denis and Rappos, Efstratios and Ataee, Shabnam  and Nigro, Biagio and Robert-Nicoud, Stephan},
      title = {Chain-structured neural architecture search for financial  time series forecasting},
      publisher = {Springer Nature},
      journal = {International Journal of Data Science and Analytics},
      address = {Berlin, Germany. 2024-12},
      number = {ARTICLE},
      pages = {10 p.},
      abstract = {Neural architecture search (NAS) emerged as a way to  automatically optimize neural networks for a specific task  and dataset. Despite an abundance of research on NAS for  images and natural language applications, similar studies  for time series data are lacking. Among NAS search spaces,  chain-structured are the simplest and most applicable to  small datasets like time series. We compare three popular  NAS strategies on chain-structured search spaces: Bayesian  optimization (specifically Tree-structured Parzen  Estimator), the hyperband method, and reinforcement  learning in the context of financial time series  forecasting. These strategies were employed to optimize  simple well-understood neural architectures like the MLP,  1D CNN, and RNN, with more complex temporal fusion  transformers (TFT) and their own optimizers included for  comparison. We find Bayesian optimization and the hyperband  method performing best among the strategies, and RNN and 1D  CNN best among the architectures, but all methods were very  close to each other with a high variance due to the  difficulty of working with financial datasets. We discuss  our approach to overcome the variance and provide  implementation recommendations for future users and  researchers.},
      url = {http://arodes.hes-so.ch/record/15225},
      doi = {https://doi.org/10.1007/s41060-024-00690-y},
}