People

Batuhan Koyuncu
Saarland Informatics Campus
Building E1 1, Room 2.24
E: koyuncu@cs.uni-saarland.de
About me
I started my PhD in September 2021, supervised by Prof. Valera. I am working on building interpretable predictive and generative models for the temporal modeling of asynchronous multimodal data. I am an incoming ELLIS PhD student. I will be co-advised by Ole Winther from University of Copenhagen & Technical University of Denmark.
You can find my personal website here https://batukoyuncu.com/
Publications
2026
Koyuncu, Batuhan; Kwon, Byeungchun; Lombardi, Marco Jacopo; Perez-Cruz, Fernando; Shin, Hyun Song
BISTRO: a general purpose oracle for macroeconomic time series Journal Article
In: 2026.
@article{nokey,
title = {BISTRO: a general purpose oracle for macroeconomic time series},
author = {Batuhan Koyuncu and Byeungchun Kwon and Marco Jacopo Lombardi and Fernando Perez-Cruz and Hyun Song Shin},
year = {2026},
date = {2026-03-16},
urldate = {2026-03-16},
abstract = {Predictions of macroeconomic variables are a key input to economic policy, yet traditional econometric approaches have the limitation that the model needs to be tailored to the specific task. The advent of large language models (LLMs) opens up the tantalising prospect that a single general model can tackle a wide variety of tasks. This article introduces the BIS Time-series Regression Oracle (BISTRO), a general purpose time series model for macroeconomic forecasting. Building on the transformer architecture underlying LLMs, BISTRO is fine-tuned on the large repository of macroeconomic data maintained at the BIS. We put the model through its paces by assessing how well it forecasts the 2021 inflation surge. In contrast to standard benchmarks, which mechanically project a reversion to the mean, BISTRO correctly anticipates the persistence of the inflation wave. This highlights its ability to adapt to unfamiliar patterns in the data. Thus, BISTRO holds promise for producing reliable baseline forecasts and for scenario analysis.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Koyuncu, Batuhan; Kwon, Byeungchun; Lombardi, Marco Jacopo; Perez-Cruz, Fernando; Shin, Hyun Song
Introducing BISTRO: a foundational model for unconditional and conditional forecasting of macroeconomic time series Journal Article
In: 2026.
@article{nokey,
title = {Introducing BISTRO: a foundational model for unconditional and conditional forecasting of macroeconomic time series},
author = {Batuhan Koyuncu and Byeungchun Kwon and Marco Jacopo Lombardi and Fernando Perez-Cruz and Hyun Song Shin},
year = {2026},
date = {2026-03-13},
urldate = {2026-03-13},
abstract = {This article introduces the BIS Time-series Regression Oracle (BISTRO), a general purpose time series model for macroeconomic forecasting. Its edge over traditional econometric approaches lies in its ability to deal with generic unconditional and conditional forecasting tasks, without requiring to adjust the model to the macroeconomic tasks being tackled. Building on the transformer architecture underlying LLMs, BISTRO is fine-tuned on the large repository of macroeconomic data maintained at the BIS. We show that BISTRO provides reliable unconditional forecasts for key macroeconomic aggregates and illustrate how using it for conditional forecasting can help unveiling patterns of nonlinearity in the data.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Koyuncu, Batuhan; Kwon, Byeungchun; Lombardi, Marco; Perez-Cruz, Fernando; Shin, Hyun Song
A foundational model for macroeconomic times series forecasting and nowcasting Journal Article
In: 2026.
@article{nokey,
title = {A foundational model for macroeconomic times series forecasting and nowcasting},
author = {Batuhan Koyuncu and Byeungchun Kwon and Marco Lombardi and Fernando Perez-Cruz and Hyun Song Shin},
year = {2026},
date = {2026-02-20},
urldate = {2026-02-20},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2025
Peis, Ignacio; Koyuncu, Batuhan; Valera, Isabel; Frellsen, Jes
Hyper-Transforming Latent Diffusion Models Journal Article
In: CoRR, vol. abs/2504.16580, 2025.
@article{DBLP:journals/corr/abs-2504-16580,
title = {Hyper-Transforming Latent Diffusion Models},
author = {Ignacio Peis and Batuhan Koyuncu and Isabel Valera and Jes Frellsen},
url = {https://doi.org/10.48550/arXiv.2504.16580},
doi = {10.48550/ARXIV.2504.16580},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {CoRR},
volume = {abs/2504.16580},
abstract = {We introduce a novel generative framework for functions by integrating Implicit Neural Representations (INRs) and Transformer-based hypernetworks into latent variable models. Unlike prior approaches that rely on MLP-based hypernetworks with scalability limitations, our method employs a Transformer-based decoder to generate INR parameters from latent variables, addressing both representation capacity and computational efficiency. Our framework extends latent diffusion models (LDMs) to INR generation by replacing standard decoders with a Transformer-based hypernetwork, which can be trained either from scratch or via hyper-transforming: a strategy that fine-tunes only the decoder while freezing the pre-trained latent space. This enables efficient adaptation of existing generative models to INR-based representations without requiring full retraining. We validate our approach across multiple modalities, demonstrating improved scalability, expressiveness, and generalization over existing INR-based generative models. Our findings establish a unified and flexible framework for learning structured function representations.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Koyuncu, Batuhan; DeVries, Rachael; Winther, Ole; Valera, Isabel
Temporal Variational Implicit Neural Representations Journal Article
In: CoRR, vol. abs/2506.01544, 2025.
@article{DBLP:journals/corr/abs-2506-01544,
title = {Temporal Variational Implicit Neural Representations},
author = {Batuhan Koyuncu and Rachael DeVries and Ole Winther and Isabel Valera},
url = {https://doi.org/10.48550/arXiv.2506.01544},
doi = {10.48550/ARXIV.2506.01544},
year = {2025},
date = {2025-01-01},
urldate = {2025-01-01},
journal = {CoRR},
volume = {abs/2506.01544},
abstract = {We introduce Temporal Variational Implicit Neural Representations (TV-INRs), a probabilistic framework for modeling irregular multivariate time series that enables efficient individualized imputation and forecasting. By integrating implicit neural representations with latent variable models, TV-INRs learn distributions over time-continuous generator functions conditioned on signal-specific covariates. Unlike existing approaches that require extensive training, fine-tuning or meta-learning, our method achieves accurate individualized predictions through a single forward pass. Our experiments demonstrate that with a single TV-INRs instance, we can accurately solve diverse imputation and forecasting tasks, offering a computationally efficient and scalable solution for real-world applications. TV-INRs excel especially in low-data regimes, where it outperforms existing methods by an order of magnitude in mean squared error for imputation task.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2024
Koyuncu, Batuhan; Bauerschmidt, Tim Nico; Valera, Isabel
E-ProTran: Efficient Probabilistic Transformers for Forecasting Proceedings Article
In: ICML 2024 Workshop on Structured Probabilistic Inference & Generative Modeling, 2024.
@inproceedings{<LineBreak>koyuncu2024eprotran,
title = {E-ProTran: Efficient Probabilistic Transformers for Forecasting},
author = {Batuhan Koyuncu and Tim Nico Bauerschmidt and Isabel Valera},
url = {https://openreview.net/forum?id=Db5Ryi9Dr8},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {ICML 2024 Workshop on Structured Probabilistic Inference & Generative Modeling},
abstract = {Time series forecasting involves predicting future data points based on historical patterns and is critical for applications in fields such as healthcare, financial markets, and weather forecasting, where scalability and efficiency, particularly in training and inference times, are paramount. Transformers, known for their ability to handle long-range dependencies in sequential data, have shown promise in time series analysis. However, the complexity of transformer models can lead to overparameterization, extended training times, and scalability challenges, which can become even more problematic if the assumptions of the underlying generative model are overly complicated. In this paper, we introduce E-ProTran by re-designing a state-of-the-art transformer for probabilistic time series forecasting. We empirically demonstrate that E-ProTran maintains high performance while significantly enhancing efficiency without necessarily reconstructing the conditioned history. Our model incorporates simplified attention layers and design adjustments that reduce computational overhead without compromising accuracy, offering a more efficient and scalable solution for time series forecasting.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Koyuncu, Batuhan; Kıran, Aleyna Dilan; Heilmann, Katja; Hamid, Laith; Buder, Anja; Engert, Veronika; Walter, Martin; Valera, Isabel
From Laboratory to Everyday Life: Personalized Stress Prediction via Smartwatches Proceedings Article
In: ICML'24 Workshop ML for Life and Material Science: From Theory to Industry Applications, 2024.
@inproceedings{,
title = {From Laboratory to Everyday Life: Personalized Stress Prediction via Smartwatches},
author = {Batuhan Koyuncu and Aleyna Dilan Kıran and Katja Heilmann and Laith Hamid and Anja Buder and Veronika Engert and Martin Walter and Isabel Valera},
url = {https://openreview.net/forum?id=XBtTaeQOWs},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
booktitle = {ICML'24 Workshop ML for Life and Material Science: From Theory to Industry Applications},
abstract = {Accurate prediction of stress in everyday life is essential to prevent chronic stress and maintain health and well-being through early and personalized intervention. With the goal of enabling reliable prediction suitable for everyday life, we present MuStP, a two-stage machine learning pipeline designed to predict stress from low-resolution heart rate (HR) and high-resolution electrocardiography (ECG) measurements from commercial smartwatches. Our model is pre-trained with labeled data collected in a controlled laboratory stress study. Subsequently, we transfer the model for everyday use, enabling it to operate with everyday smartwatch data in various environments. The model transfer strategy effectively addresses the domain shift from laboratory data to highly imbalanced smartwatch data and allows personalization. The empirical results on smartwatch data show that MuStP can predict stress everyday with an F1 score of 0.52, despite the measurements having sparse labels for stress.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2023
Koyuncu, Batuhan; Sánchez-Mart'ın, Pablo; Peis, Ignacio; Olmos, Pablo M.; Valera, Isabel
Variational Mixture of HyperGenerators for Learning Distributions Over Functions Journal Article
In: CoRR, vol. abs/2302.06223, 2023.
@article{DBLP:journals/corr/abs-2302-06223,
title = {Variational Mixture of HyperGenerators for Learning Distributions Over Functions},
author = {Batuhan Koyuncu and Pablo Sánchez-Mart'ın and Ignacio Peis and Pablo M. Olmos and Isabel Valera},
url = {https://doi.org/10.48550/arXiv.2302.06223},
doi = {10.48550/arXiv.2302.06223},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {CoRR},
volume = {abs/2302.06223},
abstract = {Recent approaches build on implicit neural representations (INRs) to propose generative models over function spaces. However, they are computationally costly when dealing with inference tasks, such as missing data imputation, or directly cannot tackle them. In this work, we propose a novel deep generative model, named VAMoH. VAMoH combines the capabilities of modeling continuous functions using INRs and the inference capabilities of Variational Autoencoders (VAEs). In addition, VAMoH relies on a normalizing flow to define the prior, and a mixture of hypernetworks to parametrize the data log-likelihood. This gives VAMoH a high expressive capability and interpretability. Through experiments on a diverse range of data types, such as images, voxels, and climate data, we show that VAMoH can effectively learn rich distributions over continuous functions. Furthermore, it can perform inference-related tasks, such as conditional super-resolution generation and in-painting, as well or better than previous approaches, while being less computationally demanding.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Parlatan, Ugur; Ozen, Mehmet Ozgun; Kecoglu, Ibrahim; Koyuncu, Batuhan; Torun, Hulya; Khalafkhany, Davod; Loc, Irem; Ogut, Mehmet Giray; Inci, Fatih; Akin, Demir; Solaroglu, Ihsan; Ozoren, Nesrin; Unlu, Mehmet Burcin; Demirci, Utkan
Label-Free Identification of Exosomes using Raman Spectroscopy and Machine Learning Journal Article
In: Small, vol. 19, no. 9, pp. 2205519, 2023.
@article{https://doi.org/10.1002/smll.202205519,
title = {Label-Free Identification of Exosomes using Raman Spectroscopy and Machine Learning},
author = {Ugur Parlatan and Mehmet Ozgun Ozen and Ibrahim Kecoglu and Batuhan Koyuncu and Hulya Torun and Davod Khalafkhany and Irem Loc and Mehmet Giray Ogut and Fatih Inci and Demir Akin and Ihsan Solaroglu and Nesrin Ozoren and Mehmet Burcin Unlu and Utkan Demirci},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/smll.202205519},
doi = {https://doi.org/10.1002/smll.202205519},
year = {2023},
date = {2023-01-01},
urldate = {2023-01-01},
journal = {Small},
volume = {19},
number = {9},
pages = {2205519},
abstract = {Exosomes, nano-sized extracellular vesicles (EVs) secreted from cells, carry various cargo molecules reflecting their cells of origin. As EV content, structure, and size are highly heterogeneous, their classification via cargo molecules by determining their origin is challenging. Here, a method is presented combining surface-enhanced Raman spectroscopy (SERS) with machine learning algorithms to employ the classification of EVs derived from five different cell lines to reveal their cellular origins. Using an artificial neural network algorithm, it is shown that the label-free Raman spectroscopy method's prediction ratio correlates with the ratio of HT-1080 exosomes in the mixture. This machine learning-assisted SERS method enables a new direction through label-free investigation of EV preparations by differentiating cancer cell-derived exosomes from those of healthy. This approach will potentially open up new avenues of research for early detection and monitoring of various diseases, including cancer.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2022
Koyuncu, Batuhan; Melek, Ahmet; Yilmaz, Defne; Tuzer, Mert; Unlu, Mehmet Burcin
Chemotherapy Response Prediction with Diffuser Elapser Network Journal Article
In: Scientific Reports, vol. 12, iss. 1, pp. 1-13, 2022.
@article{,
title = {Chemotherapy Response Prediction with Diffuser Elapser Network},
author = {Batuhan Koyuncu and Ahmet Melek and Defne Yilmaz and Mert Tuzer and Mehmet Burcin Unlu},
url = {https://www.nature.com/articles/s41598-022-05460-z.pdf},
doi = {https://doi.org/10.1038/s41598-022-05460-z},
year = {2022},
date = {2022-01-31},
urldate = {2022-01-31},
journal = {Scientific Reports},
volume = {12},
issue = {1},
pages = {1-13},
abstract = {In solid tumors, elevated fluid pressure and inadequate blood perfusion resulting from unbalanced angiogenesis are the prominent reasons for the ineffective drug delivery inside tumors. To normalize the heterogeneous and tortuous tumor vessel structure, antiangiogenic treatment is an effective approach. Additionally, the combined therapy of antiangiogenic agents and chemotherapy drugs has shown promising effects on enhanced drug delivery. However, the need to find the appropriate scheduling and dosages of the combination therapy is one of the main problems in anticancer therapy. Our study aims to generate a realistic response to the treatment schedule, making it possible for future works to use these patient-specific responses to decide on the optimal starting time and dosages of cytotoxic drug treatment. Our dataset is based on our previous in-silico model with a framework for the tumor microenvironment, consisting of a tumor layer, vasculature network, interstitial fluid pressure, and drug diffusion maps. In this regard, the chemotherapy response prediction problem is discussed in the study, putting forth a proof of concept for deep learning models to capture the tumor growth and drug response behaviors simultaneously. The proposed model utilizes multiple convolutional neural network submodels to predict future tumor microenvironment maps considering the effects of ongoing treatment. Since the model has the task of predicting future tumor microenvironment maps, we use two image quality evaluation metrics, which are structural similarity and peak signal-to-noise ratio, to evaluate model performance. We track tumor cell density values of ground truth and predicted tumor microenvironments. The model predicts tumor microenvironment maps seven days ahead with the average structural similarity score of 0.973 and the average peak signal ratio of 35.41 in the test set. It also predicts tumor cell density at the end day of 7 with the mean absolute percentage error of $2.292pm1.820$.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2021
Koyuncu, Batuhan
Analysis of ODE2VAE with Examples Journal Article
In: CoRR, vol. abs/2108.04899, 2021.
@article{DBLP:journals/corr/abs-2108-04899,
title = {Analysis of ODE2VAE with Examples},
author = {Batuhan Koyuncu},
url = {https://arxiv.org/abs/2108.04899},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
journal = {CoRR},
volume = {abs/2108.04899},
abstract = {Deep generative models aim to learn underlying distributions that generate the observed data. Given the fact that the generative distribution may be complex and intractable, deep latent variable models use probabilistic frameworks to learn more expressive joint probability distributions over the data and their low-dimensional hidden variables. Learning complex probability distributions over sequential data without any supervision is a difficult task for deep generative models. Ordinary Differential Equation Variational Auto-Encoder (ODE2VAE) is a deep latent variable model that aims to learn complex distributions over high-dimensional sequential data and their low-dimensional representations. ODE2VAE infers continuous latent dynamics of the high-dimensional input in a low-dimensional hierarchical latent space. The hierarchical organization of the continuous latent space embeds a physics-guided inductive bias in the model. In this paper, we analyze the latent representations inferred by the ODE2VAE model over three different physical motion datasets: bouncing balls, projectile motion, and simple pendulum. Through our experiments, we explore the effects of the physics-guided inductive bias of the ODE2VAE model over the learned dynamical latent representations. We show that the model is able to learn meaningful latent representations to an extent without any supervision.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
