@inbook{ecb58d89b3cd4e7981415aaa15984080,
title = "Randomness, exchangeability, and conformal prediction",
abstract = "This paper argues for a wider use of the functional theory of randomness, a modification of the algorithmic theory of randomness getting rid of unspecified additive constants. Both theories are useful for understanding relations between the assumptions of IID data and data exchangeability. While the assumption of IID data is standard in machine learning, conformal prediction relies on the weaker assumption of data exchangeability. Nouretdinov, V'yugin, and Gammerman showed, using the language of the algorithmic theory of randomness, that conformal prediction is a universal method under the assumption of IID data. In this paper, I will selectively review connections between exchangeability and the property of being IID, early history of conformal prediction, my encounters and collaboration with Alex and other interesting people, and a translation of Nouretdinov et al.'s results into the language of the functional theory of randomness, which moves it closer to practice. Namely, the translation says that every confidence predictor that is valid for IID data can be converted into a conformal predictor without losing much in predictive efficiency.",
keywords = "conformal prediction, universality of conformal prediction, fundamental limitation of conformal prediction, functional theory of randomness, IID, exchangeability, p-values, e-values",
author = "Vladimir Vovk",
year = "2026",
language = "English",
isbn = "978-3-032-15119-3",
series = "Lecture Notes in Computer Science",
publisher = "Springer",
pages = "87--117",
editor = "Khuong Nguyen and Zhiyuan Luo",
booktitle = "The Importance of Being Learnable",
}