This paper presents the third edition of the LongEval Lab, part of the CLEF 2025 conference, which continues to explore the chal- lenges of temporal persistence in Information Retrieval (IR). The lab features two tasks designed to provide researchers with test data that reflect the evolving nature of user queries and document relevance over time. By evaluating how model performance degrades as test data diverge temporally from training data, LongEval seeks to advance the understanding of temporal dynamics in IR systems. The 2025 edition aims to engage the IR and NLP communities in addressing the development of adaptive models that can maintain retrieval quality over time in the domains of web search and scientific retrieval.
@inproceedings{Keller2025,
author = {J{\"{u}}ri Keller and
Maik Fr{\"{o}}be and
Gijs Hendriksen and
Daria Alexander and
Martin Potthast and
Matthias Hagen and
Philipp Schaer},
title = {Counterfactual Query Rewriting to Use Historical Relevance Feedback},
booktitle = {Advances in Information Retrieval - 47th European Conference on Information
Retrieval, {ECIR} 2025, Lucca, Italy, April 6-10, 2025, Proceedings,
Part {III}},
series = {Lecture Notes in Computer Science},
volume = {15574},
pages = {138--147},
publisher = {Springer},
year = {2025},
doi = {10.1007/978-3-031-88714-7\_11},
}