@inproceedings{a9eb8a282f2343dfaa15185e437749a4,
title = "Autoeval: An evaluation methodology for evaluating query suggestions using query logs",
abstract = "User evaluations of search engines are expensive and not easy to replicate. The problem is even more pronounced when assessing adaptive search systems, for example system-generated query modification suggestions that can be derived from past user interactions with a search engine. Automatically predicting the performance of different modification suggestion models before getting the users involved is therefore highly desirable. AutoEval is an evaluation methodology that assesses the quality of query modifications generated by a model using the query logs of past user interactions with the system. We present experimental results of applying this methodology to different adaptive algorithms which suggest that the predicted quality of different algorithms is in line with user assessments. This makes AutoEval a suitable evaluation framework for adaptive interactive search engines.",
author = "Albakour, {M. Dyaa} and Udo Kruschwitz and Nikolaos Nanas and Yunhyong Kim and Dawei Song and Maria Fasli and {De Roeck}, Anne",
note = "Publisher Copyright: {\textcopyright} Springer-Verlag Berlin Heidelberg 2011.; 33rd European Conference on Information Retrieval, ECIR 2011 ; Conference date: 18-04-2011 Through 21-04-2011",
year = "2011",
doi = "10.1007/978-3-642-20161-5_60",
language = "English",
isbn = "9783642201608",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "605--610",
editor = "Paul Clough and Colum Foley and Cathal Gurrin and Hyowon Lee and Jones, {Gareth J.F.} and Wessel Kraaij and Vanessa Murdoch",
booktitle = "Advances in Information Retrieval - 33rd European Conference on IR Research, ECIR 2011, Proceedings",
address = "Germany",
}