@inbook{527d24835d8c4924b5f19ae2fc7bc5b6,
title = "Observations on boosting feature selection",
abstract = "This paper presents a study of the Boosting Feature Selection (BFS) algorithm [1], a method which incorporates feature selection into Adaboost. Such an algorithm is interesting as it combines the methods studied by Boosting and ensemble feature selection researchers. Observations are made on generalisation, weighted error and error diversity to compare the algorithms performance to Adaboost while using a nearest mean base learner. Ensemble feature prominence is proposed as a stop criterion for ensemble construction. Its quality assessed using the former performance measures. BFS is found to compete with Adaboost in terms of performance, despite the reduced feature description for each base classifer. This is explained using weighted error and error diversity. Results show the proposed stop criterion to be useful for trading ensemble performance and complexity. {\textcopyright} Springer-Verlag Berlin Heidelberg 2005.",
author = "Redpath, {D. B.} and K. Lebart",
year = "2005",
doi = "10.1007/11494683_4",
language = "English",
isbn = "978-3-540-26306-7",
volume = "3541",
series = "Lecture Notes in Computer Science",
pages = "32--41",
booktitle = "Multiple Classifier Systems",
note = "6th International Workshop on Multiple Classifier Systems, MCS 2005 ; Conference date: 13-06-2005 Through 15-06-2005",
}