@inbook{dd7b8ce5041d48838a92862688bcd28c,
title = "Boosting feature selection",
abstract = "It is possible to reduce the error rate of a single classifier using a classifier ensemble. However, any gain in performance is undermined by the increased computation of performing classification several times. Here the AdaboostFS algorithm is proposed which builds on two popular areas of ensemble research: Adaboost and Ensemble Feature Selection (EFS). The aim of AdaboostFSs is to reduce the number of features used by each base classifer and hence the overall computation required by the ensemble. To do this the algorithm combines a regularised version of Boosting AdaboostReg [1] with a floating feature search for each base classifier. Adaboost FS is compared using four benchmark data sets to Adaboost All, which uses all features and to AdaboostRSM, which uses a random selection of features. Performance is assessed based on error rate, ensemble error and diversity, and the total number of features used for classification. Results show that AdaboostFS achieves a lower error rate and higher diversity than AdaboostAll, and achieves a lower error rate and comparable diversity to AdaboostRSM However, over the other methods AdaboostFS produces a significant reduction in the number of features required for classification in each base classifier and the entire ensemble. {\textcopyright} Springer-Verlag Berlin Heidelberg 2005.",
author = "Redpath, {D. B.} and K. Lebart",
year = "2005",
doi = "10.1007/11551188_33",
language = "English",
isbn = "978-3-540-28757-5",
volume = "3686",
series = "Lecture Notes in Computer Science",
pages = "305--314",
booktitle = "Pattern Recognition and Data Mining",
note = "Third International Conference on Advances in Patten Recognition, ICAPR 2005 ; Conference date: 22-08-2005 Through 25-08-2005",
}