@inbook{ddfa6373268248d4b5e840994f29e80d,
title = "One-Class Classification with Subgaussians",
abstract = "If a simple and fast solution for one-class classification is required, the most common approach is to assume a Gaussian distribution for the patterns of the single class. Bayesian classification then leads to a simple template matching. In this paper we show for two very different applications that the classification performance can be improved significantly if a more uniform subgaussian instead of a Gaussian class distribution is assumed. One application is face detection, the other is the detection of transcription factor binding sites on a genome. As for the Gaussian, the distance from a template, i.e., the distribution center, determines a pattern{\textquoteright}s class assignment. However, depending on the distribution assumed, maximum likelihood learning leads to different templates from the training data. These new templates lead to significant improvements of the classification performance.",
author = "{Madany Mamlouk}, Amir and Kim, {Jan T.} and Erhardt Barth and Michael Brauckmann and Thomas Martinetz",
year = "2003",
doi = "10.1007/978-3-540-45243-0_45",
language = "English",
isbn = "978-3-540-40861-1",
volume = "2781",
series = "Lecture Notes in Computer Science",
publisher = "Springer",
pages = "346--353",
booktitle = "Pattern Recognition",
address = "Germany",
note = "25th DAGM Symposium ; Conference date: 10-09-2003 Through 12-09-2003",
}