We propose a new prior for ultra-sparse signal detection that we term the
"horseshoe+ prior." The horseshoe+ prior is a natural extension of the
horseshoe prior that has achieved success in the estimation and detection of
sparse signals and has been shown to possess a number of desirable theoretical
properties while enjoying computational feasibility in high dimensions. The
horseshoe+ prior builds upon these advantages. Our work proves that the
horseshoe+ posterior concentrates at a rate faster than that of the horseshoe
in the Kullback-Leibler (K-L) sense. We also establish theoretically that the
proposed estimator has lower posterior mean squared error in estimating signals
compared to the horseshoe and achieves the optimal Bayes risk in testing up to
a constant. For global-local scale mixture priors, we develop a new technique
for analyzing the marginal sparse prior densities using the class of Meijer-G
functions. In simulations, the horseshoe+ estimator demonstrates superior
performance in a standard design setting against competing methods, including
the horseshoe and Dirichlet-Laplace estimators. We conclude with an
illustration on a prostate cancer data set and by pointing out some directions
for future research.
%0 Generic
%1 bhadra2015horseshoe
%A Bhadra, Anindya
%A Datta, Jyotishka
%A Polson, Nicholas G.
%A Willard, Brandon
%D 2015
%K Bayesian methods regularization sparsification statistics
%T The Horseshoe+ Estimator of Ultra-Sparse Signals
%U http://arxiv.org/abs/1502.00560
%X We propose a new prior for ultra-sparse signal detection that we term the
"horseshoe+ prior." The horseshoe+ prior is a natural extension of the
horseshoe prior that has achieved success in the estimation and detection of
sparse signals and has been shown to possess a number of desirable theoretical
properties while enjoying computational feasibility in high dimensions. The
horseshoe+ prior builds upon these advantages. Our work proves that the
horseshoe+ posterior concentrates at a rate faster than that of the horseshoe
in the Kullback-Leibler (K-L) sense. We also establish theoretically that the
proposed estimator has lower posterior mean squared error in estimating signals
compared to the horseshoe and achieves the optimal Bayes risk in testing up to
a constant. For global-local scale mixture priors, we develop a new technique
for analyzing the marginal sparse prior densities using the class of Meijer-G
functions. In simulations, the horseshoe+ estimator demonstrates superior
performance in a standard design setting against competing methods, including
the horseshoe and Dirichlet-Laplace estimators. We conclude with an
illustration on a prostate cancer data set and by pointing out some directions
for future research.
@misc{bhadra2015horseshoe,
abstract = {We propose a new prior for ultra-sparse signal detection that we term the
"horseshoe+ prior." The horseshoe+ prior is a natural extension of the
horseshoe prior that has achieved success in the estimation and detection of
sparse signals and has been shown to possess a number of desirable theoretical
properties while enjoying computational feasibility in high dimensions. The
horseshoe+ prior builds upon these advantages. Our work proves that the
horseshoe+ posterior concentrates at a rate faster than that of the horseshoe
in the Kullback-Leibler (K-L) sense. We also establish theoretically that the
proposed estimator has lower posterior mean squared error in estimating signals
compared to the horseshoe and achieves the optimal Bayes risk in testing up to
a constant. For global-local scale mixture priors, we develop a new technique
for analyzing the marginal sparse prior densities using the class of Meijer-G
functions. In simulations, the horseshoe+ estimator demonstrates superior
performance in a standard design setting against competing methods, including
the horseshoe and Dirichlet-Laplace estimators. We conclude with an
illustration on a prostate cancer data set and by pointing out some directions
for future research.},
added-at = {2016-08-18T17:51:34.000+0200},
author = {Bhadra, Anindya and Datta, Jyotishka and Polson, Nicholas G. and Willard, Brandon},
biburl = {https://www.bibsonomy.org/bibtex/277ba810c2133464001278550fe96c011/peter.ralph},
interhash = {45bb85fd9ed327e58c4201f9d6dbe019},
intrahash = {77ba810c2133464001278550fe96c011},
keywords = {Bayesian methods regularization sparsification statistics},
note = {cite arxiv:1502.00560},
timestamp = {2016-08-18T17:51:34.000+0200},
title = {The Horseshoe+ Estimator of Ultra-Sparse Signals},
url = {http://arxiv.org/abs/1502.00560},
year = 2015
}