How to cite item

Can the Adaptive Metropolis Algorithm Collapse Without the Covariance Lower Bound?

  
@article{EJP840,
	author = {Matti Vihola},
	title = {Can the Adaptive Metropolis Algorithm Collapse Without the Covariance Lower Bound?},
	journal = {Electron. J. Probab.},
	fjournal = {Electronic Journal of Probability},
	volume = {16},
	year = {2011},
	keywords = {adaptive Markov chain Monte Carlo; Metropolis algorithm; stability; stochastic approximation},
	abstract = {The Adaptive Metropolis (AM) algorithm is based on the symmetric random-walk Metropolis algorithm. The proposal distribution has the following time-dependent covariance matrix, at step $n+1$, $S_n=\mathrm{Cov}(X_1,\ldots,X_n)+\varepsilon I$, that is, the sample covariance matrix of the history of the chain plus a (small) constant $\varepsilon>0$  multiple of the identity matrix $I$ . The lower bound on the eigenvalues of $S_n$ induced by the factor $\varepsilon I$ is theoretically convenient, but practically cumbersome, as a good value for the parameter $\varepsilon$ may not always be easy to choose. This article considers variants of the AM algorithm that do not explicitly bound the eigenvalues of $S_n$ away from zero.  The behaviour of $S_n$ is studied in detail, indicating that the eigenvalues of $S_n$  do not tend to collapse to zero in general.  In dimension one, it is shown that $S_n$ is bounded away from zero if the logarithmic target density is uniformly continuous. For a modification of the AM algorithm including an additional fixed component in the proposal distribution, the eigenvalues of $S_n$ are shown to stay away from zero with a practically non-restrictive condition. This result implies a strong law of large numbers for super-exponentially decaying target distributions with regular contours.},
	pages = {no. 2, 45-75},
	issn = {1083-6489},
	doi = {10.1214/EJP.v16-840},    
        url = {http://ejp.ejpecp.org/article/view/840}}