@article{EJP2031,
author = {Sébastien Gadat and Fabien Panloup and Clément Pellegrini},
title = {Large deviation principle for invariant distributions of memory gradient diffusions},
journal = {Electron. J. Probab.},
fjournal = {Electronic Journal of Probability},
volume = {18},
year = {2013},
keywords = {Large Deviation Principle; Hamilton-Jacobi Equations; Freidlin and Wentzell The- ory; small stochastic perturbations; hypoelliptic diffusions.},
abstract = {In this paper, we consider a class of diffusion processes based on a memory gradient descent, i.e. whose drift term is built as the average all along the trajectory of the gradient of a coercive function U. Under some classical assumptions on U, this type of diffusion is ergodic and admits a unique invariant distribution. In view to optimization applications, we want to understand the behaviour of the invariant distribution when the diffusion coefficient goes to 0. In the non-memory case, the invariant distribution is explicit and the so-called Laplace method shows that a Large Deviation Principle (LDP) holds with an explicit rate function, that leads to a concentration of the invariant distribution around the global minimums of U. Here, excepted in the linear case, we have no closed formula for the invariant distribution but we show that a LDP can be obtained. Then, in the one-dimensional case, we get some bounds for the rate function that lead to the concentration around the global minimum under some assumptions on the second derivative of U.
},
pages = {no. 81, 1-34},
issn = {1083-6489},
doi = {10.1214/EJP.v18-2031},
url = {http://ejp.ejpecp.org/article/view/2031}}