mcmc.bib

@comment{{This file has been generated by bib2bib 1.96}}
@comment{{Command line: bib2bib -ob mcmc.bib -s year -c select:"mcmc" Omiros_refs.bib}}
@article{pap:rob:sk,
  author = {Papaspiliopoulos, Omiros and Roberts, Gareth O. and Sk{\"o}ld,
              Martin},
  title = {A general framework for the parametrization of hierarchical
              models},
  journal = {Statist. Sci.},
  fjournal = {Statistical Science. A Review Journal of the Institute of
              Mathematical Statistics},
  volume = {22},
  year = {2007},
  number = {1},
  pages = {59--73},
  issn = {0883-4237},
  mrclass = {Database Expansion Item},
  mrnumber = {2408661},
  url = {https://dx.doi.org/10.1214/088342307000000014},
  select = {mcmc},
  abstract = {In this paper, we describe centering and noncentering methodology as complementary techniques for use in parametrization of broad classes of hierarchical models, with a view to the construction of effective MCMC algorithms for exploring posterior distributions from these models. We give a clear qualitative understanding as to when centering and noncentering work well, and introduce theory concerning the convergence time complexity of Gibbs samplers using centered and noncentered parametrizations. We give general recipes for the construction of noncentered parametrizations, including an auxiliary variable technique called the state-space expansion technique. We also describe partially noncentered methods, and demonstrate their use in constructing robust Gibbs sampler algorithms whose convergence properties are not overly sensitive to the data. },
  keywords = {Parametrization; hierarchical models; latent stochastic processes; MCMC}
}
@article{retro,
  author = {Papaspiliopoulos, Omiros and Roberts, Gareth O.},
  title = {Retrospective {M}arkov chain {M}onte {C}arlo methods for
              {D}irichlet process hierarchical models},
  journal = {Biometrika},
  fjournal = {Biometrika},
  volume = {95},
  year = {2008},
  number = {1},
  pages = {169--186},
  issn = {0006-3444},
  coden = {BIOKAX},
  mrclass = {Database Expansion Item},
  mrnumber = {2409721},
  url = {https://dx.doi.org/10.1093/biomet/asm086},
  select = {mcmc},
  abstract = {Inference for Dirichlet process hierarchical models is typically performed using Markov chain Monte Carlo methods, which can be roughly categorized into marginal and conditional methods. The former integrate out analytically the infinite-dimensional component of the hierarchical model and sample from the marginal distribution of the remaining variables using the Gibbs sampler. Conditional methods impute the Dirichlet process and update it as a component of the Gibbs sampler. Since this requires imputation of an infinite-dimensional process, implementation of the conditional method has relied on finite approximations. In this paper, we show how to avoid such approximations by designing two novel Markov chain Monte Carlo algorithms which sample from the exact posterior distribution of quantities of interest. The approximations are avoided by the new technique of retrospective sampling. We also show how the algorithms can obtain samples from functionals of the Dirichlet process. The marginal and the conditional methods are compared and a careful simulation study is included, which involves a non-conjugate model, different datasets and prior specifications. },
  keywords = {
    Exact simulation;
    Label switching;
    Mixture model;
    Retrospective sampling;
    Stick-breaking prior
}
}
@article{Chopin2013397,
  author = {Chopin, N. and Jacob, P.E. and Papaspiliopoulos, O.},
  title = {{SMC}$^2$: An efficient algorithm for sequential analysis of state space models},
  journal = {Journal of the Royal Statistical Society. Series B: Statistical Methodology},
  year = {2013},
  volume = {75},
  number = {3},
  pages = {397-426},
  note = {cited By (since 1996)0},
  url = {https://www.scopus.com/inward/record.url?eid=2-s2.0-84878260706&partnerID=40&md5=537ed2b8f2322b448706c4401572f2d4},
  document_type = {Article},
  source = {Scopus},
  select = {mcmc},
  abstract = {We consider the generic problem of performing sequential Bayesian inference in a state space model with observation process y, state process x and fixed parameter $\theta$. An idealized approach would be to apply the iterated batch importance sampling algorithm of Chopin. This is a sequential Monte Carlo algorithm in the $\theta$-dimension, that samples values of $\theta$, reweights iteratively these values by using the likelihood increments $p(y_t | y_{1:t-1},\theta)$ and rejuvenates the $\theta$-particles through a resampling step and a Markov chain Monte Carlo update step. In state space models these likelihood increments are intractable in most cases, but they may be unbiasedly estimated by a particle filter in the x-dimension, for any fixed θ. This motivates the SMC$^2$ algorithm that is proposed in the paper: a sequential Monte Carlo algorithm, defined in the $\theta$-dimension, which propagates and resamples many particle filters in the $x$-dimension. The filters in the $x$-dimension are an example of the random weight particle filter. In contrast, the particle Markov chain Monte Carlo framework that has been developed by Andrieu and colleagues allows us to design appropriate Markov chain Monte Carlo rejuvenation steps. Thus, the $\theta$-particles target the correct posterior distribution at each iteration $t$, despite the intractability of the likelihood increments. We explore the applicability of our algorithm in both sequential and non-sequential applications and consider various degrees of freedom, as for example increasing dynamically the number of $x$-particles. We contrast our approach with various competing methods, both conceptually and empirically through a detailed simulation study, and based on particularly challenging examples.},
  keywords = {Iterated batch importance sampling;
    Particle filtering;
    Particle Markov chain Monte Carlo methods;
    Sequential Monte Carlo sampling;
    State space models}
}

This file was generated by bibtex2html 1.96.