-
Notifications
You must be signed in to change notification settings - Fork 2
/
bibliography.bib
212 lines (198 loc) · 15.2 KB
/
bibliography.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
@article{Meila2007Comparing,
author = {Meil{\u a}, Marina},
journal = {Journal of Multivariate Analysis},
number = {5},
year = {2007},
pages = {873--895},
publisher = {Elsevier BV},
title = {Comparing clusterings---an information based distance},
volume = {98},
}
@inproceedings{Mller2006FDRAB,
author = {M{\"u}ller, Peter and Parmigiani, Giovanni and Rice, Kenneth M.},
year = {2007},
pages = {},
title = {FDR and Bayesian Multiple Comparisons Rules},
journal = {Bayesian Statistics 8: Proceedings of the Eighth Valencia International Meeting}
}
@article{Barbieri2004Optimal,
author = {Barbieri, M. M. and Berger, J. O.},
journal = {The Annals of Statistics},
number = {3},
year = {2004},
publisher = {Institute of Mathematical Statistics},
title = {Optimal predictive model selection},
volume = {32},
}
@article{atay-kayisMonteCarloMethod2005,
title = {A {{Monte Carlo}} Method for Computing the Marginal Likelihood in Nondecomposable {{Gaussian}} Graphical Models},
author = {Atay-Kayis, Aliye and Massam, Hélène},
year = {2005},
journaltitle = {Biometrika},
volume = {92},
number = {2},
pages = {317--335},
issn = {1464-3510, 0006-3444},
doi = {10.1093/biomet/92.2.317},
url = {http://academic.oup.com/biomet/article/92/2/317/233106/A-Monte-Carlo-method-for-computing-the-marginal},
urldate = {2022-10-24},
abstract = {A centred Gaussian model that is Markov with respect to an undirected graph G is characterised by the parameter set of its precision matrices which is the cone M+(G) of positive definite matrices with entries corresponding to the missing edges of G constrained to be equal to zero. In a Bayesian framework, the conjugate family for the precision parameter is the distribution with Wishart density with respect to the Lebesgue measure restricted to M+(G). We call this distribution the G-Wishart. When G is nondecomposable, the normalising constant of the G-Wishart cannot be computed in closed form. In this paper, we give a simple Monte Carlo method for computing this normalising constant. The main feature of our method is that the sampling distribution is exact and consists of a product of independent univariate standard normal and chi-squared distributions that can be read off the graph G. Computing this normalising constant is necessary for obtaining the posterior distribution of G or the marginal likelihood of the corresponding graphical Gaussian model. Our method also gives a way of sampling from the posterior distribution of the precision matrix.},
langid = {english},
file = {/Users/teo/Zotero/storage/A4F3K8RY/Atay-Kayis and Massam - 2005 - A Monte Carlo method for computing the marginal li.pdf}
}
@article{bensonAdaptiveMCMCMultiple2018,
title = {Adaptive {{MCMC}} for Multiple Changepoint Analysis with Applications to Large Datasets},
author = {Benson, Alan and Friel, Nial},
year = {2018},
journaltitle = {Electronic Journal of Statistics},
shortjournal = {Electron. J. Statist.},
volume = {12},
number = {2},
issn = {1935-7524},
doi = {10.1214/18-EJS1418},
url = {https://projecteuclid.org/journals/electronic-journal-of-statistics/volume-12/issue-2/Adaptive-MCMC-for-multiple-changepoint-analysis-with-applications-to-large/10.1214/18-EJS1418.full},
urldate = {2022-10-23},
abstract = {We consider the problem of Bayesian inference for changepoints where the number and position of the changepoints are both unknown. In particular, we consider product partition models where it is possible to integrate out model parameters for the regime between each changepoint, leaving a posterior distribution over a latent vector indicating the presence or not of a changepoint at each observation. The same problem setting has been considered by Fearnhead (2006) where one can use filtering recursions to make exact inference. However, the complexity of this filtering recursions algorithm is quadratic in the number of observations. Our approach relies on an adaptive Markov Chain Monte Carlo (MCMC) method for finite discrete state spaces. We develop an adaptive algorithm which can learn from the past states of the Markov chain in order to build proposal distributions which can quickly discover where changepoint are likely to be located. We prove that our algorithm leaves the posterior distribution ergodic. Crucially, we demonstrate that our adaptive MCMC algorithm is viable for large datasets for which the filtering recursions approach is not. Moreover, we show that inference is possible in a reasonable time thus making Bayesian changepoint detection computationally efficient.},
langid = {english},
file = {/Users/teo/Zotero/storage/U9TIMGXJ/Benson and Friel - 2018 - Adaptive MCMC for multiple changepoint analysis wi.pdf}
}
@article{boomBayesianLearningGraph2022a,
title = {Bayesian {{Learning}} of {{Graph Substructures}}},
author = {van den Boom, Willem and De Iorio, Maria and Beskos, Alexandros},
year = {2022},
journaltitle = {Bayesian Analysis},
shortjournal = {Bayesian Anal.},
volume = {-1},
eprint = {2203.11664},
eprinttype = {arxiv},
primaryclass = {stat},
issn = {1936-0975},
doi = {10.1214/22-BA1338},
url = {http://arxiv.org/abs/2203.11664},
urldate = {2022-10-24},
abstract = {Graphical models provide a powerful methodology for learning the conditional independence structure in multivariate data. Inference is often focused on estimating individual edges in the latent graph. Nonetheless, there is increasing interest in inferring more complex structures, such as communities, for multiple reasons, including more effective information retrieval and better interpretability. Stochastic blockmodels offer a powerful tool to detect such structure in a network. We thus propose to exploit advances in random graph theory and embed them within the graphical models framework. A consequence of this approach is the propagation of the uncertainty in graph estimation to large-scale structure learning. We consider Bayesian nonparametric stochastic blockmodels as priors on the graph. We extend such models to consider clique-based blocks and to multiple graph settings introducing a novel prior process based on a Dependent Dirichlet process. Moreover, we devise a tailored computation strategy of Bayes factors for block structure based on the Savage-Dickey ratio to test for presence of larger structure in a graph. We demonstrate our approach in simulations as well as on real data applications in finance and transcriptomics.},
archiveprefix = {arXiv},
issue = {-1},
langid = {english},
keywords = {Statistics - Methodology},
file = {/Users/teo/Zotero/storage/HUMQ2PZV/Boom et al. - 2022 - Bayesian Learning of Graph Substructures.pdf}
}
@article{boomGWishartWeightedProposal2022,
title = {The {{G-Wishart Weighted Proposal Algorithm}}: {{Efficient Posterior Computation}} for {{Gaussian Graphical Models}}},
shorttitle = {The {{G-Wishart Weighted Proposal Algorithm}}},
author = {van den Boom, Willem and Beskos, Alexandros and De Iorio, Maria},
year = {2022},
journaltitle = {Journal of Computational and Graphical Statistics},
shortjournal = {Journal of Computational and Graphical Statistics},
eprint = {2108.01308},
eprinttype = {arxiv},
primaryclass = {stat},
pages = {1--10},
issn = {1061-8600, 1537-2715},
doi = {10.1080/10618600.2022.2050250},
url = {http://arxiv.org/abs/2108.01308},
urldate = {2022-10-24},
abstract = {Gaussian graphical models can capture complex dependency structures amongst variables. For such models, Bayesian inference is attractive as it provides principled ways to incorporate prior information and to quantify uncertainty through the posterior distribution. However, posterior computation under the conjugate G-Wishart prior distribution on the precision matrix is expensive for general non-decomposable graphs. We therefore propose a new Markov chain Monte Carlo (MCMC) method named the G-Wishart weighted proposal algorithm (WWA). WWA's distinctive features include delayed acceptance MCMC, Gibbs updates for the precision matrix and an informed proposal distribution on the graph space that enables embarrassingly parallel computations. Compared to existing approaches, WWA reduces the frequency of the relatively expensive sampling from the G-Wishart distribution. This results in faster MCMC convergence, improved MCMC mixing and reduced computation time. Numerical studies on simulated and real data show that WWA provides a more efficient tool for posterior inference than competing state-of-the-art MCMC algorithms.},
archiveprefix = {arXiv},
keywords = {Statistics - Computation},
file = {/Users/teo/Zotero/storage/FBPXGPMG/Boom et al_2022_The G-Wishart Weighted Proposal Algorithm.pdf;/Users/teo/Zotero/storage/GGPUMX95/2108.html}
}
@article{colombiLearningBlockStructured2022a,
title={Learning block structured graphs in Gaussian graphical models},
author = {Colombi, Alessandro and Argiento, Raffaele and Paci, Lucia and Pini, Alessia},
journal={arXiv preprint arXiv:2206.14274},
year={2022}
}
@misc{gengProbabilisticCommunityDetection2018,
title = {Probabilistic Community Detection with Unknown Number of Communities},
author = {Geng, Junxian and Bhattacharya, Anirban and Pati, Debdeep},
year = {2018},
number = {arXiv:1602.08062},
eprint = {1602.08062},
eprinttype = {arxiv},
primaryclass = {math, stat},
publisher = {{arXiv}},
url = {http://arxiv.org/abs/1602.08062},
urldate = {2022-10-23},
abstract = {A fundamental problem in network analysis is clustering the nodes into groups which share a similar connectivity pattern. Existing algorithms for community detection assume the knowledge of the number of clusters or estimate it a priori using various selection criteria and subsequently estimate the community structure. Ignoring the uncertainty in the first stage may lead to erroneous clustering, particularly when the community structure is vague. We instead propose a coherent probabilistic framework for simultaneous estimation of the number of communities and the community structure, adapting recently developed Bayesian nonparametric techniques to network models. An efficient Markov chain Monte Carlo (MCMC) algorithm is proposed which obviates the need to perform reversible jump MCMC on the number of clusters. The methodology is shown to outperform recently developed community detection algorithms in a variety of synthetic data examples and in benchmark real-datasets. Using an appropriate metric on the space of all configurations, we develop non-asymptotic Bayes risk bounds even when the number of clusters is unknown. Enroute, we develop concentration properties of non-linear functions of Bernoulli random variables, which may be of independent interest.},
archiveprefix = {arXiv},
langid = {english},
keywords = {Mathematics - Statistics Theory,Statistics - Methodology},
file = {/Users/teo/Zotero/storage/ZE2W677N/Geng et al. - 2018 - Probabilistic community detection with unknown num.pdf}
}
@article{legramantiExtendedStochasticBlock2022,
title={Extended stochastic block models with application to criminal networks},
author = {Legramanti, Sirio and Rigon, Tommaso and Durante, Daniele and Dunson, David B.},
journal={The Annals of Applied Statistics},
volume={16},
number={4},
pages={2369--2395},
year={2022},
publisher={Institute of Mathematical Statistics}
}
@article{martinezNonparametricChangePoint2014,
title = {On a {{Nonparametric Change Point Detection Model}} in {{Markovian Regimes}}},
author = {Martínez, Asael Fabian and Mena, Ramsés H.},
year = {2014},
journaltitle = {Bayesian Analysis},
shortjournal = {Bayesian Anal.},
volume = {9},
number = {4},
issn = {1936-0975},
doi = {10.1214/14-BA878},
url = {https://projecteuclid.org/journals/bayesian-analysis/volume-9/issue-4/On-a-Nonparametric-Change-Point-Detection-Model-in-Markovian-Regimes/10.1214/14-BA878.full},
urldate = {2022-10-23},
abstract = {Change point detection models aim to determine the most probable grouping for a given sample indexed on an ordered set. For this purpose, we propose a methodology based on exchangeable partition probability functions, specifically on Pitman’s sampling formula. Emphasis will be given to the Markovian case, in particular for discretely observed Ornstein-Uhlenbeck diffusion processes. Some properties of the resulting model are explained and posterior results are obtained via a novel Markov chain Monte Carlo algorithm.},
langid = {english},
file = {/Users/teo/Zotero/storage/Q5ZJEWIZ/Martínez and Mena - 2014 - On a Nonparametric Change Point Detection Model in.pdf}
}
@article{mohammadiBayesianStructureLearning2015a,
title = {Bayesian {{Structure Learning}} in {{Sparse Gaussian Graphical Models}}},
author = {Mohammadi, A. and Wit, E. C.},
year = {2015},
journaltitle = {Bayesian Analysis},
shortjournal = {Bayesian Anal.},
volume = {10},
number = {1},
issn = {1936-0975},
doi = {10.1214/14-BA889},
url = {https://projecteuclid.org/journals/bayesian-analysis/volume-10/issue-1/Bayesian-Structure-Learning-in-Sparse-Gaussian-Graphical-Models/10.1214/14-BA889.full},
urldate = {2022-10-24},
file = {/Users/teo/Zotero/storage/4P532AMX/Mohammadi_Wit_2015_Bayesian Structure Learning in Sparse Gaussian Graphical Models.pdf}
}
@article{schmidtNonparametricBayesianModeling2013,
title = {Non-Parametric {{Bayesian}} Modeling of Complex Networks},
author = {Schmidt, Mikkel N. and Mørup, Morten},
year = {2013},
journaltitle = {IEEE Signal Processing Magazine},
shortjournal = {IEEE Signal Process. Mag.},
volume = {30},
number = {3},
eprint = {1312.5889},
eprinttype = {arxiv},
primaryclass = {stat},
pages = {110--128},
issn = {1053-5888},
doi = {10.1109/MSP.2012.2235191},
url = {http://arxiv.org/abs/1312.5889},
urldate = {2022-10-23},
abstract = {Modeling structure in complex networks using Bayesian non-parametrics makes it possible to specify flexible model structures and infer the adequate model complexity from the observed data. This paper provides a gentle introduction to non-parametric Bayesian modeling of complex networks: Using an infinite mixture model as running example we go through the steps of deriving the model as an infinite limit of a finite parametric model, inferring the model parameters by Markov chain Monte Carlo, and checking the model’s fit and predictive performance. We explain how advanced non-parametric models for complex networks can be derived and point out relevant literature.},
archiveprefix = {arXiv},
langid = {english},
keywords = {Statistics - Machine Learning},
file = {/Users/teo/Zotero/storage/U7QDBPRY/Schmidt and Mørup - 2013 - Non-parametric Bayesian modeling of complex networ.pdf}
}
@article{wangEfficientGaussianGraphical2012a,
title = {Efficient {{Gaussian}} Graphical Model Determination under {{G-Wishart}} Prior Distributions},
author = {Wang, Hao and Li, Sophia Zhengzi},
year = {2012},
journaltitle = {Electronic Journal of Statistics},
shortjournal = {Electron. J. Statist.},
volume = {6},
issn = {1935-7524},
doi = {10.1214/12-EJS669},
url = {https://projecteuclid.org/journals/electronic-journal-of-statistics/volume-6/issue-none/Efficient-Gaussian-graphical-model-determination-under-G-Wishart-prior-distributions/10.1214/12-EJS669.full},
urldate = {2022-10-24},
issue = {none},
file = {/Users/teo/Zotero/storage/V9VLGWTW/Wang_Li_2012_Efficient Gaussian graphical model determination under G-Wishart prior.pdf}
}