Bayesian neural network priors for edge-preserving inversion
Bibtex:
@article{LDS22,
author = {Li, Chen and Dunlop, Matthew M. and Stadler, Georg},
title = {{Bayesian neural network priors for edge-preserving inversion}},
year = {2022},
journal = {Inverse Problems and Imaging},
doi = {10.3934/ipi.2022022},
}
A gradient-free subspace-adjusting ensemble sampler for infinite-dimensional Bayesian inverse problems
Bibtex:
@unpublished{DS22,
author = {Dunlop, Matthew M. and Stadler, Georg},
title = {{A gradient-free subspace-adjusting ensemble sampler for infinite-dimensional Bayesian inverse problems}},
year = {2022},
note = {submitted},
}
Stability of Gibbs posteriors from the Wasserstein loss for Bayesian Full Waveform Inversion
Bibtex:
@article{DY21,
author = {Dunlop, Matthew M. and Yang, Yunan},
title = {{Stability of Gibbs posteriors from the Wasserstein loss for Bayesian Full Waveform Inversion}},
year = {2021},
journal = {SIAM/ASA Journal on Uncertainty Quantification},
volume = {9},
issue = {4},
pages = {1499-1526},
doi = {10.1137/20M1334218},
}
New likelihood functions and level-set prior for Bayesian full-waveform inversion
Bibtex:
@inproceedings{DY20,
author = {Dunlop, Matthew M. and Yang, Yunan},
title = {{New likelihood functions and level-set prior for Bayesian full-waveform inversion}},
year = {2020},
pages = {825-829},
doi = {10.1190/segam2020-3428223.1},
}
Large data and zero noise limits of graph-based semi-supervised learning algorithms
Bibtex:
@article{DSST20,
author = {Dunlop, Matthew M. and Slepcev, Dejan and Stuart, Andrew M. and Thorpe, Matthew},
title = {{Large data and zero noise limits of graph-based semi-supervised learning algorithms}},
year = {2020},
journal = {Applied and Computational Harmonic Analysis},
volume = {49},
issue = {2},
pages = {655-697},
doi = {10.1016/j.acha.2019.03.005},
}
Hyperparameter estimation in Bayesian MAP estimation: parameterizations and consistency
Bibtex:
@article{DHS20,
author = {Dunlop, Matthew M. and Helin, Tapio and Stuart, Andrew M.},
title = {{Hyperparameter estimation in Bayesian MAP estimation: parameterizations and consistency}},
year = {2020},
journal = {SMAI Journal of Computational Mathematics},
volume = {6},
pages = {69-100},
doi = {10.5802/smai-jcm.62},
}
Reconciling Bayesian and perimeter regularization for binary inversion
Bibtex:
@article{DDEHS20,
author = {Dunbar, Oliver R. A. and Dunlop, Matthew M. and Elliott, Charles M. and Ha Hoang, Viet and Stuart, Andrew M.},
title = {{Reconciling Bayesian and perimeter regularization for binary inversion}},
year = {2020},
journal = {SIAM Journal on Scientific Computing},
volume = {42},
issue = {4},
pages = {A1984-A2013},
doi = {10.1137/18M1179559},
}
Multiplicative noise in Bayesian inverse problems: well-posedness and consistency of MAP estimators
Bibtex:
@unpublished{Dunlop19,
author = {Dunlop, Matthew M.},
title = {{Multiplicative noise in Bayesian inverse problems: well-posedness and consistency of MAP estimators}},
year = {2019},
note = {submitted},
}
Dimension-robust MCMC in Bayesian inverse problems
Bibtex:
@unpublished{CDPS18,
author = {Chen, Victor and Dunlop, Matthew M. and Papaspiliopoulos, Omiros and Stuart, Andrew M.},
title = {{Dimension-robust MCMC in Bayesian inverse problems}},
year = {2018},
note = {submitted},
}
How deep are Deep Gaussian Processes?
Bibtex:
@article{DGST18,
author = {Dunlop, Matthew M. and Girolami, Mark and Stuart, Andrew M. and Teckentrup, Aretha L},
title = {{How deep are Deep Gaussian Processes?}},
year = {2018},
journal = {Journal of Machine Learning Research},
volume = {19},
issue = {54},
pages = {1-46},
}
Iterative updating of model error for Bayesian inversion
Bibtex:
@article{CDSS18,
author = {Calvetti, Daniela and Dunlop, Matthew M. and Somersalo, Erkki and Stuart, Andrew M.},
title = {{Iterative updating of model error for Bayesian inversion}},
year = {2018},
journal = {Inverse Problems},
volume = {34},
issue = {2},
pages = {025008},
doi = {10.1088/1361-6420/aaa34d},
}
Hierarchical Bayesian level set inversion
Bibtex:
@article{DIS17,
author = {Dunlop, Matthew M. and Iglesias, Marco A. and Stuart, Andrew M.},
title = {{Hierarchical Bayesian level set inversion}},
year = {2017},
journal = {Statistics and Computing},
volume = {27},
issue = {6},
pages = {1555-1584},
doi = {10.1007/s11222-016-9704-8},
}
The Bayesian formulation of EIT: analysis and algorithms
Bibtex:
@article{DS16a,
author = {Dunlop, Matthew M. and Stuart, Andrew M.},
title = {{The Bayesian formulation of EIT: analysis and algorithms}},
year = {2016},
journal = {Inverse Problems and Imaging},
volume = {10},
issue = {4},
pages = {1007-1036},
doi = {10.3934/ipi.2016030},
}
MAP estimators for piecewise continuous inversion
Bibtex:
@article{DS16,
author = {Dunlop, Matthew M. and Stuart, Andrew M.},
title = {{MAP estimators for piecewise continuous inversion}},
year = {2016},
journal = {Inverse Problems},
volume = {32},
issue = {10},
pages = {105003},
doi = {10.1088/0266-5611/32/10/105003},
}
Analysis and Computation for Bayesian Inverse Problems
Bibtex:
@phdthesis{Dunlop16,
author = {Dunlop, Matthew M.},
title = {{Analysis and Computation for Bayesian Inverse Problems}},
year = {2016},
school = {University of Warwick},
}
RÉSUMÉ
EDUCATION
2013-2016
PhD Mathematics and Statistics University of Warwick
2012-2013
MSc Mathematics and Statistics University of Warwick
2008-2012
MMath Mathematics University of Warwick
EMPLOYMENT
2019-
Postdoctoral Associate Courant Institute of Mathematical Sciences
2018-2019
Postdoctoral Researcher University of Helsinki
2016-2018
Postdoctoral Scholar California Institute of Technology
Read section 3.8 of the notes up to and including Corollary 3.34.
Exercises 3.13, 3.21.
CMS/ACM 107 Week 8 Problem Set
3.18, 3.19, 3.20.
CMS/ACM 107 Week 9 Problem Set
To be submitted 3.26, 3.30 and 3.31.
CMS/ACM 107 Week 10 Problem Set
To be submitted 3.32 and 3.33.
CMS/ACM 107 Late Submission Policy
Assignments may be handed in late; but each late day will incur a 20% penalty decrease, in order to be fair to other students who hand in on time.
CMS/ACM 107 Problem Set Instructions
Exercises are given in the lecture notes at the end of each chapter. The exercise numbers given correspond to those in the lecture notes.
A number of exercises will be assigned each week, of which one will be marked at random and count for 1% of the course.
CMS/ACM 107 Week 1 Problem Set
To be submitted: 1.3, 1.5, 1.9 and 1.11. Accessible by end of week: 1.1-1.11.
CMS/ACM 107 Week 2 Problem Set
To be submitted: 1.14, 1.17, 1.22 and 1.25. To read: Section 1.3 - Eigenvalues and Eigenvectors. Please also read the statements of the Schur Factorization, the Jordan Canonical Form
and the SVD; you do not need to read the proofs and, in fact, the proof of the
SVD will be covered in lectures anyway. Accessible by end of week: 1.13-1.14, 1.17-1.18, 1.20, 1.22-1.26.
CMS/ACM 107 Week 3 Problem Set
To be submitted: 1.16. To read: Exercise 1.15 and its solution at the back of the lecture notes. Accessible by end of week: All chapter 1 exercises.
CMS/ACM 107 Week 4 Problem Set
To be submitted: 2.1, 2.2, 2.3 and 2.4.
CMS/ACM 107 Week 5 Problem Set
To be submitted: 2.9, 2.37 and 2.39. Accessible by end of week: 2.11, 2.36.
CMS/ACM 107 Week 6 Problem Set
To be submitted: 2.29(a), 2.29(c), 2.40 and 2.41. Note: You may use the result of 2.29(b) to prove 2.29(c).
CMS/ACM 107 Week 7 Problem Set
To be submitted: 3.1, 3.3, 3.6 and 3.13. To read: Exercise 3.2 and its solution at the back of the lecture notes.
CMS/ACM 107 Week 8 Problem Set
To be submitted: 3.7, 3.8, 3.9, 3.10 and 3.17.
CMS/ACM 107 Week 9 Problem Set
To be submitted 3.26, 3.30 and 3.31.
CMS/ACM 107 Week 10 Problem Set
To be submitted 3.32 and 3.33.
CMS/ACM 107 Late Submission Policy
Assignments may be handed in late; but each late day will incur a 20% penalty decrease, in order to be fair to other students who hand in on time. Problem sets will not be graded if handed in late.