-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathreferences_appendices.bib
84 lines (76 loc) · 3.69 KB
/
references_appendices.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
@article{ho2020denoising,
title={Denoising Diffusion Probabilistic Models},
author={Jonathan Ho and Ajay Jain and Pieter Abbeel},
year={2020},
journal={arXiv preprint arxiv:2006.11239}
}
@misc{kingma2013autoencodingvariationalbayes,
title={Auto-Encoding Variational Bayes},
author={Diederik P Kingma and Max Welling},
year={2013},
eprint={1312.6114},
archivePrefix={arXiv},
primaryClass={stat.ML},
url={https://arxiv.org/abs/1312.6114},
}
@InProceedings{pmlr-v37-sohl-dickstein15,
title = {Deep Unsupervised Learning using Nonequilibrium Thermodynamics},
author = {Sohl-Dickstein, Jascha and Weiss, Eric and Maheswaranathan, Niru and Ganguli, Surya},
booktitle = {Proceedings of the 32nd International Conference on Machine Learning},
pages = {2256--2265},
year = {2015},
editor = {Bach, Francis and Blei, David},
volume = {37},
series = {Proceedings of Machine Learning Research},
address = {Lille, France},
month = {07--09 Jul},
publisher = {PMLR},
pdf = {http://proceedings.mlr.press/v37/sohl-dickstein15.pdf},
url = {https://proceedings.mlr.press/v37/sohl-dickstein15.html},
abstract = {A central problem in machine learning involves modeling complex data-sets using highly flexible families of probability distributions in which learning, sampling, inference, and evaluation are still analytically or computationally tractable. Here, we develop an approach that simultaneously achieves both flexibility and tractability. The essential idea, inspired by non-equilibrium statistical physics, is to systematically and slowly destroy structure in a data distribution through an iterative forward diffusion process. We then learn a reverse diffusion process that restores structure in data, yielding a highly flexible and tractable generative model of the data. This approach allows us to rapidly learn, sample from, and evaluate probabilities in deep generative models with thousands of layers or time steps, as well as to compute conditional and posterior probabilities under the learned model. We additionally release an open source reference implementation of the algorithm.}
}
@inproceedings{Feller1949OnTT,
title={On the Theory of Stochastic Processes, with Particular Reference to Applications},
author={William Feller},
year={1949},
url={https://api.semanticscholar.org/CorpusID:121027442}
}
@misc{luo2022understandingdiffusionmodelsunified,
title={Understanding Diffusion Models: A Unified Perspective},
author={Calvin Luo},
year={2022},
eprint={2208.11970},
archivePrefix={arXiv},
primaryClass={cs.LG},
url={https://arxiv.org/abs/2208.11970},
}
@inproceedings{choi2020starganv2,
title={StarGAN v2: Diverse Image Synthesis for Multiple Domains},
author={Yunjey Choi and Youngjung Uh and Jaejun Yoo and Jung-Woo Ha},
booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition},
year={2020}
}
@misc{dieleman2023geometry,
author = {Dieleman, Sander},
title = {The geometry of diffusion guidance},
url = {https://sander.ai/2023/08/28/geometry.html},
year = {2023}
}
@article{song2020denoising,
title={Denoising Diffusion Implicit Models},
author={Song, Jiaming and Meng, Chenlin and Ermon, Stefano},
journal={arXiv:2010.02502},
year={2020},
month={October},
abbr={Preprint},
url={https://arxiv.org/abs/2010.02502}
}
@misc{nichol2021improveddenoisingdiffusionprobabilistic,
title={Improved Denoising Diffusion Probabilistic Models},
author={Alex Nichol and Prafulla Dhariwal},
year={2021},
eprint={2102.09672},
archivePrefix={arXiv},
primaryClass={cs.LG},
url={https://arxiv.org/abs/2102.09672},
}