-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathreferences.bib
149 lines (138 loc) · 7.96 KB
/
references.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
@misc{bradley_matrices_2019,
title = {Matrices as {Tensor} {Network} {Diagrams}},
url = {https://www.math3ma.com/blog/matrices-as-tensor-network-diagrams},
author = {Bradley, Tai-Danae},
month = may,
year = {2019}
}
@article{bradley_modeling_2019,
title = {Modeling {Sequences} with {Quantum} {States}: {A} {Look} {Under} the {Hood}},
shorttitle = {Modeling {Sequences} with {Quantum} {States}},
url = {http://arxiv.org/abs/1910.07425},
abstract = {Classical probability distributions on sets of sequences can be modeled using quantum states. Here, we do so with a quantum state that is pure and entangled. Because it is entangled, the reduced densities that describe subsystems also carry information about the complementary subsystem. This is in contrast to the classical marginal distributions on a subsystem in which information about the complementary system has been integrated out and lost. A training algorithm based on the density matrix renormalization group (DMRG) procedure uses the extra information contained in the reduced densities and organizes it into a tensor network model. An understanding of the extra information contained in the reduced densities allow us to examine the mechanics of this DMRG algorithm and study the generalization error of the resulting model. As an illustration, we work with the even-parity dataset and produce an estimate for the generalization error as a function of the fraction of the dataset used in training.},
urldate = {2020-05-06},
journal = {arXiv:1910.07425 [quant-ph, stat]},
author = {Bradley, Tai-Danae and Stoudenmire, E. Miles and Terilla, John},
month = oct,
year = {2019},
note = {arXiv: 1910.07425},
keywords = {Computer Science - Machine Learning, Quantum Physics, Statistics - Machine Learning}
}
@article{biamonte_lectures_2020,
title = {Lectures on {Quantum} {Tensor} {Networks}},
url = {http://arxiv.org/abs/1912.10049},
abstract = {Situated as a language between computer science, quantum physics and mathematics, tensor network theory has steadily grown in popularity and can now be found in applications ranging across the entire field of quantum information processing. This book aims to present the best contemporary practices in the use of tensor networks as a reasoning tool, placing quantum states, operators and processes on the same compositional footing. The book has 7 parts and over 40 subsections which took shape in over a decade of teaching. In addition to covering the foundations, the book covers important applications such as matrix product states, open quantum systems and entanglement \$-\$ all cast into the diagrammatic tensor network language. The intended audience includes those in quantum information science wishing to learn about tensor networks. It includes scientists who have employed tensor networks in their modeling codes who have interest in the tools graphical reasoning capacity. The audience further includes the graduate student researcher, whom with some effort, should find this book accessible. I would appreciate it if you emailed me about any mistakes or typos you find.},
urldate = {2020-05-06},
journal = {arXiv:1912.10049 [cond-mat, physics:math-ph, physics:quant-ph]},
author = {Biamonte, Jacob},
month = jan,
year = {2020},
note = {arXiv: 1912.10049},
keywords = {Condensed Matter - Strongly Correlated Electrons, Mathematical Physics, Mathematics - Category Theory, Quantum Physics}
}
@article{biamonte_tensor_2017,
title = {Tensor {Networks} in a {Nutshell}},
url = {http://arxiv.org/abs/1708.00006},
abstract = {Tensor network methods are taking a central role in modern quantum physics and beyond. They can provide an efficient approximation to certain classes of quantum states, and the associated graphical language makes it easy to describe and pictorially reason about quantum circuits, channels, protocols, open systems and more. Our goal is to explain tensor networks and some associated methods as quickly and as painlessly as possible. Beginning with the key definitions, the graphical tensor network language is presented through examples. We then provide an introduction to matrix product states. We conclude the tutorial with tensor contractions evaluating combinatorial counting problems. The first one counts the number of solutions for Boolean formulae, whereas the second is Penrose's tensor contraction algorithm, returning the number of \$3\$-edge-colorings of \$3\$-regular planar graphs.},
urldate = {2020-05-06},
journal = {arXiv:1708.00006 [cond-mat, physics:gr-qc, physics:hep-th, physics:math-ph, physics:quant-ph]},
author = {Biamonte, Jacob and Bergholm, Ville},
month = jul,
year = {2017},
note = {arXiv: 1708.00006},
keywords = {Condensed Matter - Disordered Systems and Neural Networks, General Relativity and Quantum Cosmology, High Energy Physics - Theory, Mathematical Physics, Quantum Physics}
}
@book{coecke_picturing_2017,
address = {Cambridge, United Kingdom ; New York, NY, USA},
title = {Picturing quantum processes: a first course in quantum theory and diagrammatic reasoning},
isbn = {9781107104228},
shorttitle = {Picturing quantum processes},
publisher = {Cambridge University Press},
author = {Coecke, Bob and Kissinger, Aleks},
year = {2017},
keywords = {Logic, Symbolic and mathematical, Quantum computing, Quantum theory}
}
@article{verstraete_matrix_2008,
title = {Matrix product states, projected entangled pair states, and variational renormalization group methods for quantum spin systems},
volume = {57},
issn = {0001-8732, 1460-6976},
url = {http://www.tandfonline.com/doi/abs/10.1080/14789940801912366},
doi = {10.1080/14789940801912366},
language = {en},
number = {2},
urldate = {2020-05-06},
journal = {Advances in Physics},
author = {Verstraete, F. and Murg, V. and Cirac, J.I.},
month = mar,
year = {2008},
pages = {143--224}
}
@article{orus_practical_2014,
title = {A practical introduction to tensor networks: {Matrix} product states and projected entangled pair states},
volume = {349},
issn = {00034916},
shorttitle = {A practical introduction to tensor networks},
url = {https://linkinghub.elsevier.com/retrieve/pii/S0003491614001596},
doi = {10.1016/j.aop.2014.06.013},
language = {en},
urldate = {2020-05-06},
journal = {Annals of Physics},
author = {Orús, Román},
month = oct,
year = {2014},
note = {arXiv: 1306.2164},
pages = {117--158}
}
@article{kaiser_physics_2005,
title = {Physics and {Feynman}’s {Diagrams}},
volume = {93},
issn = {0003-0996, 1545-2786},
url = {http://www.americanscientist.org/issues/feature/2005/2/physics-and-feynmans-diagrams},
doi = {10.1511/2005.52.957},
language = {en},
number = {2},
urldate = {2020-05-06},
journal = {American Scientist},
author = {Kaiser, David},
year = {2005},
pages = {156}
}
@article{penrose_applications_1971,
title = {Applications of negative dimensional tensors},
url = {https://www.mscs.dal.ca/~selinger/papers/graphical-bib/public/Penrose-applications-of-negative-dimensional-tensors.pdf},
number = {1},
journal = {Combinatorial mathematics and its applications},
author = {Penrose, Roger},
year = {1971},
pages = {221--244}
}
@article{bridgeman_hand-waving_2017,
title = {Hand-waving and interpretive dance: an introductory course on tensor networks},
volume = {50},
issn = {1751-8113, 1751-8121},
shorttitle = {Hand-waving and interpretive dance},
url = {https://iopscience.iop.org/article/10.1088/1751-8121/aa6dc3},
doi = {10.1088/1751-8121/aa6dc3},
number = {22},
urldate = {2020-05-06},
journal = {Journal of Physics A: Mathematical and Theoretical},
author = {Bridgeman, Jacob C and Chubb, Christopher T},
month = jun,
year = {2017},
note = {arxiv:1603.03039},
pages = {223001}
}
@article{rabiner_tutorial_1989,
title = {A tutorial on hidden {Markov} models and selected applications in speech recognition},
volume = {77},
issn = {00189219},
url = {http://ieeexplore.ieee.org/document/18626/},
doi = {10.1109/5.18626},
number = {2},
urldate = {2020-05-05},
journal = {Proceedings of the IEEE},
author = {Rabiner, L.R.},
month = feb,
year = {1989},
pages = {257--286}
}