-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.bib
3075 lines (3075 loc) · 186 KB
/
main.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
@article{mehta_sparsity-based_nodate,
title = {Sparsity-{Based} {Generalization} {Bounds} for {Predictive} {Sparse} {Coding}},
author = {Mehta, Nishant A and Gray, Alexander G},
abstract = {The goal of predictive sparse coding is to learn a representation of examples as sparse linear combinations of elements from a dictionary, such that a learned hypothesis linear in the new representation performs well on a predictive task. Predictive sparse coding has demonstrated impressive performance on a variety of supervised tasks, but its generalization properties have not been studied. We establish the first generalization error bounds for predictive sparse coding, in the overcomplete setting, where the number of features k exceeds the original dimensionality d. The learning bound decays as O˜( dk/m) with respect to d, k, and the size m of the training sample. It depends intimately on stability properties of the learned sparse encoder, as measured on the training sample. Consequently, we also present a fundamental stability result for the LASSO, a result that characterizes the stability of the sparse codes with respect to dictionary perturbations.},
language = {en},
}
@article{horn1962eigenvalues,
title = {Eigenvalues of sums of Hermitian matrices},
author = {Horn, Alfred},
year = 1962,
journal = {Pacific Journal of Mathematics},
publisher = {Mathematical Sciences Publishers},
volume = 12,
number = 1,
}
@article{hubel_receptive_1962,
title = {Receptive fields, binocular interaction and functional architecture in the cat's visual cortex},
author = {Hubel, D. H. and Wiesel, T. N.},
year = 1962,
month = jan,
journal = {The Journal of Physiology},
volume = 160,
number = 1,
issn = {0022-3751},
url = {https://www.ncbi.nlm.nih.gov/pmc/articles/PMC1359523/},
urldate = {2022-09-21},
abstract = {Images null},
pmid = 14449617,
pmcid = {PMC1359523},
}
@article{matheron1963principles,
title = {Principles of geostatistics},
author = {Matheron, Georges},
year = 1963,
journal = {Economic geology},
publisher = {Society of Economic Geologists},
volume = 58,
number = 8,
}
@article{marvcenko1967distribution,
title = {Distribution of eigenvalues for some sets of random matrices},
author = {{Mar{\v{c}}enko}, Vladimir A and Pastur, Leonid Andreevich},
year = 1967,
journal = {Mathematics of the USSR-Sbornik},
publisher = {IOP Publishing},
volume = 1,
number = 4,
}
@article{thompson1971eigenvalues,
title = {On the eigenvalues of sums of Hermitian matrices},
author = {Thompson, Robert C and Freede, Linda J},
year = 1971,
journal = {Linear Algebra and Its Applications},
publisher = {Elsevier},
volume = 4,
number = 4,
}
@book{crank1979mathematics,
title = {The mathematics of diffusion},
author = {Crank, John},
year = 1979,
publisher = {Oxford university press},
}
@article{marr1979computational,
title = {A computational theory of human stereo vision},
author = {Marr, David and Poggio, Tomaso},
year = 1979,
journal = {Proceedings of the Royal Society of London. Series B. Biological Sciences},
publisher = {The Royal Society London},
volume = 204,
number = 1156,
}
@book{Phillips81,
title = {Amorphous Solids: Low Temperature Properties},
author = {Anderson, A.C.},
year = 1981,
publisher = {Springer, Berlin},
series = {Topics in Current Physics},
volume = 24,
editor = {W. A. Phillips},
}
@book{Mezard87,
title = {Spin glass theory and beyond: An Introduction to the Replica Method and Its Applications},
author = {{M{\'e}zard}, Marc and Parisi, Giorgio and Virasoro, Miguel},
year = 1987,
publisher = {World Scientific Publishing Company},
volume = 9,
}
@article{Baum88,
title = {On the capabilities of multilayer perceptrons},
author = {Baum, Eric B},
year = 1988,
journal = {Journal of complexity},
publisher = {Academic Press},
volume = 4,
number = 3,
}
@article{Gardner88,
title = {The space of interactions in neural network models},
author = {Gardner, Elizabeth},
year = 1988,
journal = {Journal of physics A: Mathematical and general},
publisher = {IOP Publishing},
volume = 21,
number = 1,
}
@article{bourrely1989parallelization,
title = {Parallelization of a neural network learning algorithm on a hypercube},
author = {Bourrely, J},
year = 1989,
journal = {Hypercube and distributed computers. Elsiever Science Publishing},
}
@article{lecun_backpropagation_1989,
title = {Backpropagation {Applied} to {Handwritten} {Zip} {Code} {Recognition}},
author = {LeCun, Y. and Boser, B. and Denker, J. S. and Henderson, D. and Howard, R. E. and Hubbard, W. and Jackel, L. D.},
year = 1989,
month = dec,
journal = {Neural Computation},
volume = 1,
number = 4,
doi = {10.1162/neco.1989.1.4.541},
issn = {0899-7667},
url = {https://doi.org/10.1162/neco.1989.1.4.541},
urldate = {2021-03-24},
abstract = {The ability of learning networks to generalize can be greatly enhanced by providing constraints from the task domain. This paper demonstrates how such constraints can be integrated into a backpropagation network through the architecture of the network. This approach has been successfully applied to the recognition of handwritten zip code digits provided by the U.S. Postal Service. A single network learns the entire recognition operation, going from the normalized image of the character to the final classification.},
}
@article{piela1989multiple,
title = {On the multiple-minima problem in the conformational analysis of molecules: deformation of the potential energy hypersurface by the diffusion equation method},
author = {Piela, Lucjan and Kostrowicki, Jaroslaw and Scheraga, Harold A},
year = 1989,
journal = {The Journal of Physical Chemistry},
publisher = {ACS Publications},
volume = 93,
number = 8,
}
@inproceedings{lecun1990optimal,
title = {Optimal brain damage},
author = {LeCun, Yann and Denker, John S and Solla, Sara A},
year = 1990,
booktitle = {Advances in neural information processing systems},
}
@incollection{lecun90,
title = {Optimal Brain Damage},
author = {LeCun, Yann and John S. Denker and Sara A. Solla},
year = 1990,
booktitle = {Advances in Neural Information Processing Systems 2},
publisher = {Morgan-Kaufmann},
url = {http://papers.nips.cc/paper/250-optimal-brain-damage.pdf},
editor = {D. S. Touretzky},
}
@article{bottou1991stochastic,
title = {Stochastic gradient learning in neural networks},
author = {Bottou, {L{\'e}on}},
year = 1991,
journal = {Proceedings of Neuro-{N{\i}mes}},
volume = 91,
number = 8,
}
@article{le1991eigenvalues,
title = {Eigenvalues of covariance matrices: Application to neural-network learning},
author = {Le Cun, Yann and Kanter, Ido and Solla, Sara A},
year = 1991,
journal = {Physical Review Letters},
publisher = {APS},
volume = 66,
number = 18,
}
@article{amari1992four,
title = {Four types of learning curves},
author = {Amari, Shun-ichi and Fujita, Naotake and Shinomoto, Shigeru},
year = 1992,
journal = {Neural Computation},
publisher = {MIT Press},
volume = 4,
number = 4,
}
@inproceedings{krogh1992simple,
title = {A simple weight decay can improve generalization},
author = {Krogh, Anders and Hertz, John A},
year = 1992,
booktitle = {Advances in neural information processing systems},
}
@article{moller1993exact,
title = {Exact Calculation of the Product of the Hessian Matrix of Feed-Forward Network Error Functions and a Vector in 0 (N) Time},
author = {{M{\o}ller}, Martin F},
year = 1993,
journal = {DAIMI Report Series},
volume = 22,
number = 432,
}
@article{amari1993universal,
title = {A universal theorem on learning curves},
author = {Amari, Shun-Ichi},
year = 1993,
journal = {Neural networks},
publisher = {Elsevier},
volume = 6,
number = 2,
}
@article{cuku,
title = {Analytical solution of the off-equilibrium dynamics of a long-range spin-glass model},
author = {Cugliandolo, Leticia F and Kurchan, Jorge},
year = 1993,
journal = {Physical Review Letters},
publisher = {APS},
volume = 71,
number = 1,
}
@article{hassibi1993second,
title = {Second order derivatives for network pruning: Optimal brain surgeon},
author = {Hassibi, Babak and Stork, David G and others},
year = 1993,
journal = {Advances in neural information processing systems},
publisher = {Morgan Kaufmann Publishers},
}
@article{pardalos1994optimization,
title = {Optimization methods for computing global minima of nonconvex potential energy functions},
author = {Pardalos, Panos M and Shalloway, David and Xue, Guoliang},
year = 1994,
journal = {Journal of Global Optimization},
publisher = {Springer},
volume = 4,
number = 2,
}
@article{pearlmutter1994fast,
title = {Fast exact multiplication by the Hessian},
author = {Pearlmutter, Barak A},
year = 1994,
journal = {Neural computation},
publisher = {MIT Press},
volume = 6,
number = 1,
}
@article{Lecun95,
title = {Convolutional networks for images, speech, and time series},
author = {LeCun, Yann and Bengio, Yoshua and others},
year = 1995,
journal = {The handbook of brain theory and neural networks},
volume = 3361,
number = 10,
}
@string{epje = {Eur.\ Phys.\ J E}}
@string{epl = {Europhys.\ Lett.}}
@string{pnas = {Proc.\ Natl.\ Acad.\ Sci.\ U.S.A.}}
@string{prb = {Phys.\ Rev.\ B}}
@string{pre = {Phys.\ Rev.\ E}}
@string{prl = {Phys.\ Rev.\ Lett.}}
@string{rmp = {Rev.\ Mod.\ Phys.}}
@article{Monasson95,
title = {Weight space structure and internal representations: a direct approach to learning and generalization in multilayer neural networks},
author = {Monasson, {R{\'e}mi} and Zecchina, Riccardo},
year = 1995,
journal = {Physical review letters},
publisher = {APS},
volume = 75,
number = 12,
}
@article{monasson1995weight,
title = {Weight space structure and internal representations: a direct approach to learning and generalization in multilayer neural networks},
author = {Monasson, R{\'e}mi and Zecchina, Riccardo},
year = 1995,
journal = {Physical review letters},
publisher = {APS},
volume = 75,
number = 12,
}
@article{saad1995line,
title = {On-line learning in soft committee machines},
author = {Saad, David and Solla, Sara A},
year = 1995,
journal = {Physical Review E},
publisher = {APS},
volume = 52,
number = 4,
}
@book{Statistical_Mechanics,
title = {Statistical Mechanics},
author = {Beale, P.D.},
year = 1996,
publisher = {Elsevier Science},
isbn = 9780080541716,
url = {https://books.google.ch/books?id=PIk9sF9j2oUC},
}
@book{Risken1996,
title = {The Fokker-Planck Equation Springer Series in Synergetics},
author = {Hannes Risken},
year = 1996,
journal = {Dynamical Systems},
abstract = {© 2015 Zbigniew Otremba and Eugeniusz Andrulewicz.The article discusses an important issue of technical pressure exerted on the marine environment during construction and operation of maritime wind farms (MFW) on waters of the Polish Exclusive Economic Zone. A motivation for analysing this issue is the need for attracting attention to the aspect of physical field modification as the factor which links large scale technical activity at sea with the existence and functioning of the marine ecosystem, including further consequences to its economic benefits. Based on current knowledge and authors' analyses, the scale of modifications (disturbances) of physical fields expected to take place during MFW construction and operation was assessed.},
}
@article{kurchanlaloux,
title = {Phase space geometry and slow dynamics},
author = {Kurchan, Jorge and Laloux, Laurent},
year = 1996,
journal = {Journal of Physics A: Mathematical and General},
publisher = {IOP Publishing},
volume = 29,
number = 9,
}
@book{Neal1996,
title = {Bayesian Learning for Neural Networks},
author = {Neal, Radford M.},
year = 1996,
publisher = {Springer-Verlag New York, Inc.},
address = {Secaucus, NJ, USA},
isbn = {0387947248},
}
@article{olshausen_emergence_1996,
title = {Emergence of simple-cell receptive field properties by learning a sparse code for natural images},
author = {Olshausen, Bruno A. and Field, David J.},
year = 1996,
month = jun,
journal = {Nature},
volume = 381,
number = 6583,
doi = {10.1038/381607a0},
issn = {1476-4687},
url = {https://www.nature.com/articles/381607a0},
urldate = {2022-09-29},
copyright = {1996 Nature Publishing Group},
note = {Number: 6583 Publisher: Nature Publishing Group},
abstract = {THE receptive fields of simple cells in mammalian primary visual cortex can be characterized as being spatially localized, oriented1–4 and bandpass (selective to structure at different spatial scales), comparable to the basis functions of wavelet transforms5,6. One approach to understanding such response properties of visual neurons has been to consider their relationship to the statistical structure of natural images in terms of efficient coding7–12. Along these lines, a number of studies have attempted to train unsupervised learning algorithms on natural images in the hope of developing receptive fields with similar properties13–18, but none has succeeded in producing a full set that spans the image space and contains all three of the above properties. Here we investigate the proposal8,12 that a coding strategy that maximizes sparseness is sufficient to account for these properties. We show that a learning algorithm that attempts to find sparse linear codes for natural scenes will develop a complete family of localized, oriented, bandpass receptive fields, similar to those found in the primary visual cortex. The resulting sparse image code provides a more efficient representation for later stages of processing because it possesses a higher degree of statistical independence among its outputs.},
language = {en},
keywords = {Humanities and Social Sciences, multidisciplinary, Science},
}
@inproceedings{bos1997dynamics,
title = {Dynamics of training},
author = {{B{\"o}s}, Siegfried and Opper, Manfred},
year = 1997,
booktitle = {Advances in Neural Information Processing Systems},
}
@article{hochreiter1997flat,
title = {Flat minima},
author = {Hochreiter, Sepp and Schmidhuber, {J{\"u}rgen}},
year = 1997,
journal = {Neural Computation},
publisher = {MIT Press},
volume = 9,
number = 1,
}
@article{Hochreiter97,
title = {Flat minima},
author = {Hochreiter, Sepp and Schmidhuber, {J{\"u}rgen}},
year = 1997,
journal = {Neural Computation},
publisher = {MIT Press},
volume = 9,
number = 1,
}
@inproceedings{williams1997computing,
title = {Computing with infinite networks},
author = {Williams, Christopher KI},
year = 1997,
booktitle = {Advances in neural information processing systems},
}
@article{reviewBCKM,
title = {Out of equilibrium dynamics in spin-glasses and other glassy systems},
author = {Bouchaud, Jean-Philippe and Cugliandolo, Leticia F and Kurchan, Jorge and Mezard, Marc},
year = 1998,
journal = {Spin glasses and random fields},
publisher = {World Scientific, Singapore},
}
@article{chen1998atomic,
title = {Atomic Decomposition by Basis Pursuit},
author = {Chen, Scott Shaobing and Donoho, David L. and Saunders, Michael A.},
year = 1998,
journal = {SIAM Journal on Scientific Computing},
volume = 20,
number = 1,
doi = {10.1137/S1064827596304010},
}
@article{lecun_gradient-based_1998,
title = {Gradient-based learning applied to document recognition},
author = {Lecun, Y. and Bottou, L. and Bengio, Y. and Haffner, P.},
year = 1998,
month = nov,
journal = {Proceedings of the IEEE},
volume = 86,
number = 11,
doi = {10.1109/5.726791},
issn = {1558-2256},
note = {Conference Name: Proceedings of the IEEE},
abstract = {Multilayer neural networks trained with the back-propagation algorithm constitute the best example of a successful gradient based learning technique. Given an appropriate network architecture, gradient-based learning algorithms can be used to synthesize a complex decision surface that can classify high-dimensional patterns, such as handwritten characters, with minimal preprocessing. This paper reviews various methods applied to handwritten character recognition and compares them on a standard handwritten digit recognition task. Convolutional neural networks, which are specifically designed to deal with the variability of 2D shapes, are shown to outperform all other techniques. Real-life document recognition systems are composed of multiple modules including field extraction, segmentation recognition, and language modeling. A new learning paradigm, called graph transformer networks (GTN), allows such multimodule systems to be trained globally using gradient-based methods so as to minimize an overall performance measure. Two systems for online handwriting recognition are described. Experiments demonstrate the advantage of global training, and the flexibility of graph transformer networks. A graph transformer network for reading a bank cheque is also described. It uses convolutional neural network character recognizers combined with global training techniques to provide record accuracy on business and personal cheques. It is deployed commercially and reads several million cheques per day.},
keywords = {2D shape variability, back-propagation, backpropagation, Character recognition, cheque reading, complex decision surface synthesis, convolution, convolutional neural network character recognizers, document recognition, document recognition systems, Feature extraction, field extraction, gradient based learning technique, gradient-based learning, graph transformer networks, GTN, handwritten character recognition, handwritten digit recognition task, Hidden Markov models, high-dimensional patterns, language modeling, Machine learning, Multi-layer neural network, multilayer neural networks, multilayer perceptrons, multimodule systems, Neural networks, optical character recognition, Optical character recognition software, Optical computing, Pattern recognition, performance measure minimization, Principal component analysis, segmentation recognition},
}
@article{lecun1998gradient,
title = {Gradient-based learning applied to document recognition},
author = {LeCun, Yann and Bottou, {L{\'e}on} and Bengio, Yoshua and Haffner, Patrick},
year = 1998,
journal = {Proceedings of the IEEE},
publisher = {IEEE},
volume = 86,
number = 11,
}
@article{lecun1998efficient,
title = {Efficient backprop},
author = {LeCun, Yann and Bottou, {L\'eon} and Orr, GB and {M{\"u}ller}, K-R},
year = 1998,
journal = {Lecture notes in computer science},
publisher = {Springer},
}
@article{niyogi_incorporating_1998,
title = {Incorporating prior information in machine learning by creating virtual examples},
author = {Niyogi, P. and Girosi, F. and Poggio, T.},
year = 1998,
month = nov,
journal = {Proceedings of the IEEE},
volume = 86,
number = 11,
doi = {10.1109/5.726787},
issn = {1558-2256},
note = {Conference Name: Proceedings of the IEEE},
abstract = {One of the key problems in supervised learning is the insufficient size of the training set. The natural way for an intelligent learner to counter this problem and successfully generalize is to exploit prior information that may be available about the domain or that can be learned from prototypical examples. We discuss the notion of using prior knowledge by creating virtual examples and thereby expanding the effective training-set size. We show that in some contexts this idea is mathematically equivalent to incorporating the prior knowledge as a regularizer, suggesting that the strategy is well motivated. The process of creating virtual examples in real-world pattern recognition tasks is highly nontrivial. We provide demonstrative examples from object recognition and speech recognition to illustrate the idea.},
keywords = {Associate members, Counting circuits, Function approximation, Knowledge acquisition, Learning systems, Machine learning, Pattern recognition, Prototypes, Speech, Supervised learning},
}
@incollection{prechelt1998early,
title = {Early stopping-but when?},
author = {Prechelt, Lutz},
year = 1998,
booktitle = {Neural Networks: Tricks of the trade},
publisher = {Springer},
}
@article{smola1998connection,
title = {The connection between regularization operators and support vector kernels},
author = {Smola, Alex J and Sch{\"o}lkopf, Bernhard and M{\"u}ller, Klaus-Robert},
year = 1998,
journal = {Neural networks},
publisher = {Elsevier},
volume = 11,
number = 4,
}
@article{monasson1999determining,
title = {Determining computational complexity from characteristic phase transitions},
author = {Monasson, {R{\'e}mi} and Zecchina, Riccardo and Kirkpatrick, Scott and Selman, Bart and Troyansky, Lidror},
year = 1999,
journal = {Nature},
publisher = {Nature Publishing Group},
volume = 400,
number = 6740,
}
@inproceedings{scholkopf_kernel_1999,
title = {Kernel principal component analysis},
author = {Scholkopf, Bernhard and Smola, Alexander and Müller, Klaus-Robert},
year = 1999,
booktitle = {Advances in {Kernel} {Methods} - {Support} {Vector} {Learning}},
publisher = {MIT Press},
abstract = {A new method for performing a nonlinear form of Principal Component Analysis is proposed. By the use of integral operator kernel functions, one can efficiently compute principal components in high-dimensional feature spaces, related to input space by some nonlinear map; for instance the space of all possible d-pixel products in images. We give the derivation of the method and present experimental results on polynomial feature extraction for pattern recognition.},
}
@article{stein1999predicting,
title = {Predicting random fields with increasing dense observations},
author = {Stein, Michael L and others},
year = 1999,
journal = {The Annals of Applied Probability},
publisher = {Institute of Mathematical Statistics},
volume = 9,
number = 1,
}
@article{Tkachenko99,
title = {Stress propagation through frictionless granular material},
author = {Tkachenko, Alexei V. and Witten, Thomas A.},
year = 1999,
month = {Jul},
journal = {Phys. Rev. E},
publisher = {American Physical Society},
volume = 60,
number = 1,
doi = {10.1103/PhysRevE.60.687},
numpages = 9,
bdsk-url-1 = {http://dx.doi.org/10.1103/PhysRevE.60.687},
}
@article{dacey2000center,
title = {Center surround receptive field structure of cone bipolar cells in primate retina},
author = {Dacey, Dennis and Packer, Orin S and Diller, Lisa and Brainard, David and Peterson, Beth and Lee, Barry},
year = 2000,
journal = {Vision research},
publisher = {Elsevier},
volume = 40,
number = 14,
}
@inproceedings{domingos00,
title = {A unified bias-variance decomposition},
author = {Domingos, Pedro},
year = 2000,
booktitle = {Proceedings of 17th International Conference on Machine Learning},
}
@article{Bremaud2000,
title = {Markov Chains: Gibbs Fields, Monte Carlo Simulation, and Queues},
author = {Laurent Saloff-Coste and Pierre Bremaud},
year = 2000,
journal = {Journal of the American Statistical Association},
volume = 95,
doi = {10.2307/2669802},
issn = {01621459},
abstract = {This book discusses both the theory and applications of Markov chains. The author studies both discrete-time and continuous-time chains and connected topics such as finite Gibbs fields, non-homogeneous Markov chains, discrete time regenerative processes, Monte Carlo simulation, simulated annealing, and queueing networks are also developed in this accessible and self-contained text. The text is firstly an introduction to the theory of stochastic processes at the undergraduate or beginning graduate level. Its primary objective is to initiate the student to the art of stochastic modelling. The treatment is mathematical, with definitions, theorems, proofs and a number of classroom examples which help the student to fully grasp the content of the main results. Problems of varying difficulty are proposed at the close of each chapter. The text is motivated by significant applications and progressively brings the student to the borders of contemporary research. Students and researchers in operations research and electrical engineering as well as in physics, biology and the social sciences will find this book of interest.},
issue = 452,
}
@article{smola2000regularization,
title = {Regularization with dot-product kernels},
author = {Smola, Alex and Ov{\'a}ri, Zolt{\'a}n and Williamson, Robert C.},
year = 2000,
journal = {Advances in neural information processing systems},
volume = 13,
}
@inproceedings{caruana2001overfitting,
title = {Overfitting in neural nets: Backpropagation, conjugate gradient, and early stopping},
author = {Caruana, Rich and Lawrence, Steve and Giles, C Lee},
year = 2001,
booktitle = {Advances in neural information processing systems},
}
@book{engel2001statistical,
title = {Statistical mechanics of learning},
author = {Engel, Andreas and Van den Broeck, Christian},
year = 2001,
publisher = {Cambridge University Press},
}
@article{knutson2001honeycombs,
title = {Honeycombs and sums of Hermitian matrices},
author = {Knutson, Allen and Tao, Terence},
year = 2001,
journal = {Notices Amer. Math. Soc},
volume = 48,
number = 2,
}
@book{opper2001advanced,
title = {Advanced mean field methods: Theory and practice},
author = {Opper, Manfred and Saad, David},
year = 2001,
publisher = {MIT press},
}
@book{scholkopf2001learning,
title = {Learning with kernels: support vector machines, regularization, optimization, and beyond},
author = {Scholkopf, Bernhard and Smola, Alexander J},
year = 2001,
publisher = {MIT press},
}
@article{reviewbray,
title = {Theory of phase-ordering kinetics},
author = {Bray, Alan J},
year = 2002,
journal = {Advances in Physics},
publisher = {Taylor {\&} Francis},
volume = 51,
number = 2,
}
@article{mezard2002analytic,
title = {Analytic and algorithmic solution of random satisfiability problems},
author = {M{\'e}zard, Marc and Parisi, Giorgio and Zecchina, Riccardo},
year = 2002,
journal = {Science},
publisher = {American Association for the Advancement of Science},
volume = 297,
number = 5582,
}
@incollection{cugliandololeshouches,
title = {Course 7: Dynamics of glassy systems},
author = {Cugliandolo, Leticia F},
year = 2003,
booktitle = {Slow Relaxations and nonequilibrium dynamics in condensed matter},
publisher = {Springer},
}
@article{Ohern03,
title = {Jamming at zero temperature and zero applied stress: The epitome of disorder},
author = {O'Hern, Corey S. and Silbert, Leonardo E. and Liu, Andrea J. and Nagel, Sidney R.},
year = 2003,
month = {Jul},
journal = {Phys. Rev. E},
publisher = {American Physical Society},
volume = 68,
number = 1,
doi = {10.1103/PhysRevE.68.011306},
bdsk-url-1 = {http://link.aps.org/doi/10.1103/PhysRevE.68.011306},
bdsk-url-2 = {http://dx.doi.org/10.1103/PhysRevE.68.011306},
}
@article{Donev04a,
title = {Improving the Density of Jammed Disordered Packings Using Ellipsoids},
author = {Donev, Aleksandar and Cisse, Ibrahim and Sachs, David and Variano, Evan A. and Stillinger, Frank H. and Connelly, Robert and Torquato, Salvatore and Chaikin, P. M.},
year = 2004,
journal = {Science},
volume = 303,
number = 5660,
doi = {10.1126/science.1093010},
bdsk-url-1 = {http://dx.doi.org/10.1126/science.1093010},
abstract = {Packing problems, such as how densely objects can fill a volume, are among the most ancient and persistent problems in mathematics and science. For equal spheres, it has only recently been proved that the face-centered cubic lattice has the highest possible packing fraction . It is also well known that certain random (amorphous) jammed packings have {{\oe}{\"U}} ?{{\^a}{\`a}} 0.64. Here, we show experimentally and with a new simulation algorithm that ellipsoids can randomly pack more densely?{{\"A}{\^\i}}up to {{\oe}{\"U}}= 0.68 to 0.71for spheroids with an aspect ratio close to that of M&M's Candies?{{\"A}{\^\i}}and even approach {{\oe}{\"U}} ?{{\^a}{\`a}} 0.74 for ellipsoids with other aspect ratios. We suggest that the higher density is directly related to the higher number of degrees of freedom per particle and thus the larger number of particle contacts required to mechanically stabilize the packing. We measured the number of contacts per particle Z ?{{\^a}{\`a}} 10 for our spheroids, as compared to Z ?{{\^a}{\`a}} 6 for spheres. Our results have implications for a broad range of scientific disciplines, including the properties of granular media and ceramics, glass formation, and discrete geometry.},
}
@article{lowe2004distinctive,
title = {Distinctive image features from scale-invariant keypoints},
author = {Lowe, David G},
year = 2004,
journal = {International journal of computer vision},
publisher = {Springer},
volume = 60,
number = 2,
}
@article{luxburg2004distance,
title = {Distance-based classification with Lipschitz functions},
author = {Luxburg, Ulrike von and Bousquet, Olivier},
year = 2004,
journal = {Journal of Machine Learning Research},
volume = 5,
number = {Jun},
}
@article{baik2005phase,
title = {Phase transition of the largest eigenvalue for nonnull complex sample covariance matrices},
author = {Baik, Jinho and Ben Arous, {G{\'e}rard} and {P{\'e}ch{\'e}}, Sandrine and others},
year = 2005,
journal = {The Annals of Probability},
publisher = {Institute of Mathematical Statistics},
volume = 33,
number = 5,
}
@article{cavagnaSGpedestrians,
title = {Spin-glass theory for pedestrians},
author = {Castellani, Tommaso and Cavagna, Andrea},
year = 2005,
journal = {Journal of Statistical Mechanics: Theory and Experiment},
publisher = {IOP Publishing},
volume = 2005,
number = {05},
}
@article{ikeda2005asymptotic,
title = {An asymptotic statistical analysis of support vector machines with soft margins},
author = {Ikeda, Kazushi and Aoishi, Tsutomu},
year = 2005,
journal = {Neural Networks},
publisher = {Elsevier},
volume = 18,
number = 3,
}
@article{Silbert05,
title = {Vibrations and Diverging Length Scales Near the Unjamming Transition},
author = {L. E. Silbert and A. J. Liu and S. R. Nagel},
year = 2005,
journal = PRL,
volume = 95,
}
@article{Wyart05b,
title = {On the Rigidity of Amorphous Solids},
author = {M. Wyart},
year = 2005,
journal = {Annales de Phys},
volume = 30,
number = 3,
}
@article{Wyart05a,
title = {Effects of compression on the vibrational modes of marginally jammed solids},
author = {Wyart, Matthieu and Silbert, Leonardo E and Nagel, Sidney R and Witten, Thomas A},
year = 2005,
journal = {Physical Review E},
publisher = {APS},
volume = 72,
number = 5,
date = 2005,
}
@article{eqBAetal,
title = {Cugliandolo-Kurchan equations for dynamics of Spin-Glasses},
author = {Ben Arous, {G\'erard} and Dembo, Amir and Guionnet, Alice},
year = 2006,
journal = {Probability theory and related fields},
publisher = {Springer},
volume = 136,
number = 4,
}
@article{montanarisemerjian,
title = {Rigorous inequalities between length and time scales in glassy systems},
author = {Montanari, Andrea and Semerjian, Guilhem},
year = 2006,
journal = {Journal of statistical physics},
publisher = {Springer},
volume = 125,
number = 1,
}
@article{nocedal2006numerical,
title = {Numerical Optimization, Second Edition},
author = {Nocedal, Jorge and Wright, Stephen J},
year = 2006,
journal = {Numerical optimization},
publisher = {Springer New York},
}
@book{williams2006gaussian,
title = {Gaussian processes for machine learning},
author = {Williams, Christopher KI and Rasmussen, Carl Edward},
year = 2006,
publisher = {MIT Press Cambridge, MA},
volume = 2,
number = 3,
}
@article{pnasmontanariksat,
title = {Gibbs states and the set of solutions of random constraint satisfaction problems},
author = {{Krzaka{\l}a}, Florent and Montanari, Andrea and Ricci-Tersenghi, Federico and Semerjian, Guilhem and {Zdeborov{\'a}}, Lenka},
year = 2007,
journal = {Proceedings of the National Academy of Sciences},
publisher = {National Academy of Sciences},
volume = 104,
number = 25,
doi = {10.1073/pnas.0703685104},
issn = {0027-8424},
abstract = {An instance of a random constraint satisfaction problem defines a random subset ?? (the set of solutions) of a large product space X N (the set of assignments). We consider two prototypical problem ensembles (random k-satisfiability and q-coloring of random regular graphs) and study the uniform measure with support on S. As the number of constraints per variable increases, this measure first decomposes into an exponential number of pure states ({\textquotedblleft}clusters{\textquotedblright}) and subsequently condensates over the largest such states. Above the condensation point, the mass carried by the n largest states follows a Poisson-Dirichlet process. For typical large instances, the two transitions are sharp. We determine their precise location. Further, we provide a formal definition of each phase transition in terms of different notions of correlation between distinct variables in the problem. The degree of correlation naturally affects the performances of many search/sampling algorithms. Empirical evidence suggests that local Monte Carlo Markov chain strategies are effective up to the clustering phase transition and belief propagation up to the condensation point. Finally, refined message passing techniques (such as survey propagation) may also beat this threshold.},
}
@article{Zdeborova07,
title = {Phase transitions in the coloring of random graphs},
author = {{Zdeborov{\'a}}, Lenka and Krzakala, Florent},
year = 2007,
journal = {Physical Review E},
publisher = {APS},
volume = 76,
number = 3,
}
@book{kardar2007statistical,
title = {Statistical physics of fields},
author = {Kardar, Mehran},
year = 2007,
publisher = {Cambridge University Press},
}
@article{Krzakala07,
title = {Landscape analysis of constraint satisfaction problems},
author = {Krzakala, Florent and Kurchan, Jorge},
year = 2007,
journal = {Physical Review E},
publisher = {APS},
volume = 76,
number = 2,
}
@inproceedings{watanabe2007almost,
title = {Almost all learning machines are singular},
author = {Watanabe, Sumio},
year = 2007,
booktitle = {Foundations of Computational Intelligence, 2007. FOCI 2007. IEEE Symposium on},
organization = {IEEE},
}
@inproceedings{achlioptas2008algorithmic,
title = {Algorithmic barriers from phase transitions},
author = {Achlioptas, Dimitris and Coja-Oghlan, Amin},
year = 2008,
booktitle = {Foundations of Computer Science, 2008. FOCS'08. IEEE 49th Annual IEEE Symposium on},
organization = {IEEE},
}
@inproceedings{mairal_supervised_2008,
title = {Supervised {Dictionary} {Learning}},
author = {Mairal, Julien and Ponce, Jean and Sapiro, Guillermo and Zisserman, Andrew and Bach, Francis},
year = 2008,
booktitle = {Advances in {Neural} {Information} {Processing} {Systems}},
publisher = {Curran Associates, Inc.},
volume = 21,
url = {https://papers.nips.cc/paper/2008/hash/c0f168ce8900fa56e57789e2a2f2c9d0-Abstract.html},
urldate = {2022-09-29},
abstract = {It is now well established that sparse signal models are well suited to restoration tasks and can effectively be learned from audio, image, and video data. Recent research has been aimed at learning discriminative sparse models instead of purely reconstructive ones. This paper proposes a new step in that direction with a novel sparse representation for signals belonging to different classes in terms of a shared dictionary and multiple decision functions. It is shown that the linear variant of the model admits a simple probabilistic interpretation, and that its most general variant also admits a simple interpretation in terms of kernels. An optimization framework for learning all the components of the proposed model is presented, along with experiments on standard handwritten digit and texture classification tasks.},
}
@article{bengio2009learning,
title = {Learning deep architectures for AI},
author = {Bengio, Yoshua and others},
year = 2009,
journal = {Foundations and trends{\textregistered} in Machine Learning},
publisher = {Now Publishers, Inc.},
volume = 2,
number = 1,
}
@inproceedings{deng_imagenet_2009,
title = {{ImageNet}: {A} large-scale hierarchical image database},
author = {Deng, J. and Dong, W. and Socher, R. and Li, L. and {Kai Li} and {Li Fei-Fei}},
year = 2009,
month = jun,
booktitle = {2009 {IEEE} {Conference} on {Computer} {Vision} and {Pattern} {Recognition}},
doi = {10.1109/CVPR.2009.5206848},
note = {ISSN: 1063-6919},
abstract = {The explosion of image data on the Internet has the potential to foster more sophisticated and robust models and algorithms to index, retrieve, organize and interact with images and multimedia data. But exactly how such data can be harnessed and organized remains a critical problem. We introduce here a new database called “ImageNet”, a large-scale ontology of images built upon the backbone of the WordNet structure. ImageNet aims to populate the majority of the 80,000 synsets of WordNet with an average of 500-1000 clean and full resolution images. This will result in tens of millions of annotated images organized by the semantic hierarchy of WordNet. This paper offers a detailed analysis of ImageNet in its current state: 12 subtrees with 5247 synsets and 3.2 million images in total. We show that ImageNet is much larger in scale and diversity and much more accurate than the current image datasets. Constructing such a large-scale database is a challenging task. We describe the data collection scheme with Amazon Mechanical Turk. Lastly, we illustrate the usefulness of ImageNet through three simple applications in object recognition, image classification and automatic object clustering. We hope that the scale, accuracy, diversity and hierarchical structure of ImageNet can offer unparalleled opportunities to researchers in the computer vision community and beyond.},
keywords = {computer vision, Explosions, Image databases, image resolution, image retrieval, Image retrieval, ImageNet database, Information retrieval, Internet, large-scale hierarchical image database, large-scale ontology, Large-scale systems, multimedia computing, multimedia data, Multimedia databases, Ontologies, ontologies (artificial intelligence), Robustness, Spine, subtree, trees (mathematics), very large databases, visual databases, wordNet structure},
}
@article{krizhevsky_learning_2009,
title = {Learning multiple layers of features from tiny images},
author = {Krizhevsky, Alex},
year = 2009,
journal = {.},
abstract = {Groups at MIT and NYU have collected a dataset of millions of tiny colour images from the web. It is, in principle, an excellent dataset for unsupervised training of deep generative models, but previous researchers who have tried this have found it difficult to learn a good set of filters from the images. We show how to train a multi-layer generative model that learns to extract meaningful features which resemble those found in the human visual cortex. Using a novel parallelization algorithm to distribute the work among multiple machines connected on a network, we show how training such a model can be done in reasonable time. A second problematic aspect of the tiny images dataset is that there are no reliable class labels which makes it hard to use for object recognition experiments. We created two sets of reliable labels. The CIFAR-10 set has 6000 examples of each of 10 classes and the CIFAR-100 set has 600 examples of each of 100 non-overlapping classes. Using these labels, we show that object recognition is significantly},
}
@article{Mailman09,
title = {Jamming in Systems Composed of Frictionless Ellipse-Shaped Particles},
author = {Mailman, Mitch and Schreck, Carl F. and O'Hern, Corey S. and Chakraborty, Bulbul},
year = 2009,
month = {Jun},
journal = {Phys. Rev. Lett.},
publisher = {American Physical Society},
volume = 102,
number = 25,
doi = {10.1103/PhysRevLett.102.255501},
numpages = 4,
bdsk-url-1 = {http://dx.doi.org/10.1103/PhysRevLett.102.255501},
}
@incollection{Cho2009,
title = {Kernel Methods for Deep Learning},
author = {Youngmin Cho and Lawrence K. Saul},
year = 2009,
booktitle = {Advances in Neural Information Processing Systems 22},
publisher = {Curran Associates, Inc.},
url = {http://papers.nips.cc/paper/3628-kernel-methods-for-deep-learning.pdf},
}
@article{Zeravcic09,
title = {Excitations of ellipsoid packings near jamming},
author = {Z. Zeravcic and N. Xu and A. J. Liu and S. R. Nagel and W. van Saarloos},
year = 2009,
journal = EPL,
volume = 87,
number = 2,
abstract = {We study the vibrational modes of three-dimensional jammed packings of soft ellipsoids of revolution as a function of particle aspect ratio {{\OE}µ} and packing fraction. At the jamming transition for ellipsoids, as distinct from the idealized case using spheres where {{\OE}µ=1}, there are many unconstrained and nontrivial rotational degrees of freedom. These constitute a set of zero-frequency modes that are gradually mobilized into a new rotational band as {|{\OE}µ-1|} increases. Quite surprisingly, as this new band is separated from zero frequency by a gap, and lies below the onset frequency for translational vibrations, {{\oe}{\^a} * }, the presence of these new degrees of freedom leaves unaltered the basic scenario that the translational spectrum is determined only by the average contact number. Indeed, {{\oe}{\^a} *} depends solely on coordination as it does for compressed packings of spheres. We also discuss the regime of large {|{\OE}µ-1|}, where the two bands merge.},
}
@incollection{bottou2010large,
title = {Large-scale machine learning with stochastic gradient descent},
author = {Bottou, {L{\'e}on}},
year = 2010,
booktitle = {Proceedings of COMPSTAT'2010},
publisher = {Physica-Verlag HD},
}
@book{Liu10,
title = {The jamming scenario - an introduction and outlook},
author = {J Liu, Andrea and R Nagel, Sidney and Saarloos, W and Wyart, Matthieu},
year = 2010,
month = {06},
booktitle = {Dynamical Heterogeneities in Glasses, Colloids, and Granular Media},
publisher = {OUP Oxford},
}
@article{lecun-mnist,
title = {{MNIST} handwritten digit database},
author = {LeCun, Yann and Cortes, Corinna},
year = 2010,
journal = {},
url = {http://yann.lecun.com/exdb/mnist/},
added-at = {2010-06-28T21:16:30.000+0200},
biburl = {https://www.bibsonomy.org/bibtex/2935bad99fa1f65e03c25b315aa3c1032/mhwombat},
groups = {public},
howpublished = {http://yann.lecun.com/exdb/mnist/},
interhash = {21b9d0558bd66279df9452562df6e6f3},
intrahash = {935bad99fa1f65e03c25b315aa3c1032},
lastchecked = {2016-01-14 14:24:11},
timestamp = {2016-07-12T19:25:30.000+0200},
username = {mhwombat},
keywords = {MSc _checked character_recognition mnist network neural},
}
@inproceedings{martens2010deep,
title = {Deep learning via Hessian-free optimization},
author = {Martens, James},
year = 2010,
booktitle = {Proceedings of the 27th International Conference on Machine Learning (ICML-10)},
}
@article{wyart2010scaling,
title = {Scaling of phononic transport with connectivity in amorphous solids},
author = {Wyart, Matthieu},
year = 2010,
journal = {EPL (Europhysics Letters)},
publisher = {IOP Publishing},
volume = 89,
number = 6,
}
@article{reviewBB,
title = {Theoretical perspective on the glass transition and amorphous materials},
author = {Berthier, Ludovic and Biroli, Giulio},
year = 2011,
journal = {Reviews of Modern Physics},
publisher = {APS},
volume = 83,
number = 2,
}
@incollection{alexnet,
title = {ImageNet Classification with Deep Convolutional Neural Networks},
author = {Alex Krizhevsky and Sutskever, Ilya and Hinton, Geoffrey E},
year = 2012,
booktitle = {Advances in Neural Information Processing Systems 25},
publisher = {Curran Associates, Inc.},
url = {http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks.pdf},
editor = {F. Pereira and C. J. C. Burges and L. Bottou and K. Q. Weinberger},
}
@book{atkinson2012spherical,
title = {Spherical harmonics and approximations on the unit sphere: an introduction},
author = {Atkinson, Kendall and Han, Weimin},
year = 2012,
publisher = {Springer Science \& Business Media},
volume = 2044,
}
@article{Charbonneau12,
title = {Universal Microstructure and Mechanical Stability of Jammed Packings},
author = {Charbonneau, Patrick and Corwin, Eric I. and Parisi, Giorgio and Zamponi, Francesco},
year = 2012,
month = 11,
day = 13,
journal = {Physical Review Letters},
publisher = {American Physical Society},
volume = 109,
number = 20,
url = {http://link.aps.org/doi/10.1103/PhysRevLett.109.205501},
date = {2012/11/13/},
id = {10.1103/PhysRevLett.109.205501},
j1 = {PRL},
journal1 = {Phys. Rev. Lett.},
ty = {JOUR},
bdsk-url-1 = {http://link.aps.org/doi/10.1103/PhysRevLett.109.205501},
}
@article{Lerner12,
title = {Toward a microscopic description of flow near the jamming threshold},
author = {E. Lerner and G. {D\"uring} and M. Wyart},
year = 2012,
journal = {EPL (Europhysics Letters)},
volume = 99,
number = 5,
bdsk-url-1 = {http://stacks.iop.org/0295-5075/99/i=5/a=58003},
}
@article{Hinton12,
title = {Deep neural networks for acoustic modeling in speech recognition: The shared views of four research groups},
author = {Hinton, Geoffrey and Deng, Li and Yu, Dong and Dahl, George E and Mohamed, Abdel-rahman and Jaitly, Navdeep and Senior, Andrew and Vanhoucke, Vincent and Nguyen, Patrick and Sainath, Tara N and others},
year = 2012,
journal = {IEEE Signal processing magazine},
publisher = {IEEE},
volume = 29,
number = 6,
}
@inproceedings{Krizhevsky12,
title = {Imagenet classification with deep convolutional neural networks},
author = {Krizhevsky, Alex and Sutskever, Ilya and Hinton, Geoffrey E},
year = 2012,
booktitle = {Advances in neural information processing systems},
}
@article{lerner2012unified,
title = {A unified framework for non-Brownian suspension flows and soft amorphous solids},
author = {Lerner, Edan and D{\"u}ring, Gustavo and Wyart, Matthieu},
year = 2012,
journal = {Proceedings of the National Academy of Sciences},
publisher = {National Acad Sciences},
volume = 109,
number = 13,
}
@book{stein2012interpolation,
title = {Interpolation of spatial data: some theory for kriging},
author = {Stein, Michael L},
year = 2012,
publisher = {Springer Science \& Business Media},
}
@article{wyart2012marginal,
title = {Marginal stability constrains force and pair distributions at random close packing},
author = {Wyart, Matthieu},
year = 2012,
journal = {Physical review letters},
publisher = {APS},
volume = 109,
number = 12,
}
@article{Wyart12,
title = {Marginal Stability Constrains Force and Pair Distributions at Random Close Packing},
author = {Wyart, Matthieu},
year = 2012,
month = {Sep},
journal = {Phys. Rev. Lett.},
publisher = {American Physical Society},
volume = 109,
doi = {10.1103/PhysRevLett.109.125502},
issue = 12,
numpages = 5,
bdsk-url-1 = {http://link.aps.org/doi/10.1103/PhysRevLett.109.125502},
bdsk-url-2 = {http://dx.doi.org/10.1103/PhysRevLett.109.125502},
}
@article{During13,
title = {Phonon gap and localization lengths in floppy materials},
author = {{D{\"u}ring}, Gustavo and Lerner, Edan and Wyart, Matthieu},
year = 2013,
journal = {Soft Matter},
publisher = {Royal Society of Chemistry},
volume = 9,
number = 1,
date = 2013,
}
@article{auffinger2013random,
title = {Random matrices and complexity of spin glasses},
author = {Auffinger, Antonio and Ben Arous, {G{\'e}rard} and {\v{C}}ern{\`y}, Ji{\v{r}}{\'\i}},
year = 2013,
journal = {Communications on Pure and Applied Mathematics},
publisher = {Wiley Online Library},
volume = 66,
number = 2,
}
@article{bruna2013invariant,
title = {Invariant scattering convolution networks},
author = {Bruna, Joan and Mallat, St{\'e}phane},
year = 2013,
journal = {IEEE transactions on pattern analysis and machine intelligence},
publisher = {IEEE},
volume = 35,
number = 8,
}
@inproceedings{le2013building,
title = {Building high-level features using large scale unsupervised learning},
author = {Le, Quoc V},
year = 2013,
booktitle = {2013 IEEE international conference on acoustics, speech and signal processing},
organization = {IEEE},
}
@article{Lerner13a,
title = {Low-energy non-linear excitations in sphere packings},
author = {Lerner, Edan and During, Gustavo and Wyart, Matthieu},
year = 2013,
journal = {Soft Matter},
publisher = {The Royal Society of Chemistry},
volume = 9,
doi = {10.1039/C3SM50515D},
issue = 34,