This repository has been archived by the owner on Nov 19, 2018. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrefs-totransfer.bib
7054 lines (6455 loc) · 427 KB
/
refs-totransfer.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
@ARTICLE{Aach-Church-2001,
AUTHOR = {John Aach and George M. Church},
DOI = {10.1093/bioinformatics},
JOURNALTITLE = {Bioinformatics},
LOCALFILE = {article/Aach-Church-2001.pdf},
PAGES = {495–508},
TITLE = {Aligning gene expression time series with time warping algorithms},
VOLUME = {17},
YEAR = {2001},
}
@ARTICLE{Aitchison-1964-tolerance,
ABSTRACT = {In the theory of statistical tolerance regions, as usually presented in frequentist terms, there are inherent difficulties of formulation, development and interpretation. The present paper re-examines the basic problem from a Bayesian point of view and suggests that such an approach provides a set of widely applicable, mathematically tractable tools, often more tailored to the requirements of users than the corresponding frequentist tools. For the one-dimensional case, Bayesian intervals are quoted for a number of standard distributions and prior densities, and the customary feature of a Bayesian analysis–that special prior densities give rise to standard frequentist results–is briefly demonstrated. A problem which seems to be of greater practical significance, namely the selection of an optimum tolerance region from a set of possible tolerance regions, is also investigated and the overwhelming advantages of the Bayesian approach are indicated.},
AUTHOR = {J. Aitchison},
JOURNALTITLE = {Journal of the Royal Statistical Society. Series B (Methodological)},
LOCALFILE = {article/Aitchison-1964-tolerance.pdf},
NUMBER = {2},
PAGES = {161–175},
TITLE = {Two Papers on the Comparison of Bayesian and Frequentist Approaches to Statistical Problems of Prediction: Bayesian Tolerance Regions},
URL = {http://links.jstor.org/stable/2984416},
VOLUME = {26},
YEAR = {1964},
}
@REPORT{VandenAkker-etal-2010,
ABSTRACT = {In electrical power networks nowadays more and more customers are becoming power-producers, mainly because of the development of novel components for decentral power generation (solar panels, small wind turbines and heat pumps). This gives rise to the question how many units of each type (solar panel, small wind turbine or central heating power units) can be inserted into any transmission line in the network, such that under given distributions on the typical production and consumption over time, the maximum loads on the lines and components will not be exceeded. In this paper, we present a linear programming model for maximizing the amount of decentral power generation while respecting the load limitations of the network. We describe a prototype showing that for an example network the maximization problem can be solved efficiently. We also modeled the case were the power consumption and decentral power generation are considered as stochastic variables, which is inherently more complex.},
AUTHOR = {Marjan van den Akker and Gabriël Bloemhof and Joost Bosman and Daan Crommelin and Jason Frank and Ghuangyuan Yang},
INSTITUTION = {KEMA},
LOCALFILE = {techreport/VandenAkker-etal-2010.pdf},
MONTH = {6},
TITLE = {Optimal distributed power generation under network load constraints},
TYPE = {techreport},
YEAR = {2010},
}
@ARTICLE{Alves-Costa-2009,
ABSTRACT = {In this paper we propose a new method to determine the exact nadir (minimum) criterion values over the efficient set in multiple objective linear programming (MOLP). The basic idea of the method is to determine, for each criterion, the region of the weight space associated with the efficient solutions that have a value in that criterion below the minimum already known (by default, the minimum in the payoff table). If this region is empty, the nadir value has been found. Otherwise, a new efficient solution is computed using a weight vector picked from the delimited region and a new iteration is performed. The method is able to find the nadir values in MOLP problems with any number of objective functions, although the computational effort increases significantly with the number of objectives. Computational experiments are described and discussed, comparing two slightly different versions of the method.},
AUTHOR = {Maria João Alves and João Paulo Costa},
DOI = {10.1016/j.ejor.2008.10.003},
ISSN = {0377-2217},
JOURNALTITLE = {European Journal of Operational Research},
KEYWORDS = {Multiple criteria analysis; Multiple objective programming; Nadir point},
LOCALFILE = {article/Alves-Costa-2009.pdf},
NUMBER = {2},
PAGES = {637–646},
TITLE = {An exact method for computing the nadir values in multiple objective linear programming},
VOLUME = {198},
YEAR = {2009},
}
@INPROCEEDINGS{Antonucci-deRosa-Giusti-2011,
ABSTRACT = {Hidden Markov models (HMMs) are powerful tools to capture the dynamics of a human action by providing a sufficient level of abstraction to recognise what two video sequences, depicting the same kind of action, have in common. If the sequence is short and hence only few data are available, the EM algorithm, which is generally employed to learn HMMs, might return unreliable estimates. As a possible solution to this problem, a robust version of the EM algorithm, which provides an interval-valued quantification of the HMM probabilities is provided. This takes place in an imprecise-probabilistic framework, where action recognition can be based on the (bounds of the) likelihood assigned by an imprecise HMM to the considered video sequence. Experiments show that this approach is quite effective in discriminating the hard-to-recognise sequences from the easy ones. In practice, either the recognition algorithm returns a set of action labels, which typically includes the right one, either a single answer, which is very likely to be correct, is provided.},
AUTHOR = {Alessandro Antonucci and Rocco de Rosa and Alessandro Giusti},
BOOKTITLE = {IPCV 2011: Proceedings of the 2011 International Conference on Image Processing, Computer Vision and Pattern Recognition},
PAGES = {474–478},
PUBLISHER = {CSREA Press},
TITLE = {Action Recognition by Imprecise Hidden Markov Models},
YEAR = {2011},
}
@INCOLLECTION{Antonucci-Salvetti-Zaffalon-2007,
AUTHOR = {Alessandro Antonucci and Andrea Salvetti and Marco Zaffalon},
BOOKTITLE = {Advanced Methods for Decision Making and Risk Management in Sustainability Science},
CHAPTER = {10},
EDITOR = {Jürgen Kropp and J. Scheffran},
ISBN = {1-60021-427-4},
PAGES = {237–256},
PUBLISHER = {Nova Publishers},
TITLE = {Credal Networks for Hazard Assessment of Debris Flows},
YEAR = {2007},
}
@ARTICLE{Applegate-et-al-2007,
ABSTRACT = {The use of floating-point calculations limits the accuracy of solutions obtained by standard LP software. We present a simplex-based algorithm that returns exact rational solutions, taking advantage of the speed of floating-point calculations and attempting to minimize the operations performed in rational arithmetic. Extensive computational results are presented.},
AUTHOR = {David L. Applegate and William Cook and Sanjeeb Dash and Daniel G. Espinoza},
DOI = {10.1016/j.orl.2006.12.010},
ISSN = {0167-6377},
JOURNALTITLE = {Operations Research Letters},
KEYWORDS = {Linear programming; Simplex algorithm; Rational arithmetic},
LOCALFILE = {article/Applegate-et-al-2007.pdf},
NUMBER = {6},
PAGES = {693–699},
TITLE = {Exact solutions to linear programming problems},
VOLUME = {35},
YEAR = {2007},
}
@ARTICLE{Arnold-Castillo-Sarabia-1993,
ANNOTATION = {ook op papier},
AUTHOR = {Barry C. Arnold and Enrique Castillo and Jose María Sarabia},
DOI = {10.1080/02331889308802432},
JOURNALTITLE = {Statistics},
LOCALFILE = {article/Arnold-Castillo-Sarabia-1993.pdf},
PAGES = {71–77},
PUBLISHER = {Taylor \& Francis},
TITLE = {Conjugate exponential family priors for exponential family likelihoods},
VOLUME = {25},
YEAR = {1993},
}
@ARTICLE{Aughenbaugh-Herrmann-2009,
ABSTRACT = {This paper considers the problem of choosing between an existing component whose reliability is well established and a new component that has an unknown reliability. In some scenarios, the designer may have some initial beliefs about the new component's reliability. The designer may also have the opportunity to obtain more information and to update these beliefs. Then, based on these updated beliefs, the designer must make a decision between the two components. This paper examines the statistical approaches for updating reliability assessments and the decision policy that the designer uses. We consider four statistical approaches for modeling the uncertainty about the new component and updating assessments of its reliability: A classical approach, a precise Bayesian approach, a robust Bayesian approach, and an imprecise probability approach. The paper investigates the impact of different approaches on the decision between the components and compares them. In particular, given that the test results are random, the paper considers the likelihood of making a correct decision with each statistical approach under different scenarios of available information and true reliability. In this way, the emphasis is on practical comparisons of the policies rather than on philosophical arguments.},
AUTHOR = {J. M. Aughenbaugh and J. W. Herrmann},
DOI = {10.1080/15598608.2009.10411926},
ISSN = {1559-8608},
JOURNALTITLE = {Journal of Statistical Theory and Practice},
KEYWORDS = {Bayesian statistics; Imprecise probabilities; Reliability assessment},
LOCALFILE = {article/Aughenbaugh-Herrmann-2009.pdf},
MONTH = {03},
NUMBER = {1},
PAGES = {289–303},
PUBLISHER = {Taylor \& Francis},
TITLE = {Reliability-Based Decision Making: A Comparison of Statistical Approaches},
VOLUME = {3},
YEAR = {2009},
}
@ARTICLE{Augustin-2005,
ABSTRACT = {Dempster-Shafer theory allows to construct belief functions from (precise) basic probability assignments. The present paper extends this idea substantially. By considering sets of basic probability assignments, an appealing constructive approach to general interval probability is achieved, which allows for a very flexible modelling of uncertain knowledge.},
AUTHOR = {Thomas Augustin},
DOI = {10.1080/03081070500190839},
JOURNALTITLE = {International Journal of General Systems},
KEYWORDS = {Basic probability assignment; Belief function; Dempster–Shafer theory; Imprecise probabilities; Interval probability; Linear partial information},
LOCALFILE = {article/Augustin-2005.pdf},
NUMBER = {4},
PAGES = {451–463},
TITLE = {Generalized basic probability assignments},
VOLUME = {34},
YEAR = {2005},
}
@REPORT{Augustin-2003-note,
AUTHOR = {Thomas Augustin},
INSTITUTION = {LMU München},
TITLE = {A Note on Lower Envelopes},
TYPE = {techreport},
YEAR = {2003},
}
@THESIS{Augustin-1998-phdthesis-parts,
ANNOTATION = {Extracts},
AUTHOR = {Thomas Augustin},
INSTITUTION = {LMU München},
ISBN = {978-3-52511411-7},
TITLE = {Optimale Tests bei Intervallwahrscheinlichkeit},
TYPE = {phdthesis},
YEAR = {1998},
}
@ARTICLE{Augustin-Coolen-2004,
ABSTRACT = {The assumption A(n), proposed by Hill (J. Amer. Statist. Assoc. 63 (1968) 677), provides a natural basis for low structure non-parametric predictive inference, and has been justified in the Bayesian framework. This paper embeds A(n)-based inference into the theory of interval probability, by showing that the corresponding bounds are totally monotone F-probability and coherent. Similar attractive internal consistency results are proven to hold for conditioning and updating.},
AUTHOR = {Thomas Augustin and Frank P. A. Coolen},
DOI = {10.1016/j.jspi.2003.07.003},
JOURNALTITLE = {Journal of Statistical Planning and Inference},
KEYWORDS = {A(n); Capacities; Conditioning; Consistency; Imprecise probabilities; Interval probability; Low structure inference; Non- parametrics; Predictive inference; Updating},
LOCALFILE = {article/Augustin-Coolen-2004.pdf},
NUMBER = {2},
PAGES = {251–272},
TITLE = {Nonparametric predictive inference and interval probability},
VOLUME = {124},
YEAR = {2004},
}
@ARTICLE{Aumann-1964,
ABSTRACT = {Theorem B of [1] is false as it stands (Section 1). It is true if the preference order is defined by a finite number of linear functions (Section 2) or alternatively, if the archimidean assumption [1, (1.2)] is replaced by either of the stronger forms [1, (4.1)] or [1, (4.2)] (Section 3). A corresponding correction must be made for Theorem C of [1] (Section 4).},
AUTHOR = {Robert J. Aumann},
ISSN = {0012-9682},
JOURNALTITLE = {Econometrica},
LOCALFILE = {article/Aumann-1964.pdf},
NUMBER = {1/2},
PAGES = {210–212},
PUBLISHER = {The Econometric Society},
TITLE = {Utility Theory without the Completeness Axiom: A Correction},
URL = {http://www.jstor.org/stable/1913746},
VOLUME = {32},
YEAR = {1964},
}
@ARTICLE{Aumann-1962,
ABSTRACT = {A utility theory is developed that parallels the von Neumann-Morgenstern utility theory, but makes no use of the assumption that preferences are complete (i.e., that any two alternatives are comparable).},
AUTHOR = {Robert J. Aumann},
ISSN = {0012-9682},
JOURNALTITLE = {Econometrica},
LOCALFILE = {article/Aumann-1962.pdf},
NUMBER = {3},
PAGES = {445–462},
PUBLISHER = {The Econometric Society},
TITLE = {Utility Theory without the Completeness Axiom},
URL = {http://www.jstor.org/stable/1909888},
VOLUME = {30},
YEAR = {1962},
}
@INBOOK{Avis-2000-lrs,
AUTHOR = {David Avis},
BOOKTITLE = {Polytopes - Combinatorics and Computation},
EDITOR = {Gil Kalai and Günther Ziegler},
PAGES = {177–198},
PUBLISHER = {Birkhäuser},
SERIES = {DMV Seminar},
TITLE = {lrs: A Revised Implementation of the Reverse Search Vertex Enumeration Algorithm},
URL = {http://cgm.cs.mcgill.ca/~avis/C/lrs.html},
VOLUME = {29},
YEAR = {2000},
}
@ARTICLE{Avis-Bremner-Seidel-1997,
ABSTRACT = {A convex polytope P can be specified in two ways: as the convex hull of the vertex set V of P, or as the intersection of the set H of its facet-inducing halfspaces. The vertex enumeration problem is to compute V from H. The facet enumeration problem is to compute H from V. These two problems are essentially equivalent under point/hyperplane duality. They are among the central computational problems in the theory of polytopes. It is open whether they can be solved in time polynomial in |H| + |V| and the dimension. In this paper we consider the main known classes of algorithms for solving these problems. We argue that they all have at least one of two weaknesses: inability to deal well with "degeneracies", or, inability to control the sizes of intermediate results. We then introduce families of polytopes that exercise those weaknesses. Roughly speaking, fat-lattice or intricate polytopes cause algorithms with bad degeneracy handling to perform badly; dwarfed polytopes cause algorithms with bad intermediate size control to perform badly. We also present computational experience with trying to solve these problem on these hard polytopes, using various implementations of the main algorithms.},
AUTHOR = {David Avis and David Bremner and Raimund Seidel},
DOI = {10.1016/S0925-7721(96)00023-5},
JOURNALTITLE = {Computational Geometry},
KEYWORDS = {Convex hulls; Convex polytopes; Lattice complexity; Triangulation complexity; Vertex enumeration},
LOCALFILE = {article/Avis-Bremner-Seidel-1997.pdf},
PAGES = {265–301},
TITLE = {How good are convex hull algorithms?},
VOLUME = {7},
YEAR = {1997},
}
@ARTICLE{Avis-Fukuda-1992,
AUTHOR = {David Avis and Komei Fukuda},
DOI = {10.1007/BF02293050},
JOURNALTITLE = {Discrete \& Computational Geometry},
LOCALFILE = {article/Avis-Fukuda-1992.pdf},
NUMBER = {1},
PAGES = {295–313},
PUBLISHER = {Springer},
TITLE = {A pivoting algorithm for convex hulls and vertex enumeration of arrangements and polyhedra},
URL = {http://www.digizeitschriften.de/dms/img/?PPN=GDZPPN000365548},
VOLUME = {8},
YEAR = {1992},
}
@INPROCEEDINGS{Bagnara-etal-2002,
AUTHOR = {Roberto Bagnara and Elisa Ricci and Enea Zaffanella and Patricia M. Hill},
BOOKTITLE = {Static Analysis: Proceedings of the 9th International Symposium},
DOI = {10.1007/3-540-45789-5_17},
EDITOR = {Manuel V. Hermenegildo and Germán Puebla},
ISBN = {3-540-44235-9},
LOCALFILE = {inproceedings/Bagnara-etal-2002.pdf},
PAGES = {213–229},
PUBLISHER = {Springer},
SERIES = {Lecture Notes in Computer Science},
TITLE = {Possibly Not Closed Convex Polyhedra and the Parma Polyhedra Library.},
URL = {http://bugseng.com/products/ppl/documentation/BagnaraRZH02.pdf},
VOLUME = {2477},
YEAR = {2002},
}
@BOOK{BangJensen-Gutin-2008-digraphs,
AUTHOR = {Jørgen Bang-Jensen and Gregory Gutin},
EDITION = {2},
PUBLISHER = {Springer},
TITLE = {Digraphs: Theory, Algorithms and Applications},
URL = {http://books.google.com/books?id=4UY-ucucWucC},
YEAR = {2008},
}
@ARTICLE{Barabasi-Oltvai-2004,
ABSTRACT = {A key aim of postgenomic biomedical research is to systematically catalogue all molecules and their interactions within a living cell. There is a clear need to understand how these molecules and the interactions between them determine the function of this enormously complex machinery, both in isolation and when surrounded by other cells. Rapid advances in network biology indicate that cellular networks are governed by universal laws and offer a new conceptual framework that could potentially revolutionize our view of biology and disease pathologies in the twenty-first century.},
AUTHOR = {Albert-László Barabási and Zoltán N. Oltvai},
DOI = {10.1038/nrg1272},
JOURNALTITLE = {Nature Reviews Genetics},
LOCALFILE = {article/Barabasi-Oltvai-2004.pdf},
NUMBER = {2},
PAGES = {101–113},
PUBLISHER = {Nature Publishing Group},
TITLE = {Network biology: understanding the cell's functional organization},
VOLUME = {5},
YEAR = {2004},
}
@BOOK{BarndorffNielsen-1978,
ANNOTATION = {Geselecteerde delen kopies},
AUTHOR = {Ole Barndorff-Nielsen},
PUBLISHER = {Wiley},
TITLE = {Information and Exponential Families In Statistical Theory},
YEAR = {1978},
}
@INPROCEEDINGS{Baroni-Vicig-2000-interchange,
AUTHOR = {Pietro Baroni and Paolo Vicig},
BOOKTITLE = {Proceedings of IPMU 2000},
PAGES = {1027–1034},
TITLE = {An uncertainty interchange format for multi-agent systems based on imprecise probabilities},
YEAR = {2000},
}
@ARTICLE{Basu-Pereira-1983a,
ABSTRACT = {A Skibinsky (1970) characterization of the family of hypergeometric distributions is re-examined from the point of view of sufficient experiments and a number of other distributions similarly characterized.},
AUTHOR = {D. Basu and Carlos A. B. Pereira},
JOURNALTITLE = {Sankhya Series A},
LOCALFILE = {article/Basu-Pereira-1983a.pdf},
NUMBER = {1},
PAGES = {99–104},
PUBLISHER = {Springer},
TITLE = {A Note on Blackwell Sufficiency and a Skibinsky Characterization of Distributions},
URL = {http://www.jstor.org/stable/25050417},
VOLUME = {45},
YEAR = {1983},
}
@ARTICLE{Basu-Pereira-1983b,
ABSTRACT = {The theory of conditional independence is explained and the relations between ancillarity, sufficiency and statistical independence are discussed in depth. Some related concepts like specific sufficiency, bounded completeness, and splitting sets are also studied in some details by using the language of conditional independence.},
AUTHOR = {D. Basu and Carlos A. B. Pereira},
JOURNALTITLE = {Sankhya Series A},
KEYWORDS = {(strong) identification; Conditional independence; Markov property; ancillarity; measuable separability; specific sufficiency; splitting sets; sufficiency; variation independence},
LOCALFILE = {article/Basu-Pereira-1983b.pdf},
NUMBER = {3},
PAGES = {324–337},
TITLE = {Conditional independence in statistics},
URL = {http://www.jstor.org/stable/25050444},
VOLUME = {45},
YEAR = {1983},
}
@ARTICLE{Bellman-Zadeh-1970,
ABSTRACT = {By decision-making in a fuzzy environment is meant a decision process in which the goals and/or the constraints, but not necessarily the system under control, are fuzzy in nature. This means that the goals and/or the constraints constitute classes of alternatives whose boundaries are not sharply defined. An example of a fuzzy constraint is: "The cost of A should not be substantially higher than $\alpha$," where $\alpha$ is a specified constant. Similarly, an example of a fuzzy goal is: "$\chi$ should be in the vicinity of $\chi$0," where $\chi$0 is a constant. The italicized words are the sources of fuzziness in these examples. Fuzzy goals and fuzzy constraints can be defined precisely as fuzzy sets in the space of alternatives. A fuzzy decision, then, may be viewed as an intersection of the given goals and constraints. A maximizing decision is defined as a point in the space of alternatives at which the membership function of a fuzzy decision attains its maximum value. The use of these conc},
AUTHOR = {R. E. Bellman and L. A. Zadeh},
DOI = {10.1287/mnsc.17.4.B141},
ISSN = {0025-1909},
JOURNALTITLE = {Management Science},
LOCALFILE = {article/Bellman-Zadeh-1970.pdf},
NUMBER = {4},
PAGES = {141–164},
PUBLISHER = {INFORMS},
TITLE = {Decision-making in a fuzzy environment},
VOLUME = {17},
YEAR = {1970},
}
@ARTICLE{Benabou-Tirole-2003,
ABSTRACT = {A central tenet of economics is that individuals respond to incentives. For psychologists and sociologists, in contrast, rewards and punishments are often counterproductive, because they undermine "intrinsic motivation". We reconcile these two views, showing how performance incentives offered by an informed principal (manager, teacher, parent) can adversely impact an agent's (worker, child) perception of the task, or of his own abilities. Incentives are then only weak reinforcers in the short run, and negative reinforcers in the long run. We also study the effects of empowerment, help and excuses on motivation, as well as situations of ego bashing reflecting a battle for dominance within a relationship.},
AUTHOR = {Roland Bénabou and Jean Tirole},
ISSN = {00346527},
JOURNALTITLE = {The Review of Economic Studies},
LOCALFILE = {article/Benabou-Tirole-2003.pdf},
NUMBER = {3},
PAGES = {489–520},
PUBLISHER = {Oxford University Press},
TITLE = {Intrinsic and Extrinsic Motivation},
URL = {http://www.jstor.org/stable/3648598},
VOLUME = {70},
YEAR = {2003},
}
@ARTICLE{Benaim-Hirsch-1999,
ABSTRACT = {Fictitious play in infinitely repeated, randomly perturbed games is investigated. Dynamical systems theory is used to study the Markov process {x\_k}, whose state vector x\_k lists the empirical frequencies of player's actions in the first k games. For 2 × 2 games with countably many Nash distribution equilibria, we prove that sample paths converge almost surely. But for Jordan's 3 × 2 matching game, there are robust parameter values giving probability 0 of convergence. Applications are made to coordination and anticoordination games and to general theory. Proofs rely on results in stochastic approximation and dynamical systems.},
AUTHOR = {Michael Benaïm and Morris W. Hirsch},
DOI = {10.1006/game.1999.0717},
ISSN = {0899-8256},
JOURNALTITLE = {Games and Economic Behavior},
MONTH = {10},
NUMBER = {1-2},
PAGES = {36–72},
TITLE = {Mixed Equilibria and Dynamical Systems Arising from Fictitious Play in Perturbed Games},
VOLUME = {29},
YEAR = {1999},
}
@INPROCEEDINGS{Alessio-Zaffalon-Miranda-2009-filtering,
ABSTRACT = {We extend hidden Markov models for continuous variables taking into account imprecision in our knowledge about the probabilistic relationships involved. To achieve that, we consider sets of probabilities, also called coherent lower previsions. In addition to the general formulation, we study in detail a particular case of interest: linear-vacuous mixtures. We also show, in a practical case, that our extension outperforms the Kalman filter when modelling errors are present in the system.},
AUTHOR = {Alessio Benavoli and Marco Zaffalon and Enrique Miranda},
BOOKTITLE = {FUSION 2009: Proceedings of the 12th International Conference on Information Fusion.},
ORGANIZATION = {IEEE},
PAGES = {1743–1750},
TITLE = {Reliable hidden Markov model filtering through coherent lower previsions},
VENUE = {Seattle, Washington},
YEAR = {2009},
}
@ARTICLE{Benavoli-et-al-2010,
ABSTRACT = {We extend hidden Markov models for continuous variables taking into account imprecision in our knowledge about the probabilistic relationships involved. To achieve that, we consider sets of probabilities, also called coherent lower previsions. In addition to the general formulation, we study in detail a particular case of interest: linear-vacuous mixtures. We also show, in a practical case, that our extension outperforms the Kalman filter when modelling errors are present in the system.},
ANNOTATION = {ook op papier},
AUTHOR = {Alessio Benavoli and Marco Zaffalon and Enrique Miranda},
JOURNALTITLE = {IEEE Transactions on Automatic Control},
LOCALFILE = {article/Benavoli-et-al-2010.pdf},
ORGANIZATION = {IEEE},
PAGES = {1743–1750},
TITLE = {A new robust approach to filtering based on coherent lower previsions},
VANUE = {Seattle, Washington},
YEAR = {2009},
}
@BOOK{BenHaim-2006-info-gap,
AUTHOR = {Yakov Ben-Haim},
EDITION = {2},
PUBLISHER = {Academic Press},
TITLE = {Info-Gap Decision Theory: Decisions Under Severe Uncertainty},
YEAR = {2006},
}
@ARTICLE{Benson-1998,
AUTHOR = {Harold P. Benson},
DOI = {10.1023/A:1008215702611},
ISSN = {0925-5001},
JOURNALTITLE = {Journal of Global Optimization},
KEYWORDS = {Efficient set; Global optimization; Multiple objective linear programming; Outer approximation; Vector maximization},
LOCALFILE = {article/Benson-1998.pdf},
NUMBER = {1},
PAGES = {1–24},
PUBLISHER = {Kluwer Academic Publishers},
TITLE = {An outer approximation algorithm for generating all efficient extreme points in the outcome set of a multiple objective linear programming problem},
VOLUME = {13},
YEAR = {1998},
}
@ARTICLE{Benson-1981,
ABSTRACT = {This note describes a class of linear multiple objective programs for which Isermann's method for finding an initial efficient extreme point is valid. The note also proposes a new method for generating an initial efficient point which enjoys advantages of both the Isermann method and of the modified Ecker-Kouada method.},
AUTHOR = {Harold P. Benson},
ISSN = {01605682},
JOURNALTITLE = {The Journal of the Operational Research Society},
LOCALFILE = {article/Benson-1981.pdf},
NUMBER = {6},
PAGES = {pp. 495–498},
PUBLISHER = {Palgrave Macmillan Journals on behalf of the Operational Research Society},
TITLE = {Finding an initial efficient extreme point for a linear multiple objective program},
URL = {http://www.jstor.org/stable/2581537},
VOLUME = {32},
YEAR = {1981},
}
@ARTICLE{BenTal-Nemirovski-2002,
ABSTRACT = {Robust Optimization (RO) is a modeling methodology, combined with computational tools, to process optimization problems in which the data are uncertain and is only known to belong to some uncertainty set. The paper surveys the main results of RO as applied to uncertain linear, conic quadratic and semidefinite programming. For these cases, computationally tractable robust counterparts of uncertain problems are explicitly obtained, or good approximations of these counterparts are proposed, making RO a useful tool for real-world applications. We discuss some of these applications, specifically: antenna design, truss topology design and stability analysis/synthesis in uncertain dynamic systems. We also describe a case study of 90 LPs from the NETLIB collection. The study reveals that the feasibility properties of the usual solutions of real world LPs can be severely affected by small perturbations of the data and that the RO methodology can be successfully used to overcome this phenomenon.},
AUTHOR = {Aharon Ben-Tal and Arkadi Nemirovski},
DOI = {10.1007/s101070100286},
ISSN = {0025-5610},
JOURNALTITLE = {Mathematical Programming},
LOCALFILE = {article/BenTal-Nemirovski-2002.pdf},
NUMBER = {3},
PAGES = {453–480},
PUBLISHER = {Springer},
TITLE = {Robust optimization – methodology and applications},
VOLUME = {92},
YEAR = {2002},
}
@REPORT{Berger-1993,
ANNOTATION = {ook op papier},
AUTHOR = {James Berger},
LOCALFILE = {techreport/Berger-1993.pdf},
INSTITUTION = {Purdue University, Department of Statistics},
NUMBER = {93-53C},
TITLE = {An Overview of Robust Bayesian Analysis},
TYPE = {techreport},
YEAR = {1993},
}
@ARTICLE{Berger-1994-robust-overview,
ABSTRACT = {Robust Bayesian analysis is the study of the sensitivity of Bayesian answers to uncertain inputs. This paper seeks to provide an overview of the subject, one that is accessible to statisticians outside the field. Recent developments in the area are also reviewed, though with very uneven emphasis.},
AUTHOR = {James Berger and Elías Moreno and Luis Raúl Pericchi and M. Bayarri and José M. Bernardo and Juan Cano and Julián {De la Horra} and Jacinto Martín and David Ríos-Insúa and Bruno Betrò and A. Dasgupta and Paul Gustafson and Larry Wasserman and Joseph B. Kadane and Srinivasan Cid and Michael Lavine and Anthony O'Hagan and Wolfgang Polasek and Christian Robert and Constantinos Goutis and Fabrizio Ruggeri and Gabriella Salinetti and Siva Sivaganesan},
DOI = {10.1007/BF02562676},
JOURNALTITLE = {Test},
NUMBER = {1},
PAGES = {5–124},
TITLE = {An overview of Robust Bayesian analysis},
VOLUME = {3},
YEAR = {1994},
}
@MISC{Bernard-2003,
ANNOTATION = {Tutorial for ISIPTA '03},
AUTHOR = {Jean-Marc Bernard},
TITLE = {An Introduction to the Imprecise Dirichlet Model for Multinomial Data},
YEAR = {2003},
}
@ARTICLE{Bernard-1997-specificity,
AUTHOR = {Jean-Marc Bernard},
JOURNALTITLE = {Revue Internationale de Systémique},
LOCALFILE = {article/Bernard-1997-specificity.pdf},
NUMBER = {1},
PAGES = {11–29},
TITLE = {Bayesian analysis of tree-structured data},
VOLUME = {11},
YEAR = {1997},
}
@BOOK{Bernardo-Smith-1994,
AUTHOR = {José M. Bernardo and Adrian F. M. Smith},
PUBLISHER = {Wiley},
SERIES = {Wiley Series in Probability and Mathematical Statistics},
TITLE = {Bayesian theory},
YEAR = {1994},
}
@INPROCEEDINGS{Bertens-VanderGaag-Renooij-2012,
ABSTRACT = {Naive Bayesian networks are often used for classification problems that involve variables of a continuous nature. Upon capturing such variables, their value ranges are modelled as finite sets of discrete values. While the output probabilities and conclusions established from a Bayesian network are dependent of the actual discretisations used for its variables, the effects of choosing alternative discretisations are largely unknown as yet. In this paper, we study the effects of changing discretisations on the probability distributions computed from a naive Bayesian network. We demonstrate how recent insights from the research area of sensitivity analysis can be exploited for this purpose.},
AUTHOR = {Roel Bertens and Linda C. van der Gaag and Silja Renooij},
BOOKTITLE = {Advances in Computational Intelligence},
DOI = {10.1007/978-3-642-31718-7_17},
EDITOR = {Salvatore Greco and Bernadette Bouchon-Meunier and Giulianella Coletti and Mario Fedrizzi and Benedetto Matarazzo and Ronald R. Yager},
ISBN = {978-3-642-31718-7},
PAGES = {161–170},
PUBLISHER = {Springer Berlin Heidelberg},
SERIES = {Communications in Computer and Information Science},
TITLE = {Discretisation effects in naive Bayesian networks},
VOLUME = {299},
YEAR = {2012},
}
@BOOK{Bertsimas-Tsitsiklis-1997,
AUTHOR = {Dimitris Bertsimas and John N. Tsitsiklis},
ISBN = {1-886529-19-1},
KEYWORDS = {Linear programming; integer programming; mathematical optimization},
PUBLISHER = {Athena Scientific},
TITLE = {Introduction to linear optimization},
URL = {http://athenasc.com/linoptbook.html},
YEAR = {1997},
}
@INPROCEEDINGS{Biazzo-Gilio-Sanfilippo-2008,
ABSTRACT = {In this paper we consider imprecise conditional prevision assessments on random quantities with finite set of possible values. We use a notion of generalized coherence which is based on the coherence principle of de Finetti. We consider the checking of g-coherence, by extending some previous results obtained for imprecise conditional probability assessments. Then, we study a connection property of interval-valued gcoherent prevision assessments, by extending a result given in a previous paper for precise assessments.},
AUTHOR = {Veronica Biazzo and Angelo Gilio and Giuseppe Sanfilippo},
BOOKTITLE = {Proceedings of the IPMU '08},
PAGES = {907–914},
TITLE = {Generalized coherence and connection property of imprecise conditional previsions},
URL = {http://www.gimac.uma.es/ipmu08/proceedings/html/120.html},
VOLUME = {8},
YEAR = {2008},
}
@ARTICLE{Bickis-2009,
ABSTRACT = {Given data on inter-arrival times, the imprecise Dirichlet model can be used to determine upper and lower values on the survival function. Similar bounds on the hazard function can be quite irregular without some structural assumptions. To address this problem, a family of prior distributions for a binomial success probability is contructed by assuming that the logit of the probability has a normal distribution. Posterior distributions so defined form a three-dimensional exponential family of which the beta family is a limiting case. This family is extended to the multivariate case, which provides for the inclusion of prior information about autocorrelation in the parameters. By restricting the hyperparameters to a suitably chosen subset, this model is proposed as an alternative to the usual imprecise Dirichlet model of Walley, having the advantage of providing smoother estimates of the hazard function. The methods are applied to data on inter-occurrence times of pandemic influenza.},
ANNOTATION = {doi: 10.1080/15598608.2009.10411919},
AUTHOR = {Miķelis Bickis},
DOI = {10.1080/15598608.2009.10411919},
ISSN = {1559-8608},
JOURNALTITLE = {Journal of Statistical Theory and Practice},
KEYWORDS = {Autocorrelation; Hazard function; Imprecise inference},
LOCALFILE = {article/Bickis-2009.pdf},
MONTH = {03},
NUMBER = {1},
PAGES = {183–195},
PUBLISHER = {Taylor \& Francis},
TITLE = {The Imprecise Logit-Normal Model and its Application to Estimating Hazard Functions},
VOLUME = {3},
YEAR = {2009},
}
@ARTICLE{Bildikar-Patil-1968,
ABSTRACT = {Let \mathbf{x} and \mathbf{θ} denote s-dimensional column vectors. The components x\_1, x\_2,⋯ x\_s of \mathbf{x} are random variables jointly following an s-variate distribution and components θ\_1, θ\_2,⋯, θ\_s of \mathbf{θ} are real numbers. The random vector \mathbf{x} is said to follow an s-variate Exponential-type distribution with the parameter vector (pv) \mathbf{θ}, if its probability function (pf) is given by \begin{equation*}\tag{1.1} f(\mathbf{x}, \mathbf{θ}) = h(\mathbf{x}) \exp {\mathbf{x'θ} - q(\mathbf{θ})},\end{equation*} \mathbf{x} \varepsilon R\_s and \mathbf{θ} \varepsilon (\mathbf{a}, \mathbf{b}) \subset R\_s. R\_s denotes the s-dimensional Euclidean space. The s-dimensional open interval (\mathbf{a}, \mathbf{b}) may or may not be finite. h(\mathbf{x}) is a function of \mathbf{x}, independent of \mathbf{θ}, and q(\mathbf{θ}) is a bounded analytic function of θ\_1, θ\_2,⋯ θ\_s, independent of \mathbf{x}. We note that f(\mathbf{x}, \mathbf{θ}), given by (1.1), defines the class of multivariate exponential-type distributions which includes distributions like multivariate normal, multinomial, multivariate negative binomial, multivariate logarithmic series, etc. This paper presents a theoretical study of the structural properties of the class of multivariate exponential-type distributions. For example, different distributions connected with a multivariate exponential-type distribution are derived. Statistical independence of the components x\_1, x\_2,⋯, x\_s is discussed. The problem of characterization of different distributions in the class is studied under suitable restrictions on the cumulants. A canonical representation of the characteristic function of an infinitely divisible (id), purely discrete random vector, whose moments of second order are all finite, is also obtained. $\phi$(\mathbf{t}), m(\mathbf{t}), k(\mathbf{t}) denote, throughout this paper, the characteristic function (ch. f.), the moment generating function (mgf), and the cumulant generating function (cgf), respectively, of a random vector \mathbf{x}. The components t\_i of the s-dimensional column vector \mathbf{t} are all real.},
ANNOTATION = {ook op papier},
AUTHOR = {Sheela Bildikar and G. P. Patil},
DOI = {10.1214/aoms/1177698257},
JOURNALTITLE = {The Annals of Mathematical Statistics},
LOCALFILE = {article/Bildikar-Patil-1968.pdf},
NUMBER = {4},
PAGES = {1316–1326},
PUBLISHER = {Institute of Mathematical Statistics},
TITLE = {Multivariate exponential-type distributions},
VOLUME = {39},
YEAR = {1968},
}
@ARTICLE{Billingsley-1961,
ABSTRACT = {This paper is an expository survey of the mathematical aspects of statistical inference as it applies to finite Markov chains, the problem being to draw inferences about the transition probabilities from one long, unbroken observation {x\_1, x\_2, ⋯, x\_n} on the chain. The topics covered include Whittle's formula, chi-square and maximum-likelihood methods, estimation of parameters, and multiple Markov chains. At the end of the paper it is briefly indicated how these methods can be applied to a process with an arbitrary state space or a continuous time parameter. Section 2 contains a simple proof of Whittle's formula; Section 3 provides an elementary and self-contained development of the limit theory required for the application of chi-square methods to finite chains. In the remainder of the paper, the results are accompanied by references to the literature, rather than by complete proofs. As is usual in a review paper, the emphasis reflects the author's interests. Other general accounts of statistical inference on Markov processes will be found in Grenander [53], Bartlett [9] and [10], Fortet [35], and in my monograph [18]. I would like to thank Paul Meier for a number of very helpful discussions on the topics treated in this paper, particularly those of Section 3.},
ANNOTATION = {geannoteerde kopie},
AUTHOR = {Patrick Billingsley},
JOURNALTITLE = {The Annals of Mathematical Statistics},
LOCALFILE = {article/Billingsley-1961.pdf},
NUMBER = {1},
PAGES = {12–40},
TITLE = {Statistical methods in Markov chains},
URL = {http://www.jstor.org/stable/2237603},
VOLUME = {32},
YEAR = {1961},
}
@INPROCEEDINGS{Blanco-etal-2004,
AUTHOR = {Rosa Blanco and Linda C. van der Gaag and Iñaki Inza and Pedro Larrañaga},
BOOKTITLE = {ISBMDA 2004: Proceedings of the 5th International Symposium on Biological and Medical Data Analysis},
EDITOR = {José María Barreiro and Fernando Martín-Sánchez and Victor Maojo and Ferran Sanz},
ISBN = {3-540-23964-2},
PAGES = {212–223},
PUBLISHER = {Springer},
SERIES = {Lecture Notes in Computer Science},
TITLE = {Selective classifiers can be too restrictive: a case-study in oesophageal cancer},
VOLUME = {3337},
YEAR = {2004},
}
@ARTICLE{Bloch-Watson-1967,
ABSTRACT = {Lindley [6] studies the topic in our title. By using Fisher's conditional-Poisson approach to the multinomial and the logarithmic transformation of gamma variables to normality, he showed that linear contrasts in the logarithms of the cell probabilities θ\_i are asymptotically jointly normal and suggested that the approximation can be improved by applying a "correction" to the sample. By studying the asymptotic series for the joint distribution in Section 2 an improved correction procedure is found below. A more detailed expansion is given in Section 3 for the distribution of a single contrast in the \log θ\_i. In many problems a linear function of the θ\_i is of interest. The exact distribution is obtained and is of a form familiar in the theory of serial correlation coefficients. A beta approximation is given. For three cells, a numerical example is given to show the merit of this approximation. A genetic linkage example is considered which requires the joint distribution of two linear functions of the θ\_i. The exact joint distribution is found but is too involved for practical use. A normal approximation leads to Lindley's results [7].},
AUTHOR = {Daniel A. Bloch and Geoffrey S. Watson},
JOURNALTITLE = {The Annals of Mathematical Statistics},
LOCALFILE = {article/Bloch-Watson-1967.pdf},
NUMBER = {5},
PAGES = {1423–1435},
TITLE = {A Bayesian study of the multinomial distribution},
URL = {http://www.jstor.org/stable/2238958},
VOLUME = {38},
YEAR = {1967},
}
@INCOLLECTION{Bolt+Van_der_Gaag-2010,
AUTHOR = {Janneke H. Bolt and Linda C. van der Gaag},
BOOKTITLE = {Information Processing and Management of Uncertainty in Knowledge-Based Systems. Theory and Methods},
DOI = {10.1007/978-3-642-14055-6_2},
EDITOR = {Eyke Hüllermeier and Rudolf Kruse and Frank Hoffmann},
ISBN = {978-3-642-14054-9},
LOCALFILE = {inproceedings/Bolt+Van_der_Gaag-2010.pdf},
PAGES = {11–20},
PUBLISHER = {Springer Berlin Heidelberg},
SERIES = {Communications in Computer and Information Science},
TITLE = {An empirical study of the use of the Noisy-OR model in a real-life Bayesian network},
VOLUME = {80},
YEAR = {2010},
}
@INCOLLECTION{Bolt+Van_der_Gaag-2007,
AUTHOR = {Janneke H. Bolt and Linda C. van der Gaag},
BOOKTITLE = {Advances in Probabilistic Graphical Models},
DOI = {10.1007/978-3-540-68996-6_7},
EDITOR = {Peter Lucas and José A. Gámez and Antonio Salmerón},
ISBN = {978-3-540-68994-2},
LOCALFILE = {incollection/Bolt+Van_der_Gaag-2007.pdf},
PAGES = {153–173},
PUBLISHER = {Springer Berlin Heidelberg},
SERIES = {Studies in Fuzziness and Soft Computing},
TITLE = {Decisiveness in loopy propagation},
VOLUME = {214},
YEAR = {2007},
}
@BOOK{Boole-1854,
AUTHOR = {George Boole},
PUBLISHER = {Macmillan},
TITLE = {The laws of thought},
URL = {http://www.gutenberg.org/ebooks/15114},
YEAR = {1854},
}
@UNPUBLISHED{DeBoor-1995-BBForm,
AUTHOR = {C. de Boor},
KEYWORDS = {Bernstein polynomials},
TITLE = {B-form basics},
YEAR = {1995},
}
@ARTICLE{Boratynska-1997,
ABSTRACT = {The problem of estimating the unknown parameter of a one-parameter exponential family with the conjugate prior is considered. Some uncertainty about the prior is assumed by introducing a class of priors Gamma. The most robust and conditional Gamma-minimax estimators are constructed. The situations when those estimators coincide are presented. The paper is a generalization of the result for the Poisson distribution obtained in Mezarski and Zielinski (1991).},
ANNOTATION = {ook op papier},
AUTHOR = {Agata Boratyńska},
DOI = {10.1016/S0167-7152(97)00060-6},
JOURNALTITLE = {Statistics \& Probability Letters},
KEYWORDS = {Bayes estimators; classes of priors; one-parameter exponential family; robust Bayesian estimation},
LOCALFILE = {article/Boratynska-1997.pdf},
NUMBER = {2},
PAGES = {173–178},
PUBLISHER = {Elsevier},
TITLE = {Stability of Bayesian inference in exponential families},
VOLUME = {36},
YEAR = {1997},
}
@ARTICLE{Bose-2009-imposition,
ABSTRACT = {We consider the problem of imposing shape constraints on a neighborhood class – the density ratio class (DeRobertis and Hartigan, 1981). Bose (1994) used mixture distributions to impose shape and smoothness constraints simultaneously. We discuss how one may impose either or both unimodality and symmetry without requiring simultaneous imposition of a smoothness constraint.},
AUTHOR = {Sudip Bose},
DOI = {10.1080/15598608.2009.10411910},
ISSN = {1559-8608},
JOURNALTITLE = {Journal of Statistical Theory and Practice},
KEYWORDS = {Bayesian robustness; Convexity; Density bounded; Density ratio; Likelihood; Minimax; Neighborhood class; Posterior regret; Smoothness; Symmetry; Unimodality; $\Gamma$-minimax},
LOCALFILE = {article/Bose-2009-imposition.pdf},
MONTH = {03},
NUMBER = {1},
PAGES = {39–55},
PUBLISHER = {Taylor \& Francis},
TITLE = {On the imposition of shape constraints in a robust Bayesian analysis},
VOLUME = {3},
YEAR = {2009},
}
@ARTICLE{Bose-2009-smoothness,
ABSTRACT = {We examine the role of the likelihood in Bayesian robustness with the density ratio class (DeRobertis and Hartigan, 1981. Ann. Stat.) and show how to impose smoothness on the density ratio class after imposing shape constraints. We discuss how to impose shape constraints on the density bounded class (Lavine, 1991. JASA)},
ANNOTATION = {doi: 10.1080/15598608.2009.10411911},
AUTHOR = {Sudip Bose},
DOI = {10.1080/15598608.2009.10411911},
ISSN = {1559-8608},
JOURNALTITLE = {Journal of Statistical Theory and Practice},
KEYWORDS = {Bayesian robustness; Density bounded; Density ratio; Likelihood; Neighborhood class; Smoothness; Symmetry; Unimodality; Convexity; Minimax; Posterior regret; $\Gamma$-minimax},
LOCALFILE = {article/Bose-2009-smoothness.pdf},
MONTH = {03},
NUMBER = {1},
PAGES = {57–67},
PUBLISHER = {Taylor \& Francis},
TITLE = {On smoothness constraints with shape constraints in a robust Bayesian analysis},
VOLUME = {3},
YEAR = {2009},
}
@ARTICLE{Bot-Lorenz-Wanka-2010,
AUTHOR = {Radu Ioan Bot and Nicole Lorenz and Gert Wanka},
DOI = {10.4134/JKMS.2010.47.1.017},
JOURNALTITLE = {Journal of The Korean Mathematical Society},
LOCALFILE = {article/Bot-Lorenz-Wanka-2010.pdf},
PAGES = {17–28},
TITLE = {Duality for linear chance-constrained optimization problems},
VOLUME = {47},
YEAR = {2010},
}
@INPROCEEDINGS{Bouckaert-2004,
AUTHOR = {Remco R. Bouckaert},
BOOKTITLE = {AI 2004: Advances in Artificial Intelligence: 17th Australian Joint Conference on Artificial Intelligence},
EDITOR = {Geoffrey I. Webb and Xinghuo Yu},
PAGES = {1089–1094},
PUBLISHER = {Springer},
SERIES = {Lecture Notes in AI},
TITLE = {Naive Bayes Classifiers that Perform Well with Continuous Variables},
YEAR = {2004},
URL = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.127.5372&rep=rep1&type=pdf},
}
@ARTICLE{Boute-2005,
AUTHOR = {Raymond T. Boute},
DOI = {10.1145/1086642.1086647},
ISSN = {0164-0925},
JOURNALTITLE = {ACM Transactions on Programming Languages and Systems},
LOCALFILE = {article/Boute-2005.pdf},
MONTH = {09},
NUMBER = {5},
PAGES = {988–1047},
TITLE = {Functional declarative language design and predicate calculus},
URL = {http://portal.acm.org/citation.cfm?doid=1086642.1086647},
VOLUME = {27},
YEAR = {2005},
}
@BOOK{Boyd-Vandenberghe-2004,
AUTHOR = {Stephen Boyd and Lieven Vandenberghe},
LOCALFILE = {book/Boyd-Vandenberghe-2004.pdf},
PUBLISHER = {Cambridge University Press},
TITLE = {Convex Optimization},
URL = {http://www.stanford.edu/~boyd/cvxbook},
YEAR = {2004},
}
@INCOLLECTION{Brams-Fishburn-1991-altvote,
AUTHOR = {Steven J. Brams and Peter C. Fishburn},
BOOKTITLE = {Political Pareties and Elections in the United States: An Encyclopedia},
EDITOR = {Sandy L Maisel},
PAGES = {23–31},
PUBLISHER = {garland},
TITLE = {Alternative voting systems},
URL = {http://bcn.boulder.co.us/government/approvalvote/altvote.html},
VOLUME = {1},
YEAR = {1991},
}
@ARTICLE{1998-Bremner++-pd,
AUTHOR = {David Bremner and Komei Fukuda and Ambros Marzetta},
DOI = {10.1007/PL00009389},
JOURNALTITLE = {Discrete \& Computational Geometry},
LOCALFILE = {article/1998-Bremner++-pd.pdf},
PAGES = {333–357},
TITLE = {Primal-Dual Methods for Vertex and Facet Enumeration},
URL = {http://www.cs.unb.ca/profs/bremner/pd},
VOLUME = {20},
YEAR = {1998},
}
@BOOK{Brokken-2006,
AUTHOR = {Frank B. Brokken},
MONTH = {09},
PUBLISHER = {University of Groningen},
TITLE = {C++ Annotations},
YEAR = {2006},
}
@INPROCEEDINGS{Brown-1951,
AUTHOR = {George W. Brown},
BOOKTITLE = {Activity analysis of production and allocation},
EDITOR = {Tjalling C. Koopmans},
NUMBER = {13},
ORGANIZATION = {Cowles Commission for Research in Economics},
PAGES = {374–376},
SERIES = {Cowles Commission Monographs},
TITLE = {Iterative solution of games by fictitious play},
YEAR = {1951},
}
@BOOK{Brown-1986,
ANNOTATION = {Geselecteerde delen kopies},
AUTHOR = {Lawrence D. Brown},
EDITOR = {Shanti S. Gupta},
LOCATION = {Hayward, California},
PUBLISHER = {Institute of Mathematical Statistics},
SERIES = {Institute of Mathematical Statistics: Lecture Notes—Monograph Series},
TITLE = {Fundamentals of Statistical Exponential Families (with Applications in Statistical Decision Theory)},
VOLUME = {9},
YEAR = {1986},
}
@REPORT{Bruening-Dennenberg-2003-belELP,
ANNOTATION = {Direct proof (of Choquet's implicit proof) that {0,1}-valued belief measures are the extreme points of the set of belief measures.},
AUTHOR = {Martin Brüning and Dieter Denneberg},
INSTITUTION = {Universität Bremen},
TITLE = {The $\sigma$-additive Möbius Transform of Belief Measures via Choquet's Theorem},
TYPE = {techreport},
YEAR = {2003},
}
@ARTICLE{Bryant-Webster-1977-cvxIII,
AUTHOR = {V. W. Bryant and R. J. Webster},
DOI = {10.1016/0022-247X(77)90267-0},
ISSN = {0022-247X},
JOURNALTITLE = {Journal of Mathematical Analysis and Applications},
LOCALFILE = {article/Bryant-Webster-1972-cvxIII.pdf},
NUMBER = {2},
PAGES = {382–392},
TITLE = {Convexity spaces. III. Dimension},
VOLUME = {57},
YEAR = {1977},
}
@ARTICLE{Bryant-Webster-1972-cvxII,
ABSTRACT = {This is the second of a series of three papers dealing with convexity spaces. In the first paper [1] we defined a convexity space and investigated some of its basic properties. Here we consider the separation and support of convex sets. Throughout the paper we will be dealing with a convexity space (X, ·) and the terminology and notation used will be those of [1]. In particular Ac denotes the complement of the set A in X and \ is used to denote set-theoretic difference.},
AUTHOR = {V. W. Bryant and R. J. Webster},
DOI = {10.1016/0022-247X(73)90076-0},
ISSN = {0022-247X},
JOURNALTITLE = {Journal of Mathematical Analysis and Applications},
LOCALFILE = {article/Bryant-Webster-1972-cvxII.pdf},
NUMBER = {2},
PAGES = {321–327},
TITLE = {Convexity spaces. II. Separation},
VOLUME = {43},
YEAR = {1973},
}
@ARTICLE{Bryant-Webster-1972-cvxI,
AUTHOR = {V. W. Bryant and R. J. Webster},
DOI = {10.1016/0022-247X(72)90268-5},
ISSN = {0022-247X},
JOURNALTITLE = {Journal of Mathematical Analysis and Applications},
LOCALFILE = {article/Bryant-Webster-1972-cvxI.pdf},
NUMBER = {1},
PAGES = {206–213},
TITLE = {Convexity spaces. I. The basic properties},
VOLUME = {37},
YEAR = {1972},
}
@ARTICLE{Buehler-1976,
ABSTRACT = {De Finetti has defined coherent previsions and coherent probabilities, and others have described concepts of coherent actions or coherent decisions. Here we consider a related concept of coherent preferences. Willingness to accept one side of a bet is an example of a preference. A set of preferences is called incoherent if reversal of some subset yields a uniform increase in utility, as with a sure win for a collection of bets. In both probability and statistical models (where preferences are conditional on data) separating hyperplane theorems show that coherence implies existence of a probability measure from which the preferences could have been inferred. Relationships to confidence intervals and to decision theory are indicated. No single definition of coherence is given which covers all cases of interest. The various cases distinguish between probability and statistical models and between finite and infinite spaces. No satisfactory theory is given for continuous statistical models.},
AUTHOR = {Robert J. Buehler},
DOI = {10.1214/aos/1176343641},
ISSN = {0090-5364},
JOURNALTITLE = {The Annals of Statistics},
LOCALFILE = {article/Buehler-1976.pdf},
NUMBER = {6},
PAGES = {1051–1064},
PUBLISHER = {Institute of Mathematical Statistics},
TITLE = {Coherent preferences},
URL = {http://www.jstor.org/stable/2958578},
VOLUME = {4},
YEAR = {1976},
}
@ARTICLE{Burge-karlin-1997,
AUTHOR = {Chris Burge and Samuel Karlin},
JOURNALTITLE = {Journal of Molecular Biology},
PAGES = {78–94},
TITLE = {Prediction of Complete gene Structures in Human Genomic DNA},
VOLUME = {268},
YEAR = {1997},
DOI = {10.1006/jmbi.1997.0951},
}
@BOOK{Burrill-1972-measure,
AUTHOR = {Claude W. Burrill},
PUBLISHER = {McGraw-Hill},
TITLE = {Measure, Integration, and Probability},
YEAR = {1972},
}
@ARTICLE{Bushell-1986-Hilbert-metric,
ABSTRACT = {The Cayley-Hilbert metric is defined for a real Banach space containing a closed cone. By restricting the domain of a particular type of positive nonlinear operator, the Banach contraction-mapping theorem is used to prove the existence of a unique fixed point of the operator with explicit upper and lower bounds. Applications to quasilinear elliptic partial differential equations and to matrix theory are considered.},
AUTHOR = {P. J. Bushell},
DOI = {10.1016/0024-3795(86)90319-8},
JOURNALTITLE = {Linear Algebra and its Applications},
LOCALFILE = {article/Bushell-1986-Hilbert-metric.pdf},
PAGES = {271–280},
PUBLISHER = {Elsevier},
TITLE = {The Cayley-Hilbert metric and positive operators},
VOLUME = {84},
YEAR = {1986},
}
@ARTICLE{Capotorti-Galli-Vantaggi-2003,
ABSTRACT = {We introduce an operational way to reduce the spatial complexity in inference processes based on conditional lower–upper probabilities assessments. To reach such goal we must suitably exploit zero probabilities taking account of logical conditions characterizing locally strong coherence. We actually re-formulate for conditional lower–upper probabilities the notion of locally strong coherence already introduced for conditional precise probabilities. Thanks to the characterization, we avoid to build all atoms, so that several real problems become feasible. In fact, the real complexity problem is connected to the number of atoms. Since for an inferential process with lower–upper probabilities several sequences of constraints must be fulfilled, our simplification can have either a “global” or a “partial” effect, being applicable to all or just to some sequences. The whole procedure has been implemented by XLisp-Stat language. A comparison with other approaches will be done by an example.},
AUTHOR = {Andrea Capotorti and L. Galli and Barbara Vantaggi},
DOI = {10.1007/s00500-002-0214-6},
ISSN = {1432-7643},
JOURNALTITLE = {Soft Computing},
LOCALFILE = {article/Capotorti-Galli-Vantaggi-2003.pdf},
NUMBER = {5},
PAGES = {280–287},
PUBLISHER = {Springer},
TITLE = {Locally strong coherence and inference with lower-upper probabilities},
VOLUME = {7},
YEAR = {2003},
}
@INPROCEEDINGS{Capotorti-Zagoraiou-2006,
AUTHOR = {Andrea Capotorti and Maroussa Zagoraiou},
BOOKTITLE = {Proceedings of the Eleventh International Conference on Information Processing and Management of Uncertainty in Knowledge-based Systems},
LOCATION = {Paris},
TITLE = {Implicit Degree of Support for Finite Lower-Upper Conditional Probabilities Extensions},
YEAR = {2006},
}
@BOOK{Carnap-1952,
AUTHOR = {Rudoplh Carnap},
LOCATION = {Chicago},
PUBLISHER = {The University of Chicago Press},
TITLE = {The Continuum of Inductive Methods},
YEAR = {1952},
}
@ARTICLE{Casalis-1996,
ABSTRACT = {The present paper describes all the natural exponential families on \mathbb{R}^d whose variance function is of the form V(m) = am øtimes m + B(m) + C, with m øtimes m(θ) = ⟨θ, m ⟩m and B linear in m. There are 2d + 4 types of such families, which are built from particular mixtures of families of Normal, Poisson, gamma, hyperbolic on \mathbb{R}^d and negative-multinomial distributions. The proof of this result relies mainly on techniques used in the elementary theory of Lie algebras.},
AUTHOR = {M. Casalis},
DOI = {10.1214/aos/1032298298},
ISSN = {0090-5364},
JOURNALTITLE = {The Annals of Statistics},
KEYWORDS = {Morris class; Variance functions},
LOCALFILE = {article/Casalis-1996.pdf},
NUMBER = {4},
PAGES = {1828–1854},
PUBLISHER = {Institute of Mathematical Statistics},
TITLE = {The 2d+4 simple quadratic natural exponential families on R^d},
URL = {http://www.jstor.org/stable/2242752},
VOLUME = {24},
YEAR = {1996},
}
@ARTICLE{Castillo-Gutierrez-Hadi-1997,
ABSTRACT = {This paper presents an efficient computational method for performing sensitivity analysis in discrete Bayesian networks. The method exploits the structure of conditional probabilities of a target node given the evidence. First, the set of parameters which is relevant to the calculation of the conditional probabilities of the target node is identified. Next, this set is reduced by removing those combinations of the parameters which either contradict the available evidence or are incompatible. Finally, using the canonical components associated with the resulting subset of parameters, the desired conditional probabilities are obtained. In this way, an important saving in the calculations is achieved. The proposed method can also be used to compute exact upper and lower bounds for the conditional probabilities, hence a sensitivity analysis can be easily performed. Examples are used to illustrate the proposed methodology},
AUTHOR = {Enrique Castillo and J. M. Gutierrez and A. S. Hadi},
DOI = {10.1109/3468.594909},
ISSN = {1083-4427},
JOURNALTITLE = {Systems, Man and Cybernetics, Part A: Systems and Humans, IEEE Transactions on},
KEYWORDS = {conditional probabilities; discrete Bayesian networks; efficient computational method; lower bounds; sensitivity analysis; upper bounds; Bayes methods; case-based reasoning; directed graphs; probability; sensitivity analysis},
MONTH = {7},
NUMBER = {4},
PAGES = {412–423},
TITLE = {Sensitivity analysis in discrete Bayesian networks},
VOLUME = {27},
YEAR = {1997},
}
@INPROCEEDINGS{Chan-Darwiche-2004,
ABSTRACT = {Previous work on sensitivity analysis in Bayesian networks has focused on single parameters, where the goal is to understand the sensitivity of queries to single parameter changes, and to identify single parameter changes that would enforce a certain query constraint. In this paper, we expand the work to multiple parameters which may be in the CPT of a single variable, or the CPTs of multiple variables. Not only do we identify the solution space of multiple parameter changes that would be needed to enforce a query constraint, but we also show how to find the optimal solution, that is, the one which disturbs the current probability distribution the least (with respect to a specific measure of disturbance). We characterize the computational complexity of our new techniques and discuss their applications to developing and debugging Bayesian networks, and to the problem of reasoning about the value (reliability) of new information.},
AUTHOR = {Hei Chan and Adnan Darwiche},
BOOKTITLE = {UAI-04: Proceedings of the Twentieth Conference on Uncertainty in Artificial Intelligence},
LOCATION = {Arlington, Virginia},
PAGES = {67–75},
PUBLISHER = {AUAI Press},
TITLE = {Sensitivity analysis in Bayesian networks: from single to multiple parameters},
YEAR = {2004},
}
@INPROCEEDINGS{2006-Charitos+Gaag,
AUTHOR = {Theodore Charitos and Linda C. van der Gaag},
BOOKTITLE = {FLAIRS Conference},
EDITOR = {Geoff Sutcliffe and Randy Goebel},
LOCALFILE = {inproceedings/2006-Charitos+Gaag.pdf},
PAGES = {806–811},
PUBLISHER = {AAAI Press},
TITLE = {Sensitivity analysis of Markovian models},
YEAR = {2006},
}
@ARTICLE{Charitos+al-2009,
ABSTRACT = {Diagnosing ventilator-associated pneumonia in mechanically ventilated patients in intensive care units is seen as a clinical challenge. The difficulty in diagnosing ventilator-associated pneumonia stems from the lack of a simple yet accurate diagnostic test. To assist clinicians in diagnosing and treating patients with pneumonia, a decision-theoretic network had been designed with the help of domain experts. A major limitation of this network is that it does not represent pneumonia as a dynamic process that evolves over time. In this paper, we construct a dynamic Bayesian network that explicitly captures the development of the disease over time. We discuss how probability elicitation from domain experts served to quantify the dynamics involved and how the nature of the patient data helps reduce the computational burden of inference. We evaluate the diagnostic performance of our dynamic model for a number of real patients and report promising results.},
AUTHOR = {Theodore Charitos and Linda C. van der Gaag and Stefan Visscher and Karin A. M. Schurink and Peter J. F. Lucas},
DOI = {10.1016/j.eswa.2007.11.065},
ISSN = {0957-4174},
JOURNALTITLE = {Expert Systems with Applications},
KEYWORDS = {Ventilator-associated pneumonia; Diagnosis; Dynamic Bayesian networks; Stochastic processes; Inference},
LOCALFILE = {article/Charitos+al-2009.pdf},
NUMBER = {2, Part 1},
PAGES = {1249–1258},
TITLE = {A dynamic Bayesian network for diagnosing ventilator-associated pneumonia in ICU patients},
VOLUME = {36},
YEAR = {2009},
}
@ARTICLE{Charitos+De_Waal+Van_der_Gaag-2008,
ABSTRACT = {Markov chains constitute a common way of modelling the progression of a chronic disease through various severity states. For these models, a transition matrix with the probabilities of moving from one state to another for a specific time interval is usually estimated from cohort data. Quite often, however, the cohort is observed at specific times with intervals that may be greater than the interval of interest. The transition matrix computed then needs to be decomposed in order to estimate the desired interval transition matrix suited to the model. Although simple to implement, this method of matrix decomposition can yet result in an invalid short-interval transition matrix with negative or complex entries. In this paper, we present a method for computing short-interval transition matrices that is based on regularization techniques. Our method operates separately on each row of the invalid short-interval transition matrix aiming to minimize an appropriate distance measure. We test our method on various matrix structures and sizes, and evaluate its performance on a real-life transition model for HIV-infected individuals.},
AUTHOR = {Theodore Charitos and Peter R. de Waal and Linda C. van der Gaag},
DOI = {10.1002/sim.2970},
ISSN = {1097-0258},
JOURNALTITLE = {Statistics in Medicine},
KEYWORDS = {Markov chain; transition matrix; regularization techniques},
LOCALFILE = {article/Charitos+De_Waal+Van_der_Gaag-2008.pdf},
NUMBER = {6},
PAGES = {905–921},
PUBLISHER = {John Wiley \& Sons, Ltd.},