@string{AAP = "Advances in Applied Probability"}
@string{RSA = "Random Structures and Algorithms"}
@STRING{ASSP = "IEEE Transactions on Acoustics, Speech and Signal Processing"}
@string{CABIOS = "Comput. Applic. Biosci."}
@string{NAR = "Nucleic Acids Res."}
@string{JBSD = "J. Biomol. Struct. Dyn."}
@string{ProcIEEE = "Proceedings of the IEEE"}
@string{JMolBiol = "Journal of Molecular Biology"}
@string{JCB = "Journal of Computational Biology"}
@string{PNAS = "Proceedings of the National Academy of Sciences of the USA"}
@BOOK{latex,
author = "Leslie Lamport",
title = "{\LaTeX \rm:} {A} Document Preparation System",
publisher = "Addison-Wesley",
year = 1986 }
%%%%%%% MacKay publications %%%%%%%%%%%%%%%%%%%
% online papers that need adding to this list and putting in their abstracts
% delve
% distance
% cpi4
% advance
% gbm
% gene
@ARTICLE{MacKay92a,
AUTHOR ="MacKay, D. J. C.",
TITLE ="{B}ayesian Interpolation",
JOURNAL ="Neural Computation",
YEAR ="1992",
VOLUME ="4",
NUMBER ="3",
PAGES ="415--447",
URL={http://www.inference.phy.cam.ac.uk/mackay/PhD.html},
ANNOTE ="Date submitted: 21 May 1991; Date accepted: 29 Oct 1991;
Collaborating institutes: California Institute of
Technology"
}
@ARTICLE{MacKay92b,
KEY ="MacKay",
AUTHOR ="MacKay, D. J. C.",
TITLE ="A Practical {B}ayesian Framework for Backpropagation Networks",
JOURNAL ="Neural Computation",
YEAR ="1992",
VOLUME ="4",
NUMBER ="3",
PAGES ="448--472",
ANNOTE ="Date submitted: 21 May 1991; Date accepted: 29 Oct 1991; Collaborating institutes: California Institute of Technology"}
@ARTICLE{MacKay92c,
KEY ="MacKay",
AUTHOR ="MacKay, D. J. C.",
TITLE ="Information Based Objective Functions for Active
Data Selection",
JOURNAL ="Neural Computation",
YEAR ="1992",
VOLUME ="4",
NUMBER ="4",
PAGES ="589--603",
ANNOTE ="Date submitted: 17 July 1991; Date accepted: 15 Nov 1991; Collaborating institutes: California Institute of Technology"}
@ARTICLE{MacKay92d,
KEY ="MacKay",
AUTHOR ="MacKay, D. J. C.",
TITLE ="The Evidence Framework Applied to Classification Networks",
JOURNAL ="Neural Computation",
YEAR ="1992",
VOLUME ="4",
NUMBER ="5",
PAGES ="698-714",
ANNOTE ="Date submitted: 20 Nov 1991; Date accepted: 18 Feb 1992; Collaborating institutes: California Institute of Technology"}
@ARTICLE{MacKay95_kuipers_review,
KEY ="MacKay",
AUTHOR ="MacKay, D. J. C.",
TITLE ="Book review: Qualitative Reasoning: Modeling and
Simulation with Incomplete Knowledge",
JOURNAL ="Computers in Physics",
YEAR =1995,
VOLUME =9,
NUMBER =2,
PAGES =3}
@TECHREPORT{MacKay86,
KEY ="MacKay",
AUTHOR ="D. J. C. ~MacKay",
TITLE ="Statistical Testing of High Precision Digitisers",
YEAR ="1986",
NUMBER ="3971",
INSTITUTION ="Royal Signals and Radar Establishment,
Malvern, Worcester. WR14 3PS"
}
@TECHREPORT{MacKay87,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="A Method of Increasing the Contextual Input to
Adaptive Pattern Recognition Systems",
YEAR ="1987",
NUMBER ="RIPR 1000/14/87",
INSTITUTION ="Research Initiative in Pattern Recognition,
Royal Signals and Radar Establishment,
Malvern, Worcester. WR14 3PS"
}
% vfe
@INPROCEEDINGS{MacKay94:fe,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="A Free Energy Minimization Framework for
Inference Problems in Modulo 2 Arithmetic",
BOOKTITLE ="Fast Software Encryption (Proceedings of 1994 K.U. Leuven Workshop on
Cryptographic Algorithms)",
editor = "B. Preneel",
series = "Lecture Notes in Computer Science",
number = "1008",
YEAR =1995,
publisher = "Springer-Verlag",
PAGES ="179-195",
ANNOTE ="Date submitted: 15 Jan 1995; Date accepted: 22 March 1995; Collaborating institutes:
Cambridge University Computer Laboratory"}
@ARTICLE{MacKay94:fes,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="Free Energy Minimization Algorithm for Decoding
and Cryptanalysis",
YEAR =1995,
JOURNAL="Electronics Letters",
VOLUME =31,
NUMBER=6,
PAGES ="446-447",
ANNOTE ="Date submitted: Jan 1995; Date accepted: 24 Feb 1995; Date
published: 16 March 1995;
Collaborating institutes: Cambridge University Computer Laboratory"}
% ISSN: 0013-5194
@UNPUBLISHED{mnc4pfoutdated,
KEY ="MacKay and Neal",
AUTHOR ="D. J. C. MacKay and R. M. Neal",
TITLE ="Good Codes based on Very Sparse Matrices",
YEAR =1995,
NOTE="Available from {\tt http://www.inference.phy.cam.ac.uk/}",
PAGES ="",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes:
University of Toronto"}
@Article{mncEL,
author = "D. J. C. MacKay and R. M. Neal",
title = "Near {S}hannon Limit Performance of Low Density Parity
Check Codes",
key = "MacKay and Neal",
journal = "Electronics Letters",
ISSN={ 0013-5194},
year = 1996,
volume = 32,
number = 18,
pages = "1645-1646",
month = "August",
note = {Reprinted {\em Electronics Letters},
{\bf 33}(6):457--458, March 1997},
annoteb = {Reprinted {\em Electronics Letters},
vol 33, no 6, 13th March 1997, pp.457--458},
annote = "Date submitted: Jul 12 1996; Date accepted: Aug 12
1996; printed 29 Aug. Collaborating institutes:
University of Toronto"
}
@Article{mncEL1,
author = "D. J. C. MacKay and R. M. Neal",
title = "Near {S}hannon Limit Performance of Low Density Parity
Check Codes",
key = "MacKay and Neal",
journal = "Electronics Letters",
year = 1996,
volume = 32,
number = 18,
pages = "1645-1646",
month = "August",
annote = "Date submitted: Jul 12 1996; Date accepted: Aug 12
1996; printed 29 Aug. Collaborating institutes:
University of Toronto"
}
@Article{mncEL2,
author = "D. J. C. MacKay and R. M. Neal",
title = "Near {S}hannon Limit Performance of Low Density Parity
Check Codes",
key = "MacKay and Neal",
journal = "Electronics Letters",
year = 1997,
volume = 33,
number = 6,
pages = "457-458",
month = "March",
note = {Reprinted because of printing errors in 1996},
annote = "Date submitted: Jul 12 1996; Date accepted: Aug 12
1996; printed 29 Aug. Collaborating institutes:
University of Toronto"
}
% ",; Available from {\tt http://www.inference.phy.cam.ac.uk/}",
%
% KEY ="MacKay and Neal",
% AUTHOR ="D. J. C. MacKay and R. M. Neal",
% TITLE ="Good Error Correcting Codes based on Very Sparse Matrices",
% YEAR =1996,
% NOTE="To be submitted to IEEE transactions on Information Theory. Available from {\tt http://www.inference.phy.cam.ac.uk/}",
% PAGES ="",
% ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes:
% University of Toronto"}
@article{mncN,
AUTHOR ="D. J. C. MacKay",
TITLE ="Good Error Correcting Codes based on Very Sparse Matrices",
YEAR =1999,
journal={IEEE Transactions on Information Theory},
issn={0018-9448},
volume={45},number={2},
PAGES ="399-431",
ANNOTE ="Date submitted: June 9th 1997; Date accepted: July 27th 1998;
Collaborating institutes:
University of Toronto"}
@inproceedings{mncisit,
AUTHOR ="D. J. C. MacKay",
TITLE ="Good Error-Correcting Codes based on Very Sparse Matrices",
YEAR =1997,
booktitle={Proceedings of 1997 IEEE International Symposium on Information Theory. Ulm, Germany.},
PAGES ="113",
ANNOTE ="Date submitted: Sep 24 96; Date accepted: Feb 97; Collaborating institutes:
University of Toronto. MRAO number: "}
% mnc
@incollection{MacKay_Neal_Codes:95,
KEY ="MacKay and Neal",
AUTHOR ="D. J. C. MacKay and R. M. Neal",
TITLE ="Good Codes based on Very Sparse Matrices",
booktitle = "Cryptography and Coding. 5th {IMA} Conference",
publisher = "Springer",
address = "Berlin",
year = 1995,
editor = "Colin Boyd",
series = "Lecture Notes in Computer Science",
number = "1025",
pages="100-111",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes:
University of Toronto. ISBN 3-540-60693-9"}
@article{McElieceMacKay96,
AUTHOR ="R. J. McEliece and D. J. C. MacKay and J.-F. Cheng",
TITLE ="Turbo Decoding as an Instance of {P}earl's `Belief Propagation' Algorithm",
YEAR =1998,
journal={IEEE Journal on Selected Areas in Communications},
ISSN={ 0733-8716},
volume={16}, number={2},
PAGES ="140-152",
month={February},
ANNOTE ="Date submitted: sep 27, 96; Date accepted: may 3, 97;
Collaborating institutes:
Caltech; Communications Society Leonard G. Abraham Prize Paper Award"}
% aka Daveyphd
@PhdThesis{mcdthesis,
author = {M. C. Davey},
title = {Error-correction using Low-Density Parity-Check Codes},
school = {University of Cambridge},
year = {1999},
}
@Unpublished{DM_LDPC_MonteCarlo,
author = {M. C. Davey and D. J. C. MacKay},
title = {Monte {C}arlo simulations of infinite low density parity check codes over {$GF(q)$}},
note = {Available from {\tt http://www.inference.phy.cam.ac.uk/is/papers/}},
key = {Davey and MacKay},
year = {1997}
}
%DM_LDPC_CLGFq,
@Article{DaveyMacKay96,
author = {M. C. Davey and D. J. C. MacKay},
title = {Low Density Parity Check Codes over {GF}(q)},
journal = {IEEE Communications Letters},
issn={1089-7798 },
year = {1998},
key = {Davey and MacKay},
volume = {2},
number = {6},
pages = {165-167},
month = {June},
annote={M. C. Davey was my supervised research student}
}
@InProceedings{cbj_97b0,
Author = {Bailer-Jones, C.A.L. and MacKay, D.J.C. and Sabin, T.J. and Withers, P.J.},
title = {Static and Dynamic Modelling of Materials Forging},
booktitle = {Ninth Australian Conference on Neural Networks},
year = {1998},
note = {identical to cbj-97b I think},
}
@article{cbj_97b,
Author = {Bailer-Jones, C.A.L. and MacKay, D.J.C. and Sabin, T.J. and Withers, P.J.},
title = {Static and Dynamic Modelling of Materials Forging},
booktitle = {Ninth Australian Conference on Neural Networks},
volume = {5},
number = {1},
journal={Australian Journal of Intelligent Information Processing Systems},
year = {1998},
pages = {10-17},
}
@InProceedings{sabin_97a,
Author = {Sabin, T.J. and Bailer-Jones, C.A.L. and Roberts, S.M. and MacKay, D.J.C. and Withers, P.J.},
title = {Modelling the Evolution of Microstructures in Cold-Worked and Annealed Aluminium Alloy},
booktitle = {International Conference
on Thermomechanical Processing},
year = {1997},
OPTorganization = {},
OPTpublisher = {},
OPTaddress = {},
OPTpages = {}
}
@InProceedings{cbj_97bb,
Author = {Bailer-Jones, C.A.L. and Sabin, T.J. and MacKay, D.J.C. and Withers, P.J.},
title = {Prediction of Deformed and Annealed Microstructures
Using {B}ayesian Neural Networks and {G}aussian
Processes},
booktitle = {Australasia Pacific Forum on Intelligent Processing and
Manufacturing of Materials},
year = {1997},
OPTorganization = {},
OPTpublisher = {},
OPTaddress = {},
OPTpages = {}
}
@article{cbj_98a,
Author = {Bailer-Jones, C.A.L. and Bhadeshia, H.K.D.H. and
MacKay, D.J.C.},
title = {{G}aussian Process Modelling of Austenite Formation in Steel},
year = {1999},
volume={15},
pages = {287-294},
journal = {Materials Science and Technology}
}
@article{Cole_etal99,
Author = {Cole, D. and Martin-Moran, C. and Sheard, A.G. and Bhadeshia, H.K.D.H. and
MacKay, D.J.C.},
title = {Modelling Creep Rupture Strength of Ferritic Steel Welds},
year = {2000},
volume={5},
number={2},
pages = {81-89},
journal={Science and Technology of Welding and Joining},
annote= {Submitted 3 May 1999, accepted 17 June 1999}
}
@article{Tancret_etal99,
Author = {Tancret, F. and Bhadeshia, H.K.D.H. and
MacKay, D.J.C.},
title = {Comparison of Artificial Neural Networks with {G}aussian Processes to Model the Yield Strength of {N}ickel-base Superalloys},
year = {1999},
volume={39},
number={10},
pages = {1020-1026},
journal = {ISIJ International}
}
@article{Tancret_etal00,
title={Design of new creep-resistant nickel-base superalloys for power-plant applications},
author={Tancret, F. and Bhadeshia, H.K.D.H. and MacKay, D.J.C.},
journal={CREEP AND FRACTURE OF ENGINEERING MATERIALS AND STRUCTURES},
volume={171},
number={1},
pages={529-536},year={2000}
}
@article{cbj_98b,
Author = {Bailer-Jones, C.A.L. and
MacKay, D.J.C. and Withers, P.J.},
title = {A Recurrent Neural Network for Modelling Dynamical Systems},
year = {1998},
journal = {Network: Computation in Neural Systems},
volume={9},
number={4},
pages = {531-547},
}
@UNPUBLISHED{DaveyMacKay96old,
KEY ="",
AUTHOR ="M. C. Davey and D. J. C. MacKay",
TITLE ="Good Codes over {$GF(q)$} based
on Very Sparse Matrices",
YEAR =1997,
NOTE="In preparation",
PAGES ="",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes: none"}
@UNPUBLISHED{HeskethMacKay97,
KEY ="",
AUTHOR ="D. J. C. MacKay and C. P. Hesketh",
TITLE ="Performance of Low Density Parity Check Codes
as a Function of Actual and Assumed Noise Levels",
YEAR =1997,
journal = "IEEE Trans. Communications",
volume = {},
number = {},
note={{\tt http://www.inference.phy.cam.ac.uk/mackay/abstracts/sensit.html}. To appear, IEEE Trans. Communications 1999},
PAGES ="",
ANNOTE ="Date submitted: 1997; Date accepted: 1998; Collaborating institutes: none"}
@UNPUBLISHED{WilsonMacKay96,
KEY ="",
AUTHOR ="S. T. Wilson and D. J. C. MacKay",
TITLE ="Decoding Shortened Cyclic Codes by Belief Propagation",
YEAR =1996,
NOTE="In preparation",
PAGES ="",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes:
none"}
@INPROCEEDINGS{Renals_MacKay,
KEY ="Renals and MacKay",
AUTHOR ="S. J. Renals and D. J. C. MacKay",
TITLE ="{B}ayesian regularisation methods in a hybrid
{MLP}-{HMM} system",
BOOKTITLE ="Proceedings of {E}urospeech 93, {B}erlin",
YEAR ="1993",
pages = {1719--1722},
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes:
Cambridge University Engineering Department. MRAO 1820"}
% PAGES ="",
% September 1993.
@INPROCEEDINGS{MM89:nips,
KEY ="MacKay and Miller",
AUTHOR ="D. J. C. MacKay and K. D. Miller",
TITLE ="Analysis of {L}insker's simulations of {H}ebbian rules",
BOOKTITLE ="Advances in Neural Information Processing Systems II",
EDITOR ="D. Touretzky",
PAGES ="694-701",
YEAR ="1989"}
@ARTICLE{MM90:nc,
KEY ="MacKay and Miller",
AUTHOR ="D. J. C. MacKay and K. D. Miller",
TITLE ="Analysis of {L}insker's simulations of {H}ebbian rules",
JOURNAL ="Neural Computation",
VOLUME ="2",
NUMBER ="2",
PAGES ="173-187",
YEAR ="1990"}
@ARTICLE{MM90:network,
KEY ="MacKay and Miller",
AUTHOR ="D. J. C. MacKay and K. D. Miller",
TITLE ="Analysis of {L}insker's application of
{H}ebbian rules to linear networks",
JOURNAL ="Network",
VOLUME ="1",
NUMBER ="3",
PAGES ="257-297",
YEAR ="1990"}
@ARTICLE{MM94:nc,
KEY ="",
AUTHOR ="K. D. Miller and D. J. C. MacKay",
TITLE ="The role of constraints in {H}ebbian learning",
JOURNAL ="Neural Computation",
VOLUME ="6",
NUMBER ="1",
PAGES ="98-124",
YEAR ="1994",
ANNOTE ="Date submitted: 9 Oct 1992; Date accepted: 13 May 1993;
Collaborating institutes:
California Institute of Technology"}
@INPROCEEDINGS{phnips,
KEY ="Bridle, Heading and MacKay",
AUTHOR ="J. S. Bridle and A. J. R. Heading and D. J. C. MacKay",
TITLE ="Unsupervised Classifiers, Mutual Information and `Phantom targets'",
BOOKTITLE ="Advances in Neural Information Processing Systems 4",
EDITOR ="J. E. Moody and S. J. Hanson and R. P. Lippmann",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1992",
PAGES ="1096-1101"}
@INPROCEEDINGS{MacKay.nips4,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="Bayesian Model Comparison and Backprop Nets",
BOOKTITLE ="Advances in Neural Information Processing Systems 4",
EDITOR ="J. E. Moody and S. J. Hanson and R. P. Lippmann",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1992",
PAGES ="839-846",
url={http://www.inference.phy.cam.ac.uk/mackay/nips91.ps.gz}
}
@ARTICLE{Bhadeshia_etal95,
AUTHOR ="Bhadeshia, H. K. D. H. and MacKay, D. J. C.
and L. E. Svensson",
TITLE ="Impact toughness of {C-MN} Steel Arc Welds ---
{B}ayesian neural network analysis",
YEAR ="1995",
JOURNAL = "Materials Science and Technology",
VOLUME ="11",
number=10,
pages="1046-1051",
ANNOTE ="Date submitted: Jul 1994; Date accepted: Sep 1994; Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"}
@inproceedings{Jones_etal95,
AUTHOR ="J. Jones and D. J. C. MacKay and
H. K. D. H. Bhadeshia",
TITLE ="The Strength of {N}ickel Base Superalloys --- a
{B}ayesian Neural Network Analysis",
YEAR = 1995,
booktitle={Proceedings of the 5th International Symposium on Advanced
Materials, Pakistan},
pages={659-666},
ANNOTE ="Date submitted: 1995; Date accepted: 1995; Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"}
@UNPUBLISHED{Jones_etal95old,
AUTHOR ="J. Jones and J. King and H. K. D. H. Bhadeshia and
D. J. C. MacKay",
TITLE ="Modelling the Strength of {N}ickel Base
Superalloys",
YEAR = 1995,
NOTE = "3rd International Parsons Turbine Conference",
ANNOTE ="Date submitted: 1995; Date accepted: 1995; Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"}
@article{Gavard_etal95,
AUTHOR ={L. Gavard and H. K. D. H. Bhadeshia and
D. J. C. MacKay and S. Suzuki},
TITLE ={Bayesian Neural Network Model for Austenite
Formation in Steels},
journal ={Materials Science and Technology},
vol =12,
pages ={453-463},
YEAR = 1996,
ANNOTE ="Date submitted: May 1995; Date accepted: n/k 1995;
Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science. MRAO reprint number 1941."}
@ARTICLE{Fujii_etal96,
AUTHOR ="H. Fujii and D. J. C. MacKay and H. K. D. H. Bhadeshia",
TITLE ="{B}ayesian neural network analysis of Fatigue Crack
Growth Rate in {N}ickel Base Superalloys",
YEAR ="1996",
JOURNAL = "ISIJ International",
VOLUME ="36",
number=11,
pages="1373-1382",
ANNOTE ="Date submitted: May 96; Date accepted: Sep 1996; Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"}
@InCollection{Cool.B.M.97,
author = "T. Cool and H. K. D. H. Bhadeshia and D. J. C. MacKay",
title = "Modelling the Mechanical
Properties in the {HAZ} of Power Plant Steels {I}:
{B}ayesian Neural Network Analysis of Proof Strength",
booktitle = "Mathematical Modelling of Weld Phenomena 3",
publisher = "Institute of Materials",
editor = "H. Cerjak",
series = "Materials Modelling Series",
address = "London",
year={1997},
pages={403--442},
annote = "Date submitted: 96; Date accepted: 1996; Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"
}
@Article{Cool.B.M.97b,
author = "T. Cool and H. K. D. H. Bhadeshia and D. J. C. MacKay",
title = "The Yield and Ultimate Tensile Strength of Steel Welds",
journal = "Materials Science and Engineering A",
year = 1997,
volume = "A223",
pages = "186-200"
}
@InCollection{MacKay.weld.review.97,
author = "D. J. C. MacKay",
title = "{B}ayesian Non-linear Modelling with Neural Networks",
booktitle = "Mathematical Modelling of Weld Phenomena 3",
publisher = "Institute of Materials",
editor = "H. Cerjak",
series = "Materials Modelling Series",
address = "London",
year={1997},
pages={359--389},
annote = "Date submitted: 96; Date accepted: 1996; Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"
}
@article{Takeuchi_etal94:Jap,
AUTHOR ="R. Takeuchi and D. J. C. MacKay
and T. Matsumoto",
TITLE ="A {B}ayesian Inference of Hyperparameters and Regularizers
for Standard Regularization Problems",
YEAR =1994,
journal = "Technical report of IEICE",
pages="",
note="In Japanese",
ANNOTE ="Date submitted: 1994; Date accepted: 1994;
Collaborating institutes:
Waseda University, Tokyo"}
@inproceedings{Takeuchi_etal94,
AUTHOR ="R. Takeuchi and D. J. C. MacKay
and T. Matsumoto",
TITLE ="Determining Optimal Hyperparameters and Regularizers
for Standard Regularization Problems",
YEAR =1994,
booktitle = "Proceedings of 1994 International Symposium on Artificial
Neural Networks (ISANN-94), Tainan, Taiwan",
pages="419-428",
ANNOTE ="Date submitted: 1994; Date accepted: 1994;
Collaborating institutes:
Waseda University, Tokyo"}
@article{MacKay_Peto,
AUTHOR ="D. J. C. MacKay and L. Peto",
TITLE ="A Hierarchical {D}irichlet Language Model",
YEAR ="1995",
journal ="Natural Language Engineering",
volume=1,
number=3,
pages="1-19",
ANNOTE ="Date submitted: 31 March 1995; Date accepted: ; Collaborating institutes:
University of Toronto"}
@UNPUBLISHED{MacKay94:amino,
AUTHOR ="D. J. C. MacKay",
TITLE ="Models for Dice Factories and Amino Acid Probability Vectors",
YEAR ="1995",
NOTE ="Unpublished",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes:
MRC Laboratory of Molecular Biology, Cambridge"}
@INPROCEEDINGS{MaxentCons,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="Maximum Entropy Connections{:} Neural Networks",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {L}aramie, 1990",
YEAR ="1991",
EDITOR ="W. T. Grandy and L. Schick",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
PAGES ="237-244"
}
% modelling input dependent noise level
@INPROCEEDINGS{MacKay95:icann,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="Probabilistic Networks: New Models and New Methods",
BOOKTITLE ="ICANN '95",
YEAR ="1995",
PUBLISHER ="EC2 and Cie",
ADDRESS ="Paris",
PAGES ="331-337", annote={this is where I use BUGS to model
input-dependent noise. MRAO 1927}
}
@INPROCEEDINGS{MacKay95:snn,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="Developments in Probabilistic Modelling with Neural
Networks -- Ensemble Learning",
BOOKTITLE ="Neural Networks: Artificial Intelligence and
Industrial Applications. Proceedings of the 3rd
Annual Symposium on Neural Networks, Nijmegen,
Netherlands, 14-15 September 1995",
YEAR ="1995",
PUBLISHER ="Springer",
editors="Kappen, B. and Gielen, S.",
ADDRESS ="Berlin",
PAGES ="191-198", annote={MRAO 1926}
}
@INPROCEEDINGS{MacKay92am,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="{B}ayesian interpolation",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {S}eattle 1991",
EDITOR ="C.R. Smith and G.J. Erickson and P.O. Neudorfer",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1992",
PAGES ="39-66"}
@INPROCEEDINGS{MacKay92bm,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="The evidence for neural networks",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {S}eattle 1991",
EDITOR ="C.R. Smith and G.J. Erickson and P.O. Neudorfer",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1992",
PAGES ="165-183"}
@Unpublished{MacKay95:nips,
author = "D. J. C. MacKay",
title = "Ensemble Learning and Evidence Maximization",
note = "submitted to NIPS*95",
year = 1995,
annote = "Date submitted: May 1995"
}
@Unpublished{Macphee_MacKay,
author = "K. Macphee and D. J. C. MacKay",
title = "Ensemble Learning for Hidden {M}arkov Models",
note = "in preparation",
year = 1996,
annote = ""
}
@Unpublished{MacKay_hmm,
author = "D. J. C. MacKay",
title = "Ensemble Learning for Hidden {M}arkov Models",
note = "{\tt http://www.inference.phy.cam.ac.uk/mackay/abstracts/ensemblePaper.html}",
year = 1997,
annote = ""
}
@Unpublished{MacKay_vfeprob,
author = "D. J. C. MacKay",
title = "A Problem with Variational Free Energy Minimization",
note = "{\tt http://www.inference.phy.cam.ac.uk/mackay/abstracts/minima.html}",
year = 2001,
annote = ""
}
%submitted to NIPS 97",
% Available from {\tt http://www.inference.phy.cam.ac.uk/}",
@Unpublished{Gibbs_MacKay97a,
author = "M. N. Gibbs and D. J. C. MacKay",
title = "Efficient implementation of {G}aussian Processes for Interpolation",
note = "{\tt http://www.inference.phy.cam.ac.uk/mackay/abstracts/gpros.html}",
year = 1996,
annote = ""
}
@article{Gibbs_MacKay97b,
author = "M. N. Gibbs and D. J. C. MacKay",
title = "Variational {G}aussian Process Classifiers",
annote = "{\tt http://www.inference.phy.cam.ac.uk/mackay/abstracts/vgc.html}",
journal={IEEE Transactions on Neural Networks},
volume={11},
number={6},
year =2000,
month={November},
pages={1458-1464},
annote = "submitted 9 sep 1998"
}
@Unpublished{MacKay96:ica,
author = "D. J. C. MacKay",
title = "Maximum Likelihood and Covariant Algorithms for
Independent Component Analysis",
note = "{\tt http://www.inference.phy.cam.ac.uk/mackay/abstracts/ica.html}",
year = 1996,
annote = ""
}
@article{MacKay96:laplace,
author = "D. J. C. MacKay",
title = "Choice of Basis for {Laplace} Approximation",
journal = "Machine Learning",
year = 1998,
volume={33},
number={1},
pages={77-86},
annote = ""
}
@Misc{MacKay97:ipd,
author = "D. J. C. MacKay",
title = "Iterative Probabilistic Decoding of Low Density Parity Check Codes",
howpublished = "Animations available on world wide web",
year = 1997,
note = "{\tt http://www.inference.phy.cam.ac.uk/mackay/codes/gifs/}"
}
@inproceedings{WaterhouseMacKayRobinson95-nips8,
author="S. R. Waterhouse and D. J. C. MacKay and A. J. Robinson",
title="Bayesian Methods for Mixtures of Experts",
booktitle="Neural Information Processing Systems",
editor="D. S. Touretzky and M. C. Mozer and M. E. Hasselmo",
publisher="MIT Press",
pages={351-357},
year ="1996",
}
@inproceedings{Waterhouse_etal95:nips,
author="S. R. Waterhouse and D. J. C. MacKay and A. J. Robinson",
title="Bayesian Methods for Mixtures of Experts",
booktitle="Neural Information Processing Systems",
editor="D. S. Touretzky and M. C. Mozer and M. E. Hasselmo",
publisher="MIT Press",
year ="1996",
pages={351-357},
annote={MRAO 1928}
}
@INPROCEEDINGS{MacKay94:alpha,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="Hyperparameters: Optimize, or Integrate out?",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {S}anta {B}arbara 1993",
EDITOR ="G. Heidbreder",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1996",
pages="43-60",
abstractURL="ftp://www.inference.phy.cam.ac.uk/pub/mackay/abstracts/alpha.html",
postscriptURL="ftp://www.inference.phy.cam.ac.uk/pub/mackay/alpha.ps.Z",
keywords="alpha",
ANNOTE ="Date submitted: 1993; Date accepted: 1993; Collaborating institutes: none"}
@article{MacKay94:alpha_nc,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="Comparison of Approximate Methods for Handling Hyperparameters",
YEAR ="1999",
volume={11},
number={5},
pages={1035-1068},
journal={Neural Computation},
abstractURL="http://www.inference.phy.cam.ac.uk/mackay/abstracts/alpha.html",
postscriptURL="ftp://www.inference.phy.cam.ac.uk/pub/mackay/alpha.ps.Z",
ANNOTE ="Date submitted: 1994/1996/1997; Date accepted: October 1998; Collaborating institutes: none"
}
@InCollection{MacKay95:arbib,
author = "D. J. C. MacKay",
title = "Bayesian Methods for Supervised Neural Networks",
booktitle= "The Handbook of Brain Theory and Neural Networks",
publisher = "MIT Press",
year = 1995,
editor = "M. A. Arbib",
pages = "144-149"
}
%MacKay93:review,
@INCOLLECTION{MacKay94:springer,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="Bayesian Methods for Backpropagation Networks",
BOOKTITLE ="Models of Neural Networks III",
EDITOR ="E. Domany and van Hemmen, J. L. and K. Schulten",
PUBLISHER ="Springer-Verlag",
ADDRESS ="New York",
YEAR ="1994",
CHAPTER ="6",
pages = {211-254},
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes: none"}
@article{MacKay95:network,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="Probable Networks and Plausible Predictions --- A
Review of Practical {B}ayesian Methods for Supervised
Neural Networks",
journal ="Network: Computation in Neural Systems",
volume = 6,
YEAR =1995,
pages = "469-505",
ANNOTE ="Date submitted: 1994; Date accepted: 1994; Collaborating
institutes: none"}
% (August
% ISSN 0954-898X
@INPROCEEDINGS{MacKay94:pred_ashrae,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="Bayesian non-linear modelling for the prediction
competition",
BOOKTITLE ="ASHRAE Transactions, V.100, Pt.2",
EDITOR ="",
PUBLISHER ="ASHRAE",
ADDRESS ="Atlanta Georgia",
YEAR ="1994",
PAGES ="1053-1062",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes: none"}
@INPROCEEDINGS{MacKay94:pred,
KEY ="MacKay",
AUTHOR ="D. J. C. MacKay",
TITLE ="Bayesian non-linear modelling for the 1993 energy prediction
competition",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {S}anta {B}arbara 1993",
EDITOR ="G. Heidbreder",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1996",
pages="221-234",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes: none"}
@INPROCEEDINGS{Green_MacKay94,
AUTHOR ="A. G. Green and D. J. C. MacKay",
TITLE ="Bayesian analysis of linear phased-array radar",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods,
{S}anta {B}arbara 1993",
EDITOR ="G. Heidbreder",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1996",
pages="309-318",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes: none"}
@INPROCEEDINGS{Tansley_etal94,
AUTHOR ="J. E. Tansley and M. J. Oldfield and D. J. C. MacKay",
TITLE ="Neural network image reconstruction",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods,
{S}anta {B}arbara 1993",
EDITOR ="G. Heidbreder",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1996",
pages="319-326",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes: none"}
@UNPUBLISHED{MacKay_Oldfield95,
KEY ="",
AUTHOR ="D. J. C. MacKay and M. J. Oldfield",
TITLE ="Generalization Error
and the Number of Hidden units in a Multilayer Perceptron",
YEAR ="1995",
note = "In preparation",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes: none"}
@Article{MacKay_Takeuchi95,
AUTHOR ="D. J. C. MacKay and R. Takeuchi",
TITLE ="Interpolation Models with Multiple Hyperparameters",
journal = {Statistics and Computing},
year = {1998},
volume = {8},
pages = {15--23},
ANNOTE ="Date submitted: Sep 1995; Date accepted: Jan 1997; Collaborating institutes:
Waseda University, Tokyo"}
@UNPUBLISHED{Evans_MacKay,
AUTHOR ="E. F. Evans and D. J. C. MacKay",
TITLE ="A convenient method for generating constrained
randomized sequences on-line for physiological and
psychophysical stimulus selection",
YEAR =1995,
NOTE ="In preparation",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes:
Department of Communication and Neuroscience, Keele
University. MRAO 1834"}
@INPROCEEDINGS{MacKay_Takeuchi95_maxent,
AUTHOR ="D. J. C. MacKay and R. Takeuchi",
TITLE ="Interpolation Models with Multiple Hyperparameters",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods,
{C}ambridge 1994",
EDITOR ="J. Skilling and S. Sibisi",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1995",
PAGES ="249-257",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes:
Waseda University, Tokyo. MRAO 1835"}
@INPROCEEDINGS{Barnett_MacKay95,
AUTHOR ="A. H. Barnett and D. J. C. MacKay",
TITLE ="Bayesian Comparison of Models for Images",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods,
{C}ambridge 1994",
EDITOR ="J. Skilling and S. Sibisi",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1995",
PAGES ="239-248",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes: none"}
@INPROCEEDINGS{MacKay95:density_nets,
KEY ="",
AUTHOR ="D. J. C. MacKay",
TITLE ="Density Networks and their Application to Protein Modelling",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods,
{C}ambridge 1994",
EDITOR ="J. Skilling and S. Sibisi",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1996",
PAGES ="259-268",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes:
MRC Laboratory of Molecular Biology, Cambridge. MRAO 1837"}
@ARTICLE{MacKay95:wonsda,
AUTHOR ="D. J. C. MacKay",
TITLE ="Bayesian Neural Networks and Density Networks",
JOURNAL ="Nuclear Instruments and Methods in Physics
Research, Section A",
volume = 354,
number = 1,
YEAR =1995,
PAGES="73-80",
ANNOTE ="Date submitted: 1994; Date accepted: 1994; Collaborating institutes: none"}
% (Nucl. Inst. and Meth.)
% Jan 15 1995
@Incollection{MacKay97:dn,
author = "D. J. C. MacKay and M. N. Gibbs",
title = "Density Networks",
publisher = "O.U.P.",
booktitle={Statistics and Neural Networks},
annote={subtitle: Advances at the Interface},
annote={Proceedings of meeting on Statistics and Neural Nets, Edinburgh, 1997},
year = "1998",
editor = "J. W. Kay and D. M. Titterington",
pages = "129-146"
}
% , note = "in press"
@article{MacKay94:BC,
author = "D. J. C. MacKay",
title = "Equivalence of {B}oltzmann Chains and Hidden {M}arkov Models",
journal = "Neural Computation",
year = 1996,
volume=8,
number=1,
pages="178-181",
ANNOTE ="Date submitted: Nov 1994;
modified 31 March 1995; Date accepted: April 10 1995; Collaborating institutes: none"
}
% better to cite Radford_book,MacKay94:pred_ashrae,MacKay95:network
@TECHREPORT{ARD,
KEY ="MacKay and Neal",
AUTHOR ="D. J. C. MacKay and R. M. Neal",
TITLE ="Automatic relevance determination for Neural Networks",
YEAR ="1994",
NUMBER ="In preparation",
INSTITUTION ="Cambridge University",
ANNOTE ="Date submitted: ; Date accepted: ; Collaborating institutes:
University of Toronto"}
@PHDTHESIS{MacKay91,
AUTHOR ="D. J. C. MacKay",
TITLE ="Bayesian Methods for Adaptive Models",
YEAR =1991,
SCHOOL ="California Institute of Technology"}
@Misc{Eddy_MacKay,
author = "S. R. Eddy and D. J. C. MacKay",
title = "Is the {P}ope the {P}ope? (Correspondence in {\em {N}ature\/} 382, p.\ 490)",
journal="Nature",
volume=382,
year = 1996,
month = "August",
pages = 490,
annote = "8 Aug 96"
}
@InCollection{MullerMacKayHerz96,
author = {R. M\"uller and D. J. C. MacKay and Herz, A. V. M.},
title = "Associative memory using action potential timing",
booktitle = "Proceedings of BioNet'96: Third Workshop
`Bio-informatics and Pulsepropagating networks'",
editor ="G.K. Heinz",
publisher = "",
year = 1996,
pages = "",
ANNOTE ="Date submitted: 15 Nov 96; Date accepted: 15 Nov 96; Collaborating institutes:
University of Bremen"}
@Unpublished{MacKay:itp,
author = "D. J. C. MacKay",
title = "Information Theory, Inference and Learning Algorithms",
note = "Textbook in preparation, currently 600 pages long,
available from http://www.inference.phy.cam.ac.uk/mackay/itprnn/,
to be published by C.U.P.",
url="http://www.inference.phy.cam.ac.uk/mackay/itprnn/",
year = 1999
}
@article{Badmos_etal97,
AUTHOR ="A. Y. Badmos and H. K. D. H. Bhadeshia and D. J. C. MacKay",
TITLE ="Tensile Properties of
Mechanically Alloyed Oxide Dispersion Strengthened
Iron Alloys. Part 1 --- Neural network Models",
YEAR ="1998",
JOURNAL = "Materials Science and Technology",
VOLUME ="14",
number={},
pages="793-809",
ANNOTE ="Date submitted: Mar 97; accepted May 97; Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"}
% mrao number?
@article{Brun_etal97,
AUTHOR ="F. Brun and T. Yoshida and J. D. Robson and V.
Narayan and H. K. D. H. Bhadeshia and D. J. C. MacKay",
TITLE ="Theoretical Design of Ferritic Creep Resistant Steels using Neural Network, Kinetic and Thermodynamic Models",
YEAR ="1999",
JOURNAL = "Materials Science and Technology",
VOLUME ="15",
number={},
pages="547-554",
ANNOTE ="Date submitted: Sep 97; accepted July 98. Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science.
contains the new steels A and B"}
% mrao number?
@article{Lalam_etal97,
AUTHOR ="S. H. Lalam and H. K. D. H. Bhadeshia and D. J. C. MacKay",
TITLE ="Estimation of the Mechanical Properties of Ferritic Steel Welds: Part {I}: Yield and Tensile Strength",
YEAR ="2000",
JOURNAL = "Science and Technology of Welding and Joining",
VOLUME ="5",
number={3},
pages="135-147",
ANNOTE ="Date submitted: May 99; accepted . Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science."}
% mrao number?
@INPROCEEDINGS{FreyMacKay97,
KEY ="",
AUTHOR ="B. J. Frey and D. J. C. MacKay",
TITLE ="Trellis-constrained Codes",
BOOKTITLE ="Proceedings of the 35th Allerton Conference on Communication, Control, and Computing, Sept.\ 1997",
place = "Champaign-Urbana, Illinois",
annote = "Available at {\tt http://www.cs.utoronto.ca/$\sim$frey}",
EDITOR ="",
PUBLISHER ="",
ADDRESS ="",
YEAR ="1998",
PAGES ="",
ANNOTE ="Date submitted: jun 97; Date accepted: jun 97; Collaborating institutes:
Univ of Toronto. MRAO "}
@Unpublished{frey-mackay-98b,
key = "Frey",
author = "B.~J. Frey and D.~J.~C. MacKay",
title = "Trellis-constrained codes",
note = "Submitted to \emph{IEEE Transactions on Communications}",
year = "1998"
}
@INPROCEEDINGS{FreyMacKay98,
KEY ="",
AUTHOR ="B. J. Frey and D. J. C. MacKay",
TITLE ={A Revolution: Belief Propagation in Graphs with Cycles},
BOOKTITLE ={Advances in Neural Information Processing Systems 10},
EDITOR ="M. I. Jordan and M. J. Kearns and S. A. Solla",
PUBLISHER ="M.I.T. Press",
ADDRESS ="Cambridge MA.",
YEAR ="1998",
PAGES ="479--485",
anNOTE ="Available at {\tt http://www.cs.utoronto.ca/$\sim$frey}",
ANNOTE ="Date submitted: jun 97; Date accepted: ; Collaborating institutes:
Univ of Toronto. MRAO "}
% Same as:
@InCollection{frey-mackay-98c,
key = "Frey",
author = "B.~J. Frey and D.~J.~C. MacKay",
title = "A revolution: {B}elief propagation in graphs with cycles",
booktitle = "Advances in Neural Information Processing Systems
1997, Volume 10",
editor = "M.~I. Jordan and M.~I. Kearns and S.~A. Solla",
year = "1998",
publisher = "MIT Press",
place = "Cambridge MA.",
pages = "479--485",
note = "Available at {\tt http://www.cs.utoronto.ca/$\sim$frey}"
}
@article{Takeuchi_etal97:Jap,
AUTHOR ="R. Takeuchi and D. J. C. MacKay
and S. Nakazawa and T. Matsumoto",
TITLE ="Inferring Hyperparameters and Regularizers
for Standard Regularization Problems",
YEAR =1997,
journal = "Transactions of the Institute of Electronics, Information
and Communication Engineers",
volume={J80-D-II},
number={9},
pages="2502-2511",
note="In Japanese",
ANNOTE ="Date submitted: ; Date accepted: ;
Collaborating institutes:
Waseda University, Tokyo"}
@Incollection{MacKay97:erice,
author = "D. J. C. MacKay",
title = "Introduction to {M}onte {C}arlo Methods",
publisher = "Kluwer Academic Press",
booktitle={Learning in Graphical Models},
year = "1998",
editor = "M. I. Jordan",
pages = "175-204",
series={NATO Science Series}
}
@article{Fujii_etal99,
AUTHOR ="H. Fujii and D. J. C. MacKay and H. K. D. H. Bhadeshia
and H. Harada and K. Nogi",
TITLE ="Prediction of Creep Rupture Life in {N}ickel-Base Superalloys using {B}ayesian Neural Network",
YEAR ="1999",
journal={J. Japan Inst. Metals},
volume={63},
number={7},
pages={905-911},
note={In Japanese},
ANNOTE ="Submitted Nov 2 98; Accepted Apr 19 1999. Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"}
@inproceedings{Fujii_etal98,
AUTHOR ="H. Fujii and D. J. C. MacKay and H. K. D. H. Bhadeshia
and H. Harada and K. Nogi",
TITLE ="Estimation of Creep Rupture Strength
in {N}ickel Base Superalloys",
YEAR ="1998",
booktitle={Proceedings 6th Liege Conference on Materials for Advanced Power Engineering},
ANNOTE ="May 1998. conference 5-7 October 1998; Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"}
@article{Ichikawa96,
KEY ="K. Ichikawa",
AUTHOR ="K. Ichikawa and H. K. D. H. Bhadeshia and D. J. C. MacKay",
TITLE ="Model for Hot Cracking in Low--Alloy Steel Weld Metals",
YEAR =1996,
JOURNAL = "Science and Technology of Welding and Joining",
VOLUME = "1",
PAGES ="43-50"
}
@Incollection{MacKay98:gp,
author = "D. J. C. MacKay",
title = "Introduction to {G}aussian Processes",
publisher = "Kluwer Academic Press",
booktitle={Neural Networks and Machine Learning},
series={NATO ASI Series},
year = "1998",
editor = "C. M. Bishop",
pages = "133-166"
}
@InProceedings{DM_LDPC_ITW98,
author = {M. C. Davey and D. J. C. MacKay},
title = {Low Density Parity Check Codes over {GF}(q)},
booktitle = {Proceedings of the 1998 IEEE Information Theory Workshop},
year = {1998},
month = {June},
organization = {IEEE},
pages={70-71},
key = {Davey and MacKay}
}
%MD_LDPC_IRREG,
@INPROCEEDINGS{MacKayAllerton98,
KEY ="",
AUTHOR ="D. J. C. MacKay and S. T. Wilson and M. C. Davey",
TITLE ="Comparison of Constructions of Irregular {G}allager Codes",
BOOKTITLE ="Proceedings of the 36th Allerton Conference on Communication, Control, and Computing, Sept.\ 1998",
EDITOR ="",
PUBLISHER ="Allerton House",
ADDRESS ="Monticello, Illinois",
YEAR ="1998",
PAGES ="220-229",
ANNOTE ="Date submitted: jun 98; Date accepted: jun 98; MRAO "}
@article{MacKayWilsonDavey98,
KEY ="",
AUTHOR ="D. J. C. MacKay and S. T. Wilson and M. C. Davey",
TITLE ="Comparison of Constructions of Irregular {G}allager Codes",
journal={IEEE Transactions on Communications},
issn={0018-9332},
YEAR ="1999",
PAGES ="1449-1454",
volume={47},number={10},month={October},
annote={Both co-authors were supervised research students},
ANNOTE ="Date submitted: jun 98; Date accepted: apr 21 1999; MRAO "}
% To appear. Also available from {\tt{http://www.inference.phy.cam.ac.uk/mackay/abstracts/ldpc-irreg.html}}},
@article{Narayan_etal98,
AUTHOR ="V. Narayan and R. Abad and B. Lopez
and H. K. D. H. Bhadeshia and D. J. C. MacKay",
TITLE ="Estimation of Hot Torsion Stress Strain Curves in Iron
Alloys Using a Neural Network Analysis",
YEAR ="1998",
JOURNAL = "ISIJ International",
volume={39},
number={10},
pages = {999-1005},
ANNOTE ="Date submitted: Sep 98; Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"}
@article{Yoshitake_etal98,
AUTHOR ="S. Yoshitake and V. Narayan and H. Harada
and H. K. D. H. Bhadeshia and D. J. C. MacKay",
TITLE ="Estimation of the $\gamma$ and $\gamma'$ Lattice Parameters
in {N}ickel-base Superalloys Using Neural Network Analysis",
YEAR ="1998",
JOURNAL = "ISIJ International",
VOLUME ="38",
number={5},
pages="495-502",
ANNOTE ="Date submitted: Sep 97; Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"}
@article{Singh_etal98,
AUTHOR ="S. B. Singh and H. K. D. H. Bhadeshia and D. J. C. MacKay
and H. Carey and I. Martin",
TITLE ={Neural Network Analysis of Steel Plate Processing},
YEAR ="1998",
JOURNAL = "Ironmaking and Steelmaking",
VOLUME ="25",
number={5},
pages="355-365",
abstract={The process of rolling is very complicated and the number of parameters which determines the final properties
can be very large. It is extremely difficult therefore to develop a physical model for predicting various properties
like yield and tensile strengths. In the present work, a neural network technique which can recognise complex
relationships was employed to develop a quantitative method for estimating the yeild and tensile strengths as a
function of steel composition and rolling parameters. The model was tested extensively to confirm that the
predictions are reasonable in the context of metallurgical principles and other data published in the literature.
},
url={http://www.msm.cam.ac.uk/phase-trans/abstracts/processing.html},
ANNOTE ="Date submitted: Jan 98;
Accepted: July 1998; Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science"}
%%% up to here (search ahead for ADD NEW)
% 22 Feb 2001, print issue march 2001
@article{PattersonChildsMacKay00,
title={Exact sampling from non-attractive distributions using summary states},
author={Andrew M. Childs and Ryan B. Patterson and David J. C. MacKay},
year={2001},
journal={Physical Review E},
volume={63},
page={036113},
annote={036113 (5 pages)}
}
@unpublished{MacKayEncyclopedia98,
KEY ="",
AUTHOR ="D. J. C. MacKay",
TITLE ="Encyclopedia of Sparse Graph Codes",
ADDRESS ="",
YEAR ="1998",
PAGES ="",
note={{\verb|http://www.inference.phy.cam.ac.uk/mackay/|}},
ANNOTE ="Date submitted: ; Date accepted: ; MRAO "}
@unpublished{MacKay99ENC,
KEY ="",
AUTHOR ="D. J. C. MacKay",
TITLE ="Encyclopedia of Sparse Graph Codes (hypertext archive)",
ADDRESS ="",
YEAR ="1999",
PAGES ="",
note={{\verb|http://www.inference.phy.cam.ac.uk/mackay/codes/data.html|}},
ANNOTE ="Date submitted: ; Date accepted: ; MRAO "}
@article{Lalam_etal99a,
AUTHOR ="S. H. Lalam and H. K. D. H. Bhadeshia and D. J. C. MacKay",
TITLE ="Estimation of the Mechanical Properties of Ferritic Steel Welds: Part {II}: Elongation and {C}harpy Toughness",
YEAR ="2000",
JOURNAL = "Science and Technology of Welding and Joining",
VOLUME ="5",
number={3},
pages="149-160",
ANNOTE ="Date submitted: Dec 99; accepted . Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science."}
@article{Lalam_etal99b,
AUTHOR ="S. H. Lalam and H. K. D. H. Bhadeshia and D. J. C. MacKay",
TITLE ="The {C}harpy Impact Transition Temperature
for some Ferritic Steel Welds",
YEAR ="2000",
JOURNAL = "Australisian Welding Journal",
VOLUME ="45",
number={},
pages="33-37",
ANNOTE ="Date submitted: Dec 99; accepted . Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science."}
% Science and Technology of Welding and Joining",
@article{Lalam_etal99c,
AUTHOR ="S. H. Lalam and H. K. D. H. Bhadeshia and D. J. C. MacKay",
TITLE ="{B}ruscato factor in the Temper Embrittlement of Welds",
YEAR ="2000",
JOURNAL = "Science and Technology of Welding and Joining",
VOLUME ="5",
number={5},
pages="338-340",
ANNOTE ="Date submitted: Dec 99; accepted . Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science."}
% mrao number?
% de Sa, V.R., & MacKay, D.J.C. (2001). Model fitting as an Aid to Bridge Balancing in Neuronal Recording. {\it Neurocomputing} (special issue devoted to Proceedings of the CNS 2000 meeting) Vol38-40, 1651--1656.
@article{deSaMacKay2001,
author={de Sa, V.R., and MacKay, D.J.C.},
year={2001},
title={Model fitting as an Aid to Bridge Balancing in Neuronal Recording},
journal={Neurocomputing},
annote={special issue devoted to Proceedings of the CNS 2000 meeting},
volume={38-40},
pages={1651-1656}
}
@article{Yescas_etal01,
AUTHOR ="M. A. Yescas and H. K. D. H. Bhadeshia and D. J. C. MacKay",
TITLE ="Estimation of the amount of retained austenite in austempered ductile irons using neural networks",
YEAR ="2001",
JOURNAL = "Materials Science and Technology",
VOLUME ="A311",
pages="162-173",
ANNOTE ="Date submitted: 21 Aug 2000; accepted . Collaborating institutes:
Cambridge University Department of Metallurgy and
Materials Science."}
% mrao number?
% ADD NEW MacKay papers here (search back for 'up to here')
%
% DJCM marker 1 (another below)
@unpublished{MacKay-irreg-its,
title={Decoding Times of Irregular {G}allager Codes},
author={D. J. C. MacKay},
note={Unpublished},
year={1998}
}
@unpublished{MacKay-ra-its,
title={Decoding Times of Repeat-Accumulate Codes},
author={D. J. C. MacKay},
note={Unpublished},
year={1998}
}
@unpublished{MacKay-SGC-AmblesideI,
title={Sparse Graph Codes},
author={D. J. C. MacKay},
note={Extended Abstract, Ambleside meeting},
year={1999}
}
@inproceedings{MacKay-SGC-AmblesideII,
title={{G}allager Codes -- Recent Results},
author={D. J. C. MacKay},
booktitle={Coding, Communications and Broadcasting},
annote={Proceedings of International Symposium on Communication Theory and
Applications, Ambleside, 1999},
publisher={Research Studies Press},
address={Baldock, Hertfordshire, England},
editor={P. Farrell and M. Darnell and B. Honary},
pages={139-150},
year={2000}
}
% MacKay Kschischang and Frey
% compound codes
% Frey and MacKay
% shortened turbo codes and irregular turbo codes
@INPROCEEDINGS{FreyMacKay00c,
author={B. J. Frey and David J. C. MacKay},
TITLE =" Knowing when to stop",
BOOKTITLE ="Proceedings of the 38th Allerton Conference on Communication, Control, and Computing, Sept.\ 2000",
EDITOR ="",
PUBLISHER ="Allerton House",
ADDRESS ="Monticello, Illinois",
YEAR ="2000",
PAGES ="",
ANNOTE ="Date submitted: ; Date accepted: ; MRAO "}
@inproceedings{FreyMacKay00b,
title={Irregular Turbocodes},
author={B. J. Frey and David J. C. MacKay},
year={2000},
booktitle={Proceedings 2000 IEEE International Symposium on Information Theory},
pages={121},
annote={ISIT 2000}
}
@inproceedings{DaveyMacKay99a,
title={Watermark Codes: Reliable communication over Insertion/Deletion channels.},
author={Matthew C. Davey and David J. C. MacKay},
year={2000},
booktitle={Proceedings 2000 IEEE International Symposium on Information Theory},
pages={477},
annote={ISIT 2000}
}
@article{DaveyMacKay99b,
title={Reliable communication over channels with insertions, deletions and substitutions.},
author={Matthew C. Davey and David J. C. MacKay},
year={2001},
journal={IEEE Transactions on Information Theory},
volume={47},
number={2},
pages={687-698},
annote={Submitted 1999}
}
@inproceedings{RatzerMacKay00,
title={Codes for Channels with Insertions, Deletions and Substitutions},
author={Edward A. Ratzer and David J. C. MacKay},
pages={149-156},
booktitle={Proceedings of 2nd International Symposium on Turbo Codes and Related Topics,
Brest, France, 2000},
year={2000}
}
@inproceedings{FreyMacKay00,
title={Irregular Turbo-like Codes},
author={B. J. Frey and David J. C. MacKay},
pages={67-72},
booktitle={Proceedings of 2nd International Symposium on Turbo Codes and Related Topics,
Brest, France, 2000},
year={2000}
}
@unpublished{MacKay00RLLT,
title={An Alternative to Runlength-limiting Codes:
Turn Timing Errors into Substitution Errors},
author={David J. C. MacKay},
year={2000},
abstractURL="http://www.inference.phy.cam.ac.uk/mackay/abstracts/rllt.html",
postscriptURL="ftp://www.inference.phy.cam.ac.uk/pub/mackay/rllt.ps.Z",
note={available from {\tt{http://www.inference.phy.cam.ac.uk/mackay/}}}
}
@inproceedings{MacKay99RLL,
title={Almost-certainly Runlength-limiting Codes},
author={David J. C. MacKay},
booktitle={Proceedings of the
IMA Cryptography and Coding Conference 2001},
publisher={Springer Verlag},
year={2001},
note={Also available from {\tt www.inference.phy.cam.ac.uk/mackay}}
}
@InCollection{McElieceMacKay00,
AUTHOR ="S. Aji and H. Jin and A. Khandekar and R. J. McEliece and D. J. C. MacKay",
title={{BSC} Thresholds for Code Ensembles based on `Typical Pairs' Decoding},
booktitle = {Codes, Systems and Graphical Models},
volume={123},
series = {IMA Volumes in Mathematics and its Applications},
publisher = {Springer-Verlag},
year = 2000,
editor = {B. Marcus and J. Rosenthal},
address = {New York}
}
@InCollection{MacKayDaveyR3,
AUTHOR ="D. J. C. MacKay and M. C. Davey",
title={Two Small {G}allager codes},
booktitle = {Codes, Systems and Graphical Models},
volume={123},
series = {IMA Volumes in Mathematics and its Applications},
publisher = {Springer-Verlag},
year = 2000,
editor = {B. Marcus and J. Rosenthal},
address = {New York}
}
% seagate
@InCollection{MacKayHighRate98,
KEY ="",
AUTHOR ="D. J. C. MacKay and M. C. Davey",
TITLE ="Evaluation of {G}allager Codes for Short Block Length and High Rate
Applications",
booktitle = {Codes, Systems and Graphical Models},
volume={123},
series = {IMA Volumes in Mathematics and its Applications},
publisher = {Springer-Verlag},
year = 2000,
pages={113-130},
editor = {B. Marcus and J. Rosenthal},
address = {New York},
url={http://www.inference.phy.cam.ac.uk/mackay/CodesRegular.html}
}
@inproceedings{ward2000,
author="D. J. Ward and A. F. Blackwell and D. J. C. MacKay",
year="2000",
booktitle="Proceedings of {User Interface Software and Technology} 2000",
title="Dasher -- {A} Data Entry Interface Using Continuous Gestures and Language Models",
pages="129-137",
annote={The 13th Annual ACM Symposium on User Interface Software and Technology}
}
@article{ward2002,
author="D. J. Ward and A. F. Blackwell and D. J. C. MacKay",
title="Dasher -- {A} Data Entry Interface Using Continuous Gestures and Language Models",
year="2002",
journal={Human-Computer Interaction},
vol={17},
number={2-3},
pages={}
}
@article{wardmackay2002,
author="D. J. Ward and D. J. C. MacKay",
title="Fast Hands-free writing by Gaze Direction",
year="2002",
journal={Nature},
vol={418},
number={6900},
pages={838},
url={http://www.inference.phy.cam.ac.uk/mackay/abstracts/eyeshortpaper.html}
}
@unpublished{Dasher00,
title={Dasher -- a Data Entry Interface Using Continuous Gestures and
Language Models},
author={David J. Ward and Alan F. Blackwell and David J.C. Mackay},
year={2000},
note={submitted to ACM conference}
}
@unpublished{MacKay00Thresholds,
title={On Thresholds of Codes},
author={David J. C. MacKay},
year={2000},
note={{\tt www.inference.phy.cam.ac.uk/mackay/abstracts/theorems.html}}
}
@unpublished{MacKay00Puncture,
title={Punctured and Irregular High-Rate {G}allager Codes},
author={David J. C. MacKay},
year={2000},
note={{\tt www.inference.phy.cam.ac.uk/mackay/abstracts/puncture.html}}
}
% kji
@article{Inskip2001,
title={{K-Z} diagrams for {3CR} and {6C} Galaxies
in a selection of Different Cosmologies},
author={K. J. Inskip and P. N. Best and M. S. Longair and D. J. C. MacKay},
journal={M.N.R.A.S.},
year={2002},
page =277, volume =329, issue= 2,
note={astro-ph/0110054}
}
@techreport{MYFW2001,
author={D. J. C. MacKay and J. S. Yedidia and W. T. Freeman and Y. Weiss},
title={A Conversation about the {B}ethe Free Energy and Sum-Product},
institution={Cambridge University / Mitsubishi},
note={MERL TR-2001-18},
year={2001},
url={http://www.merl.com/reports/TR2001-18/index.html or http://www.inference.phy.cam.ac.uk/mackay/abstracts/bethe.html}
}
% non-negative
@misc{ obd00nonnegative,
author = "Downs, O. B. and MacKay, D. J. C. and Lee, D. D. ",
title = "The Nonnegative Boltzmann Machine",
text = "Advances in Neural Information Processing Systems 12.",
editor={S.A. Solla, T.K. Leen, and K.-R. M\"uller},
publisher={MIT Press},
year = "2000",
url = "citeseer.nj.nec.com/downs00nonnegative.html",
abstract={
The nonnegative Boltzmann machine (NNBM) is a recurrent neural network
model that can describe multimodal nonnegative data. Application of maximum likelihood
estimation to this model gives a learning rule that is analogous to the binary Boltzmann
machine. We examine the utility of the mean field approximation for the NNBM, and
describe how Monte Carlo sampling techniques can be used to learn its parameters. Reflective slice sampling is particularly
well-suited for this distribution, and can efficiently be implemented to sample the distribution. We illustrate learning of the
NNBM on a translationally invariant distribution, as well as on a generative model for images of human.}
}
@PhdThesis{MiskinPHD,
author = {James W. Miskin},
title = {Ensemble Learning for Independent Component Analysis},
school = {Department of Physics, University of Cambridge},
year = 2001,
url={http://www.inference.phy.cam.ac.uk/jwm1003/}
}
@InProceedings{miskin1,
author = {Miskin, J. W. and MacKay, D. J. C.},
title = {Ensemble Learning for Blind Image Separation and Deconvolution},
booktitle = {Advances in Independent Component Analysis},
editor={M. Girolami},
publisher = {Springer-Verlag Scientific Publishers},
year = {2000},
url={http://www.inference.phy.cam.ac.uk/jwm1003/}
}
@InProceedings{miskin2,
author = {Miskin, J. W. and MacKay, D. J. C.},
title = {Application of Ensemble Learning to Infra-Red Imaging},
booktitle = {Proceedings of the Second International Workshop on Independent Component Analysis and Blind Signal Separation},
pages = {399-404},
year = {2000}
}
@InCollection{miskin3,
author = {Miskin, J. W. and MacKay, D. J. C.},
title = {Ensemble Learning for Blind Source Separation},
booktitle={ICA: Principles and Practice},
editor={S.J. Roberts and ?????????},
publisher={Cambridge University Press}
}
@techreport{mackaymiskin01,
title={Latent Variable Models for Gene Expression Data},
author={David J. C. MacKay and James W. Miskin},
year=2001,
url={http://www.inference.phy.cam.ac.uk/mackay/abstracts/icagenes.html}
}
@techreport{MacKayDecision2001,
author={D. J. C. MacKay},
title={A Conversation about the {B}ethe Free Energy and Sum-Product},
institution={Cambridge University / Mitsubishi},
note={MERL TR-2001-??},
year={2000}
}
@article{SkillingMacKay2002,
title={Slice Sampling -- a Binary Implementation},
author={John Skilling and David J.C. MacKay},
year={2001},
url={http://www.inference.phy.cam.ac.uk/mackay/abstracts/slice.html},
journal={Annals of Statistics},
note={to appear}
}
@unpublished{Bloj2001,
author="Roger F. Sewell and David J. C. MacKay and Iain McLean", year=2001, title={A maximum entropy approach to fair elections}, note={In preparation}}
% MISKIN PAPERS?????????????
@inproceedings{ChristelleEtAl2002,
author={Christelle Royer Crotaz and Hugh Shercliffe and David J. C. MacKay},
title={},
conference={ICAA8},
year={2002}
}
@techreport{MacKayEuro2002,
author={David J. C. MacKay},
title={Belgian euro coins: 140 heads in 250 tosses - suspicious?},
institution={University of Cambridge, Department of Physics},
note={Available online from
http://www.inference.phy.cam.ac.uk/mackay/abstracts/euro.html},
url={http://www.inference.phy.cam.ac.uk/mackay/abstracts/euro.html},
year={2002}
}
% ADD NEW MacKay papers here (search back for 'up to here') HERE
%
% DJCM marker 2
% used my software:
@article{Grylls1997,
title={Mechanical properties of a high-strength cupronickel alloy Bayesian
neural network analysis},
author={Grylls, R. J.},
journal={Materials Science and Engineering A-Structural Materials Properties
Microstructure and Processing},
year={1997},
volume={234},
pages={267-270},
abstract={In this work the mechanical properties of a highly alloyed
cupronickel have been analyzed using a neural network technique
within a Bayesian framework. In this way the mechanical properties
can be represented as an empirical function of the compositional
variables. This method has been used to analyze the relative
contributions of the various elements to the mechanical properties.
Whilst the method is entirely empirical, it will be shown that the
predictions made are of metallurgical significance. (C) 1997 Elsevier
Science S.A.}
}
% -----------------------------------
% INDEX:
% -----------------------------------
% SPIN GLASS PAPERS
% STATISTICS AND NEURAL NETS
%
% I wonder if I can still find Hodge and Seed
%
% LUTTRELL
% BM'S, MEAN FIELD THEORY
% TSP
% BASIC NEURAL NET REFS
% HEBBIAN, LINSKER
% NUMERICAL
% GULL, SKILLING, OCCAM, MAXENT, MDL
% NEURAL NETS OPTIMISATION OF number parameters, regularisers, etc.
% OTHER PAPERS ON OCCAM
%%%%%%% QECC References %%%%%%%%%%%%%%%%%%%%%%%
@Book{NC,
author = "M.~Nielsen and I.~Chuang",
title = "Quantum Computation and Quantum Information",
publisher = "Cambridge University Press",
address = "Cambridge",
year = "2000",
}
@InCollection{Steane,
author = {A.~Steane},
title = {Quantum Error Correction},
booktitle = {Introduction to Quantum Computation and Information},
publisher = {World Scientific},
year = 1998,
editor = {H. K. Lo, S. Popescu and T. Spiller}
}
@Misc{Preskill,
author = {J.~Preskill},
title = {Lecture Notes for Physics 219: Quantum Information and Computation},
howpublished = {Available from http://www.theory.caltech.edu/people/preskill/ph219},
year={2001}
}
@Article{shor95,
author = {P. W. Shor},
title = {Quantum Error-Correction},
journal = {Phys. Rev. A},
year = 1995,
volume = 52,
number = {R2493}
}
@Misc{GF(4)codes,
author = {A. R. Calderbank and E. M. Rains and P. W. Shor and N. J. A. S
loane},
title = {Quantum Error Correction via Codes over {GF(4)}},
howpublished = {quant-ph/9608006},
year = 1997
}
}
@Article{Steane96,
author = {A. Steane},
title = {Quantum Error Correcting Codes},
journal = {Phys. Rev. Lett.},
year = 1996,
volume = 77,
pages = 793
}
@Article{gottesman,
author = {D. Gottesman},
title = {A Class of Quantum Error-Correcting Codes saturating the Quant
um Hamming Bound},
journal = {Phys. Rev. A},
year = 1996,
volume = 54,
pages = 1862
}
@Article{Orthog_geometry,
author = {A. R. Calderbank and E. M. Rains and P. W. Shor and N. J. A. S
loane},
title = {Quantum Error-Correction and Orthogonal Geometry},
journal = {Phys. Rev. Lett.},
year = 1997,
volume = 78,
pages = 405
}
@Article{SteaneCSS,
author = {A. Steane},
title = {Multiple Particle Interference and Quantum Error Correction},
journal = {Proc. Roy. Soc. Lond. A},
year = 1996,
volume = 452,
pages = 2551
}
@Article{Landauer,
author = {R. Landauer},
title = {Irreversibility and heat generation in the computing process},
journal = {IBM J. Res. Dev.},
year = 1961,
volume = 5,
pages = 183
}
@article{CaldShor96,
title={Good Quantum Error-Correcting Codes Exist},
author={A. R. Calderbank and Peter W. Shor},
address={AT\&T Research},
journal={Phys. Rev. A}, Volume=54, Number=2, pages={1098-1106}, year=1996,
note={quant-ph/9512032}
}
@Article{ShorCSS,
author = {A. R. Calderbank and P. W. Shor},
title = {Good Quantum Error-Correcting Codes Exist},
journal = {Phys. Rev. A},
year = 1996,
volume = 54,
pages = 1098,
note={quant-ph/9512032}
}
@Misc{Ashikhmin,
author = {A. Ashikhmin and S. Litsyn and M. A. Tsfasman},
title = {Asymptotically Good Quantum Codes},
howpublished = {quant-ph/0006061},
year = 2000
}
% -----------------------------------
@UNPUBLISHED{WWWturbo,
key = {jpl},
author = {JPL},
title = {Turbo Codes Performance},
year = 1996,
month = {August},
note = {Available from {\tt http://www331.jpl.nasa.gov/public/TurboPerf.html}}
}
@UNPUBLISHED{WWWcodes,
key = {jpl},
author = {JPL},
title = {Code Imperfectness},
year = 1999,
note = {{\tt www331.jpl.nasa.gov/public/imperfectness.html}},
annote = {{\tt http://www331.jpl.nasa.gov/public/imperfectness.html}}
}
@Book{frey-97c,
key = "Frey",
author = "B.~J. Frey",
title = "Bayesian Networks for Pattern Classification, Data
Compression and Channel Coding",
publisher = "Department of Electrical and Computer Engineering,
University of Toronto",
address = "Toronto Canada",
year = "1997",
note = "Doctoral dissertation available at
{\tt http://www.cs.utoronto.ca/$\sim$frey}"
}
@Book{frey-98,
key = "Frey",
author = "B.~J. Frey",
title = "Graphical Models for Machine Learning and Digital
Communication",
publisher = "MIT Press",
address = "Cambridge MA.",
annote = {See {\tt http://www.cs.utoronto.ca/$\sim$frey}},
year = "1998"
}
% see also Yang and Ryan Globecom march 20 2001.
% burst noise for ldpcc
@INPROCEEDINGS{Worthen,
KEY ="",
AUTHOR ="Worthen, A.P. and Stark, W.E.",
TITLE ="Low-Density Parity Check Codes for Fading Channels with Memory",
BOOKTITLE ="Proceedings of the 36th Allerton Conference on Communication, Control, and Computing, Sept.\ 1998",
EDITOR ="",
PUBLISHER ="",
ADDRESS ="",
YEAR ="1998",
PAGES ="117-125" }
@article{Worthen2001,
AUTHOR ="Worthen, A.P. and Stark, W.E.",
TITLE ="Unified Design of Iterative Receivers Using Factor Graphs",
year={2001},
journal={IEEE Transactions on Information Theory},
volume={47},
number={2},
pages={843-849}
}
@techreport{bwt,
author={Michael Burrows and D. J. Wheeler},
title={A block-sorting lossless data compression algorithm},
year={1994},
institution={Digital SRC},
note={Research Report 124. 10th May 1994},
url={ftp://ftp.digital.com/pub/DEC/SRC/research-reports/SRC-124.ps.gz},
annote={If you have trouble finding it, try searching at the
New Zealand Digital Library, http://www.nzdl.org.}
}
% SPIN GLASS PAPERS
@TECHREPORT{Yau.tr,
KEY ="Yau and Wallace",
AUTHOR ="H. W. Yau and D. J. Wallace",
TITLE ="Basins of
Attraction in Sparse Neural Network Models with Persistent Inputs,",
YEAR ="1990",
NUMBER ="In preparation",
INSTITUTION ="Edinburgh University"}
@ARTICLE{Yau,
AUTHOR = "H W Yau and D J Wallace",
TITLE = "Enlarging the Attractor Basins of Neural Networks with Noisy
External Fields",
JOURNAL = "Journal of Physics A: Maths and General",
YEAR = "1991",
VOLUME = "24",
PAGES = "5639--5650"}
@ARTICLE{BDS,
KEY ="Buhmann et. al.",
AUTHOR ="J. Buhmann and R. Divko and K. Schulten",
TITLE ="Associative memory with high information content",
JOURNAL ="preprint",
YEAR ="1988",
VOLUME ="",
NUMBER ="",
PAGES =""}
% Papers on hop
@ARTICLE{Hopfield82,
KEY ="Hopfield",
AUTHOR ="J. J. Hopfield",
TITLE ="Neural Networks and physical
systems with emergent collective computational abilities",
JOURNAL ="Proc. Natl. Acad.
Sci. USA",
YEAR ="1982",
VOLUME ="79",
NUMBER ="",
PAGES ="2554--8"}
@ARTICLE{Hopfield84,
KEY ="Hopfield",
AUTHOR ="J. J. Hopfield",
TITLE ="Neurons with
graded response properties have collective computational properties like those of
two--state Neurons",
JOURNAL ="Proc. Natl. Acad. Sci. USA",
YEAR ="1984",
VOLUME ="81",
NUMBER ="",
PAGES ="3088--92"}
@ARTICLE{Hopfield_Tank,
KEY ="Hopfield and Tank",
AUTHOR ="J. J. Hopfield and D. W. Tank",
TITLE ="Neural Computation of Decisions in Optimization Problems",
JOURNAL ="Biological Cybernetics",
YEAR ="1985",
VOLUME ="52",
NUMBER ="",
PAGES ="1-25"}
@ARTICLE{Hopfield87,
KEY ="Hopfield",
AUTHOR ="J. J. Hopfield",
TITLE ="Learning algorithms
and probability distributions in feed--forward and feed--back
networks",
JOURNAL ="Proc. Natl. Acad. Sci. USA",
YEAR ="1987",
VOLUME ="84",
NUMBER ="",
PAGES ="8429--33"}
% Discussion of introduction of biases or low levels of activity: see Amit in Network1
% Best ref:
@ARTICLE{RS89,
KEY ="Rubin and Sompolinsky",
AUTHOR ="N. Rubin and H. Sompolinsky",
TITLE ="Neural Networks with low local firing rates",
JOURNAL ="Europhys. Lett.",
YEAR ="1989",
VOLUME ="8",
NUMBER ="",
PAGES ="465"}
% they study thetas that look linear in average background but may not be..
% Applied field references: see 0.14 below. But the first good paper is:
@ARTICLE{EES89,
KEY ="Engel et. al.",
AUTHOR ="A. Engel and H. English and A. Schutte",
TITLE ="Improved retrieval in Neural Networks with external fields",
JOURNAL ="Europhys Lett.",
YEAR ="1989",
VOLUME ="8",
NUMBER ="",
PAGES ="393"}
@ARTICLE{Amit87b,
KEY ="Amit et. al.",
AUTHOR ="D. J. Amit and H. Gutfreund and H. Sompolinsky",
TITLE ="
Information storage in Neural Networks with low levels of activity",
JOURNAL ="Phys. Rev. A",
YEAR ="1987",
VOLUME ="35",
PAGES ="2293"}
% Hop capacity:
@ARTICLE{Amit85,
KEY ="Amit et. al.",
AUTHOR ="D. J. Amit and H. Gutfreund and H. Sompolinsky",
TITLE ="Spin glass models of Neural Networks",
JOURNAL ="Phys. Rev. A",
YEAR ="1987",
VOLUME ="32",
PAGES ="1007"}
% the above only discusses the case alpha -> 0, constant P, N-> infty.
% It derives T = 0.46 Tc for hopfield prescription
% p=0.14N derived in
@ARTICLE{Amit85b,
KEY ="Amit et. al.",
AUTHOR ="D. J. Amit and H. Gutfreund and H. Sompolinsky",
TITLE ="Storing infinite numbers of patterns in a spin glass
model of Neural Networks",
JOURNAL ="Phys. Rev. Lett.",
YEAR ="1985",
VOLUME ="55",
NUMBER ="",
PAGES ="1530"}
@ARTICLE{Amit87,
KEY ="Amit et. al.",
AUTHOR ="D. J. Amit and H. Gutfrend and H. Sompolinsky",
TITLE ="Statistical mechanics of Neural Networks near saturation",
JOURNAL ="Ann. Phys. (New York)",
YEAR ="1987",
VOLUME ="173",
PAGES ="30"}
% ^^This one is prob the best 0.14 ref, and
% has a lot more in it too. It evven discusses applied fields and dismisses
% them because they imagine the field being fixed, regardless of the cue vector.
% Blackout is discussed in
% J-P Nadal, G. Toulouse, J.P Changeux, and S. Dehaene, 1986, Networks of formal
% Neurons and memory palimpsests. Europhys Lett 1 535
% Blackout = loss of memories due to overload. Their paper suggests weight decay [?]
% So as to not go above capacity.
% Pseudoinverse refs can be found referred to in Gardner 1987 below. They get cap =1.
% alpha =2 is derived in
@ARTICLE{Gardner,
KEY ="Gardner",
AUTHOR ="E. J. Gardner",
TITLE ="Maximum storage capacity
of Neural Networks",
JOURNAL ="Europhys. Lett.",
YEAR ="1987",
VOLUME ="4",
PAGES ="481"}
% Other Gardner refs:
% B. Derrida and E. J. Gardner and A. Zippelius, Europhys Lett 4 1987 167
% E.J. Gardner B. Derrida and Mottishaw, J. Phys (paris) 48 1987 441
% E.J. Gardner J. Phys A 19 1986 L 1047
% A. Bruce, E.J. Gardner and D.J. Wallace, J. Phys A 20 1987 A 2909
% `Dynamics and Statistical Mechanics of the {H}opfield Model'
% The latter two are meant to include derivariton of 0.14-like results.
%
@ARTICLE{Amit85,
KEY ="Amit et. al.",
AUTHOR ="D. J. Amit and H. Gutfreund and H. Sompolinsky",
TITLE ="
Statistical mechanics of Neural Networks near saturation",
JOURNAL ="Ann. Phys. (New York)",
YEAR ="1985",
VOLUME ="173",
PAGES ="30"}
@ARTICLE{Amit90,
KEY ="Amit et. al.",
AUTHOR ="D. J. Amit and G. Parisi and S. Nicolis",
TITLE ="Neural Potentials as stimuli for attractor Neural Networks",
JOURNAL ="Network",
YEAR ="1990",
VOLUME ="1 ",
NUMBER ="1",
PAGES ="75-88"}
@ARTICLE{Galland93,
KEY ="",
AUTHOR ="C. C. Galland",
TITLE ="The Limitations of Deterministic {B}oltzmann Machine Learning",
JOURNAL ="Network",
YEAR ="1993",
VOLUME ="4",
NUMBER ="3",
PAGES ="355-379"}
%
% Boosting
%
@article{boosting97,
author={Y. Freund and Schapire, R.E.},
title={A decision--theoretic generalization of on--line learning and an application to boosting},
year={1997},
journal={Journal of Computer and System Sciences},
volume={55},
number={1},
pages={119-139},
}
%
@inproceedings{boosting95,
author={Y. Freund and Schapire, R.E.},
title={A decision--theoretic generalization of on--line learning and an application to boosting},
year={1995},
booktitle={Proceedings of the Second European Conference on Computational Learning Theory},
pages={23-37},
}
%
@inproceedings{boosting96,
author={Y. Freund and Schapire, R.E.},
title={Experiments with a New Boosting Algorithm},
year={1996},
booktitle={Proceedings of the Thirteenth International Conference on Machine Learning},
}
%
@Article{Levenshtein66,
author = {V. I. Levenshtein},
title = {Binary Codes capable of correcting deletions, insertions,
and reversals},
journal = {Soviet Physics - Doklady},
year = {1966},
volume = {10},
number = {8},
pages = {707-710},
month = {February}
}
@Article{Ferreira97,
author = {H.C. Ferreira and W.A. Clarke and A.S.J. Helberg and K.A.
S. Abdel-Ghaffar and A.J. Han Vinck},
title = {Insertion/Deletion Correction with Spectral Nulls},
journal = {IEEE Trans. Info. Theory},
year = {1997},
volume = {43},
number = {2},
pages = {722-732},
month = {March},
}
@inproceedings{boosting98,
author={Schapire, R.E. and Singer, Y.},
title={Improved Boosting Algorithms using Confidence--rated Predictions},
year={1998},
booktitle={Proceedings of the Eleventh Annual Conference on Computational Learning Theory},
}
%
@unpublished{boostingFHT98,
author={Friedman, J. and Hastie, T. and Tibshirani, R.},
title={Additive Logistic Regression: a Statistical View of
Boosting},
note={Tech. report available from
{\verb|http://www-stat.stanford.edu/~tibs/research.html|}},
year={1998},
}
%
@article{boosting90,
author={R. Schapire},
title={The Strength of Weak Learnability},
journal={Machine learning},
year={1990},
Volume={5},
number={2},
}
@InCollection{mihaljevic_and_golic92,
KEY ="Mihaljevic, M.J. and Golic, J.D.",
AUTHOR ="M. J. Mihaljevi\'c and J. D. Goli\'c",
TITLE ="A Fast Iterative Algorithm for a Shift Register Initial
State Reconstruction given the Noisy Output Sequence",
BOOKTITLE ="Advances in Cryptology - AUSCRYPT'90",
series = "Lecture Notes in Computer Science",
YEAR =1992,
VOLUME =453,
publisher = "Springer-Verlag",
PAGES ="165-175"}
% should this date be 1990?
@InCollection{mihaljevic_and_golic93,
KEY ="Mihaljevic, M.J. and Golic, J.D.",
AUTHOR ="M. J. Mihaljevi\'c and J. D. Goli\'c",
TITLE ="Convergence of a {B}ayesian iterative
error-correction procedure on a noisy shift register sequence",
BOOKTITLE ="Advances in Cryptology - EUROCRYPT 92",
series ="Lecture Notes in Computer Science",
VOLUME =658,
publisher ="Springer-Verlag",
PAGES ="124-137",
YEAR=1993}
% check year
@ARTICLE{Anderson93,
KEY ="Anderson",
AUTHOR ="R. J. Anderson",
TITLE ="Faster Attack on Certain Stream Ciphers",
JOURNAL ="Electronics Letters",
YEAR ="1993",
VOLUME ="29",
NUMBER ="15",
PAGES ="1322-1323"}
@INPROCEEDINGS{Anderson94,
KEY ="Anderson",
AUTHOR ="R. J. Anderson",
TITLE ="Searching for the optimum correlation attack",
BOOKTITLE ="Fast Software Encryption (Proceedings of 1994 K.U. Leuven Workshop on
Cryptographic Algorithms)",
editor = "B. Preneel",
series = "Lecture Notes in Computer Science",
YEAR =1995,
publisher = "Springer-Verlag",
PAGES ="179-195"}
% `` Searching for the optimum correlation attack", in Preproceedings of
% the KU Leuven Workshop on Cryptographic Algorithms, pp 56 - 62; and in
% full proceedings to be published in Springer LNCS series
%
% Incidentally, the similar reference for your paper is pp 86 - 98. The
% preproceedings will be posted tomorrow,
@InProceedings{JPLcode,
author = "L. Swanson",
title = "A New Code for {G}alileo",
pages = "94-95",
booktitle = "Proc. 1988 IEEE International Symposium Information Theory",
year = "1988?"
}
% (3) Unfortunately the JPL guys never published anything
% external about the Galileo Code. I could look up an internal JPL
% report, but there was at least an announcement of the code at
% the 1988 Information Theory Symposium
@article{McEliece1987,
title={The Capacity of the {H}opfield Associative Memory},
author={McEliece, R. J. and Posner, E. C. and Rodemich, E. R. and Venkatesh, S. S.},
journal={IEEE Transactions on Information Theory},
year={1987},
volume={33},
number={4},
pages={461-482}
}
@book{McEliece77,
author = "R. J. McEliece",
title = "The Theory of Information and Coding: A Mathematical
Framework for Communication",
year = 1977,
publisher="Addison-Wesley",
address="Reading, Mass.",
annote="Cambridge: Cambridge University Press, 1984,
[Univ. Lib.] 351:5.c.95.153
South Front 4"}
@Article{Gallager62,
author = "Gallager, R. G.",
title = "Low Density Parity Check Codes",
journal = "IRE Trans. Info. Theory",
year = 1962,
volume = "IT-8",
pages = "21-28",
month = "Jan",
annote = "cited by Mihaljevic"
}
@Book{Guggenheim52,
author = {E. A. Guggenheim},
title = {Mixtures},
publisher = {Oxford University Press},
year = 1952
}
@Article{FowlerGuggenheim40,
author = {R. H. Fowler and E. A. Guggenheim},
title = {},
journal = {Proc. Roy. Soc. A},
year = {1940},
key = {},
volume = {174},
number = {},
pages = {189}
}
@Book{Hinch,
author = "Hinch, E. J.",
title = "Perturbation Methods",
publisher = "Cambridge University Press",
year = "1991"
}
% Author: Hinch, E. J.
% Title: Perturbation methods/ E.J. Hinch
% Cambridge: Cambridge University Press, 1991
% xi,160p; 24cm
% Series title: Cambridge texts in applied mathematics
% Subjects: Perturbation (Mathematics)
%
% Location: [Univ. Lib.] 349:5.c.95.496
% Location: [Univ. Lib.] 1994.9.1478 (paperback issue)
% 349:5.c.95.496 South Front 4
% Not on loan
% 1994.9.1478 (paperback issue)
% Order in West Room (Central Desk)
% Not ordinarily borrowable
@book{ORuanaidh,
Author={O Ruanaidh, J. J. K. and Fitzgerald, W. J.},
Title={Numerical Bayesian methods applied to signal processing},
address={New York},
publisher={Springer},
year={1996},
series={Statistics and Computing Series}
}
% Location: [Trinity College] RR 335 O 3
% recommended for rayleigh
@Book{Khinchin,
author = "A. I. Khinchin",
title = "Mathematical Foundations of Information Theory",
publisher = "Dover",
year = 1957,
address = "New York",
annote = "Cavendish 39 K 5."
}
@Book{Schwartz,
author = "L. S. Schwartz",
title = "Principles of Coding, Filtering and Information Theory",
publisher = "Spartan Books",
year = 1963,
address = "Baltimore",
annote = "Cav: 39 S 1"
}
@article{Shannon48,
author = "Shannon, C. E.",
title = "A Mathematical Theory of Communication",
journal="Bell Sys. Tech. J.",
volume = 27,
pages ="379-423, 623-656",
year = 1948
}
@Book{Shannon&Weaver,
author = "Shannon, C. E. and Weaver, W.",
title = "The Mathematical Theory of Communication",
publisher = "Univ. of Illinois Press",
year = 1949,
address = "Urbana",
annote = "Cav: 39 S 2"
}
@book{shannon93,
author = {Shannon, C. E.},
title = {Collected Papers},
publisher = {IEEE Press},
address ={New York},
year = {1993},
note = {Edited by N. J. A. Sloane and A. D. Wyner}
}
% editor={N. J. A. Sloane and A. D. Wyner},
% 84 S 101
@incollection{shannon44,
author = {Shannon, C. E.},
title = {The Best Detection of Pulses},
booktitle = {Collected Papers of Claude Shannon},
publisher = {IEEE Press},
editor={N. J. A. Sloane and A. D. Wyner},
year = {1993},
pages={148--150},
address={New York}
}
@Book{Pierce,
author = "Pierce, J. R.",
title = "An Introduction to Information Theory",
publisher = "Dover",
year = 1980,
edition = 2,
address = "New York",
annote = "Subtitle: Symbols, Signals and Noise. Cav: 39 P 5."
}
@Book{Peretto,
author = "Peretto, P.",
title = "An Introduction to the Modeling of Neural Networks",
publisher = "C.U.P.",
year = 1992,
annote = "Cav: 39 P 8"
}
% Has replica theory then temporal nets.
% linear separability, the Cover limit
% Perceptrions
% Backprop (but with J_ij as the weights !
% Kohonen nets
% nets for optimization
% ------ looks a reasonable book.
@book ( kohonen-84,
key = "Kohonen" ,
author = "Kohonen, T." ,
title = "Self-Organization and Associative Memory (2nd
edition)" ,
publisher= "Springer-Verlag" ,
address = "Berlin" ,
year = "1984"
)
@TechReport{Honkela96tr,
author = {Timo Honkela and Samuel Kaski and Krista Lagus and
Teuvo Kohonen},
title = {Newsgroup exploration with {WEBSOM} method and
browsing interface},
institution = {Helsinki University of Technology, Laboratory of
Computer and Information Science},
year = 1996,
number = {A32},
address = {Espoo, Finland}
}
@Article{Honkela96alma,
author = {Timo Honkela and Samuel Kaski and Krista Lagus and
Teuvo Kohonen},
title = {Self-organizing maps of document collections},
journal = {ALMA},
year = 1996,
volume = 1,
number = 2,
note = {Electronic Journal, address http://www.diemme.it/~luigi/alma.html}
}
@InCollection{Lagus96,
author = {Krista Lagus and Samuel Kaski and Timo Honkela and
Teuvo Kohonen},
title = {Browsing digital libraries with the aid of
self-organizing maps},
booktitle = {Proceedings of the Fifth International World Wide
Web Conference WWW5, May 6-10, Paris, France},
publisher = {EPGL},
year = 1996,
volume = {Poster Proceedings},
pages = {71-79}
}
@InCollection{Honkela96,
author = {Timo Honkela and Samuel Kaski and Krista Lagus and
Teuvo Kohonen},
title = {Exploration of full-text databases with
self-organizing maps},
booktitle = {Proceedings of the ICNN96, International Conference
on Neural Networks},
publisher = {IEEE Service Center},
year = 1996,
volume = {I},
address = {Piscataway, NJ},
pages = {56-61}
}
@InCollection{Kohonen96icann,
author = {Teuvo Kohonen and Samuel Kaski and Krista Lagus and
Timo Honkela},
title = {Very large two-level {SOM} for the browsing of newsgroups},
booktitle = {Proceedings of ICANN96, International Conference on
Artificial Neural Networks, Bochum, Germany, July
16-19, 1996},
publisher = {Springer},
year = 1996,
editor = {C. von der Malsburg and W. von Seelen and
J. C. Vorbr{\"u}ggen and B. Sendhoff},
series = {Lecture Notes in Computer Science, vol. 1112},
address = {Berlin},
pages = {269-274}
}
@InCollection{Lagus96step,
author = {Krista Lagus and Timo Honkela and Samuel Kaski and
Teuvo Kohonen},
title = {{WEBSOM} -- A Status Report},
booktitle = {Proceedings of STeP'96, Finnish Artificial
Intelligence Conference},
publisher = {Finnish Artificial Intelligence Society},
year = 1996,
editor = {Jarmo Alander and Timo Honkela and Matti Jakobsson},
address = {Vaasa, Finland},
pages = {73-78}
}
@InCollection{Lagus96kdd,
author = {Krista Lagus and Timo Honkela and Samuel Kaski and
Teuvo Kohonen},
title = {Self-organizing maps of document collections: {A}
new approach to interactive exploration},
booktitle = {Proceedings of the Second International Conference
on Knowledge Discovery and Data Mining},
publisher = {AAAI Press},
year = 1996,
editor = {Evangelios Simoudis and Jiawei Han and Usama Fayyad},
address = {Menlo Park, California},
pages = {238-243}
}
@InCollection{Kaski96wcnn,
author = {Samuel Kaski and Timo Honkela and Krista Lagus and
Teuvo Kohonen},
title = {Creating an order in digital libraries with
self-organizing maps},
booktitle = {Proceedings of WCNN'96, World Congress on Neural
Networks, September 15-18, San Diego, California},
publisher = {Lawrence Erlbaum and INNS Press},
year = 1996,
address = {Mahwah, NJ},
pages = {814-817}
}
@book{Polya,
title={Induction and Analogy in Mathematics},
author={G. Polya},
publisher={Princeton University Press},
address={New Jersey},
year={1954},
note={Volume 1 of Mathematics and Plausible Reasoning}
}
@Article{Kaski97thesis,
author = {Samuel Kaski},
title = {Data Exploration Using Self-Organizing Maps},
journal = {Acta Polytechnica Scandinavica, Mathematics,
Computing and Management in Engineering Series No.~82},
year = 1997,
month = {March},
note = {DTech Thesis, Helsinki University of Technology, Finland}
}
@Article{Kaski97npl,
author = {Samuel Kaski},
title = {Computationally Efficient Approximation of a
Probabilistic Model for Document Representation in
the {WEBSOM} Full-Text Analysis Method},
journal = {Neural Processing Letters},
year = 1997,
volume = 5,
pages = {139-151}
}
@InCollection{Honkela97wsom,
author = {Timo Honkela and Samuel Kaski and Krista Lagus and
Teuvo Kohonen},
title = {{WEBSOM}---Self-Organizing Maps of Document Collections},
booktitle = {Proceedings of WSOM'97, Workshop on Self-Organizing
Maps, Espoo, Finland, June 4-6},
publisher = {Helsinki University of Technology, Neural Networks
Research Centre},
year = 1997,
address = {Espoo, Finland},
pages = {310-315},
}
@InCollection{Lagus97,
author = {Krista Lagus},
title = {Map of WSOM'97 Abstracts---Alternative Index},
booktitle = {Proceedings of WSOM'97, Workshop on Self-Organizing
Maps, Espoo, Finland, June 4-6},
publisher = {Helsinki University of Technology, Neural Networks
Research Centre},
year = 1997,
address = {Espoo, Finland},
pages = {368-372}
}
@InCollection{Kohonen97icnn,
author = {Teuvo Kohonen},
title = {Exploration of Very Large Databases by
Self-Organizing Maps},
booktitle = {Proceedings of ICNN'97, International Conference on
Neural Networks},
publisher = {IEEE Service Center},
year = 1997,
address = {Piscataway, NJ},
pages = {PL1-PL6}
}
@InCollection{Honkela98klass,
author = "T. Honkela and S. Kaski and T. Kohonen and K. Lagus",
title = "Self-Organizing Maps of Very Large Document
Collections: Justification for the {WEBSOM} method",
booktitle = "Classification, Data Analysis, and Data Highways",
publisher = "Springer",
year = 1998,
editor = "I. Balderjahn and R. Mathar and M. Schader",
pages = "245-252",
address = "Berlin"
}
@Book{Maxwells_demon,
author = "H. S. Leff and A. F. Rex",
title = "Maxwell's Demon: Entropy, Information, Computing",
publisher = "Adam Hilger",
year = 1990,
address = "Bristol",
annote = "Cav: 39 L 8. A magnificent collection of papers on
this beast. Szilard (1929) made the connection of
entropy and information. Then the `light emission'
solution came along. Then the erasure of information
finally. The paper by Bennet at the end is very
good. Discusses Landauer's proof of entropy increase
during certain computational operations."
}
@Book{Hamming,
author = "Hamming, R. W.",
title = "Coding and Information Theory",
publisher = "Prentice-Hall",
year = 1986,
address = "Englewood Cliffs, NJ",
edition = "2",
annote = "Cav: 39 H 4"
}
@Book{HammingP,
author = "Hamming, R. W.",
title = "The Art of Probability",
publisher = "Addison Wesley",
year = 1991,
address = "Redwood City, California",
annote = {13 H 16}
}
@Book{Ash,
author = "Ash, R.",
title = "Information Theory",
publisher = "Interscience publishers",
year = 1965,
address = "New York",
annote = "Cav: 39 A 1. Emphasizes the complementary Shannon
and Wiener approaches. Studies Shannon."
}
% ref on ``R0"
% international zurich seminar on communications 1974
% referenced in G Ungerbock
% `IEEE trans info theory 1982' trellis coded modulation
% CHANNEL CODING WITH MULTILEVEL PHASE SIGNALS
% AU: UNGERBOECK_G
% NA: IBM,ZURICH RES LAB,CH-8803 RUSCHLIKON,SWITZERLAND
% JN: IEEE TRANSACTIONS ON INFORMATION THEORY 1982 Vol.28 No.1 pp.55-67
% CR: ANDERSON_JB, 1976 Vol.12 p.587, ELECTRON LETT
% ANDERSON_JB, 1978 Vol.24 p.703, IEEE T INFORM THEORY
% AULIN_T, 1980 A2, 1980 P INT ZUR SEM D
% DIGEON_A, 1977 Vol.25 p.1238, IEEE T COMMUN
% FORNEY_GD, 1970 Vol.16 p.720, IEEE T INFORMATION T
% FORNEY_GD, 1973 Vol.61 p.268, P IEEE
% GALLAGER_RG, 1968 p.74, INFORMATION THEORY R
% LARSEN_KJ, 1972 Vol.18 p.437, IEEE T INFORM THEORY
%***% MASSEY_JL, 1974 E2, 1974 P INT ZUR SEM D
% ODENWALDER_JP, 1973, NASA CR114561 LINK C
% PAASKE_E, 1974 Vol.20 p.683, IEEE T INFORM THEORY
% TAYLOR_DP, 1979, CRL68 MCMAST U REP
% UNGERBOECK_G, 1976, 1976 INT S INF THEOR
% WOZENCRAFT_JM, 1965 p.318, PRINCIPLES COMMUNICA
%
% see also
% viterbi and omura
@article{Forney2001,
author = "Forney, Jr., G. D.",
TITLE ="Codes on Graphs: Normal Realizations",
year={2001},
journal={IEEE Transactions on Information Theory},
volume={47},
number={2},
pages={520-548}
}
@PhdThesis{Forney63,
author = "Forney, Jr., G. D.",
title = "Concatenated Codes",
school = "M.I.T.",
year = 1963
}
@Book{Gallager63,
author = "Gallager, R. G.",
title = "Low Density Parity Check Codes",
publisher = "MIT Press",
year = 1963,
address = "Cambridge, Mass.",
series = "Research monograph series",
number = "21"
}
% CL: [Computer Laboratory] Y256
@Book{Forney66,
author = "Forney, Jr., G. D.",
title = "Concatenated Codes",
publisher = "MIT Press",
address = "Cambridge, Mass.",
year = "1966"
}
% MIT research monograph 37
@TechReport{Massey63,
author = "J. L. Massey",
title = "Threshold decoding",
institution = "MIT",
year = 1963,
address = "Cambridge, Mass.",
number = 410,
annote = "[Computer Laboratory] V5 135"
}
@incollection{vanlint71,
author = {van Lint, J. H.},
title = {Nonexistence theorems for perfect error-correcting codes},
booktitle = {Computers in Algebra and Number Theory, volume IV, SIAM--AMS Proceedings},
year = {1971},
}
% Computers in algebra and number theory
% Symposium in applied mathematics of the American Mathematical Society and the Society for Industrial and
% Applied Mathematics
% March 1970
% New York, U.S.A.
% Proceedings: Birkhoff G., Hall M., A.M.S., Providence, 1971
% ISBN: 0-8218-1323-4 [Pure Maths] QA150.S9 1970
@article{tietavainen73,
author = {Tiet\"av\"ainen, A.},
title = {On the nonexistence of perfect codes over finite fields},
journal = {SIAM J. Appl. Math.},
volume={24},
pages={88-96},
year = {1973},
}
@article{Etzion1994,
title={Perfect Binary Codes: Constructions, Properties, and Enumeration},
author={Etzion, T. and Vardy, A.},
journal={IEEE Transactions on Information Theory},
year={1994},
volume={40},
number={3},
pages={754-763},
abstract={Properties of nonlinear perfect binary codes are investigated and
several new constructions of perfect codes are derived from these
properties. An upper bound on the cardinality of the intersection of
two perfect codes of length n is presented, and perfect codes whose
intersection attains the upper bound are constructed for all n. As an
immediate consequence of the proof of the upper bound we obtain a
simple closed-form expression for the weight distribution of a
perfect code. Furthermore, we prove that the characters of a perfect
code satisfy certain constraints, and provide a sufficient condition
for a binary code to be perfect. The latter result is employed to
derive a generalization of the construction of Phelps, which is shown
to give rise to some perfect codes that are nonequivalent to the
perfect codes obtained from the known constructions. Moreover, for
any m greater-than-or-equal-to 4 we construct full-rank perfect
binary codes of length 2m - 1. These codes are obviously
nonequivalent to any of the previously known perfect codes.
Furthermore the latter construction exhibits the existence of full-
rank perfect tilings. Finally, we construct a set of 2(2cn)
nonequivalent perfect codes of length n, for sufficiently large n and
a constant c = 0.5 - epsilon. Precise enumeration of the number of
codes in this set provides a slight improvement over the results
previously reported by Phelps.}
}
@Book{macwilliams&sloane,
author = "MacWilliams, F. J. and Sloane, N. J. A.",
title = "The theory of error-correcting codes",
publisher = "North-Holland",
year = 1977,
address = "Amsterdam",
annote = "Cav: 39 M 2; [Univ. Lib.] 349:1.c.95.356;
South Front 4"
}
@incollection{Massey77,
author = "J. L. Massey",
title = "Coding and Complexity",
publisher = "Springer",
year = 1977
}
@Book{lin&costello,
author = "S. Lin and Costello, Jr., D. J.",
title = "Error control coding: fundamentals and applications",
publisher = "Prentice-Hall",
year = 1983,
address = "Englewood Cliffs, N.J.",
annote = "[Univ. Lib.] 431.c.98.288
South Front 6
[King's College] BUF Lin"
}
% On loan, issued on 30 Nov 1994 16:40
% Due back on 25 Jan 1995
@TechReport{McEliece78,
author = "R. J. McEliece",
title = "A Public-key Cryptosystem Based on Algebraic Coding Theory",
institution = "JPL",
year = 1978,
number = "DSN 42-44",
address = "Pasadena"
}
% The McEliece Public-Key Cryptosystem
%
% Define a set of correctable error vectors Z = { z : low weight }
% Encryption:
% E = S G P (left to right)
% where S is random invertible, G is a code with an efficient decoding
% algm, P is a permutation matrix.
% Decryption: apply PT, decode, apply S^.
% Public key is the specification of Z and the matrix E. Secret key =
% S, P, decoder.
@Article{BMT78,
author = "Berlekamp, E. R. and R. J. McEliece and van Tilborg,
H. C. A.",
title = "On the intractability of certain coding problems",
journal = "IEEE Transactions on Information Theory",
year = 1978,
volume = 24,
number = 3,
pages = "384-386"
}
% shows that the general decoding problem for linear codes is NP-complete.
@BOOK{Blahut,
AUTHOR ="R. E. Blahut",
TITLE ="Principles and Practice of Information Theory",
YEAR =1987,
PUBLISHER ="Addison-Wesley",
ADDRESS ="New York"}
@Book{gellmann,
author = "M. Gell-Mann",
title = "The Quark and the Jaguar",
publisher = "nk",
year = 1994,
address = "nk"
}
@Book{Deco96,
author = "G. Deco and D. Obradovic",
title = "An Information--Theoretic Approach to Neural Computation",
publisher = "Springer",
year = 1996,
annote={50 dollars}
}
@Article{Cover65,
author = "T. M. Cover",
title = "Geometrical and Statistical Properties of Systems of
Linear Inequalities with Applications in Pattern
Recognition",
journal = "IEEE Transactions on Electronic Computers",
year = 1965,
volume = 14,
pages = "326-334"
}
@BOOK{Cover&Thomas,
AUTHOR ="T. M. Cover and J. A. Thomas",
TITLE ="Elements of Information Theory",
YEAR =1991,
PUBLISHER ="Wiley",
ADDRESS ="New York",
annote="68 pounds"}
@BOOK{vanTilburg,
AUTHOR ="van Tilburg, J.",
TITLE ="Security-Analysis of a Class of Cryptosystems Based
on Linear Error-Correcting Codes",
YEAR =1994,
PUBLISHER ="Royal PTT Nederland NV",
ADDRESS ="Leidschendam"}
% Ross lent me this. It is quite a good terse book.
@article{Berlekamp80,
AUTHOR ="Berlekamp, E. R.",
TITLE ="The Technology of Error--Correcting Codes",
volume={},
number={},
pages={},
YEAR =1980,
journal={IEEE Transactions on Information Theory},
}
@BOOK{Berlekamp,
AUTHOR ="Berlekamp, E. R.",
TITLE ="Algebraic Coding Theory",
YEAR =1968,
PUBLISHER ="McGraw-Hill",
ADDRESS ="New York",
annote = "Y432 in CL library"}
% p 231-240 discusses more than t errors with BCH code.
% BCH: each digit in the code corresponds to one element in a GF
@BOOK{Peterson&Weldon,
AUTHOR ="W. W. Peterson and Weldon, Jr., E. J.",
TITLE ="Error-Correcting Codes",
edition = "2nd",
YEAR =1972,
PUBLISHER ="MIT Press",
ADDRESS ="Cambridge, Massachusetts",
annote="Y 179-2 in CL library"}
@ARTICLE{Meier_Staffelbach,
KEY ="",
AUTHOR ="W. Meier and O. Staffelbach",
TITLE ="Fast Correlation Attacks on Certain Stream Ciphers",
JOURNAL ="J. Cryptology",
YEAR ="1989",
VOLUME ="1",
PAGES ="159-176"}
@BOOK{Feynman:SM,
AUTHOR ="R. P. Feynman",
TITLE ="Statistical Mechanics",
YEAR =1972,
ADDRESS ="New York",
publisher={Addison--Wesley}}
% PUBLISHER ="W. A. Benjamin, Inc.",
@BOOK{Stryer,
AUTHOR ="L. Stryer",
TITLE ="Biochemistry",
YEAR =1981,
PUBLISHER ="W.H. Freeman"}
% see page 631 (chapter 26 'the genetic code') for statement
% `nearly all aa substitutions can be accounted for by a change of a
% single base'.
% Derives GCV? Proves the discrepancy principle is no good.
% Not the earliest ref for GCV though? Or is 1979 better?
% Mayeb this ref just deals with CV, not GCV.
@ARTICLE{Wahba:75,
AUTHOR ="G. Wahba",
TITLE ="Smoothing Noisy Data with Spline Functions",
JOURNAL ="Numer. Math.",
VOLUME ="24",
YEAR ="1975",
PAGES ="383-393"
}
% The ref that Wahba often uses for GCV -- maybe this is the first ref
% where the formula V(lambda) for use where sigma not known
% and points not equispaced appears.
@ARTICLE{Craven_Wahba,
AUTHOR ="P. Craven and G. Wahba",
TITLE ="Smoothing Noisy Data with Spline Functions",
JOURNAL ="Numer. Math.",
VOLUME ="31",
YEAR ="1979",
PAGES ="377-403"
}
% Gamma and its use in estimating sigma appears in Wahba 1983:
@ARTICLE{Wahba:83,
AUTHOR ="G. Wahba",
TITLE ="{B}ayesian `Confidence Intervals' for the
Cross-validated Smoothing Spline",
JOURNAL ="J. R. Statist. Soc. B",
VOLUME ="45",
NUMBER ="1",
YEAR ="1983",
PAGES ="133-150"
}
% Derives GML for the first time, according to Wahba.
% Also does comparisons and claims that GML is worse than GCV even when
% the true function is smooth. Maybe not surprising since no care
% is taken over the priors.
% manages to derive GML without ever writing down the likelihood
% that is discussed.
@ARTICLE{Wahba_GML,
AUTHOR ="G. Wahba",
TITLE ="A Comparison of {GCV} and {GML} for Choosing the
Smoothing Parameter in the Generalized Spline
Smoothing Problem",
JOURNAL ="Numer. Math.",
VOLUME ="24",
YEAR ="1975",
PAGES ="383-393"
}
% Proves that splines are the Bayesian MAP for given priors
% Wahba also cites Wahba 1978 for discussion of Bayes connection
@ARTICLE{Kimeldorf_Wahba,
AUTHOR ="G. S. Kimeldorf and G. Wahba",
TITLE ="A Correspondence between {B}ayesian
Estimation of Stochastic Processes
and Smoothing by Splines",
JOURNAL ="Annals of Mathematical Statistics",
VOLUME ="41",
NUMBER ="2",
YEAR ="1970",
PAGES ="495-502"
}
% Multiple alphas
% claims to do both GCV and GML...
% also discusses inference of sum model covariance matrix components,
% I think.
% AUTHOR ="C. Gu and G. Wahba",
% TITLE ="Minimizing {GCV}/{GML} Scores with
% Multiple Smoothing Parameters via the
% {N}ewton Method",
% JOURNAL ="SIAM J. Sci. Stat. Comput.",
% VOLUME ="12",
% YEAR ="1991",
% PAGES ="383-398"
%}
@ARTICLE{Gu_Wahba,
title={Minimizing {GCV}/{GML} Scores with Multiple Smoothing
Parameters via the {N}ewton Method},
author={Gu, C. and Wahba, G.},
journal={{SIAM} Journal on Scientific and Statistical Computing},
year={1991},
volume={12},
number={2},
pages={383-398},
abstract={The (modified) Newton method is adapted to optimize generalized cross
validation (GCV) and generalized maximum likelihood (GML) sources
with multiple smoothing parameters. The main concerns in solving the
optimization problem are the speed and the reliability of the
algorithm, as well as the invariance of the algorithm under
transformations under which the problem itself is invariant. The
proposed algorithm is believed to be highly efficient for the
problem, though it is still rather expensive for large data sets,
since its operational counts are (2/3) kn3 + O(n2), with k the number
of smoothing parameters and n the number of observations. Sensible
procedures for computing good starting values are also proposed,
which should help in keeping the execution load to the minimum
possible. The algorithm is implemented in Rkpack [RKPACK and its
applications: Fitting smoothing spline models, Tech. Report 857,
Department of Statistics, University of Wisconsin, Madison, WI, 1989]
and illustrated by examples of fitting additive and interaction
spline models. It is noted that the algorithm can also be applied to
the maximum likelihood (ML) and the restricted maximum likelihood
(REML) estimation of the variance component models.}
}
@BOOK{Wahba90,
AUTHOR ="G. Wahba",
TITLE ="Spline Models for Observational Data",
PUBLISHER ="Society for Industrial and Applied Mathematics.
CBMS-NSF Regional Conference series in applied mathematics",
YEAR ="1990",
}
% Other refs given by Wahba for inference of covariance matrix.
@BOOK{Rao,
AUTHOR ="C. R. Rao",
TITLE ="Linear Statistical Inference and its Applications",
PUBLISHER="Wiley",
ADDRESS ="New York",
YEAR ="1973"
}
% above available in college libraries, below not in them or in UL.
@BOOK{Rao_Kleffe,
AUTHOR ="C. R. Rao and J. Kleffe",
TITLE ="Estimation of Variance Components and Applications",
PUBLISHER="North-Holland",
ADDRESS ="Amsterdam",
YEAR ="1988"
}
@ARTICLE{Harville,
AUTHOR ="Harville, D. A.",
TITLE ="Maximum Likelihood Approaches to Variance Component
Estimation and to Related Problems",
JOURNAL ="J. Amer. Statist. Assoc.",
VOLUME ="72",
YEAR =1977,
PAGES ="320-340",
NOTE ="(with discussion)"}
@ARTICLE{Lindstrom_Baies,
AUTHOR ="Lindstrom, M. J. and Baies, D. M.",
TITLE ="{N}ewton-{R}aphson and {EM}
Algorithms for Linear Mixed-effects
Models for Repeated-measures Data",
JOURNAL ="J. Amer. Statist. Assoc.",
VOLUME ="83",
YEAR =1988,
PAGES ="1014-1022"}
@INCOLLECTION{Bridle,
KEY ="Bridle",
AUTHOR ="J. S. Bridle",
TITLE ="Probabilistic interpretation of
feedforward classification Network outputs,
with relationships to statistical
pattern recognition",
BOOKTITLE ="Neuro-computing: algorithms, architectures and applications",
YEAR ="1989",
EDITOR ="F. Fougelman--Soulie and J. H\'erault",
PAGES ="",
PUBLISHER ="Springer--Verlag"}
@ARTICLE{alphanets,
KEY ="Bridle",
AUTHOR ="J. S. Bridle",
TITLE ="Alpha-Nets: A recurrent `neural' network
architecture with a hidden {M}arkov
model interpretation",
JOURNAL ="Speech Communication",
VOLUME ="9",
NUMBER ="1",
YEAR ="1990",
PAGES ="83-92",
}
% John S Bridle
% Speech Communication 9 (1990) 83-92.
% That's Volume 9, No.1, February 1990.
% ISSN 0167-6393
% Publisher: North Holland.
% A more recent version of the AlphaNet stuff, with CSR and better notation,
% is
% An AlphaNet approach to optimising input transformations
% for continuous speech recognition
% J S Bridle and L Dodd,
% Proc ICASSP91 (Toronto)
@TECHREPORT{Fantargs1,
KEY ="Bridle",
AUTHOR ="J. S. Bridle",
TITLE ="The phantom target
cluster Network: a peculiar relative of (unsupervised)
maximum likelihood stochastic modelling and (supervised)
error backpropagation",
YEAR ="1988",
NUMBER ="SP4: 66",
INSTITUTION ="RSRE"}
@INPROCEEDINGS{Moody,
KEY ="",
AUTHOR ="J. E. Moody",
TITLE ="Note on generalization,
regularization and architecture selection in nonlinear learning
systems",
BOOKTITLE ="First IEEE--SP Workshop on neural networks for signal
processing",
PUBLISHER ="IEEE Computer society press",
YEAR ="1991",
PAGES ="847-854"
}
@INPROCEEDINGS{Moody.nips4,
KEY ="",
AUTHOR ="J. E. Moody",
TITLE ="The {\it Effective} Number of Parameters: An
Analysis of Generalization and Regularization in Nonlinear Learning
Systems",
BOOKTITLE ="Advances in Neural Information Processing Systems 4",
EDITOR ="J. E. Moody and S. J. Hanson and R. P. Lippmann",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1992",
PAGES ="847-854"}
@INPROCEEDINGS{Keeler_Rumelhart.nips4,
KEY ="",
AUTHOR ="Keeler, J. and Rumelhart, D. E.",
TITLE ="A Self-Organizing Integrated Segmentation and
Recognition Neural Net",
BOOKTITLE ="Advances in Neural Information Processing Systems 4",
EDITOR ="J. E. Moody and S. J. Hanson and R. P. Lippmann",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1992",
PAGES ="496-503"}
@inproceedings{ bottou98coder,
author = "L. Bottou and Howard, P. G. and Y. Bengio",
title = "The {Z}-Coder Adaptive Binary Coder",
booktitle = "Proceedings
of the Data Compression Conference, Snowbird, Utah, March 1998",
pages={13-22},
year = "1998"
}
@INPROCEEDINGS{Guyon.nips4,
KEY ="",
AUTHOR ="I. Guyon and V. N. Vapnik and B. E. Boser
and L. Y. Bottou and S. A. Solla",
TITLE ="Structural risk minimization for character recognition",
BOOKTITLE ="Advances in Neural Information Processing Systems 4",
EDITOR ="J. E. Moody and S. J. Hanson and R. P. Lippmann",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1992",
PAGES ="471-479"}
@BOOK{Bayes.Kalman,
KEY ="",
AUTHOR ="Bar-Shalom, Y. and T.E. Fortmann",
TITLE ="Tracking and Data Association",
PUBLISHER ="Academic Press",
YEAR ="1988"}
% Bayesian model comparison for Kalman filter models
@BOOK{Blake_Zisserman,
KEY ="",
AUTHOR ="Blake, A. and Zisserman, A.",
TITLE ="Visual Reconstruction",
PUBLISHER ="MIT Press",
YEAR ="1987",
ADDRESS ="Cambridge Mass."}
@misc{ fox01particle,
author = "D. Fox and S. Thrun and W. Burgard and F. Dellaert",
title = "Particle filters for mobile robot localization",
text = "Fox D., Thrun S., Burgard W. & Dellaert F. (2001). Particle filters for
mobile robot localization. In Sequential Monte Carlo Methods in Practice
(eds A. Doucet, J.F.G. de Freitas and N.J. Gordon). New York: Springer-Verlag.",
year = "2001",
url = "citeseer.nj.nec.com/fox01particle.html" }
@book{particlefilters01,
title={Sequential Monte Carlo Methods in Practice},
editor={A. Doucet and J.F.G. de Freitas and N.J. Gordon},
address={New York},
publisher={Springer-Verlag},
year = "2001",
ISBN={ 0-387-95146-6},
url={http://www-sigproc.eng.cam.ac.uk/~ad2/book.html}
}
@misc{ blake98learning,
author = "A. Blake and B. North and M. Isard",
title = "Learning multi-class dynamics",
text = "A. Blake, B. North, and M. Isard, Learning multi-class dynamics, in NIPS
'98, 1998.",
year = "1998"}
@article{ isard98condensation,
author = "M. Isard and A. Blake",
title = "Condensation -- conditional density propagation for visual tracking",
journal = "International Journal of Computer Vision",
volume={29},
number={1},
pages={ 5-28},
year = "1998"
}
@inproceedings{ isard98smoothing,
author = "Michael Isard and Andrew Blake",
title = "A Smoothing Filter for {CONDENSATION}",
booktitle = "{EVVC} (1)",
pages = "767-781",
year = "1998",
url = "citeseer.nj.nec.com/isard98smoothing.html" }
@misc{ isard96visual,
author = "M. Isard and A. Blake",
title = "Visual tracking by stochastic propagation of conditional density",
text = "Proc. Fourth European Conf. Computer Vision, pp. 343--356",
year = "1996" }
@Article{Terzopoulos,
author = "D. Terzopoulos",
title = "Regularization of inverse problems involving
discontinuities",
journal = "IEEE PAMI",
year = 1986,
volume = 8,
number = 4,
pages = "417-438"
}
% STATISTICS AND NEURAL NETS
@ARTICLE{Solla,
KEY ="Solla",
AUTHOR ="S. A. Solla and E. Levin and M. Fleisher",
TITLE ="Accelerated learning in layered Neural Networks",
JOURNAL ="Complex systems",
YEAR ="1988",
VOLUME ="2",
NUMBER ="",
PAGES ="625--640"}
@misc{ hinton00training,
author = "G. Hinton",
title = "Training products of experts by minimizing contrastive divergence",
text = "G. E. Hinton. Training products of experts by minimizing contrastive divergence.
Technical Report GCNU TR 2000-004, Gatsby Computational Neuroscience Unit,
University College London, 2000.",
year = "2000",
url = "citeseer.nj.nec.com/hinton00training.html" }
@INPROCEEDINGS{HintonSej,
KEY ="Hinton and Sejnowski",
AUTHOR ="G. E. Hinton and T. J. Sejnowski",
TITLE ="Optimal Perceptual Inference",
BOOKTITLE ="Proc. IEEE Conference on Computer Vision and Pattern Recognition",
YEAR ="1983",
PAGES ="448--453"}
@INCOLLECTION{Brain_Damage,
KEY ="LeCun \etal",
AUTHOR ="LeCun, Y. and J.S. Denker and S. A. Solla",
TITLE ="Optimal Brain Damage",
BOOKTITLE ="Advances in Neural Information Processing Systems 2",
YEAR ="1990",
EDITOR ="D.S. Touretzky",
PAGES ="598--605",
PUBLISHER ="Morgan Kaufmann"}
@INPROCEEDINGS{Luttrell,
KEY ="Luttrell",
AUTHOR ="S. P. Luttrell",
TITLE ="Hierarchical Self-organising Networks",
BOOKTITLE ="Proc. 1st {IEE} Conf on Artificial Neural Networks, {L}ondon",
YEAR ="1989",
PAGES ="2--6"}
% Luttrell 1989c, `Self-organisation: a derivation from first principles
% of a class of learning algorithms' presented at IJCNN 1989, Washington
@INPROCEEDINGS{Luttrell_Maxent,
KEY ="Luttrell",
AUTHOR ="S. P. Luttrell",
TITLE ="The use of
{B}ayesian and entropic methods in Neural Network theory",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {C}ambridge 1988",
EDITOR ="J. Skilling",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1989",
PAGES ="363--370"}
@ARTICLE{SL.transinformation,
KEY ="Luttrell",
AUTHOR ="S. P. Luttrell",
TITLE ="The use of transinformation in the design of data sampling
schemes for inverse problems",
JOURNAL ="Inverse Problems",
VOLUME ="1",
PAGES ="199-218",
YEAR ="1985"}
@book {kanerva-88,
key = "Kanerva",
author = "Kanerva, P.",
year = "1988",
title = "Sparse Distributed Memory",
address = "Cambridge, MA",
publisher = "MIT Press"
}
@article{deerwester-dumais-landauer-furnas-harshman-90,
author = "S. Deerwester and S. T. Dumais and T. K. Landauer and
G. W.
Furnas and R. A. Harshman",
year = "1990",
title = "Indexing by latent semantic analysis",
journal = "Journal of the Society for Information Science",
volume = "41",
number = "6",
pages = "391-407",
annote = "first technical LSI paper; good background."
}
@inproceedings{landauer-laham-foltz-98,
author = "T. K. Landauer and D. Laham and P. W. Foltz",
title = "Learning Human-like Knowledge with Singular Value
Decomposition: A Progress Report",
booktitle = "Neural Information Processing Systems (NIPS*97)",
year = "1998"
}
@article{landauer-dumais-97,
author = "T. K. Landauer and S. T. Dumais",
year = "1997",
title = "Solution to Plato's Problem: The Latent Semantic
Analysis
Theory of Acquisition, Induction and Representation of
Knowledge",
journal = "Psychological Review",
pages = "211-240",
volume = "104",
number = "2"
}
@inproceedings{bartell-cottrell-belew-92,
author = "B.T. Bartell and G.W. Cottrell and R.K. Belew",
year = "1992",
title = "{Latent Semantic Indexing} is an optimal special case
of
multidimensional scaling",
booktitle = "Proc SIGIR-92",
publisher = "ACM Press",
address = "New York"
}
@article{hummel-holyoak-97,
author = "J. E. Hummel and K. J. Holyoak",
title = "Distributed representations of structure: {A} theory
of
analogical access and mapping",
journal = "Psychological Review",
year = 1997,
volume = 104,
number = 3,
pages = "427--466",
annote = "LISA paper"
}
@inproceedings{kanerva-96,
author = "P. Kanerva",
year = 1996,
title = "Binary spatter-coding of ordered K-tuples",
volume = 1112,
pages = "869-873",
publisher = "Springer",
editor = "C. von der Malsburg and W. von Seelen and
J.C. Vorbruggen and B. Sendhoff",
booktitle = "Artificial Neural Networks--ICANN Proceedings",
series = "Lecture Notes in Computer Science",
address = "Berlin",
keywords = "HRRs, distributed representations"
}
@unpublished{halford-wilson-phillips-bbs98,
author = "Halford, Graeme and Wilson, William H. and Phillips,
Steven",
title = "Processing Capacity Defined by Relational
Complexity: Implications for Comparative,
Developmental, and Cognitive Psychology",
note = "Behavioral and Brain Sciences",
year = "to appear"
}
@InBook{plate-97c,
author = "Tony A. Plate",
chapter = "A Common Framework for Distributed
Representation Schemes for Compositional
Structure",
title = "Connectionist Systems for Knowledge
Representation and Deduction",
publisher = "Queensland University of Technology",
year = "1997",
editor = "Fr\'ed\'eric Maire and Ross Hayward and
Joachim Diederich",
pages = "15-34"
}
@article{plate95,
author={Tony A. Plate},
year={1995},
title={Holographic Reduced Representations},
journal={IEEE Transactions on Neural Networks},
volume={6},
number={3}, pages={623-641}
}
@incollection{plate-98,
author = "Tony Plate",
title = "Analogy retrieval and processing with distributed
represenations",
year = "1998",
booktitle = "Advances in Analogy Research: Integration of Theory
and
Data from the Cognitive, Computational, and Neural
Sciences",
pages = "154--163",
editor = "Keith Holyoak and Dedre Gentner and Boicho Kokinov",
publisher = "NBU Series in Cognitive Science, New Bugarian
University, Sofia."
}
@incollection ( willshaw-81,
key = "Willshaw" ,
author = "Willshaw, D." ,
year = "1981" ,
title = "Holography, associative memory, and inductive
generalization" ,
editor = "G.~E. Hinton and J.~A. Anderson" ,
booktitle= "Parallel models of associative memory" ,
address = "Hillsdale, NJ" ,
publisher= "Erlbaum"
)
@inproceedings (hinton-88,
key = "Hinton",
author = "G. E. Hinton",
title = "Representing Part-whole Hierarchies in Connectionist Networks",
pages = "48-54",
booktitle = COGSCI-88,
year = "1988"
)
@article (hinton-90,
key = "Hinton",
author = "Hinton, G.~E.",
title = "Mapping part--whole hierarchies into connectionist networks",
journal = "Artificial Intelligence",
volume = "46",
number = "1-2",
pages = "47-76",
year = "1990"
)
@ARTICLE{WillshawDayan,
KEY ="Willshaw and Dayan",
AUTHOR ="D. Willshaw and P. Dayan",
TITLE ="Optimal Plasticity from Matrix Memories:
what goes up must come down",
JOURNAL ="Neural Computation",
YEAR ="1990",
VOLUME ="2",
NUMBER ="1",
PAGES ="85--93"}
@ARTICLE{Nadal_duality,
KEY ="Nadal",
AUTHOR ="J.-P. Nadal and N. Parga",
TITLE ="Duality between learning machines:
a bridge between supervised and
unsupervised learning",
JOURNAL ="Neural Computation",
YEAR ="1994",
VOLUME ="6",
NUMBER ="3",
PAGES ="489-506"}
@INPROCEEDINGS{Solla_generalisation,
KEY ="Tishby \etal",
AUTHOR ="N. Tishby and E. Levin and S. A. Solla",
TITLE ="Consistent inference of probabilities
in layered Networks: predictions and generalization",
BOOKTITLE ="Proc. {IJCNN}, {W}ashington",
YEAR ="1989",
PAGES =""}
@INPROCEEDINGS{LevinTishbySolla,
KEY ="Levin \etal",
AUTHOR ="E. Levin and N. Tishby and S. A. Solla",
TITLE ="A statistical approach to learning and generalization
in layered Neural Networks",
BOOKTITLE ="{COLT} '89: 2nd workshop on computational learning theory",
YEAR ="1989",
PAGES ="245--260"}
%more details in Buntine paper
@ARTICLE{Buntine_Weigend,
KEY ="Buntine and Weigend",
AUTHOR ="W.L. Buntine and A.S. Weigend",
TITLE ="{B}ayesian Back--propagation",
JOURNAL ="Complex Systems",
YEAR ="1991",
VOLUME ="5",
PAGES ="603--643"}
@TECHREPORT{Wolpert_rig,
KEY ="Wolpert",
AUTHOR ="D. H. Wolpert",
TITLE ="A rigorous investigation of
`evidence' and `{O}ccam factors' in {B}ayesian reasoning'",
YEAR ="1992",
NUMBER ="T.R. 92-03-013",
INSTITUTION ="Santa Fe Inst."}
@TECHREPORT{Wolpert_Wolf,
KEY ="Wolpert and Wolf",
AUTHOR ="D. H. Wolpert and D. R. Wolf",
TITLE ="Estimating functions of probability distributions
from a finite set of samples. Part I:
{B}ayes estimators and the {S}hannon entropy",
YEAR ="1993",
NUMBER ="LA-UR-92-4369",
INSTITUTION ="Los Alamos National Laboratory"}
@TECHREPORT{Buntine2,
KEY ="Buntine",
AUTHOR ="W.L. Buntine",
TITLE ="Theory refinement on {B}ayesian Networks",
YEAR ="1991",
INSTITUTION =""}
@ARTICLE{Buntine:trees,
KEY ="Buntine",
AUTHOR ="W.L. Buntine",
TITLE ="Learning classification trees",
YEAR ="1992",
JOURNAL ="Statistics and Computing",
VOLUME ="2",
PAGES ="63-73"}
@ARTICLE{Bishop,
KEY ="Bishop",
AUTHOR ="C. M. Bishop",
TITLE ="Exact calculation
of the {H}essian matrix for the multilayer perceptron",
JOURNAL ="Neural Computation",
YEAR ="1992",
VOLUME ="4",
NUMBER ="4",
PAGES ="494--501"}
@techreport{Peto,
AUTHOR ="Peto, L. B.",
TITLE ="A Comparison of Two Smoothing Methods for Word
Bigram Models",
YEAR =1994,
institution = "Computer Systems
Research Institute, University of Toronto",
Number ="CSRI-304"}
% Amino Acid Index Database %
% %
% Please cite the following reference when making use of the database. %
% Nakai, K., Kidera, A., and Kanehisa, M.; Cluster analysis of %
% amino acid indices for prediction of protein structure and %
% function. Prot. Eng. 2, 93-100 (1988) %
@Article{amino_index,
author = "Nakai, K. and Kidera, A. and Kanehisa, M.",
title = "Cluster analysis of amino acid indices for
prediction of protein structure and function",
journal = "Prot. Eng.",
year = 1988,
volume = 2,
pages = "93-100"
}
% Protein superfamilies and domain superfolds
% C. A. Orengo and othewrs Nature vol 372 15 Dec 1994 p.631
@UNPUBLISHED{Buntine3,
KEY ="Buntine",
AUTHOR ="W. L. Buntine and A. S. Weigend",
TITLE ="Calculating second derivatives on feed-forward Networks",
NOTE ="Submitted to IEEE Trans. on Neural Networks",
YEAR ="1991"}
@ARTICLE{Lewicki,
KEY ="Lewicki",
AUTHOR ="M.S. Lewicki",
TITLE ="Bayesian modeling and classification of neural signals",
JOURNAL ="Neural Computation",
VOLUME ="6",
NUMBER =5,
PAGES ="1005-1030",
YEAR ="1994"}
@INCOLLECTION{Denker2,
KEY ="Denker and LeCun",
AUTHOR ="J.S. Denker and LeCun, Y.",
TITLE ="Transforming Neural-net output levels
to probability distributions",
BOOKTITLE ="Advances in Neural Information Processing Systems 3",
YEAR ="1991",
EDITOR ="R. P. Lippmann",
PAGES ="853--859",
ADDRESS ="San Mateo, California",
PUBLISHER ="Morgan Kaufmann"}
@INCOLLECTION{Becker_Le_Cun,
KEY ="Becker and LeCun",
AUTHOR ="S. Becker and LeCun, Y.",
TITLE ="Improving the convergence of back-propagation learning
with second order methods",
BOOKTITLE ="Proc. of the connectionist
models Summer school",
YEAR ="1988",
EDITOR ="D.S. Touretzky et. al.",
PAGES ="29",
ADDRESS ="San Mateo, California",
PUBLISHER ="Morgan Kaufmann"}
% LUTTRELL
@ARTICLE{Luttrell_IEEE90,
KEY ="Luttrell",
AUTHOR ="S. P. Luttrell",
TITLE ="Derivation of a class of training algorithms",
JOURNAL ="IEEE
Trans. on Neural Networks",
YEAR ="1990",
VOLUME ="1",
NUMBER ="2",
PAGES ="229--232"}
@ARTICLE{Luttrell94:SOM,
KEY ="Luttrell",
AUTHOR ="S. P. Luttrell",
TITLE ="A {B}ayesian analysis of self-organising maps",
JOURNAL ="Neural Computation",
YEAR ="1994",
VOLUME ="6",
PAGES ="767-794"}
@TechReport{Luttrell:BC,
author = "S. P. Luttrell",
title = "The {G}ibbs Machine applied to hidden {M}arkov model
problems. Part 1: Basic theory",
institution = "SP4 division, RSRE",
year = 1989,
number = 99,
address = "Malvern, U.K."
}
@article{Luttrell94:PMD,
KEY ="Luttrell",
AUTHOR ="S. P. Luttrell",
TITLE ="The partitioned mixture distribution: an adaptive {B}ayesian
network for low-level image processing",
volume={141},
number={4},
JOURNAL ={Proc. IEE Vision, Image and Signal Processing},
YEAR ="1994",
PAGES ="251-260"}
% IEE Proceedings on Vision Image and Signal Processing",
% An adaptive Bayesian network for low-level image processing", Proceedings of the 3rd
% International IEE Conference on Artificial Neural Networks, Brighton, 1993, pp. 61-65
% this is the first PMD paper.
% BM'S, MEAN FIELD THEORY
@ARTICLE{mean-field,
KEY ="Peterson et. al.",
AUTHOR ="C. Peterson and J. R. Anderson",
TITLE ="A Mean Field Theory Learning Algorithm for Neural Networks",
JOURNAL ="Complex Systems",
YEAR ="1987",
VOLUME ="1",
NUMBER ="",
PAGES ="995-1019"}
@ARTICLE{peterson_soderberg87,
KEY ="Peterson and Soderberg",
AUTHOR ="C. Peterson and B. Soderberg",
TITLE ="A New Method for Mapping Optimization Problems onto
Neural Networks",
JOURNAL ="Int. Journal Neural Systems",
YEAR ="1989",
VOLUME ="1",
NUMBER ="1",
PAGES =""}
@INPROCEEDINGS{Sej,
KEY ="Sejnowski",
AUTHOR ="T. J. Sejnowski",
TITLE ="Higher order {B}oltzmann machines",
BOOKTITLE ="Neural networks for computing",
EDITOR ="J.S. Denker",
PAGES ="398-403",
ADDRESS ="New York",
PUBLISHER ="American Institute of Physics",
YEAR ="1986"
}
@TechReport{sejnowski-rosenberg-86,
key = "Sejnowski",
author = "T.~J. Sejnowski and C.~R. Rosenberg",
title = "{\em NETtalk: A parallel network that learns to read
aloud}",
type = "Technical Report 86-01",
institution = "Department of Electrical Engineering and Computer
Science, Johns Hopkins University, Baltimore, MD.",
year = "1986",
}
@Article{nettalk,
author = "T. J. Sejnowski and C. R. Rosenberg",
title = "Parallel Networks that Learn to Pronounce {E}nglish
Text",
journal = "Journal of Complex Systems",
volume = "1",
number = "1",
month = feb,
year = "1987",
pages = "145--168",
comment = "Classic paper covering the NETtalk system, which
learns to convert English text to speech.",
}
% TSP
% Other Neural Nets
@ARTICLE{DurbWill,
KEY ="Durbin and Willshaw",
AUTHOR ="R. Durbin and D. Willshaw",
TITLE ="An
analogue approach to the travelling salesman problem using an elastic Net
method",
JOURNAL ="Nature",
YEAR ="1987",
VOLUME ="326",
NUMBER ="",
PAGES ="689--91"}
@article(Eberhart&al:91,
Author = {Eberhart, S. P. and Daud, D. and Kerns, D. A. and
Brown, T. X. and Thakoor, A. P.},
Title = {Competitive Neural Architecture for Hardware Solution
to the Assignment Problem},
Journal = {Neural Networks},
Volume = {4},
Pages = {431--442},
Year = {1991})
@article(Peterson&Soderberg:89,
Author = {Peterson, C. and S\"{o}derberg, B.},
Title = {A new method for mapping optimization problems onto
neural networks},
Journal = {International Journal of Neural Systems},
Volume = {1},
Number = {1},
Pages = {3--22},
Year = {1989})
@article(Peterson&Anderson:88,
Author = {Peterson, C. and Anderson, J. R.},
Title = {Neural Networks and {NP}-complete Optimization Problems; A
Performance Study on the Graph Bisection Problem},
Journal = {Complex Systems},
Volume = {2},
Number = {1},
Pages = {59--89},
Year = {1988})
@article(Van&Miller:89,
Author = {Van den Bout, D. E. and Miller, III, T. K.},
Title = {Improving the Performance of the {Hopfield--Tank} Neural
Network Through Normalization and Annealing},
Journal = {Biological Cybernetics},
Volume = {62},
Pages = {129--139},
Year = {1989})
@article(Van&Miller:90,
Author = {Van den Bout, D. E. and Miller, III, T. K.},
Title = {Graph Partitioning using Annealed Neural Networks},
Journal = {IEEE Transactions on Neural Networks},
Volume = {1},
Number = {2},
Pages = {192--203},
Month = {June},
Year = {1990})
@ARTICLE{Aiyer,
KEY ="Aiyer et. al.",
AUTHOR ="S. V. B. Aiyer and M. Niranjan and F. Fallside",
TITLE ="
A Theoretical investigation into the performance of the {H}opfield model",
JOURNAL ="IEEE
Trans. on Neural Networks",
YEAR ="1990",
VOLUME ="1",
NUMBER ="2",
PAGES ="204--215"}
@PHDTHESIS{Aiyer_thesis,
KEY ="Aiyer",
AUTHOR ="S. V. B. Aiyer",
TITLE ={Solving Combinatorial Optimization Problems Using
Neural Networks},
YEAR ="1991",
SCHOOL={Cambridge University Engineering Department PhD},
NOTE={CUED/F-INFENG/TR 89}
}
@ARTICLE{Gee_Prager,
KEY ="Gee and Prager",
AUTHOR ="A. H. Gee and R. W. Prager",
TITLE ="Polyhedral Combinatorics and Neural Networks",
JOURNAL ="Neural Computation",
YEAR ="1994",
VOLUME ="6",
NUMBER ="",
PAGES ="161-180"}
% BASIC NEURAL NET REFS
@BOOK{PDP,
KEY ="D. E. Rumelhart and J. E. McClelland",
AUTHOR ="D. E. Rumelhart and J. E. McClelland",
TITLE ="Parallel Distributed Processing",
PUBLISHER ="MIT Press",
YEAR ="1986",
ADDRESS ="Cambridge Mass."}
@ARTICLE{backprop,
KEY ="Rumelhart \etal",
AUTHOR ="D. E. Rumelhart and G. E. Hinton and
R. J. Williams",
TITLE ="Learning representations by
back--propagating errors",
JOURNAL ="Nature",
YEAR ="1986",
VOLUME ="323",
NUMBER ="",
PAGES ="533--536"}
% in the pdp book this is 318--362
@TechReport{Williams85,
KEY ="Williams",
AUTHOR ="R. J. Williams",
TITLE ="Feature Discovery through Error-Correction Learning",
institution = "Insititute for Cognitive Science",
year = 1985,
number = "ICS 8501"
}
@ARTICLE{Pineda,
KEY ="Pineda",
AUTHOR ="F.J. Pineda",
TITLE ="Recurrent back--propagation and the dynamical approach to adaptive Neural computation",
JOURNAL ="Neural Computation",
YEAR ="1989",
VOLUME ="1",
NUMBER ="",
PAGES ="161--172"}
% initial of Heil?
@ARTICLE{Baldi,
KEY ="Baldi",
AUTHOR ="P. Baldi and Heiligenberg",
TITLE ="How sensory maps could enhance resolution through ordered
arrangement of broadly tuned receivers",
JOURNAL ="Biol. Cyb.",
VOLUME ="59",
PAGES ="313-318",
YEAR ="1988"}
% NUMERICAL
@BOOK{NR,
KEY ="Press \etal",
AUTHOR ="W.H. Press and B.P. Flannery and S. A. Teukolsky and W. T. Vetterling",
TITLE ="Numerical Recipes in {C}",
PUBLISHER ="Cambridge",
YEAR ="1988"}
% {B}ayes
@BOOK{Berger_Wolpert,
KEY ="Berger and Wolpert",
AUTHOR ="J.O. Berger and R. L. Wolpert",
TITLE ="",
PUBLISHER ="Institute of Mathematical Statistics",
ADDRESS ="Hayward, California",
YEAR ="1984"}
% Nice quote: [from savage originally] `Indeed to many {B}ayesians, belief
% in the LP is the big difference between {B}ayesians and frequentists,
% not the desire to involve prior information'
@BOOK{Berger,
KEY ="Berger",
AUTHOR ="J. Berger",
TITLE ="Statistical Decision theory and {B}ayesian
Analysis",
PUBLISHER ="Springer",
YEAR ="1985"}
@BOOK{Zellner,
KEY ="Zellner",
AUTHOR ="A. Zellner",
TITLE ="Basic issues in econometrics",
PUBLISHER ="Chicago",
YEAR ="1984"}
% University of Chicago Press, Chicago
@book{Duda_Hart_Stork,
author={Duda, Richard O. and Hart, Peter E. and Stork, David G.},
title={ Pattern Classification},
note={2nd Edition},
isbn={0-471-05669-3},
publisher={Wiley},
address={New York},
year={2000},
}
@BOOK{Duda_Hart,
KEY ="Duda and Hart",
AUTHOR ="Duda, Richard O. and Hart, Peter E.",
TITLE ="Pattern Classification and Scene Analysis",
PUBLISHER ="Wiley",
YEAR ="1973"}
@misc{bombesimulator,
author={Nik Shaylor},
year={1997},
url={http://www.geocities.com/CapeCanaveral/Hangar/4040/bombe.html},
annote={Nik Shaylor's page describes the logical circuitry that Turing and G. W. Welchman devised to accomplish the
rejection of all rotor positions inconsistent with guessed plaintext. It also has a Java simulation of the process.}
}
@article{GoodEnigma,
author={I. J. Good},
title={Studies in the History of Probability and Statistics.
XXXVII. A. M. Turing's statistical work in World War II},
journal={Biometrika},
volume={66},
number={2}, pages={393-396},year={1979},
annote={reprinted also in the
Collected Works}
}
%%%%%%%%%%%%
%
%% book{turing-pure-maths,
%% author={A. M. Turing},
% title={ Collected Works of A. M. Turing (North-Holland, 1992) Pure Mathematics} (ed. J. R.
% Britton)
@BOOK{Good,
KEY ="Osteyee and Good",
AUTHOR ="D. B. Osteyee and I. J. Good",
TITLE ="Information, weight of
evidence, the singularity between probability measures and
signal detection",
PUBLISHER ="Springer",
YEAR ="1974"}
@Proceedings{{Meyer_Collier,
title = "{B}ayesian statistics",
year = 1970,
key = "Meyer and Collier",
editor = "D. L. Meyer and R. O. Collier",
publisher = "Peacock publishers"
}
@INCOLLECTION{Lindley-philosophy,
KEY ="Lindley",
AUTHOR ="D.V. Lindley",
TITLE ="{B}ayesian analysis in regression problems",
BOOKTITLE ="{B}ayesian statistics",
YEAR ="1970",
EDITOR ="D.L. Meyer and R.O. Collier",
PUBLISHER ="Peacock publishers"}
% History
@ARTICLE{laplace,
KEY ="Stigler",
AUTHOR ="S.M. Stigler",
TITLE ="Laplace's 1774 memoir on inverse
probability",
JOURNAL ="Stat. Sci.",
YEAR ="1986",
VOLUME ="1",
NUMBER ="3",
PAGES ="359--378"}
@ARTICLE{cox,
KEY ="Cox",
AUTHOR ="R.T. Cox",
TITLE ="Probability, frequency, and reasonable expectation",
JOURNAL ="Am. J. Physics",
YEAR ="1946",
VOLUME ="14",
PAGES ="1-13"}
@ARTICLE{Akaike,
KEY ="Akaike",
AUTHOR ="H. Akaike",
TITLE ="Statistical predictor identification",
JOURNAL ="Ann. Inst. Statist. Math.",
YEAR ="1970",
VOLUME ="22",
NUMBER ="",
PAGES ="203--217"}
% CLT
@ARTICLE{clt,
KEY ="Walker",
AUTHOR ="A.M. Walker",
TITLE ="On the asymptotic behaviour of posterior
distributions",
JOURNAL ="J. R. Stat. Soc. B",
YEAR ="1967",
VOLUME ="31",
NUMBER ="",
PAGES ="80--88"}
% GULL, SKILLING, OCCAM, MAXENT, MDL
@ARTICLE{Smith_and_Spiegelhalter,
KEY ="Smith and Spiegelhalter",
AUTHOR ="A.F.M. Smith and D.J. Spiegelhalter",
TITLE ="{B}ayes factors and choice criteria for linear models",
JOURNAL ="Journal of the Royal Statistical Society B",
YEAR ="1980",
VOLUME ="42",
NUMBER ="2",
PAGES ="213-220"}
@ARTICLE{Smith_review,
KEY ="Smith",
AUTHOR ="A.F.M. Smith",
TITLE ="{B}ayesian Computational Methods",
JOURNAL ="Philosophical Transactions of the Royal Society of
London A",
YEAR =1991,
VOLUME =337,
PAGES ="369-386"}
@ARTICLE{Jefferys_and_Berger,
KEY ="Jefferys and Berger",
AUTHOR ="W.H. Jefferys and J.O. Berger",
TITLE ="{O}ckham's razor and {B}ayesian analysis",
JOURNAL ="American Scientist",
YEAR ="1992",
VOLUME ="80",
PAGES ="64-72"}
% Has good examples including fitting a high polynomial to data,
% detecting plagiarism, detecting that a coin has two heads,
% Newton / GR, also they give bounds on the min Occam factor that
% a model can suffer.
@ARTICLE{Mark_and_Miller,
KEY ="Mark and Miller",
AUTHOR ="K.E. Mark and M.I. Miller",
TITLE ="{B}ayesian model selection and minimum description length
estimation of auditory--nerve discharge rates",
JOURNAL ="J. Acoust. Soc. Am.",
YEAR ="1992",
VOLUME ="91 ",
NUMBER ="2",
PAGES ="989--1002"}
% {B}ayes and Regularisation
% Iversen's {B}ayes Booklet: has several useful simple results, and typical lame philosophy.
@BOOK{Iversen,
KEY ="Iversen",
AUTHOR ="G. R. Iversen",
TITLE ="{B}ayesian statistical inference",
PUBLISHER ="Sage publications, Beverly Hills",
YEAR ="1984"}
% He refers to Box and Tiao as containing inferences concerning robust models'
% parameters. Berger also discusses robustness, but I suspect not the inference
% of those params.
@BOOK{Box_and_Tiao_text,
KEY ="Box and Tiao",
AUTHOR ="G. E. P. Box and G. C. Tiao",
TITLE ="{B}ayesian inference in statistical analysis",
PUBLISHER ="Addison--Wesley",
YEAR ="1973"}
@ARTICLE{Box1,
KEY ="Box and Tiao",
AUTHOR ="G. E. P. Box and G. C. Tiao",
TITLE ="A further look at robustness via {B}ayes' theorem",
JOURNAL ="Biometrika",
YEAR ="1962",
VOLUME ="49",
NUMBER ="",
PAGES ="419--432"}
@ARTICLE{Box2a,
KEY ="Box and Tiao",
AUTHOR ="G. E. P. Box and G. C. Tiao",
TITLE ="A {B}ayesian approach
to the importance of assumptions applied to the comparison of variances",
JOURNAL ="Biometrika",
YEAR ="1964",
VOLUME ="51",
NUMBER ="",
PAGES ="153--167"}
@ARTICLE{Box2b,
KEY ="Box and Tiao",
AUTHOR ="G. E. P. Box and G. C. Tiao",
TITLE ="A note on criterion robustness and inference robustness",
JOURNAL ="Biometrika",
YEAR ="1964",
VOLUME ="51",
PAGES ="169--173"}
@ARTICLE{Box3,
KEY ="Box and Tiao",
AUTHOR ="G. E. P. Box and G. C. Tiao",
TITLE ="A {B}ayesian approach to some outlier problems",
JOURNAL ="Biometrika",
YEAR ="1968",
VOLUME ="55",
PAGES ="119--129"}
@ARTICLE{Dempster:EM,
author = {A.P. Dempster and N.M. Laird and D.B. Rubin},
title = {Maximum Likelihood from Incomplete Data via the {EM}
Algorithm},
journal = {Journal of the Royal Statistical Society {B}},
year = 1977,
volume = 39,
pages = {1-38},
source = {UL P.202.c.30}
}
% Lindley booklet: has strong detailed and simple arguments showing that
% Fisher is bullshit inchoerent. He wrote this after lecturing for Dan Brunk!
@BOOK{Lindley-booklet,
KEY ="Lindley",
AUTHOR ="D. V. Lindley",
TITLE ="{B}ayesian statistics, a review",
PUBLISHER ="Society for Industrial and Applied Mathematics, Philadelphia",
YEAR ="1972"}
% p.3: Unlike common procedure of proposing a procedure
% and investigating its properties, we instead ask
% what properties are required and then find procedures that have these properties. I like it!
% Lindley reviews Ramsey's gambling scenario that proves that you have
% to have a utility function and a prob dist. Savage later did a rigorous
% version of the same.
% Assume that lotteries can be ordered. The ordering is transitive.
% Lindley also mentions Wald. He knocks Dempster-Schafer.
% He then states that any coherent inferences/decisions
% must be interpretable in terms of a prior. That prob distbn is a
% subjective prob possessed by the decision maker.
% `Objections to this attitude are numerous but none that I am aware of
% have gone to the axioms and criticised those. Indeed, it is hard to see how
% such criticism could be sustained since the requirements impoosed by coherence
% are so modest.'
% If the scientific community makes decisions, it must have a prior
% and a utility. In half a line he mentions the result in game theory that
% e baum takes 50 pages to prove.
% Lindley then distinguishes inference and decision theory nicely.
% Then he attqacks sampling theory for incoherence by showing counter-
% examples. Likelihood p.
% The requirement of unbiasedness violates the l.p.
% eg, If sample r/n binomial, theta = r/n.
% But if sample n for fixed r, theta = r-1/n-1 is the unbiased estimator.
% A statistic t(x) is called ancillary if its P does not depend on theta.
% [That is , for example, it is the deviations of the samples from x_bar]
% Some crap sampling theory dicks base their methods on ancill stats. They
% are wrong of course. counterexamples on p.11-12.
% Maximu likelihood couterexample: mixture model has singularity
% when sigma-> 0 with one component of the mixture on top of a particular
% data point. Similarly sigma N-1 can be generalised to give examples
% that don't converge.
% Significance tests counterexamples.
% Minimax counterexamples.
% Examples where a rejected hypothesis has probability close to 1.
% Examples are citred of confidence intervals where the larger interval
% doesn't include the smaller!
% Another example of a ludicrous unbiased estimator.
% Later on p.42 he cites Edwards et al 1963 as the definitive (but long) paper on
% the robustness of {B}ayesian inferences to the prior.
% He distinguishes [Box and Tiao 62 64a 64b] Criterion robustness and inference
% robustness. The first is robustness of a fixed procedure to the distribution
% being different from the assumptions. THe latter is the {B}ayesian attitude.
% [p.43]
% p.44 reviews the non-normal model studied by Box. He says more work is needed
% here.
%
% p. 46 -> outliers. Box 1968b uses a mixture model, same mean, two gaussians.
% The outlier problem is discussed by Hartigan by seeing how influential each
% individual datum is.
@INPROCEEDINGS{Proteins_with_Autoclass,
AUTHOR ="L. Hunter and D. States",
TITLE ="Applying {B}ayesian Classification to Protein Structure",
YEAR ="1991",
BOOKTITLE ="IEEE Conference on Applications of A.I. 1991",
PAGES =""
}
@UNPUBLISHED{Cheeseman_color,
AUTHOR ="P. Cheeseman",
TITLE ="Personal communication",
YEAR ="1991",
NOTE ="",
}
@INPROCEEDINGS{Cheeseman_hard,
AUTHOR ="P. Cheeseman",
TITLE ="Where the {\em Really} Hard Problems Are",
YEAR ="1991",
BOOKTITLE ="IJCAI-91: Proc. 12th. International
Conference on Artificial Intelligence",
PAGES ="331-337"
}
% unfortunately the above paper does not contain his probabilistic mean
% field type algm.
% it is a thorough study of the existence of phase transitions in "NP complete"
% problems.
@INCOLLECTION{Cheeseman_on_Occam,
KEY ="Cheeseman",
AUTHOR ="P. Cheeseman",
TITLE ="On finding the most probable model",
BOOKTITLE ="Computational models of
scientific discovery and theory formation",
YEAR ="19XX",
EDITOR ="J. Shrager and P. Langley",
PAGES ="73--95",
PUBLISHER =""}
% % Quite a nice strong-wroded review of Occam, but with quite a lot of
% alternative free talk as well, I think.
@ARTICLE{Titterington1,
KEY ="Titterington",
AUTHOR ="D. Titterington",
TITLE ="General structure of regularization procedures in image reconstruction",
JOURNAL ="Astron. Astrophys.",
YEAR ="1985",
VOLUME ="144",
PAGES ="381--387"}
@ARTICLE{Titterington2,
KEY ="Titterington",
AUTHOR ="D. Titterington",
TITLE ="
Common structure of smoothing techniques in statistics",
JOURNAL ="Int. Statist. Rev.",
YEAR ="1985",
VOLUME ="53",
PAGES ="141--170"}
% these two papers are pretty similar, neither is that deep, or perhaps
% I just don't understand. The int stat rev one is longer .
% Both papers mention over-smoothing.
@TECHREPORT{Poggio3,
KEY ="Poggio and Girosi",
AUTHOR ="T. Poggio and F. Girosi",
TITLE ="A theory of Networks for approximation and learning",
YEAR ="1989",
INSTITUTION ="M.I.T.",
NUMBER ="A.I. 1140"}
@ARTICLE{Poggio1,
KEY ="Poggio et. al.",
AUTHOR ="T. Poggio and V. Torre and C. Koch",
TITLE ="Computational vision and regularization theory",
JOURNAL ="Nature",
YEAR ="1985",
VOLUME ="317 ",
NUMBER ="6035",
PAGES ="314-319"}
% arguably this is a crap ref for CV, as there is plenty of Wahba before it.
% I copied use of this ref from SFG. I guess he uses it to refer to GML
@ARTICLE{CrossVal,
KEY ="Davies and Anderssen",
AUTHOR ="A. R. Davies and R. S. Anderssen",
TITLE ="Optimization in the Regularization of Ill--posed Problems",
JOURNAL ="J. Austral. Mat. Soc. Ser. B",
YEAR ="1986",
VOLUME ="28",
NUMBER ="",
PAGES ="114-133"}
% Wahba says that "ML" choice of alpha first appears in this:
@ARTICLE{Anderssen_Bloomfield,
KEY ="Anderssen",
AUTHOR ="R. S. Anderssen and P. Bloomfield",
TITLE ="A Time Series Approach to Numerical Differentiation",
JOURNAL ="Technometrics",
YEAR ="1974",
VOLUME ="16",
PAGES ="69-75"}
% This paper proves properties of alternative choices of alpha,
% including I think that cross val is best. **
@BOOK{Eubank,
KEY ="Eubank",
AUTHOR ="R. L. Eubank",
TITLE ="Spline Smoothing and Non--parametric
Regression",
PUBLISHER ="Marcel Dekker",
YEAR ="1988"}
% In this book they call GCV `the method of choice' p.255
@INPROCEEDINGS{Jaynes,
KEY ="Jaynes",
AUTHOR ="E.T. Jaynes",
TITLE ="{B}ayesian Methods: General Background",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods in
applied statistics",
EDITOR ="J. H. Justice",
PUBLISHER ="C.U.P.",
YEAR ="1986",
PAGES ="1--25"}
@article{Rose1992,
title="Vector Quantization by Deterministic Annealing",
author="Rose, K. and Gurewitz, E. and Fox, G. C.",
journal="IEEE Transactions on Information Theory",
year="1992",
volume="38",
number="4",
pages="1249-1257"
}
@Book{Rosenkrantz,
author = "R. D. Rosenkrantz",
title = "{E.T. Jaynes}. Papers on Probability,
Statistics and Statistical Physics",
publisher = "Kluwer",
year = 1983,
}
% editor = "R. D. Rosenkrantz"
@INCOLLECTION{Jaynes.intervals,
KEY ="Jaynes",
AUTHOR ="E. T. Jaynes",
TITLE ="{B}ayesian Intervals versus Confidence Intervals",
BOOKTITLE ="{E.T. Jaynes}. Papers on Probability,
Statistics and Statistical Physics",
EDITOR ="R. D. Rosenkrantz",
PUBLISHER ="Kluwer",
YEAR ="1983",
PAGES ="151"}
% PUBLISHER ="Kluwer Academic Publishers",
% reprinted in paperback 1989,
% I just read utterly the best Jaynes essay ever. It is SO good; so even
% handed and confrontational; rubbing the noses of the opposition in the
% examples he gives, using the opponents of Galileo as analogy -- some of
% his opponents refused to look through his telescope to see Jupiter's
% moons, because they `already knew'. It's a very pragmatic argument he uses,
% not philosophical -- just look at the results of the two approaches
% and see where they give different answers, then magnify those differences
% and ask your common sense which answer makes sense.
@INPROCEEDINGS{Bryan,
KEY ="Bryan",
AUTHOR ="Bryan, R.K.",
TITLE ="Solving Oversampled Data Problems by {M}aximum {E}ntropy",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods,
{D}artmouth, {U.S.A.}, 1989",
EDITOR ="P. Fougere",
PUBLISHER ="Kluwer",
YEAR ="1990",
PAGES ="221-232"}
@INPROCEEDINGS{Loredo,
KEY ="Loredo",
AUTHOR ="T. J. Loredo",
TITLE ="From {L}aplace to Supernova {SN} {1987A}: {B}ayesian Inference
in Astrophysics",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {D}artmouth, {U.S.A.}, 1989",
EDITOR ="P. Fougere",
PUBLISHER ="Kluwer",
YEAR ="1990",
PAGES ="81--142"}
@INPROCEEDINGS{Gregory_Loredo,
KEY ="Gregory and Loredo",
AUTHOR ="P. C. Gregory and T. J. Loredo",
TITLE ="A New Method for the
Detection of a Periodic Signal of Unknown Shape and Period",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods,",
EDITOR ="G.J. Erickson and C.R. Smith",
PUBLISHER ="Kluwer",
YEAR ="1992",
NOTE ="also in The Astrophysical Journal, Oct 10, 1992"}
@INPROCEEDINGS{GS1,
KEY ="Skilling",
AUTHOR ="J. Skilling",
TITLE ="Classic Maximum Entropy",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {C}ambridge 1988",
EDITOR ="J. Skilling",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1989",
PAGES =""}
@ARTICLE{Gull.nature,
KEY ="Gull",
AUTHOR ="S. F. Gull and G.J.~Daniell",
TITLE ="Image reconstruction from incomplete and noisy data",
JOURNAL ="Nature",
VOLUME ="272",
YEAR ="1978",
PAGES ="686-690"}
@INPROCEEDINGS{GS2,
KEY ="Gull",
AUTHOR ="S. F. Gull",
TITLE ="Developments in Maximum entropy data analysis",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {C}ambridge 1988",
EDITOR ="J. Skilling",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1989",
PAGES ="53--71"}
@INPROCEEDINGS{Skilling2,
KEY ="Skilling",
AUTHOR ="J. Skilling",
TITLE ="The eigenvalues of mega-dimensional matrices",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {C}ambridge 1988",
EDITOR ="J. Skilling",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1989",
PAGES ="455--466"}
@INPROCEEDINGS{GSdevelopments,
KEY ="Gull",
AUTHOR ="S. F. Gull and J. Skilling",
TITLE ="Developments in {C}ambridge",
BOOKTITLE ="Maximum-Entropy and {B}ayesian Spectral Analysis and
Estimation Problems",
EDITOR ="C.R. Smith and G.J. Erickson",
PUBLISHER ="Reidel",
ADDRESS ="Dordrecht",
YEAR ="1987",
PAGES ="149-160",
annote="proc of wyoming meeting 1983"}
% 13 S 17
% 21 ZM 21
@INPROCEEDINGS{G1,
KEY ="Gull",
AUTHOR ="S. F. Gull",
TITLE ="{B}ayesian inductive inference and
maximum entropy",
BOOKTITLE =" Maximum Entropy and {B}ayesian Methods in
Science and Engineering, vol. 1: Foundations",
EDITOR ="G.J. Erickson and C.R. Smith",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1988",
PAGES ="53-74"}
% Papers by Devinder Sivia on Bayesian methods for inference of
% physics models:
% The introductory tutorial one is:
%
% Sivia, David, Knight and Gull, Physica D 66 (1993) 234-242.
%
% The more detailed one, specifically to do with line-fitting, is:
%
% Sivia and Carlile, J. Chem. Phys. 96 (1992) 170-178.
@INPROCEEDINGS{Gull88,
KEY ="Gull",
AUTHOR ="S. F. Gull",
TITLE ="{B}ayesian data analysis: straight--line fitting",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {C}ambridge 1988",
EDITOR ="J. Skilling",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1989",
PAGES ="511--518"}
@INPROCEEDINGS{Sibisi1,
KEY ="Sibisi",
AUTHOR ="S. Sibisi",
TITLE ="Regularization and inverse problems",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {C}ambridge 1988",
EDITOR ="J. Skilling",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1989",
PAGES ="389--396"}
% A comparison of cross val with {B}ayes choice of alpha. Not at all
% conclusive. ** above is far more thorough.
@INPROCEEDINGS{Skilling1,
KEY ="Skilling",
AUTHOR ="J. Skilling",
TITLE ="On parameter estimation and quantified MaxEnt",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {L}aramie, 1990",
EDITOR ="W. T. Grandy and L. Schick",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1991",
PAGES ="267--273"}
% see also .JMR below
@Article{Bretthorst_decays,
author = "Bretthorst, G. L.",
title = "Bayesian Analysis {II}: Model Selection",
journal = "J. Mag. Res.",
year = 1990,
volume = 88,
pages = "552-570"
}
@INPROCEEDINGS{BubblingSusie,
KEY ="Skilling et. al.",
AUTHOR ="J. Skilling and D. R. T. Robinson and
S. F. Gull",
TITLE ="Probabilistic displays",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {L}aramie, 1990",
YEAR ="1991",
EDITOR ="W. T. Grandy and L. Schick",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
PAGES ="365--368"}
@INPROCEEDINGS{Charter,
KEY ="Charter",
AUTHOR ="M.K. Charter",
TITLE ="Quantifying drug absorption",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {L}aramie, 1990",
EDITOR ="W. T. Grandy and L. Schick",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1991",
PAGES ="245--252"}
@INPROCEEDINGS{JaynesME90,
KEY ="Jaynes",
AUTHOR ="E.T. Jaynes",
TITLE ="",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {L}aramie, 1990",
YEAR ="1991",
EDITOR ="W. T. Grandy and L. Schick",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
PAGES =""}
@Article{Jaynes57I,
author = "E.T. Jaynes",
title = "Information Theory and Statistical Mechanics I",
journal = "Phys Rev",
year = 1957,
volume = 106,
pages = "620-630"
}
@Article{Jaynes57II,
author = "E.T. Jaynes",
title = "Information Theory and Statistical Mechanics {II}",
journal = "Phys Rev",
year = 1957,
volume = 108,
pages = "171-190"
}
@INPROCEEDINGS{Image.contest,
KEY ="Bontekoe",
AUTHOR ="T.R. Bontekoe",
TITLE ="The image reconstruction contest",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {L}aramie, 1990",
YEAR ="1991",
EDITOR ="W. T. Grandy and L. Schick",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht"
}
@ARTICLE{Rubin84,
KEY ="Rubin",
AUTHOR ="D. B. Rubin",
TITLE ="{B}ayesianly justifiable and relevant frequency
calculations for the applied statistician",
JOURNAL ="Ann. Stat.",
YEAR ="1984",
VOLUME ="12",
NUMBER ="4",
PAGES ="1151--1172"}
@INPROCEEDINGS{Sibisi2,
KEY ="Sibisi",
AUTHOR ="S. Sibisi",
TITLE ="{B}ayesian interpolation",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {L}aramie, 1990",
YEAR ="1991",
EDITOR ="W. T. Grandy and L. Schick",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
PAGES ="349--355"}
% Studies non-noisy interpolation
@TECHREPORT{Skilling_and_Sibisi,
KEY ="Skilling and Sibisi",
AUTHOR ="J. Skilling and S. Sibisi",
TITLE ="Maximum Entropy Data Analysis",
YEAR ="1990",
INSTITUTION ="University of Cambridge"}
@TECHREPORT{G1.tr,
KEY ="Gull",
AUTHOR ="S. F. Gull",
TITLE ="{B}ayesian inductive inference and maximum entropy",
YEAR ="1985",
INSTITUTION ="University of Cambridge Dept. of Physics",
NUMBER ="1326"}
@MANUAL{GS3,
KEY ="Gull and Skilling",
AUTHOR ="S. F. Gull and J. Skilling",
TITLE ="Quantified Maximum Entropy. \verb+MemSys5+ User's manual",
ORGANIZATION ="M.E.D.C.",
ADDRESS ="33 North End, Royston, SG8 6NR, England",
YEAR ="1991"}
@Proceedings{Maxent90,
KEY ="Grandy and Schick",
EDITOR ="Grandy, Jr., W. T. and L.H. Schick",
TITLE ="Maximum Entropy and {B}ayesian Methods, {L}aramie 1990",
PUBLISHER ="Kluwer",
YEAR ="1991"}
@proceedings{Maxent88,
KEY ="Skilling",
EDITOR ="J. Skilling",
YEAR = 1989,
TITLE ="Maximum Entropy and {B}ayesian Methods, {C}ambridge 1988",
PUBLISHER ="Kluwer"
}
% aka Skilling93
@INPROCEEDINGS{Skilling_clouds,
KEY ="Skilling",
AUTHOR ="J. Skilling",
TITLE ="{B}ayesian numerical analysis",
BOOKTITLE ="Physics and Probability",
EDITOR ="Grandy, Jr., W. T. and P. Milonni",
PUBLISHER ="C.U.P.",
ADDRESS ="Cambridge",
YEAR ="1993"}
% Radford used to be used to denote Radford.mixture
@INPROCEEDINGS{Radford.mixture,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ="{B}ayesian mixture modelling",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {S}eattle 1991",
EDITOR ="C.R. Smith and G.J. Erickson and P.O. Neudorfer",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1992",
PAGES ="197-211"}
@Article{Neal_belief_nets,
author = "R. M. Neal",
title = "Connectionist learning of belief networks",
journal = "Artificial Intelligence",
year = 1992,
volume = 56,
pages = "71-113"
}
@TECHREPORT{Radford.mixtureTR,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ="{B}ayesian mixture modelling by
{M}onte {C}arlo simulation",
YEAR ="1991",
INSTITUTION ="Dept. of Computer Science, University
of Toronto",
NUMBER ="Technical Report CRG--TR--91--2"}
@article{Radford.over.b,
year=1997,
author={R. M. Neal},
title={Suppressing Random Walks in {M}arkov chain {M}onte
{C}arlo using Ordered Overrelaxation},
note={this volume}
}
@TECHREPORT{Radford.over,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ="Suppressing Random Walks in {M}arkov chain {M}onte
{C}arlo using Ordered Overrelaxation",
YEAR ="1995",
INSTITUTION ="Dept. of Statistics, University
of Toronto",
NUMBER ="9508"}
@TECHREPORT{PintoNeal_01,
KEY ="Neal",
AUTHOR ="R. L. Pinto and R. M. Neal",
title={Improving {M}arkov chain {M}onte {C}arlo Estimators
by Coupling to an Approximating Chain},
YEAR ="2001",
INSTITUTION ="Dept. of Statistics, University
of Toronto",
NUMBER ="0101"}
@TECHREPORT{Neal_92,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ="{B}ayesian Training of Backpropagation
Networks by the Hybrid {M}onte {C}arlo method",
YEAR ="1992",
NUMBER ="CRG--TR--92--1",
INSTITUTION ="Dept. of Computer Science, University
of Toronto"}
% better to ref Neal_nips5
@article{Green1995,
author={Green, P. J.},
year={1995},
title={Reversible Jump {M}arkov Chain {M}onte {C}arlo Computation
and {B}ayesian Model Determination},
journal={Biometrika}, volume=82,pages={711-732}
}
@TECHREPORT{Neal_dop,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ="Probabilistic Inference using
{M}arkov Chain {M}onte {C}arlo Methods",
YEAR ="1993",
NUMBER ="CRG--TR--93--1",
INSTITUTION ="Dept. of Computer Science, University of Toronto"}
@TECHREPORT{AutoClassTR,
KEY ="Hanson, Stutz and Cheeseman",
AUTHOR ="R. Hanson and J. Stutz and P. Cheeseman",
TITLE ="{B}ayesian classification theory",
YEAR ="1991",
NUMBER ="FIA--90-12-7-01",
INSTITUTION ="NASA Ames"}
@INPROCEEDINGS{AutoClass,
KEY ="Hanson, Stutz and Cheeseman",
AUTHOR ="R. Hanson and J. Stutz and P. Cheeseman",
TITLE ="{B}ayesian classification with correlation and inheritance",
YEAR ="1991",
BOOKTITLE ="Proceedings of the 12th International Joint Conference on
Artificial Intelligence, Sydney, Australia",
publisher = "Morgan Kaufmann",
volume=2,
pages="692-698",
MONTH ="August"}
@Article{lauritzen-spiegelhalter-88,
key = "Lauritzen",
author = "S.~L. Lauritzen and D.~J. Spiegelhalter",
title = "Local computations with probabilities on graphical
structures and their application to expert systems",
journal = "Journal of the Royal Statistical Society B",
volume = "50",
pages = "157--224",
year = "1988"
}
@article{Shokrollahi1997,
title={A remark on matrix rigidity},
author={Shokrollahi, M. A. and Spielman, D. A. and Stemann, V.},
journal={Information Processing Letters},
year={1997},
volume={64},
number={6},
pages={283-285},
abstract={The rigidity of a matrix is defined to be the number of entries in
the matrix that have to be changed in order to reduce its rank below
a certain value. Using a simple combinatorial Lemma, we show that one
must alter at least c(n(2)/r)log(n/r) entries of an (n x n)-Cauchy
matrix to reduce its rank below r, for some constant c. We apply our
combinatorial lemma to matrices obtained from asymptotically good
algebraic geometric codes to obtain a similar result for r satisfying
2n/(root q - 1) < r less than or equal to n/4. (C) 1997 Elsevier
Science B.V.}
}
@article{spielman-96,
title={Linear-time encodable and decodable error-correcting codes},
author={Spielman, D. A.},
journal={IEEE Transactions on Information Theory},
year={1996},
volume={42},
number={6.1},
annote={no. 6 (Part 1), November},
month={November},
pages={1723-1731},
abstract={We present a new class of asymptotically good, linear error-
correcting codes. These codes can be both encoded and decoded in
linear time. They can also be encoded by logarithmic-depth circuits
of linear size and decoded by logarithmic depth circuits of size O(n
log n). We present both randomized and explicit constructions of
these codes.}
}
@article{Sipser1996,
title={Expander codes},
author={Sipser, M. and Spielman, D. A.},
journal={IEEE Transactions on Information Theory},
year={1996},
volume={42},
number={6Pt1},
pages={1710-1722},
abstract={Using expander graphs, we construct a new family of asymptotically
good, linear error-correcting codes, These codes have linear time
sequential decoding algorithms and logarithmic time parallel decoding
algorithms that use a linear number of processors. We present both
randomized and explicit constructions of these codes, Experimental
results demonstrate the good performance of the randomly chosen
codes.}
}
@article{Luby2001a,
author = {M. G. Luby and M. Mitzenmacher and M. Amin Shokrollahi and
D. A. Spielman},
title = {Efficient Erasure Correcting Codes},
journal={IEEE Transactions on Information Theory},
volume={47},
number={2},
pages={569-584},
year = {2001}
}
@article{Luby2001b,
author = {M. G. Luby and M. Mitzenmacher and M. Amin Shokrollahi and
D. A. Spielman},
title = "Improved Low-Density
Parity-Check Codes Using Irregular Graphs and Belief Propagation",
volume={47},
number={2},
pages={585-584},
year = {2001}
}
@InProceedings{SpielPLRC,
author = {M. G. Luby and M. Mitzenmacher and M. Amin Shokrollahi and
D. A. Spielman and V. Stemann},
title = {Practical Loss-Resilient Codes},
booktitle = {Proceedings of the Twenty-Ninth Annual ACM Symposium on Theory of Computing (STOC)},
key = {Spielman},
year = {1997}
}
% aka SpielLDPC see Luby2001b
@InProceedings{spielman-98,
author = "M.~G. Luby and M. Mitzenmacher and M. A. Shokrollahi and
D.~A. Spielman",
title = "Improved Low-Density
Parity-Check Codes Using Irregular Graphs and Belief Propagation",
booktitle = {Proceedings of the IEEE International Symposium on Information Theory (ISIT)},
year = "1998",
pages = "117"
}
@unpublished{spielman-98-old,
key = "Spielman",
author = "M.~G. Luby and M. Mitzenmacher and M. A. Shokrollahi and
D.~A. Spielman",
title = "Improved Low-Density
Parity-Check Codes Using Irregular Graphs and Belief Propagation",
note = "Submitted to ISIT98",
year = "1998"
}
@Book{pearl,
author = "J. Pearl",
title = "Probabilistic Reasoning in Intelligent Systems: Networks of
Plausible Inference",
publisher = "Morgan Kaufmann",
year = 1988,
address = "San Mateo",
annote="2Y123, CL"
}
@INPROCEEDINGS{NW91,
KEY ="Weir",
AUTHOR ="N. Weir",
TITLE ="Applications of maximum entropy techniques to {HST} data",
BOOKTITLE ="Proceedings of the {ESO/ST--ECF} Data Analysis Workshop, {A}pril 1991",
ADDRESS = "Garching",
EDITOR = "P.J. Grosbol and R.H. Warmels",
PUBLISHER = "European Southern Observatory/Space Telescope -- European
Coordinating Facility",
PAGES = "115-129",
YEAR ="1991"}
@ARTICLE{Kashyap,
KEY ="Kashyap",
AUTHOR ="R. L. Kashyap",
TITLE ="A {B}ayesian comparison of different classes of dynamic
models using empirical data",
JOURNAL ="IEEE Transactions on Automatic Control",
YEAR ="1977",
VOLUME ="AC-22",
NUMBER ="5",
PAGES ="715--727"}
% This paper includes a rediscovery of {B}ayesian model comparison and the fact
% that it embodies Occam's razor. - In the context of models for time series.
% It also includes a thorough discussion of how this is different from `Hypothesis
% testing'. At a few points I disagree with his statements but nearly all of it
% gets full marks from me.
@BOOK{Lempers,
KEY ="Lempers",
AUTHOR ="F. B. Lempers",
TITLE ="Posterior probabilities of alternative linear models",
PUBLISHER ="Rotterdam University Press",
YEAR ="1971"}
% Has a lot of discussion of conjugate priors. No mention of Occam's razor.
% Looks readable in parts.
% Active learning
% Experimental design
@ARTICLE{Lindley,
KEY ="Lindley",
AUTHOR ="D.V. Lindley",
TITLE ="On a measure of the information provided
by an experiment",
JOURNAL ="Ann. Math. Statist.",
YEAR ="1956",
VOLUME ="27",
PAGES ="986-1005"}
@INPROCEEDINGS{Skilling92,
KEY ="Skilling",
AUTHOR ="J. Skilling",
TITLE ="{B}ayesian solution of ordinary differential equations",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {S}eattle 1991",
EDITOR ="C.R. Smith and G.J. Erickson and P.O. Neudorfer",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1992",
PAGES ="23-37"}
@BOOK{Fedorov,
KEY ="Fedorov",
AUTHOR ="V.V. Fedorov",
TITLE ="Theory of optimal experiments",
PUBLISHER ="Academic press",
YEAR ="1972"}
@BOOK{Fukunaga,
KEY ="Fukunaga",
AUTHOR ="K. Fukunaga",
TITLE ="Introduction to statistical pattern recognition",
PUBLISHER ="Academic press",
YEAR ="1972"}
@INPROCEEDINGS{El-Gamal,
KEY ="El-Gamal",
AUTHOR ="M. A. El-Gamal",
TITLE ="The role of priors in active {B}ayesian learning in the
sequential statistical decision framework",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {L}aramie, 1990",
YEAR ="1991",
EDITOR ="W. T. Grandy and L. Schick",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
PAGES ="33--38"}
@ARTICLE{Baum,
KEY ="Baum",
AUTHOR ="E. B. Baum",
TITLE ="Neural Net algorithms that learn in
polynomial time from examples and queries",
JOURNAL ="IEEE Trans. on neural
networks",
YEAR ="1991",
VOLUME ="2",
NUMBER ="1",
PAGES ="5--19"}
@Article{viterbi,
author = "A. J. Viterbi",
title = "Error bounds for convolutional codes
and an asymptotically optimum decoding algorithm",
journal = "IEEE
Transactions on Information Theory",
year = 1967,
volume = "IT-13",
pages = "260-269"
}
% review of Baum Welch algm
@ARTICLE{Baum_Welch,
KEY ="",
AUTHOR ="S. E. Levinson and L. R. Rabiner and M. M. Sondhi",
TITLE ="An Introduction to the Application
of the Theory of Probabilistic Functions
of a {M}arkov Process to Automatic Speech Recognition",
JOURNAL ="Bell Sys. Tech. J.",
YEAR ="1983",
VOLUME ="62",
PAGES ="1035"}
% a noddier intro
@ARTICLE{HMM_intro,
KEY ="",
AUTHOR ="L. R. Rabiner and B. H. Juang",
TITLE ="An Introduction to Hidden {M}arkov Models",
JOURNAL ="IEEE ASSP Magazine",
YEAR ="1986",
MONTH="Jan",
PAGES ="4-16"}
@ARTICLE{Speech_HMM,
KEY ="",
AUTHOR ="D. B. Paul",
TITLE ="Speech Recognition using Hidden {M}arkov Models",
JOURNAL ="The Lincoln Laboratory Journal",
YEAR ="1990",
VOLUME="3",
NUMBER = 1,
PAGES ="41-62"}
@ARTICLE{Baum_Welch_orig,
KEY ="Baum and Petrie",
AUTHOR ="L. E. Baum and T. Petrie",
TITLE ="Statistical Inference for Probabilistic Functions of
Finite-State {M}arkov Chains",
JOURNAL ="Ann. Math. Stat.",
YEAR ="1966",
VOLUME ="37",
NUMBER ="",
PAGES ="1559-1563"}
@ARTICLE{Query91,
KEY ="Hwang \etal",
AUTHOR ="J-N. Hwang and J.J. Choi and S. Oh and R.J. Marks II",
TITLE ="Query--based learning applied to partially trained
multilayer perceptrons",
JOURNAL ="IEEE Trans. on Neural
networks",
YEAR ="1991",
VOLUME ="2",
NUMBER ="1",
PAGES ="131--136"}
@TECHREPORT{Plutowski_White,
KEY ="Plutowski and White",
AUTHOR ="M. Plutowski and H. White",
TITLE ="Active selection of training examples for Network learning
in noiseless environments",
YEAR ="1991",
NUMBER ="TR 90-011",
INSTITUTION ="Dept. Computer Science, UCSD"}
% MDL
All three of these make clear that MDL = {B}ayes
@ARTICLE{Wallace_Freeman,
KEY ="Wallace and Freeman",
AUTHOR ="C. S. Wallace and P. R. Freeman",
TITLE ="Estimation and Inference by Compact Coding",
JOURNAL ="J. R. Statist. Soc. B",
YEAR ="1987",
VOLUME ="49",
NUMBER ="3",
PAGES ="240-265"}
@book{Wallace_book,
KEY ="Mosteller and Wallace",
AUTHOR ="F. Mosteller and D. L. Wallace",
title={Applied {B}ayesian and Classical Inference.
The case of {\em The {F}ederalist\/} papers},
year={1984},
publisher={Springer}
}
@INCOLLECTION{Patrick_Wallace,
KEY ="Patrick and Wallace",
AUTHOR ="J. D. Patrick and C. S. Wallace",
TITLE ="Stone circle geometries: an information
theory approach",
BOOKTITLE ="Archaeoastronomy in the {O}ld {W}orld",
YEAR ="1982",
EDITOR ="D. C. Heggie",
PAGES ="231-264",
PUBLISHER ="Cambridge Univ. Press"}
@ARTICLE{Schwarz,
KEY ="Schwarz",
AUTHOR ="G. Schwarz",
TITLE ="Estimating the dimension of a model",
JOURNAL ="Ann. Stat.",
YEAR ="1978",
VOLUME ="6 ",
NUMBER ="2",
PAGES ="461--464"}
@ARTICLE{WB,
KEY ="Wallace and Boulton",
AUTHOR ="C.S. Wallace and D.M. Boulton",
TITLE ="An information measure for classification",
JOURNAL ="Comput. J.",
YEAR ="1968",
VOLUME ="11 ",
NUMBER ="2",
PAGES ="185--194"}
% Marginalization
@ARTICLE{Spiegelhalter,
KEY ="Spiegelhalter and Lauritzen",
AUTHOR ="D. J. Spiegelhalter and S. L. Lauritzen",
TITLE ="Sequential updating of conditional probabilities on
directed graphical structures",
JOURNAL ="Networks",
YEAR ="1990",
VOLUME ="20",
NUMBER ="",
PAGES ="579--605"}
@Article{Spieg93,
author = "D. J. Spiegelhalter and A. P. Dawid and S. L.
Lauritzen and R. G. Cowell",
title = "Bayesian Analysis in Expert Systems",
journal = "Statistical Science",
volume = 8,
number = 3,
pages = "219-283",
year=1993
}
@book{lauritzen96,
author = "S. L. Lauritzen",
title = "Graphical Models",
publisher={Clarendon Press},
address={Oxford},
year={1996},
series={Oxford Statistical Science Series},
number={17}
}
% fundamental theory of graphical models
% Bayesian Monte Carlo methods
@incollection{LindleyLaplace,
author="D. V. Lindley",
TITLE ="Approximate {B}ayesian Methods",
booktitle = "Bayesian Statistics",
publisher = "Valencia University Press",
year = 1980,
editor = "J. M. Bernardo and M. H. DeGroot and D. V. Lindley and
A. F. M. Smith",
pages = "223-237",
address = "Valencia"
}
@incollection{bugs,
author="A. Thomas and D. J. Spiegelhalter and W. R. Gilks",
TITLE ="{BUGS}: A Program to Perform {B}ayesian Inference Using
{G}ibbs Sampling",
booktitle = "Bayesian Statistics 4",
publisher = "Clarendon Press",
year = 1992,
editor = "J. M. Bernardo and J. O. Berger and A. P. Dawid and
A. F. M. Smith",
pages = "837-842",
address = "Oxford"
}
@article{Adler1981,
title={Over-Relaxation Method for the {M}onte-{C}arlo Evaluation of the
Partition Function for Multiquadratic Actions},
author={Adler, S. L.},
journal={Physical Review D -- Particles and Fields},
year={1981},
volume={23},
number={12},
pages={2901-2904}
}
% importance resampling
@article{ berzuini97dynamic,
author = "Carlo Berzuini and Nicola G. Best and Walter R. Gilks and Cristiana Larizza",
title = "Dynamic Conditional Independence Models and {Markov} Chain {Monte Carlo} Methods",
journal = "Journal of the American Statistical Association",
volume = "92",
number = "440",
pages = "1403--??",
year = "1997",
url = "citeseer.nj.nec.com/berzuini97dynamic.html" }
% Gilks, WR, Berzuini, C (2001). Following a moving target - Monte Carlo inference for dynamic Bayesian models. Journal of the Royal Statistical Society Series B-Statistical Methodology , 63, 127-146.
@article{BerzuiniGilks2001,
title={Following a moving target - Monte Carlo inference for dynamic Bayesian models},
author= "Carlo Berzuini and Walter R. Gilks",
journal={Journal of the Royal Statistical Society Series B-Statistical Methodology},
Year=2001,
Volume=63,
Number=1,
Pages={127-146}
}
% http://www-sigproc.eng.cam.ac.uk/~ad2/arnaud_doucet.html
@article{Doucetetal2000,
title={On Sequential Monte Carlo Sampling Methods for Bayesian Filtering},
author={A. Doucet and S.J. Godsill and C. Andrieu},
journal={Statistics and Computing}, volume={10}, number={3}, pages={197-208},
year={2000}
}
@Article{Gilks_Wild,
author = "Gilks, W.R. and Wild, P.",
title = "Adaptive Rejection Sampling for {G}ibbs Sampling",
journal = "Applied Statistics",
year = 1992,
volume = 41,
pages = "337-348"
}
@Article{Gilks_RG_ADS,
author = "Gilks, W.R. and Roberts, G.O. and George, E.I.",
title = "Adaptive Direction Sampling",
journal = "Statistician",
year = 1994,
volume = 43,
pages = "179-189"
}
% neural net algorithm for MaxEnt:
@ARTICLE{MP2,
KEY ="Marrian and Peckerar",
AUTHOR ="C. R. K. Marrian and M. C. Peckerar",
TITLE ="Electronic Neural Net Algorithm for Maximum Entropy Solutions of
Ill-Posed Problems",
JOURNAL ="IEEE Trans. Circ. Sys.",
YEAR ="1989",
VOLUME ="36",
NUMBER ="",
PAGES ="288--294"}
@INPROCEEDINGS{MP1,
KEY ="Marrian and Peckerar",
AUTHOR ="C. R. K. Marrian and M. C. Peckerar",
TITLE ="Electronic Neural Net Algorithm for Maximum Entropy Solutions of
Ill-Posed Problems",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {C}ambridge 1988",
EDITOR ="J. Skilling",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1989"}
% NEURAL NETS OPTIMISATION OF number parameters, regularisers, etc.
@INCOLLECTION{Weigend,
KEY ="Weigend et. al.",
AUTHOR ="A. S. Weigend and D. E. Rumelhart
and B. A. Huberman",
TITLE ="Generalization by weight--elimination
with applications to forecasting",
BOOKTITLE ="Advances in Neural Information Processing Systems 3",
YEAR ="1991",
EDITOR ="R. P. Lippmann et. al.",
PAGES ="875--882",
PUBLISHER ="Morgan Kaufmann"}
@PHDTHESIS{Nowlan,
KEY ="Nowlan",
AUTHOR ="Steven J. Nowlan",
TITLE ="Soft competitive adaptation:
neural Network learning algorithms based on fitting statistical mixtures",
YEAR ="1991",
NOTE ="CS--91--126",
SCHOOL ="Carnegie Mellon University"}
@PHDTHESIS{GibbsPhD,
AUTHOR ="M. N. Gibbs",
TITLE ="Bayesian {G}aussian Processes for Regression and Classification",
YEAR ="1997",
SCHOOL ="Cambridge University",
note = "{\tt http://www.inference.phy.cam.ac.uk/mng10/}",
}
@article{Williams1998,
title={Computation with infinite neural networks},
author={Williams, C. K. I.},
journal={Neural Computation},
year={1998},
volume={10},
number={5},
pages={1203-1216},
abstract={For neural networks with a wide class of weight priors, it can be
shown that in the limit of an infinite number of hidden units, the
prior over functions tends to a gaussian process. In this article,
analytic forms are derived for the covariance function of the
gaussian processes corresponding to networks with sigmoidal and
gaussian hidden units. This allows predictions to be made efficiently
using networks with an infinite number of hidden units and shows,
somewhat paradoxically, that it may be easier to carry out Bayesian
prediction with infinite networks rather than finite ones.}
}
@INCOLLECTION{BM,
KEY ="Hinton and Sejnowski",
AUTHOR ="G. E. Hinton and T. J. Sejnowski",
TITLE ="Learning and relearning in {B}oltzmann machines",
BOOKTITLE ="Parallel Distributed Processing",
YEAR ="1986",
EDITOR ="D. E. Rumelhart and J. E. McClelland",
PAGES ="282--317",
PUBLISHER ="MIT Press",
ADDRESS ="Cambridge Mass."}
@ARTICLE{Ji,
KEY ="Ji \etal",
AUTHOR ="C. Ji and R. R. Snapp and D. Psaltis",
TITLE ="Generalizing smoothness constraints from discrete samples",
JOURNAL ="Neural Computation",
YEAR ="1990",
VOLUME ="2 ",
NUMBER ="2",
PAGES ="188-197"}
@TECHREPORT{LT,
KEY ="Lee and Tenorio",
AUTHOR ="W. T. Lee and M. F. Tenorio",
TITLE ="On Optimal Adaptive Classifier Design Criterion ---
How many hidden units are Necessary for an optimal Neural
network classifier?",
YEAR ="1991",
NUMBER ="TR-EE-91-5",
INSTITUTION ="Purdue University"}
@ARTICLE{Abu1,
KEY ="Abu-Mostafa",
AUTHOR ="Y. S. Abu-Mostafa",
TITLE ="The {V}apnik-{C}hervonenkis
dimension: information versus complexity in learning",
JOURNAL ="Neural Computation",
YEAR ="1990",
VOLUME ="1 ",
NUMBER ="3",
PAGES ="312--317"}
@ARTICLE{Abu,
KEY ="Abu-Mostafa",
AUTHOR ="Y. S. Abu-Mostafa",
TITLE ="Learning from hints in Neural Networks",
JOURNAL ="J. Complexity",
YEAR ="1990",
VOLUME ="6",
NUMBER ="",
PAGES ="192--198"}
% includes an example of a regulariser `hint'
@INPROCEEDINGS{Haussler,
KEY ="Haussler \etal",
AUTHOR ="D. Haussler and M. Kearns and R. Schapire",
TITLE ="Bounds on the sample complexity of {B}ayesian learning using information
theory and the {VC} dimension",
BOOKTITLE ="Proceedings of the fourth {COLT} workshop",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1991"
}
% OTHER PAPERS ON OCCAM
@INPROCEEDINGS{Ponting,
KEY ="Ponting",
AUTHOR ="K. M. Ponting",
TITLE ="A statistical approach to the determination of
hidden {M}arkov model structure",
BOOKTITLE ="7th {FASE} Symposium",
YEAR ="1988",
PUBLISHER =""}
%---------------------------------
@ARTICLE{Angel,
KEY ="Angel \etal",
AUTHOR ="J. R. P. Angel and P. Wizinowich and M. Lloyd-Hart and D. Sandler",
TITLE ="Adaptive optics for array telescopes using Neural-network techniques",
JOURNAL ="Nature",
YEAR ="1990",
VOLUME ="348",
NUMBER ="",
PAGES ="221--224"}
% Nov 1990
% J. R. P. Angel
% Steward Observatory
% University of Arizona
% Tucson
% AZ 85721
% USA
@ARTICLE{Bayes,
KEY ="Bayes",
AUTHOR ="Thomas Bayes",
TITLE ="An essay towards solving a problem in the
doctrine of chances",
JOURNAL ="Philos. Trans. R. Soc. London",
YEAR ="1763",
VOLUME ="53",
NUMBER ="",
PAGES ="370--418"}
% , reprinted in {\em Biometrika} (1958) {\bf 45}, 293--315
@BOOK{Bretthorst,
KEY ="Bretthorst",
AUTHOR ="G.L. Bretthorst",
TITLE ="{B}ayesian spectrum analysis
and parameter estimation",
PUBLISHER ="Springer",
YEAR ="1988",
NOTE={Also available at {\tt bayes.wustl.edu}}
}
% In this book, he studies the inference of exponential functions
% from noisy data, and especially oscillatory decaying functions
% exp(-l x)*(A sin omega x + phi).
@ARTICLE{Bretthorst.JMR,
KEY ="Bretthorst",
AUTHOR ="G.L. Bretthorst",
TITLE ="{B}ayesian Analysis.
{I}. Parameter Estimation Using Quadrature NMR Models.
{II}. Signal Detection and Model Selection.
{III}. Applications to NMR.",
JOURNAL ="J. Magnetic Resonance",
YEAR ="1990",
VOLUME ="88 ",
NUMBER ="3",
PAGES ="533--595"}
@INPROCEEDINGS{Gull:nonparam,
KEY ="Gull and Fielden",
AUTHOR ="S. F. Gull and J. Fielden",
TITLE ="{B}ayesian Non-Parametric Statistics",
BOOKTITLE =" Maximum Entropy and {B}ayesian Methods in
Applied Statistics,",
EDITOR ="J. H. Justice",
PUBLISHER ="C.U.P.",
ADDRESS ="Cambridge",
YEAR ="1986",
PAGES ="85-94"}
@BOOK{Jeffreys,
KEY ="Jeffreys",
AUTHOR ="H. Jeffreys",
TITLE ="Theory of Probability",
PUBLISHER ="Oxford Univ. Press",
YEAR ="1939",
NOTE="3rd edition reprinted 1985"}
@ARTICLE{Rissanen1,
KEY ="Rissanen",
AUTHOR ="J. Rissanen",
TITLE ="Modeling by shortest data description",
JOURNAL ="Automatica",
YEAR ="1978",
VOLUME ="14",
NUMBER ="",
PAGES ="465--471"}
@TECHREPORT{Seung,
KEY ="Seung \etal",
AUTHOR ="H. S. Seung and H. Sompolinsky and N. Tishby",
TITLE ="Statistical mechanics of learning from examples",
YEAR ="1991",
NUMBER ="",
INSTITUTION ="preprint"}
@article{LeeSeung,
title={Learning the parts of objects by non-negative matrix factorization},
author={Daniel D. Lee and H. S. Seung},
journal={Nature},
volume={401},
pages={788-791},
year={1999},
abstract={
Is perception of the whole based on perception of its parts? There is psychological and
physiological evidence for parts-based representations in the brain, and certain
computational theories of object recognition rely on such representations. But little is
known about how brains or computers might learn the parts of objects. Here we
demonstrate an algorithm for non-negative matrix factorization that is able to learn
parts of faces and semantic features of text. This is in contrast to other methods, such as
principal components analysis and vector quantization, that learn holistic, not
parts-based, representations. Non-negative matrix factorization is distinguished from
the other methods by its use of non-negativity constraints. These constraints lead to a
parts-based representation because they allow only additive, not subtractive,
combinations. When non-negative matrix factorization is implemented as a neural
network, parts-based representations emerge by virtue of two properties: the firing rates
of neurons are never negative and synaptic strengths do not change sign.
}
}
@BOOK{Szeliski,
KEY ="Szeliski",
AUTHOR ="R. Szeliski",
TITLE ="{B}ayesian modeling of uncertainty in low level vision",
PUBLISHER ="Kluwer",
YEAR ="1989"}
@BOOK{Rao&Fujiwara,
KEY ="Rao&Fujiwara",
AUTHOR ="T. R. N. Rao and E. Fujiwara",
TITLE ="Error-control Coding for Computer Systems",
PUBLISHER ="Prentice-Hall",
YEAR ="1989"}
% From haussler
% We just did a journal version as an invited paper to the special
% issue of MAchine Learning, on the COLT `91 conference. However, that
% paper is still being reviewed. This long version will appear also as
% tech rep UCSC-CRL-91-44. Right now though, the best references to this
% and related work are:
@inproceedings{OH.colt,
author= "Opper, M. and D. Haussler",
title= "Calculation of the learning curve of {B}ayes Optimal
classification algorithm for learning a perceptron with noise",
booktitle= "Computational Learning Theory: Proceedings of the
Fourth Annual Workshop",
publisher= "Morgan Kaufmann",
pages= "75-87",
year= 1991
}
@inproceedings{HKS,
author= "Haussler, D. and M. Kearns and R. Schapire",
title= "Bounds on the sample complexity of {B}ayesian learning
using information theory and the {VC} dimension",
booktitle= "Proceedings of the Fourth Workshop on Computational
Learning Theory",
pages= "61-74",
year= 1991
}
@article{OH.prl,
author= "Opper, M. and D. Haussler",
title= "Generalization performance of {B}ayes Optimal classification
algorithm for learning a perceptron",
journal= "Physical Review Letters",
year= 1991,
volume= 66,
number= 20,
month= May,
pages= "2677-2680"
}
@misc{Postol,
author={M. Postol},
title={Quantum Low Density Parity Check Codes},
year={2001}
}
@ARTICLE{dirichlet,
KEY ="Zabell",
AUTHOR ="S. L. Zabell",
TITLE ="{W. E. Johnson}'s `sufficientness' postulate",
JOURNAL ="Annals of Statistics",
YEAR ="1982",
VOLUME ="10 ",
NUMBER ="4",
PAGES ="1091-1099"}
% Smoothing method in compression: was Cleary_compression
@ARTICLE{cleary84,
KEY ="Cleary and Witten",
AUTHOR ="Cleary, J. G. and Witten, I. H.",
TITLE ="Data compression using
adaptive coding and partial string matching",
JOURNAL ="IEEE Transactions on Communications",
YEAR ="1984",
VOLUME ="32",
PAGES ="396-402"}
% The model used isn't phrased in quite the same way as you do. For each
% symbol, they look at the longest context that is has occurred before,
% and try to predict on the basis of the statistics available for that,
% but with a certain probability of an "escape" to a lower-order context.
% The book _Text Compression_ by Bell and Witten also covers this stuff
% (and is more recent).
% Bell, Timothy C.
% Cleary, John G.
% Witten, Ian H.
@book{ Hebb49,
author = "D. O. Hebb",
title = "The Organization of Behavior",
year = 1949,
publisher = "Wiley"}
@article{ Grayetal89,
author = "Charles M. Gray and Peter Konig and Andreas K. Engel and Wolf Singer",
title = "Oscillatory responses in cat visual cortex exhibit inter-columnar synchronization which reflects global stimulus properties",
journal = "Nature",
year = 1989,
month = mar,
volume = 338,
pages = "334--337"}
@BOOK{Bell_Compression,
KEY ="Bell, Cleary and Witten",
AUTHOR ="Bell, T. C. and
Cleary, J. G. and
Witten, I. H.",
TITLE ="Text compression",
ADDRESS ="Englewood Cliffs",
PUBLISHER ="Prentice Hall",
YEAR ="1990"}
% Location: [Computer Laboratory] 2Y50
% Arithmetic coding: Witten, Neal and Cleary
@ARTICLE{arith_coding,
KEY ="Witten \etal",
AUTHOR ="I. H. Witten and R. M. Neal and J. G. Cleary",
TITLE ="Arithmetic Coding for Data Compression",
JOURNAL ="Communications of the ACM",
YEAR ="1987",
VOLUME ="30",
NUMBER ="6",
PAGES ="520--540",
annote={Communications of the Association for Computing Machinery}
}
@article{Gallager78,
author= {Gallager, R. G.},
title="Variations on a Theme by {H}uffman",
journal={IEEE Trans. on Information Theory}, Volume={IT-24}, Number={6},
month={Nov.},
year={1978},
pages={668-674}}
% Abstract:
% The state of the art in data compression is arithmetic coding, not the
% better known Huffman method. Arithmetic coding gives greater compression,
% is faster for adaptive models, and clearly separates the model from the
% channel encoding.
@ARTICLE{Rissanen_arith,
KEY ="Rissanen",
AUTHOR ="J. Rissanen",
TITLE ="Generalized {K}raft Inequality and Arithmetic Coding",
JOURNAL ="IBM J. Res. Dev.",
YEAR =1976,
MONTH ="May",
VOLUME =20,
PAGES ="198-203"}
@ARTICLE{Rissanen_Langdon,
KEY ="Rissanen",
AUTHOR ="J. Rissanen and G. G. Langdon",
TITLE ="Universal Modeling and Coding",
JOURNAL ="IEEE Trans. Info. Theory",
YEAR ="1981",
VOLUME ="27",
NUMBER ="1",
PAGES ="12-23"}
@ARTICLE{Rissanen_Langdon:79,
KEY ="Rissanen and Langdon",
AUTHOR ="J. Rissanen and G. G. Langdon",
TITLE ="Arithmetic Coding",
JOURNAL ="IBM Journal of Research and Development",
YEAR ="1979",
VOLUME ="23",
PAGES ="149-162"}
% ``Smoothing" a la IBM
@ARTICLE{Bahl,
KEY ="Bahl et. al.",
AUTHOR ="L. R. Bahl and F. Jelinek and R. L. Mercer",
TITLE ="A maximum likelihood approach to continuous speech
recognition",
JOURNAL ="IEEE Trans",
YEAR ="1983",
VOLUME ="PAMI--5 ",
NUMBER ="2",
PAGES ="179-190"}
% coding theorists forward-backward
@ARTICLE{BCJR,
KEY ="Bahl et. al.",
AUTHOR ="L. R. Bahl and J. Cocke and F. Jelinek and J. Raviv",
TITLE ="Optimal Decoding of Linear Codes for Minimizing
Symbol Error Rate",
JOURNAL ="IEEE Trans. Inform. Theory",
YEAR ="1974",
VOLUME ="IT-20",
NUMBER ="",
PAGES ="284-287"}
@INPROCEEDINGS{Bahl2,
KEY ="Bahl et. al.",
AUTHOR ="L. R. Bahl and P.F. Brown and de Souza, P.V. and R. L. Mercer and D. Nahamoo",
TITLE ="A fast algorithm for deleted interpolation",
BOOKTITLE ="Proc. Eurospeech '91 Genoa",
YEAR ="1991",
PAGES ="1209-1212"}
@INPROCEEDINGS{Jelinek_Mercer,
KEY ="Jelinek and Mercer",
AUTHOR ="F. Jelinek and R. L. Mercer",
TITLE ="Interpolated estimation of {M}arkov source parameters
from sparse data",
BOOKTITLE ="Pattern recognition in practice",
EDITOR ="E. S. Gelsema and L. N. Kanal",
PUBLISHER ="North--Holland publishing company",
YEAR ="1980",
PAGES ="381-402"}
@ARTICLE{Nadas,
KEY ="Nadas",
AUTHOR ="A. Nadas",
TITLE ="Estimation of probabilities in the language model of the
{IBM} speech recognition system",
JOURNAL ="IEEE Trans",
YEAR ="1984",
VOLUME ="ASSP--32 ",
NUMBER ="4",
PAGES ="859--861"}
% and a backing off paper:
@ARTICLE{katz-backoff,
AUTHOR = {S. M. Katz},
JOURNAL = ASSP,
PAGES = {400-401},
TITLE = {Estimation of probabilities from sparse data for the language model component of a speech recognizer},
VOLUME = {35},
NUMBER = {3},
MONTH = {March},
YEAR = {1987}
}
@inproceedings{Brown:88a,
author={Peter F. Brown and John Cocke and Stephen A. DellaPietra
and Vincent J. DellaPietra and Frederick Jelinek
and Robert L. Mercer and Paul S. Roossin},
title={A Statistical Approach to Language Translation},
booktitle={Proceedings of the 12th International Conference
on Computational Linguistics},
year={1988},
pages={71-76},
address={Budapest, Hungary},
month ={August}}
@inproceedings{lafferty-codes,
author = "John Lafferty and Dan Rockmore",
title = "Codes and Iterative Decoding on Algebraic Expander Graphs",
booktitle= "International Symposium on Information Theory and its Applications",
year = "2000",
month = "November",
location = "Honolulu, HI",
url = "citeseer.nj.nec.com/lafferty00code.html"
}
@misc{ lafferty-codesB,
author = "John D. Lafferty and Dan Rockmore",
title = "Codes And Iterative Decoding on Algebraic Expander Graphs",
url = "citeseer.nj.nec.com/lafferty00code.html",
note={International Symposium on Information Theory and its Applications},
annote={Honolulu, Hawaii, USA, November 5-8},
year=2000}
@unpublished{MacKay_Lafferty,
title={Codes from {C}ayley graphs},
author={D. J. C. MacKay and J. Lafferty},
note={Work in progress},
year={1997}
}
@article{Brown:90b,
author={Peter F. Brown and John Cocke and Stephen A. DellaPietra and
Vincent J. DellaPietra and Frederick Jelinek
and John D. Lafferty
and Robert L. Mercer and Paul S. Roossin},
title={A Statistical Approach to Machine Translation},
journal={Computational Linguistics},
year={1990},
month={June},
volume={16},
number={2},
pages={79-85}}
@article{Brown:91g,
author ={Peter F. Brown and Stephen A. DellaPietra
and Vincent J. DellaPietra
and Robert L. Mercer},
title={The Mathematics of Statistical
Machine Translation: Parameter Estimation},
year={1993},
journal={Computational Linguistics},
month={June},
volume={19},
number={2},
pages={263-311}}
% The most comprehensive paper is Brown:91g.
@inproceedings{Brown:91f,
author ={Peter F. Brown and Stephen A. DellaPietra
and Vincent J. DellaPietra
and Robert L. Mercer},
title={A Statistical Approach to Sense Disambiguation in
Machine Translation},
booktitle={Fourth DARPA Workshop on Speech and Natural Language},
year={1991},
publisher={Morgan Kaufmann Publishers, Inc.},
pages={146-151},
address={Pacific Grove, California},
month={February}}
@inproceedings{Brown:92a,
author ={Peter F. Brown and Stephen A. DellaPietra
and Vincent J. DellaPietra
and John Lafferty
and Robert L. Mercer},
title={Analysis, Statistical Transfer, and Synthesis in Machine
Translation},
booktitle={Proceedings of the Fourth International Conference
on Theoretical and Methodological Issues in Machine Translation},
year={1992},
pages={83-100}}
@misc{Brown:92e,
author ={Peter F. Brown and Stanley F. Chen and Stephen A. DellaPietra
and Vincent J. DellaPietra and Andrew S. Kehler
and Robert L. Mercer},
title={Automatic Speech Recognition in Machine Aided Translation},
year={1992},
howpublished={Submitted to Computers Speech and Language}}
% Using spaker independent data aas a prior for a speaker dependent machine
@INPROCEEDINGS{Lee_Gauvain,
KEY ="Lee and Gauvain",
AUTHOR ="C. H. Lee and J. L. Gauvain",
TITLE ="Speaker Adaptation Based on {MAP} Estimation of {HMM}
Parameters",
BOOKTITLE ="IEEE Proceedings",
PAGES ="II-558-561",
YEAR ="1993"}
@ARTICLE{Copas:83,
KEY ="Copas",
AUTHOR ="J. B. Copas",
TITLE ="Regression, Prediction and Shrinkage (with Discussion)",
JOURNAL ="J. R. Statist.Soc B",
YEAR ="1983",
VOLUME ="45",
NUMBER ="3",
PAGES ="311-354"}
% This discusses ``Preshrunk predictors".
% It does also give the Bayesian answer for one case, but then rambles
% off again to terrible
% all-predictions-fudged-by-factor-k
% methods.
% This one is about including the possibility of incorrect binary labels
@ARTICLE{Copas:88,
KEY ="Copas",
AUTHOR ="J. B. Copas",
TITLE ="Binary Regression Models for Contaminated Data
(with Discussion)",
JOURNAL ="J. R. Statist.Soc B",
YEAR ="1988",
VOLUME ="50",
NUMBER ="2",
PAGES ="225-265"}
@INPROCEEDINGS{Nowlan.sunspot,
KEY ="Nowlan and Hinton",
AUTHOR ="Steven J. Nowlan and G. E. Hinton",
TITLE ="Adaptive Soft Weight Tying using {G}aussian Mixtures",
BOOKTITLE ="Advances in Neural Information Processing Systems 4",
EDITOR ="J. E. Moody and S. J. Hanson and R. P. Lippmann",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1992",
PAGES ="993--1000"}
@BOOK{Reif,
KEY ="Reif",
AUTHOR ="F. Reif",
TITLE ="Fundamentals of Statistical and Thermal Physics",
PUBLISHER ="McGraw--Hill",
YEAR ="1965"}
@INPROCEEDINGS{Brain_Surgeon,
KEY ="Hassibi and Stork",
AUTHOR ="B. Hassibi and D. G. Stork",
TITLE ="Second Order Derivatives for Network Pruning:
Optimal Brain Surgeon",
BOOKTITLE ="Advances in Neural Information Processing Systems 5",
EDITOR ="C. L. Giles and S. J. Hanson and J. D. Cowan",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1993",
PAGES ="164-171"}
% They use a cute iterative procedure for calculating the inverse
% (Hessian+alpha I) in a single pass through the data, with a large number of
% matrix multiplications.
@INPROCEEDINGS{LSP:hessian,
KEY ="LeCun et. al.",
AUTHOR ="LeCun, Y. and P. Y. Simard and B. Pearlmutter",
TITLE ="Automatic Learning Rate Maximization by On-line
Estimation of the Hessian's Eigenvectors",
BOOKTITLE ="Advances in Neural Information Processing Systems 5",
EDITOR ="C. L. Giles and S. J. Hanson and J. D. Cowan",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1993",
PAGES ="156-163"}
@INPROCEEDINGS{SLD:nips5,
KEY ="Simard, LeCun and Denker",
AUTHOR ="P. Simard and LeCun, Y. and J. Denker",
TITLE ="Efficient Pattern Recognition Using a New
Transformation Distance",
BOOKTITLE ="Advances in Neural Information Processing Systems 5",
EDITOR ="C. L. Giles and S. J. Hanson and J. D. Cowan",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1993",
PAGES ="50-58"}
@INPROCEEDINGS{nips6,
KEY ="",
AUTHOR ="",
TITLE ="",
BOOKTITLE ="Advances in Neural Information Processing Systems 6",
EDITOR ="J. D. Cowan and G. Tesauro and J. Alspector",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1994",
PAGES =""}
@article{HintonGhahramani97,
author={G. E. Hinton and Z. Ghahramani},
title={Generative Models for Discovering Sparse Distributed
Representations},
journal={Proc.\ Roy.\ Soc.},
year={1997}
}
@INPROCEEDINGS{Hinton_Zemel:94,
AUTHOR ="Hinton, G. E. and Zemel, R. S.",
TITLE ="Autoencoders, Minimum Description Length and {H}elmholtz
Free Energy",
BOOKTITLE ="Advances in Neural Information Processing Systems 6",
EDITOR ="J. D. Cowan and G. Tesauro and J. Alspector",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1994",
PAGES =""}
@PHDTHESIS{Zemel_thesis,
AUTHOR ="Zemel, R. S.",
TITLE ="A Minimum Description Length Framework for Unsupervised
Learning",
SCHOOL ="University of Toronto",
YEAR =1993}
@UNPUBLISHED{Steeg:94,
AUTHOR ="E. Steeg",
TITLE ="",
NOTE ="Personal communication",
YEAR ="1994"}
@INPROCEEDINGS{boosting,
KEY ="",
AUTHOR ="H. Drucker and R. Schapire and P. Simard",
TITLE ="Improving Performance in Neural Networks Using a
Boosting Algorithm",
BOOKTITLE ="Advances in Neural Information Processing Systems 5",
EDITOR ="C. L. Giles and S. J. Hanson and J. D. Cowan",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1993",
PAGES ="42-49"}
% I would call this a data selection procedure, plus a funky modelling
% rule. It depends theoretically on the assumption that the model can
% do better than 50% on any sub-ensemble from the training set.
@INPROCEEDINGS{nips5,
KEY ="",
AUTHOR ="",
TITLE ="",
BOOKTITLE ="Advances in Neural Information Processing Systems 5",
EDITOR ="C. L. Giles and S. J. Hanson and J. D. Cowan",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1993",
PAGES =""}
@INPROCEEDINGS{Wolpert_nips,
KEY ="Wolpert",
AUTHOR ="D. H. Wolpert",
TITLE ="On the use of evidence in Neural Networks",
BOOKTITLE ="Advances in Neural Information Processing Systems 5",
EDITOR ="C. L. Giles and S. J. Hanson and J. D. Cowan",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1993",
PAGES ="539-546"}
@INPROCEEDINGS{Neal_nips5,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ="{B}ayesian learning via stochastic dynamics",
BOOKTITLE ="Advances in Neural Information Processing Systems 5",
EDITOR ="C. L. Giles and S. J. Hanson and J. D. Cowan",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1993",
PAGES ="475-482"}
@book{NealHinton98,
author="R. M. Neal and G. E. Hinton",
title="A New View of the {EM} Algorithm that Justifies Incremental, Sparse, and Other Variants",
publisher = "Kluwer Academic Publishers",
booktitle={Learning in Graphical Models},
year = "1998",
editor = "M. I. Jordan",
pages = "355-368",
series={NATO Science Series},
address={Dordrecht}
}
@Book{jordan98:_learn_graph_model,
editor = {M. I. Jordan},
title = {Learning in Graphical Models},
publisher = {Kluwer Academic Publishers},
year = 1998,
series = {NATO Science Series},
address={Dordrecht}
}
% A New View of the EM Algorithm
% that Justifies Incremental and
% Other Variants
%
% R. M. Neal and Geoffrey E. Hinton, Dept. of Computer Science, University of Toronto
@Incollection{NealHinton98,
author="R. M. Neal and G. E. Hinton",
title="A New View of the {EM} Algorithm that Justifies Incremental, Sparse, and Other Variants",
publisher = "Kluwer Academic Press",
booktitle={Learning in Graphical Models},
year = "1998",
editor = "M. I. Jordan",
pages = "355-368",
series={NATO Science Series}
}
@article{NealHinton93,
author="R. M. Neal and G. E. Hinton",
title="A New View of the {EM} Algorithm that Justifies Incremental, Sparse, and Other Variants",
journal="Biometrika",
month=feb,
year=1993,
note="submitted"}
% see erice volume NealHinton98
@article{Thodberg1996,
title={Review of {B}ayesian neural networks with an application to near
infrared spectroscopy},
author={Thodberg, H. H.},
journal={IEEE Transactions on Neural Networks},
year={1996},
volume={7},
number={1},
pages={56-72},
abstract={MacKay's Bayesian framework for backpropagation is a practical and
powerful means to improve the generalization ability of neural
networks, It is based on a Gaussian approximation to the posterior
weight distribution, The framework is extended, reviewed, and
demonstrated in a pedagogical way, The notation Is simplified using
the ordinary weight decay parameter, and a detailed and explicit
procedure for adjusting several weight decay parameters is given,
Bayesian backprop is applied in the prediction of fat content in
minced meat from near infrared spectra, It out performs ''early
stopping'' as well as quadratic regression, The evidence of a
committee of differently trained networks is computed, and the
corresponding improved generalization is verified, The error bars on
the predictions of the fat content are computed. There are three
contributors: The random noise, the uncertainty in the weights, and
the deviation among the committee members, The Bayesian framework is
compared to Moody's GPE. Finally, MacKay and Neal's automatic
relevance determination, in which the weight decay parameters depend
on the input number, is applied to the data with improved results.}
}
@TECHREPORT{Thodberg,
KEY ="Thodberg",
AUTHOR ="H. H. Thodberg",
TITLE ="Ace of {B}ayes: application
of Neural Networks with pruning",
YEAR ="1993",
NUMBER ="1132 E",
INSTITUTION ="Danish meat research institute"}
@TechReport{Gold94,
author = "S. Gold and C. P. Lu and A. Rangarajan and S. Pappu
and E. Mjolsness",
title = "New Algorithms for {2D} and {3D} Point Matching: Pose
Estimation and Correspondence",
institution = "Yale",
year = 1994,
number = "YALEU/DCS/RR-1035"
}
% this paper makes use of the idea of turning energies E_{ij}
% into probabilities like the marginals of the posterior of a
% permutation matrix by repeated carpet jumping over i then j.
@BOOK{HKP,
KEY ="Hertz \etal",
AUTHOR ="J. Hertz and A. Krogh and R. G. Palmer",
TITLE ="Introduction to the Theory of Neural Computation",
PUBLISHER ="Addison-Wesley",
YEAR ="1991"}
@INPROCEEDINGS{strauss,
KEY ="Strauss \etal",
AUTHOR ="C. E. M. Strauss and D. H. Wolpert and D. R. Wolf",
TITLE ="Alpha, Evidence, and the Entropic Prior",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {P}aris 1992",
EDITOR ="A. Mohammed-Djafari",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1993"}
@INPROCEEDINGS{Puetter,
KEY ="P",
AUTHOR ="Puetter",
TITLE ="The pixon model for image reconstruction",
BOOKTITLE ="Maximum Entropy and {B}ayesian Methods, {S}anta {B}arbara 1993",
EDITOR ="G. Heidbreder",
PUBLISHER ="Kluwer",
ADDRESS ="Dordrecht",
YEAR ="1996"}
@Article{NC:Bromley93,
author = "J. Bromley and J. S. Denker",
title = "Improving Rejection Performance on Handwritten Digits
by Training with `Rubbish'",
type = "Note",
journal = "Neural Computation",
volume = "5",
number = "3",
pages = "367--370",
year = "1993"
}
@TECHREPORT{Jervis_etal93,
AUTHOR = {T. T. Jervis and W. J. Fitzgerald},
ADDRESS = {Trumpington Street, Cambridge, England},
INSTITUTION = {Cambridge University Engineering Department},
MONTH = {August},
NUMBER = {CUED/F-INFENG/TR 144},
TITLE = {Optimization Schemes for Neural Networks},
YEAR = {1993},
SOURCE = {ftp://svr-ftp.eng.cam.ac.uk/pub/reports/jervis_tr144.ps.Z}
}
@ARTICLE{Gabor,
KEY ="Gabor",
AUTHOR ="D. Gabor",
TITLE ="Theory of communication",
JOURNAL ="J. Inst. Electr. Eng.",
YEAR ="1946",
VOLUME ="93",
PAGES ="429--457"}
@ARTICLE{Daug1,
KEY ="Daugman",
AUTHOR ="John G. Daugman",
TITLE ="Uncertainty relation for resolution in space, spatial frequency,
and orientation optimized by two-dimensional visual cortical filters",
JOURNAL ="J. Opt. Soc. Am. A",
YEAR ="1985",
VOLUME ="2 ",
NUMBER ="7",
PAGES ="1160--1169"}
@ARTICLE{Daug2,
KEY ="Daugman",
AUTHOR ="John G. Daugman",
TITLE ="Complete Discrete 2-D Gabor Transforms by Nearal Networks for
Image Analysis and Compression",
JOURNAL ="IEEE Trans. Acoustics, Speech and Signal Proc.",
YEAR ="1988",
VOLUME ="36",
NUMBER ="7",
PAGES ="1169--1179"}
@ARTICLE{Wavelet1,
KEY ="Strang",
AUTHOR ="Gilbert Strang",
TITLE ="Wavelets and Dilation Equations: A Brief Introduction",
JOURNAL ="SIAM Review",
YEAR ="1989",
VOLUME ="31",
NUMBER ="4",
PAGES ="614--627"}
@ARTICLE{Wavelet2,
KEY ="Heil and Walnut",
AUTHOR ="C. E. Heil and D. F. Walnut",
TITLE ="Continuous and Discrete Wavelet Transforms",
JOURNAL ="SIAM Review",
YEAR ="1989",
VOLUME ="31",
NUMBER ="4",
PAGES ="628--666"}
@BOOK{Frac,
KEY ="Mandelbrot",
AUTHOR ="Benoit Mandelbrot",
TITLE ="The Fractal Geometry of Nature",
PUBLISHER ="W.H. Freeman and Co",
YEAR ="1982"}
@ARTICLE{ahb_me,
KEY ="ahb",
AUTHOR ="A. H. Barnett",
TITLE ="Statistical modelling of rough crack surfaces in metals",
JOURNAL ="Internal Report for Non-Destructive Testing
Applications Centre, Technology Division, Nuclear Electric plc",
YEAR ="1993"}
% David Field (J. Opt Soc Am A, 4(12) 2379-2394 (1987)) analyzed a bunch of
% natural images and decided that <|F(k)|^2> = k^{-2} on average, i.e.
% m = 1 in your notation.
%
@Article{field,
author = "D. Field",
title = "n/k",
journal = "J. Opt Soc Am A",
year = 1987,
volume = 4,
number = 12,
pages = "2379-2394"
}
@INPROCEEDINGS{tresp:nips5,
AUTHOR ="S. Ahmad and V. Tresp",
TITLE ="Some Solutions to the Missing Feature Problem in Vision",
BOOKTITLE ="Advances in Neural Information Processing Systems 5",
EDITOR ="C. L. Giles and S. J. Hanson and J. D. Cowan",
PUBLISHER ="Morgan Kaufmann",
ADDRESS ="San Mateo, California",
YEAR ="1993",
PAGES =""}
@InProceedings{tresp:nips6,
author = "V. Tresp and S. Ahmad and R. Neuneier",
title = "Training Neural Networks with Deficient Data",
editor = "Cowan, J. D. and Tesauro, G. and Alspector, J.",
booktitle = "Advances in Neural Information Processing Systems 6",
year = 1994,
ADDRESS ="San Mateo, California",
publisher = "Morgan Kaufmann",
PAGES =""
}
@InProceedings{tresp:nips7,
author = "V. Tresp and R. Neuneier and S. Ahmad",
title = "Efficient Methods for Dealing with Missing Data in
Supervised Learning",
editor = "G. Tesauro and D. Touretzky and T. Leen",
booktitle = "Advances in Neural Information Processing Systems 7",
year = 1995,
ADDRESS ="San Mateo, California",
publisher = "Morgan Kaufmann",
PAGES =""
}
@TECHREPORT{breiman,
KEY ="Breiman",
AUTHOR ="L. Breiman",
TITLE ="Stacked regressions",
YEAR ="1992",
NUMBER ="367",
INSTITUTION ="Dept. of Stat., Univ. of Cal. Berkeley"}
@TECHREPORT{Radford_infinite_nets,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ="Priors for infinite Networks",
YEAR ="1994",
NUMBER ="CRG-TR-94-1",
INSTITUTION ="Univ. of Toronto"}
% best ref for this is radford's thesis.
% Slice Sampling
@TECHREPORT{Radford_slice,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ={{M}arkov chain {M}onte {C}arlo methods based on `slicing' the density
function} ,
abstract={One way to sample from a distribution is to sample uniformly
from the region under the plot of its density function. A Markov chain
that converges to this uniform distribution can be constructed by
alternating uniform sampling in the vertical direction with uniform
sampling from the horizontal `slice' defined by the current vertical
position. Variations on such `slice sampling' methods can easily be
implemented for univariate distributions, and can be used to sample
from a multivariate distribution by updating each variable in
turn. This approach is often easier to implement than Gibbs sampling,
and may be more efficient than easily-constructed versions of the
Metropolis algorithm. Slice sampling is therefore attractive in
routine Markov chain Monte Carlo applications, and for use by software
that automatically generates a Markov chain sampler from a model
specification. One can also easily devise overrelaxed versions of
slice sampling, which sometimes greatly improve sampling efficiency by
suppressing random walk behaviour. Random walks can also be avoided in
some slice sampling schemes that simultaneously update all variables.
},
YEAR ="1997",
NUMBER ="9722",
INSTITUTION ="Dept. of Statistics, Univ. of Toronto",
url={http://www.cs.toronto.edu/~radford/slice.abstract.html}
}
% Slice Sampling
@article{Radford_slice2001,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ={Slice Sampling},
journal={Annals of Statistics},
note={In Press},
YEAR ="2002",
url={http://www.cs.toronto.edu/~radford/slice.abstract.html}
}
@TECHREPORT{Radford_ais,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ={Annealed Importance Sampling},
abstract={Simulated annealing - moving from a tractable distribution to a distribution of interest via a sequence of
intermediate distributions - has traditionally been used as an inexact method of handling isolated modes in
Markov chain samplers. Here, it is shown how one can use the Markov chain transitions for such an annealing
sequence to define an importance sampler. The Markov chain aspect allows this method to perform acceptably
even for high-dimensional problems, where finding good importance sampling distributions would otherwise be
very difficult, while the use of importance weights ensures that the estimates found converge to the correct
values as the number of annealing runs increases. This annealed importance sampling procedure resembles the
second half of the previously-studied tempered transitions, and can be seen as a generalization of a
recently-proposed variant of sequential importance sampling. It is also related to thermodynamic integration
methods for estimating ratios of normalizing constants. Annealed importance sampling is most attractive when
isolated modes are present, or when estimates of normalizing constants are required, but it may also be more
generally useful, since its independent sampling allows one to bypass some of the problems of assessing
convergence and autocorrelation in Markov chain samplers. },
YEAR ="1998",
NUMBER ="9805",
INSTITUTION ="Dept. of Statistics, Univ. of Toronto",
url={http://www.cs.toronto.edu/~radford/ais.abstract.html}
}
@PHDTHESIS{Radford_thesis,
AUTHOR ="R. M. Neal",
TITLE ="Bayesian Learning for Neural Networks",
YEAR ="1995",
school ="Dept. of Computer Science, Univ. of Toronto"}
@Book{Radford_book,
author = "R. M. Neal",
title = "Bayesian Learning for Neural Networks",
publisher = "Springer",
year = 1996,
number = 118,
series = "Lecture Notes in Statistics",
address = "New York"
}
@TECHREPORT{Neal_gp,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ="{M}onte {C}arlo Implementation of {G}aussian Process
Models for {B}ayesian Regression and Classification",
YEAR ="1997",
NUMBER ="CRG--TR--97--2",
INSTITUTION ="Dept. of Computer Science, University of Toronto"}
% aka 9702
@TECHREPORT{Neal_mcdecoder,
KEY ="Neal",
AUTHOR ="R. M. Neal",
TITLE ="{M}onte {C}arlo decoding of {LDPC} codes",
note={Presented at ICTP Workshop on Statistical Physics and Capacity-Approaching Codes},
YEAR ="2001",
url={http://www.cs.toronto.edu/~radford/slides.html},
INSTITUTION ="Dept. of Computer Science, University of Toronto"}
% aka 9702
@Article {Pearlmutter,
author = "B. A. Pearlmutter",
title = "Fast Exact Multiplication by the {H}essian",
journal = "Neural Computation",
year = 1994,
volume = 6,
number = 1,
pages = "147--160",
annote = "Also available by ftp archive.cis.ohio-state.edu:
/pub/neuroprose/pearlmutter.hessian.ps.Z"
}
@unpublished {PearlmutterICA,
author = "B. A. Pearlmutter and L. C. Parra",
title = "A context-sensitive
generalization of ICA",
year = 1996,
note = "To appear in ICONIP. Also available at
\verb+http://www.cnl.salk.edu/+
\verb+~bap/papers/iconip-96-cica.ps.gz+"
}
@unpublished {AmariICA,
author = "S. Amari and A. Cichocki and H. Yang",
title = "A new learning algorithm for blind signal separation",
year = 1996,
note = "In NIPS96"
}
@Book{Knuth_vol1,
author = "D. E. Knuth",
title = "The art of computer programming. Volume 1: fundamental
algorithms",
publisher = "Addison Wesley",
year = 1968,
address = "Reading, Mass."
}
% [Univ. Lib.] 348:8.c.95.227 SF 4
@ARTICLE{mollon92,
KEY ="Mollon and Bowmaker",
AUTHOR ="Mollon, J. D. and Bowmaker, J. K.",
TITLE ="The Spatial Arrangement of Cones in the Primate Fovea",
JOURNAL ="Nature",
YEAR ="1992",
VOLUME ="360",
NUMBER ="",
PAGES ="677-679"}
@INCOLLECTION{Lee:Adapt,
author = {Chin-Hui Lee and Jean-Luc Gauvain},
title = {Adaptive Learning in Acoustic and Language Modeling},
booktitle = {Speech Recognition and Coding: {N}ew Advances and Trends},
pages = {14-31},
crossref = {NATO:95},
source = {Bill},
status = {Photocopied}
}
@InProceedings{Hinton_bb,
author = "G. E. Hinton and van Camp, D.",
title = "Keeping Neural Networks Simple by Minimizing the
Description Length of the Weights",
booktitle = "Proc. 6th Annu. Workshop on Comput. Learning Theory",
publisher = "ACM Press, New York, NY",
year = "1993",
pages = "5--13",
}
@BOOK{Draper,
KEY ="Draper, Norman Richard",
AUTHOR ="Draper, N. R. and H. Smith",
TITLE ="Applied regression analysis",
PUBLISHER ="Wiley",
ADDRESS ="New York",
YEAR ="1966"
}
@Book{GrandyI,
author = "Grandy, W. T., Jr.",
title = "Foundations of Statistical Mechanics. Volume I:
Equilibrium Theory",
publisher = "D. Reidel",
year = 1987,
annote = "now Reidel is Kluwer"
}
@Book{GrandyII,
author = "Grandy, W. T., Jr.",
title = "Foundations of Statistical Mechanics. Volume II:
Nonequilibrium Phenomena",
publisher = "D. Reidel",
year = 1987,
annote = "now Reidel is Kluwer"
}
@Article{Eddy94,
author = "Sean R. Eddy and Richard Durbin",
title = "{RNA} Sequence Analysis Using Covariance Models",
journal = "Nucleic Acids Research",
year = 1994,
volume = 22,
pages = "2079-2088"
}
@Article{Krogh94,
key = "Anders Krogh and Michael Brown and I. Saira Mian and
Kimmen Sjolander and David Haussler",
author = "A. Krogh and M. Brown and I. S. Mian and
K. Sjolander and D. Haussler",
title = "Hidden {M}arkov Models in Computational Biology:
Applications to Protein Modeling",
journal = "Journal of Molecular Biology",
year = 1994,
volume = 235,
pages = "1501-1531"
}
@InProceedings{Sakakibara94a,
key = "Yasubumi Sakakibara and Michael Brown and Rebecca C.
Underwood and I. Saira Mian and D. Haussler",
author = "Y. Sakakibara and M. Brown and R. C.
Underwood and I. S. Mian and D. Haussler",
title = "Stochastic Context-Free Grammars for Modeling {RNA}",
editor = "Lawrence Hunter",
volume = "V",
pages = "284-293",
booktitle = "Proceedings of the Twenty-Seventh Annual Hawaii
International Conference on System Sciences:
Biotechnology Computing",
year = 1994,
publisher = "IEEE Computer Society Press",
address = "Los Alamitos, CA"
}
@Unpublished{Sakakibara94b,
key = "Yasubumi Sakakibara and Michael Brown and Richard
Hughey and I. Saira Mian and Kimmen Sj{\"{o}}lander and
Rebecca C. Underwood and David Haussler",
author = "Yasubumi Sakakibara and M. Brown and R.
Hughey and I. S. Mian and K. Sj{\"{o}}lander and
R. C. Underwood and D. Haussler",
title = "The Application of Stochastic Context-Free Grammars
to Folding, Aligning and Modeling Homologous {RNA}
Sequences",
year = 1994,
note = "unpublished manuscript"
}
@inproceedings ( saund-86,
key = "Saund" ,
author = "Saund, E." ,
year = "1986" ,
title = "Abstraction and representation of continuous variables in
connectionist networks" ,
booktitle = "Proceedings of the Fifth National Conference on Artificial
Intelligence" ,
publisher= "Morgan Kaufmann",
address = "Los Altos, CA" ,
pages = "638-644"
)
@article (saund-89a,
key = "Saund",
author = "Saund, E.",
title = "Dimensionality-reduction using connectionist networks",
journal = "IEEE Transactions on Pattern Analysis and Machine
Intelligence",
volume = "11(3)",
pages = "304-314",
year = "1989"
)
@inproceedings ( saund-89b,
key = "Saund" ,
author = "Saund, E.",
title = "Adding Scale to the Primal Sketch",
booktitle = cvpr,
year = "1989" ,
pages = "70-78"
)
@inproceedings{RobinsonFallside88-nips,
author= "A. J. Robinson and F. Fallside",
title= "Static and Dynamic Error Propagation Networks with
Application to Speech Coding",
editor= "D. Z. Anderson",
booktitle= "Neural Information Processing Systems",
publisher= "American Institute of Physics",
year= 1988}
@BOOK{Everitt,
Author= "Everitt, B. S.",
Title= "An Introduction to Latent Variable Models",
Publisher= "Chapman and Hall",
Address= "London",
Year= "1984"
}
% Location: [Univ. Lib.] 202.c.98.248 South Wing 5
@Article{Muller_variable_bandwidth,
Title ="Variable Bandwidth Kernel Estimators of Regression-Curves",
Author ="Muller, H. G. and Stadtmuller, U.",
Journal ="Annals of Statistics",
Year =1987,
Volume ="15",
Number ="1",
Pages ="182-201"
}
@Article{Freeman:1994,
Title ="The Generic Viewpoint Assumption in a Framework for Visual-Perception",
Author ="Freeman, W. T.",
Journal ="Nature",
Year =1994,
Volume ="368",
Number ="6471",
Pages ="542-545"
}
% NA- MITSUBISHI ELECT RES LABS,201 BROADWAY/CAMBRIDGE//MA/02139
% DT- ARTICLE
% AB- A VISUAL system makes assumptions in order to interpret visual data. The
% assumption of 'generic view'1-4 states that the observer is not in a special
% position relative to the scene. Researchers commonly use a binary decision
% of generic or accidental view to disqualify scene interpretations that
% assume accidental viewpoints5-10. Here we show how to use the generic view
% assumption, and others like it, to quantify the likelihood of a view, adding
% a new term to the probability of a given image interpretation. The resulting
% framework better models the visual world and reduces the reliance on other
% prior assumptions. It may lead to computer vision algorithms of greater
% power and accuracy, or to better models of human vision. We show
% applications to the problems of inferring shape surface reflectance
% properties and motion from images.
@article{Brown-et-al92b,
author = "Brown, Peter F. and {Della Pietra}, Stephen A. and {Della Pietra}, Vincent J. and Lai, Jennifer C. and Mercer, Robert L.",
title = "An Estimate of an Upper Bound for the Entropy of {E}nglish",
year = "1992",
journal = "Computational Linguistics",
volume = "18",
number = "1",
pages = "31-40"}
@inproceedings{Gale&Church91,
author = "Gale, William and Church, Kenneth",
title = "A program for aligning sentences in bilingual corpora",
year = "1991",
booktitle = "Proceedings of 29th Annual Meeting of the ACL",
pages = "177-184"}
@Book{Seneta,
author = "E. Seneta",
title = "Non-negative Matrices",
publisher = "Wiley",
year = 1973,
address = "New York"
}
% Title: Non-negative matrices: an introduction to theory and
% applications/ E. Seneta
% London: George Allen and Unwin, 1973
% x,214p; 24cm
@INPROCEEDINGS{Saul_Jordan:BC,
AUTHOR ="L. Saul and M. Jordan",
TITLE ="{B}oltzmann Chains and Hidden {M}arkov Models",
BOOKTITLE ="Advances in Neural Information Processing Systems 7",
EDITOR ="G. Tesauro and D. Touretzky and T. Leen",
PUBLISHER ="M.I.T. Press",
YEAR ="1995",
PAGES ="435-442"}
@MastersThesis{Williams:BC,
author = "C. K. I. Williams",
title = "Using Deterministic {B}oltzmann Machines to
Discriminate Temporally Distorted Strings",
school = "Dept. of Computer Science, Univ. of Toronto",
year = 1990
}
@incollection {williams-hinton-91,
author = "Williams, C. K. I and Hinton, G. E.",
title = "Mean field networks that learn to discriminate temporally
distorted strings",
year = "1991",
publisher = "Morgan Kaufmann, San Mateo, CA",
editor = "Touretzky, D. S. and Elman, J. L. and Sejnowski, T. J.",
booktitle = "{Connectionist Models: Proceedings of the 1990 Summer School}"
}
@Article{Freedman_M100,
author = "W. L. Freedman and B. F. Madore and J. R. Mould
and R. Hill and others",
title = "Distance to the {V}irgo Cluster Galaxy {M100} from
{H}ubble Space Telescope Observations of {C}epheids",
journal = "Nature",
year = 1994,
volume = 371,
pages = "757-762",
month = "Oct"
}
% Distance to the Leo cluster Galaxies M96 and UGC5889 from Hubble
% Space Telescope observations of Cepheids
@Article{shlyakhter_kammen_92,
author = "A. I. Shlyakhter and D. M. Kammen",
title = "Sea-level rise or fall?",
journal = "Nature",
year = 1992,
volume = 357,
pages = 25,
annote = "7 May 1992"
}
@TechReport{west.dirichlet,
author = "M. West",
title = "Hyperparameter Estimation in {D}irichlet
Process Mixture Models",
institution = "Duke Inst. of Stats. and Decision Sciences",
year = 1992,
type = "Working paper",
number = "92-A03"
}
@article{West1984,
title="Outlier Models and Prior Distributions in {B}ayesian Linear-Regression",
author="West, M.",
journal="Journal of the Royal Statistical Society Series B-Methodological",
year="1984",
volume="46",
number="3",
pages="431-439"
}
@article{Shephard1994,
title="Partial Non-{G}aussian State-Space",
author="Shephard, N.",
journal="Biometrika",
year="1994",
volume="81",
number="1",
pages="115-131",
abstract="In this paper we suggest the use of simulation techniques to extend
the applicability of the usual Gaussian state space filtering and
smoothing techniques to a class of non-Gaussian time series models.
This allows a fully Bayesian or maximum likelihood analysis of some
interesting models, including outlier models, discrete Markov chain
components, multiplicative models and stochastic variance models.
Finally we discuss at some length the use of a non-Gaussian model to
seasonally adjust the published money supply figures."
}
@article{Carter1994,
title="On {G}ibbs Sampling for State-Space Models",
author="Carter, C. K. and Kohn, R.",
journal="Biometrika",
year="1994",
volume="81",
number="3",
pages="541-553",
abstract="We show how to use the Gibbs sampler to carry out Bayesian inference
on a linear state space model with errors that are a mixture of
normals and coefficients that can switch over time. Our approach
simultaneously generates the whole of the state vector given the
mixture and coefficient indicator variables and simultaneously
generates all the indicator variables conditional on the state
vectors. The states are generated efficiently using the Kalman
filter. We illustrate our approach by several examples and
empirically compare its performance to another Gibbs sampler where
the states are generated one at a time. The empirical results suggest
that our approach is both practical to implement and dominates the
Gibbs sampler that generates the states one at a time."
}
@Article{antoniak,
author = "Antoniak, C. E.",
title = "Mixtures of {D}irichlet
processes with applications to non\-para\-met\-ric problems",
journal = "Annals of Statistics",
year = 1974,
volume = 2,
pages = "1152-1174"
}
@Article{kac47,
author = "M. Kac",
title = "Random Walk and the Theory of {B}rownian Motion",
journal = "Amer. Math. Monthly",
year = 1947,
volume = 54,
pages = "369-391",
annote = "I have not read this paper but it is cited as the original"
}
@Article{letac_takacs79,
author = "G. Letac and L. Takacs",
title = "Random Walk on the $m$-dimensional cube",
journal = "J. reine angew. Math.",
year = 1979,
volume = 310,
pages = "187-195"
}
@Article{takacs79,
author = "L. Takacs",
title = "On an Urn Problem of {P}aul and {T}atiana {E}hrenfest",
journal = "Math. Proc. Camb. Phil. Soc.",
year = 1979,
volume = 86,
pages = "127-130"
}
@Article{DiaGraMor90,
author = "Diaconis and Graham and Morrison",
title = "Asymptotic Analysis of a Random Walk on a Hypercube
with Many Dimensions",
journal = "Random Structures & Algorithms",
volume = "1",
year = "1990",
}
@Article{ChuDiaGra92,
author = "Chung and Diaconis and Graham",
title = "Universal Cycles for Combinatorial Structures",
journal = "Discrete Mathematics",
volume = "110",
year = "1992",
}
@Article{DiaconisSaloffCoste93,
author = "Persi Diaconis and Laurent Saloff-Coste",
title = "Comparison techniques for random walk on finite
groups",
journal = "Ann. Probab.",
volume = "21",
pages = "2131--2156",
year = "1993",
}
@Article{DiaconisSaloffCoste94,
author = "P.\ Diaconis and L. Saloff-Coste",
title = "Moderate growth and random walk on finite groups",
journal = "Geom. Funct. Anal.",
volume = "4",
pages = "1--36",
year = "1994",
}
@article{bin91,
author="N. H. Bingham",
title="Fluctuation Theory for the {E}hrenfest Urn",
journal=AAP,
pages="598-611",
volume=23,
year=1991}
@article{DGM90,
author="P. Diaconis and R.L. Graham and J.A. Morrison",
title="Asymptotic Analysis of a Random Walk on a Hypercube
with Many Dimensions",
journal=RSA,
volume=1,
pages="51-72",
year=1990}
@article{KLY93,
author="S. Karlin and B. Lindqvist and Y-C Yao",
title="Markov Chains on Hypercubes: Spectral Representations
and Several Majorization Relations",
journal=RSA,
volume=4,
pages="1-36",
year=1993}
@article{Randall93,
author="D. Randall",
title="Efficient Generation of Random Nonsingular Matrices",
journal=RSA,
volume=4, number=1,
pages="111-118",
year=1993}
@article{Tsfasman1991,
title="Algebraic--Geometric Codes and Asymptotic Problems",
author="Tsfasman, M. A.",
journal="Discrete Applied Mathematics",
year="1991",
volume="33",
number="1-3",
pages="241-256"
}
@article{Tsfasman1982,
title="Modular Curves, {S}himura curves, and {G}oppa codes, better
than the {Varshamov-Gilbert} Bound",
author="Tsfasman, M. A. and S. G. Vladut and T. Zink",
journal="Math. Nachr.",
year="1982",
volume="109",
pages="21-28"
}
@Article{Feng_Rao1993,
author = "G.-L. Feng and T. R. N. Rao",
title = "Decoding Algebraic-Geometric Codes up to the
Designed Minimum Distance",
journal = "IEEE Transactions on Information Theory",
year = 1993,
volume = 39,
number = 1,
pages = "37-45",
month = "January"
}
@article{Coffey1990,
title="Any Code of Which We Cannot Think Is Good",
author="Coffey, J. T. and Goodman, R. M.",
journal="IEEE Transactions on Information Theory",
year="1990",
volume="36",
number="6",
pages="1453-1461"
}
@article{Delsarte1982,
title="Algebraic Constructions of {S}hannon Codes for Regular Channels",
author="Delsarte, P. and Piret, P.",
journal="IEEE Transactions on Information Theory",
year="1982",
volume="28",
number="4",
pages="593-599"
}
@article{Ahlswede1982,
title="Good Codes Can Be Produced By A Few Permutations",
author="Ahlswede, R. and Dueck, G.",
journal="IEEE Transactions on Information Theory",
year="1982",
volume="28",
number="3",
pages="430-443"
}
% a theory of how to _value_ information is found in :
@Book{Russell_Wefald,
author = "S. Russell and E. Wefald",
title = "Do the Right Thing: Studies in Limited Rationality",
publisher = "MIT Press",
year = 1991
}
@article{Debuda1989,
Author="Debuda, R.",
Title="Some Optimal Codes have Structure",
Journal="IEEE Journal on Selected Areas in Communications",
Year=1989,
volume=7,
number=6,
pages="893-899"
}
% hestenes: `new foundations for classical mechanics' published Kluwer
% Thanks for the message - what an interesting application. My
% immediate reaction is to shudder at not doing it all exactly (I
% suppose the networks are too complex to triangulate?), but I
% shouldn't be too purist.
%
% The early work on MUNIN - the very large application that drove
% the HUGIN work, used this approach of ignoring the cycles by
% removing the `weak links' and hoping
% for the best, until they adopted the exact techniques. And there
% is later work from the Danish group in which they study in more
% detail the approximation incurred by removing edges. Some references are:
%
@inproceedings{munin,
author = " S. Andreassen and M. Woldbye and B. Falck and S. Andersen",
title = " {MUNIN} --- a causal probabilistic network for the interpretation of electromyographic
findings",
booktitle = "Proc. of the 10th {N}ational {C}onf. on {AI}, {AAAI}: {M}enlo {P}ark CA.",
year = "1987",
pages = "121-123",
}
% By the way the MUNIN article is actually in IJCAI 87 and not in AAAI, and
% it is on pages 366-372.
@Techreport{kjaerulff:93,
author = "Kj{\ae}rulff, U.",
title = "Approximation of {B}ayesian
networks by edge removals",
institution = "University of Aalborg, Denmark",
year = "1993",
type = "Technical Report",
number = "R~93-2021",
}
@Techreport{kjaerulff:94,
author = "Kj{\ae}rulff, U.",
title = "Reduction of complexity in {B}ayesian networks
through removal of weak dependencies",
institution = "University of Aalborg, Denmark",
year = "1994",
type = "Technical Report",
number = "R~93-2009",
}
@article{ProSite, author={Amos Bairoch}, title="********************",
year=1993, journal=NAR, volume=21, pages={3097-3103} }
@article{Staden1989, journal=CABIOS, author={Rodger Staden}, volume=5,
title={Methods for calculating the probabilities of
finding patterns in sequences}, year=1989, number=2,
pages={89-96} }
@article{PevznerBorodovskyMironov1989, journal=JBSD, volume=6,
number=5, year=1989, pages={1013-1026},
author="Pavel A. Pevzner and Mark Yu. Borodovsky and
Anrey A. Mironov"}
@article{blocks, author="S. Henikoff and J.G. Henikoff", year=1991,
journal=NAR, volume=19, pages={6565-6572},
title={Automatic assembly of protein blocks for
database searching} }
@book{durbin1998,
title={Biological Sequence Analysis.
Probabilistic Models of Proteins and Nucleic Acids},
author={Richard Durbin and Sean R. Eddy and Anders Krogh and Graeme Mitchison},
publisher={Cambridge University Press},
year={1998}}
@book{Welsh1988,
title={Codes and Cryptography},
author={Dominic Welsh},
year={1988},
publisher={Clarendon press}}
@article{Matchprobs, author="Roger F. Sewell and Richard Durbin",
year=1995, title={Method for Calculation of
Probability of Matching a Bounded Regular Expression
in a Random Data String}, journal=JCB, volume=2,
number=1, pages={25-31}}
@unpublished{Sewell_Fragments, author="Roger F. Sewell", year=1995, title={A
full probabilistic model for finding fragment matches to {H}idden {M}arkov
{M}odels in a data string}}
@unpublished{Sewell_Methods, author="Roger F. Sewell", year=1995,
title={Methods for applying {H}idden {M}arkov
{M}odels to sequence data: separation of score due to length and to
content; negative discriminative training; full
probability simulated annealing; site
discrimination; and multiple discriminative models
used successively}}
% see also bloj.bib
%
% TURBO CODES, Gallager codes, etc.
%
@Article{berrou-glavieux-96,
key = "Berrou",
author = "C. Berrou and A. Glavieux",
title = "Near Optimum Error Correcting Coding and Decoding:
{T}urbo-Codes",
journal = "IEEE Transactions on Communications",
volume = "44",
pages = "1261--1271",
month = "October",
year = "1996"
}
@inproceedings{Berrou93:Turbo,
author = "C. Berrou and A. Glavieux and P. Thitimajshima",
title = "Near {S}hannon Limit Error-correcting Coding and
Decoding: {T}urbo-Codes",
year = 1993,
booktitle = "Proc. 1993 IEEE International Conference on
Communications, Geneva, Switzerland",
pages = "1064-1070"
}
% pages = "23-27"
@inproceedings{Divsalar1998,
title="Coding Theorems for `Turbo-Like' Codes",
author="Divsalar, D. and Jin, H. and McEliece, R. J.",
BOOKTITLE ="Proceedings of the 36th Allerton Conference on Communication, Control, and Computing, Sept.\ 1998",
EDITOR ="",
PUBLISHER ="Allerton House",
ADDRESS ="Monticello, Illinois",
YEAR ="1998",
PAGES ="201-210",
}
@article{Divsalar1996,
title="Effective Free Distance of Turbo Codes",
author="Divsalar, D. and McEliece, R. J.",
journal="Electronics Letters",
year="1996",
volume="32",
number="5",
pages="445-446",
abstract="The authors define and study the effective free distance of a turbo
code. If a turbo code is constructed from a number of component
codes, they argue that the effective free distance can be maximised
by choosing the component codes to be IIR convolutional code
fragments with maximal input weight-2 free distance. They then
present some theoretical bounds for, and some numerical tables of;
IIR code fragments with maximal input weight-2 free distance."
}
@TechReport{Divsalar95,
author = "D. Divsilar and F. Pollara",
title = "On the Design of {T}urbo Codes",
institution = "Jet Propulsion Laboratory",
year = 1995,
number = "TDA 42-123",
address = "Pasadena",
month = "November"
}
@TechReport{Benedetto96,
author = "S. Benedetto and G. Montorsi and D. Divsilar and F. Pollara",
title = "Serial Concatenation of Interleaved Codes{:}
Performance Analysis, Design, and Iterative Decoding",
institution = "Jet Propulsion Laboratory",
year = 1996,
number = "TDA 42-126",
address = "Pasadena",
month = "August"
}
@InProceedings{divsalar-pollara-95,
key = "Divsalar",
author = "D. Divsalar and F. Pollara",
title = "Turbo-codes for {PCS} Applications",
booktitle = "Proceedings of ICC'95",
year = "1995",
pages = "54--59",
place = "Seattle WA."
}
@phdthesis{wiberg:phd,
author = "N. Wiberg",
title = "Codes and Decoding on General Graphs",
school = "Dept. of Electrical Engineering, Link{\"o}ping, Sweden",
year = 1996,
note = "Link{\"o}ping studies in Science and Technology.
Dissertation No. 440"
}
@Article{wiberg95,
key = "Wiberg",
author = "N. Wiberg and H.-A. Loeliger and R. K{\"o}tter",
title = "Codes and Iterative Decoding on General Graphs",
journal = "European Transactions on Telecommunications",
volume = "6",
pages = "513--525",
year = "1995"
}
% national defence research establishment, linkoping sweden
@Inproceedings{koetter_nilsson94,
author = "R. K{\"o}tter and J. Nilsson",
title = "Interleaving Strategies for Product Codes",
booktitle = "Proc. EIDMA, Veldhoven, Netherlands, Dec.\ 19--21, 1994",
year = 1994,
pages = 37
}
@Inproceedings{nilsson_koetter94,
author = "J. Nilsson and R. K{\"o}tter",
title = "Iterative Decoding of Product Code Constructions",
booktitle = "Proc.\ ISITA94, Sydney, Nov.\ 1994",
year = 1994,
pages = {1059--1064}
}
@unpublished{jimenez-zigangirov97,
author={Jimenez, A. and Zigangirov, K. Sh.},
title={Time-varying Periodical Convolutional Codes with Low-Density
Parity-Check Matrix},
note={preprint},
year={1997}
}
@Article{Sorokine98I,
author = "V. Sorokine and Kschischang, F. R. and S. Pasupathy",
title = "Gallager Codes for {CDMA} Applications I: Generalizations, Constructions and Performance Bounds",
journal = "IEEE Trans. Communications",
year = 1998,
volume = {},
number = {},
pages = "",
note={Submitted},
annote = "www.comm.utoronto.ca/frank/"
}
@Article{Sorokine98II,
author = "V. Sorokine and Kschischang, F. R. and S. Pasupathy",
title = "Gallager Codes for {CDMA} Applications {II}: Implementations, Complexity and System Capacity",
journal = "IEEE Trans. Communications",
year = 1998,
volume = {},
number = {},
pages = "",
note={submitted},
annote = "www.comm.utoronto.ca/frank/"
}
@article{Kschischang2001,
author = "Kschischang, F. R. and Frey, B. J. and Loeliger, H.-A.",
TITLE ="Factor Graphs and the Sum-Product Algorithm",
year={2001},
journal={IEEE Transactions on Information Theory},
volume={47},
number={2},
pages={498-519}
}
@Article{Kschischang_,
author = "Kschischang, F. R. and V. Sorokine",
title = "On the Trellis Structure of Block Codes",
journal = "IEEE Trans. in Inform. Theory",
year = 1995,
volume = 41,
number = 6,
pages = "1924--1937",
month = "Nov",
annote = "www.comm.utoronto.ca/frank/"
}
% papers on construction of sparse graphs
% Wong Cages a survey
% Benson
@Article{Jung-Nasshan94:turbo,
author = "P. Jung and M. Nasshan",
title = "Performance evaluations of turbo codes for short
frame transmission systems",
journal = "Electronics Letters",
year = 1994,
volume = 30,
number = 2,
pages = "111-113"
}
@Article{Bendetto-Montorsi95:turbo,
author = "S. Bendetto and G. Montorsi",
title = "Performance evaluations of turbo codes",
journal = "Electronics Letters",
year = 1995,
volume = 31,
number = 3,
pages = "163-165"
}
@Article{Bendetto-Montorsi95:turboRCC,
author = "S. Bendetto and G. Montorsi",
title = "Role of recursive convolutional codes in turbo codes",
journal = "Electronics Letters",
year = 1995,
volume = 31,
number = 11,
pages = "858-859"
}
@Article{Bendetto-Montorsi95:turboPCBC,
author = "S. Bendetto and G. Montorsi",
title = "Average performance of parallel concatenated block codes",
journal = "Electronics Letters",
year = 1995,
volume = 31,
number = 3,
pages = "156-159"
}
@article{Zyablov,
author={V. V. Zyablov and M. S. Pinsker},
title={Estimation of the Error-Correction Complexity for {G}allager
Low-Density Codes},
journal={Problemy Peredachi Informatsii},
volume={11},
number={1},
pages={23-36},
year={1975}
}
@Article{TAP1977,
author = "D. J. Thouless and P. W. Anderson and R. G. Palmer",
title = "Solutions of `Solvable Models of a Spin Glass'",
journal = "Philosophical Magazine",
year = 1977,
volume = 35,
number = 3,
pages = "593-601"}
%BTW, at your talk on codes at the Cavendish lab. last month I mentioned
%the TAP equations. The reference is
% D J Thouless, P W Anderson, and R G Palmer, 1977, Phil Mag v 35, p 3271
%The self-consistent equation they derive for the average magnetization
%at i is
%
%m_i = tanh[\beta (\sum_j J_{ij} m_j - \beta \sum_j J_{ij}^2 (1-m_j^2)m_i)]
%
%which I found in the book "Spin glasses" by Fischer and Hertz
%(cambr univ. press 1994). Spin Glasses K H Fischer and J A Hertz
@Article{Bein89,
author = "F. Bein",
title = "Construction of Telephone Networks",
journal = "Notices Amer. Math. Soc.",
year = 1989,
volume = 36,
month = "Jan"
}
@Article{Lubotsky88,
author = "A. Lubotsky",
title = "Ramanujan Graphs",
journal = "Combinatorica",
year = 1988,
volume = 8
}
@Book{Bishop95,
author = "C. M. Bishop",
title = "Neural Networks for Pattern Recognition",
publisher = "Oxford University Press",
year = 1995
}
% 5000 sold first run
@InCollection{Battail93_cando,
title = "We Can Think of Good Codes, and Even Decode Them",
year = 1993,
author = "G. Battail",
booktitle = "Eurocode '92. Udine, Italy, 26-30 October",
number = 339,
series = "CISM Courses and Lectures",
publisher = "Springer",
editor = "P. Camion and P. Charpin and S. Harari",
pages = "353-368",
address = "Wien"
}
@InCollection{Battail95,
title = "Can We Implement Random Coding?",
year = 1995,
author = "G. Battail",
booktitle = "Codes and Cyphers",
publisher = "Formara",
editor = "P. G. Farrell",
pages = "1-15",
address = "Southend"
}
@article{promhouse78,
title={The Minimum Distance of
All Binary Cyclic Codes of Odd Lengths from 69 to 99},
author={G. Promhouse and S. E. Tavares},
journal={IT Trans. Inform. Theory},
volume={24}, number={4}, year={1978},
pages={438-442},
annote={Gary Promhouse and Stafford E. Tavares, "The Minimum Distance of
All Binary Cyclic Codes of Odd Lengths from 69 to 99", IT Trans.
Inform. Theory, Vol. IT-24, No. 4, July 1978, pp. 438-442.}
}
@article{Kou2001,
author={Y. Kou and S. Lin and M. P. C. Fossorier},
title={Low Density Parity Check Codes Based on Finite Geometries: A Rediscovery and New Results},
journal={IEEE Transactions on Information Theory},
vol={47}, pages={2711-2736},
month={Nov.},
year=2001}
%Y. Kou, S. Lin, and M. P.C. Fossorier, ?Low Density Parity Check Codes Based on Finite Geometries: A Rediscovery and New Results?, IEEE Transactions on Information Theory, vol. IT-47, pp. 2711-2736, Nov., 2001.
%2. R. Lucas, M. P.C. Fossorier, Y. Kou and S. Lin, ?Iterative Decoding of One-Step Majority Logic Decodable Codes Based on Belief Propagation?. IEEE Trans. on Communication, Vol. 48, pp. 931-938, June 2000.
@article{Lucas99,
author={R. Lucas and M. Fossorier and Y. Kou and S. Lin},
title={Iterative Decoding of One-Step Majority Logic Decodable Codes
based on Belief Propagation},
year={2000},
journal={IEEE Transactions on Communications}, volume={48}, pages={931-937}, month={June},
annote={Submitted 1999}
}
@article{Tanner1981,
title="A Recursive Approach to Low Complexity Codes",
author="Tanner, R. M.",
journal="IEEE Transactions on Information Theory",
year="1981",
volume="27",
number="5",
pages="533-547"
}
@article{Karplus1991,
title={A Semi-systolic Decoder for the {PDSC--73} Error-Correcting Code},
author={K. Karplus and H. Krit},
journal={Discrete Applied Mathematics},
year={1991},
volume={33},
pages={109-128},
annote={Tanner chip}
}
@article{Margulis1982,
title="Explicit Constructions of Graphs Without Short Cycles and Low-Density
Codes",
author="Margulis, G. A.",
journal="Combinatorica",
year="1982",
volume="2",
number="1",
pages="71-78"
}
@article{Gacs1986,
title="Reliable Computation with Cellular Automata",
author="Gacs, P.",
journal="Journal of Computer and System Sciences",
year="1986",
volume="32",
number="1",
pages="15-78"
}
@article{Thommesen1987,
title="Error-Correcting Capabilities of Concatenated Codes with {MDS} Outer
Codes on Memoryless Channels with Maximum-Likelihood Decoding",
author="Thommesen, C.",
journal="IEEE Transactions on Information Theory",
year="1987",
volume="33",
number="5",
pages="632-640"
}
@article{Gallager74,
author={R G Gallager},
title={Capacity and Coding for Degraded Broadcast Channels},
journal={Problemy Peredaci Informaccii},
vlume={10},
number={3},
pages={3-14},
year={1974}
}
@article{Gallager1988,
title="Finding Parity in a Simple Broadcast Network",
author="Gallager, R. G.",
journal="IEEE Transactions on Information Theory",
year="1988",
volume="34",
number="2",
pages="176-180"
}
@article{Pippenger1991a,
title="On a Lower Bound for the Redundancy of Reliable Networks with Noisy
Gates",
author="Pippenger, N. and Stamoulis, G. D. and Tsitsiklis, J. N.",
journal="IEEE Transactions on Information Theory",
year="1991",
volume="37",
number="3",
pages="639-643",
abstract="A proof is provided that a logarithmic redundancy factor is necessary
for the reliable computation of the parity function by means of a
network with noisy gates. This is the same as the main result of
Dobrushin and Ortyukov except that the analysis therein seems to be
not entirely correct."
}
@article{Voss1991,
title="Asymptotically Good Families of Geometric {G}oppa Codes and the
{G}ilbert-{V}arshamov Bound",
author="Voss, C.",
journal="Lecture Notes in Computer Science",
year="1991",
volume="514",
pages="150-157",
abstract="This note presents a generalization of the fact that most of the
classical Goppa codes lie arbitrarily close to the Gilbert-Varshamov
bound (cf.[2, p. 229])."
}
@article{Slinko1991,
title="Design of Experiments to Detect Nonnegligible Variables in a Linear
Model",
author="Slinko, A. M.",
journal="Cybernetics",
year="1991",
volume="27",
number="3",
pages="433-442",
abstract="The design of sifting experiments is considered. The properties of
'superoptimal' designs discovered by Meshalkin [3, 4] are
investigated."
}
@article{Radosavljevic1992,
title="Sequential-Decoding of Low-Density Parity-Check Codes by Adaptive
Reordering of Parity Checks",
author="Radosavljevic, B. and Arikan, E. and Hajek, B.",
journal="IEEE Transactions on Information Theory",
year="1992",
volume="38",
number="6",
pages="1833-1839",
abstract="Decoding algorithms are investigated in which unpruned codeword trees
are generated from an ordered list of parity checks. The order is
computed from the received message, and low-density parity-check
codes are used to help control the growth of the tree. Simulation
results are given for the binary erasure channel. They suggest that
for small erasure probability, the method is computationally feasible
at rates above the computational cutoff rate."
}
@article{Mihaljevic1991,
title="A Comparison of Cryptanalytic Principles Based on Iterative Error-
Correction",
AUTHOR ="M. J. Mihaljevi\'c and J. D. Goli\'c",
key="Mihaljevic, M. J. and Golic, J. D.",
journal="Lecture Notes in Computer Science",
year="1991",
volume="547",
pages="527-531",
abstract="A cryptanalytic problem of a linear feedback shift register initial
state reconstruction using a noisy output sequence is considered. The
main underlying principles of three recently proposed cryptanalytic
procedures based on the iterative error-correction are pointed out
and compared."
}
@article{Chepyzhov1991,
title="On a Fast Correlation Attack on Certain Stream Ciphers",
author="Chepyzhov, V. and Smeets, B.",
journal="Lecture Notes in Computer Science",
year="1991",
volume="547",
pages="176-185",
abstract="In this paper we present a new algorithm for the recovery of the
initial state of a linear feedback shift register when a noisy output
sequence is given. Our work is focussed on the investigation of the
asymptotical behaviour of the recovery process rather than on the
construction of an optimal recovery procedure. Our results show the
importance of low-weight checks and show also that the complexity of
the recovery problem grows less than exponentially with the length of
the shift register, even if the number of taps grows linearly with
the register length. Our procedure works for shift register with
arbitrary feedback polynomial."
}
@article{Zemor1995,
title="The Threshold Probability of a Code",
author="Zemor, G. and Cohen, G. D.",
journal="IEEE Transactions on Information Theory",
year="1995",
volume="41",
number="2",
pages="469-477",
abstract="We define and estimate the threshold probability theta of a linear
code, using a theorem of Margulis originally conceived for the study
of the probability of disconnecting a graph. We then apply this
concept to the study of the erasure and Z-channels, for which we
propose linear coding schemes that admit simple decoding. We show
that theta is particularly relevant to the erasure channel since
linear codes achieve a vanishing error probability as long as p less
than or equal to theta, where p is the probability of erasure. In
effect, theta can be thought of as a capacity notion designed for
codes rather than for channels. Binomial codes have highest possible
theta (and achieve capacity). As for the Z-channel, a subcapacity is
derived with respect to the linear coding scheme. For a transition
probability in the range ]log (3/2); 1[, we show how to achieve this
subcapacity. As a by-product we obtain improved constructions and
existential results for intersecting codes (linear Sperner families)
which are used in our coding schemes."
}
@article{Caves1990,
title="Quantitative Limits on the Ability of a Maxwell Demon to Extract Work
From Heat",
author="Caves, C. M.",
journal="Physical Review Letters",
year="1990",
volume="64",
number="18",
pages="2111-2114"
}
% does not cite {G}allager codes in fact
@article{Pippenger1991b,
title="The Expected Capacity of Concentrators",
author="Pippenger, N.",
journal="SIAM Journal on Discrete Mathematics",
year="1991",
volume="4",
number="1",
pages="121-129",
abstract="The expected capacity of a class of sparse concentrators called
modular concentrators is determined. In these concentrators, each
input is connected to exactly two outputs, each output is connected
to exactly three inputs, and the girth (the length of the shortest
cycle in the connexion graph) is large. Two definitions of expected
capacity are considered. For the first (which is due to Masson and
Morris), it is assumed that a batch of customers arrive at a random
set of imputs and that a maximum matching of these customers to
servers at the outputs is found. The number of unsatisfied requests
is negligible if customers arrive at fewer than one-half of the
inputs, and it grows quite gracefully even beyond this threshold. The
situation in which customers arrive sequentially is considered, and
the decision as to how to serve each is made randomly, without
knowledge of future arrivals. In this case, the number of unsatisfied
requests is larger but still quite modest."
}
@article{Zivkovic1991,
title="On Two Probabilistic Decoding Algorithms for Binary Linear Codes",
author="Zivkovic, M.",
journal="IEEE Transactions on Information Theory",
year="1991",
volume="37",
number="6",
pages="1707-1716",
abstract="A generalization of the Sullivan inequality on the ratio of the
probability of a linear code to that of any of its cosets is proved.
Starting from this inequality, a sufficient condition for successful
decoding of linear codes by a probabilistic method is derived. A
probabilistic decoding algorithm for 'low-density parity-check codes'
is also analyzed. The results obtained allow one to estimate
experimentally the probability of successful decoding using these
probabilistic algorithms."
}
@Book{Golomb1994,
author = "Golomb, S. W. and Peile, R. E. and Scholtz, R. A.",
title = "Basic Concepts in Information Theory and Coding: {T}he
Adventures of Secret Agent {00111}",
publisher = "Plenum Press",
year = 1994,
address = "New York",
annote = "On p.369 'there are many...codes and the optimal
code for a given set of channel conditions may not
resemble the optimal code for another'. On p.309,
this book is well aware that decoding beyond the
minimum distance is possible"
}
@Book{Duff,
author = "Duff, I.S. and Erisman, A. M. and Reid, J. K.",
title = "Direct Methods for Sparse Matrices",
publisher = "Clarendon",
year = 1986,
address = "Oxford"
}
@article{Comon1991,
title="Blind Separation of Sources. 2. Problems Statement",
author="Comon, P. and Jutten, C. and Herault, J.",
journal="Signal Processing",
year="1991",
volume="24",
number="1",
pages="11-20",
abstract="Though it arouses more and more curiosity, the HJ iterative algorithm
has never been derived in mathematical terms to date. We attempt in
this paper to describe it from a statistical point of view. For
instance the updating term of the synaptic efficacies matrix cannot
be the gradient of a single C2 functional contrary to what is
sometimes understood. In fact, we show that the HJ algorithm is
actually searching common zeros of n functionals by pipelined
stochastic iterations. Based on simulation results, advantages and
limitations as well as possible improvements are pointed out after a
short theoretical analysis."
}
@article{Jutten1991,
title="Blind Separation of Sources. 1. an Adaptive Algorithm Based on
Neuromimetic Architecture",
author="Jutten, C. and Herault, J.",
journal="Signal Processing",
year="1991",
volume="24",
number="1",
pages="1-10",
abstract="The separation of independent sources from an array of sensors is a
classical but difficult problem in signal processing. Based on some
biological observations, an adaptive algorithm is proposed to
separate simultaneously all the unknown independent sources. The
adaptive rule, which constitutes an independence test using non-
linear functions, is the main original point of this blind
identification procedure. Moreover, a new concept, that of
INdependent Components Analysis (INCA), more powerful than the
classical Principal Components Analysis (in decision tasks) emerges
from this work."
}
@article{Bell_Sejnowski,
title="An Information Maximization Approach to Blind Separation and Blind
Deconvolution",
author="Bell, A. J. and Sejnowski, T. J.",
journal="Neural Computation",
year="1995",
volume="7",
number="6",
pages="1129-1159",
abstract="We derive a new self-organizing learning algorithm that maximizes the
information transferred in a network of nonlinear units. The
algorithm does not assume any knowledge of the input distributions,
and is defined here for the zero-noise limit. Under these conditions,
information maximization has extra properties not found in the linear
case (Linsker 1989). The nonlinearities in the transfer function are
able to pick up higher-order moments of the input distributions and
perform something akin to true redundancy reduction between units in
the output representation. This enables the network to separate
statistically independent components in the inputs: a higher-order
generalization of principal components analysis. We apply the network
to the source separation (or cocktail party) problem, successfully
separating unknown mixtures of up to 10 speakers. We also show that a
variant on the network architecture is able to perform blind
deconvolution (cancellation of unknown echoes and reverberation in a
speech signal). Finally, we derive dependencies of information
transfer on time delays. We suggest that information maximization
provides a unifying framework for problems in `blind' signal
processing."
}
% Helmholtz95
@article{Dayan1995,
title="The {H}elmholtz Machine",
author="Dayan, P. and Hinton, G. E. and Neal, R. M. and Zemel, R. S.",
journal="Neural Computation",
year="1995",
volume="7",
number="5",
pages="889-904",
abstract="Discovering the structure inherent in a set of patterns is a
fundamental aim of statistical inference or learning. One fruitful
approach is to build a parameterized stochastic generative model,
independent draws from which are likely to produce the patterns. For
all but the simplest generative models, each pattern can be generated
in exponentially many ways. It is thus intractable to adjust the
parameters to maximize the probability of the observed patterns. We
describe a way of finessing this combinatorial explosion by
maximizing an easily computed lower bound on the probability of the
observations. Our method can be viewed as a form of hierarchical
self-supervised learning that may relate to the function of bottom-up
and top-down cortical processing pathways."
}
@unpublished{NealHarvey2000,
author = {Harvey, M. and Neal, R. M.},
title={Inference for Belief Networks using Coupling From the Past},
note={submitted to UAI 2000},
year={2000}
}
@article{Hinton1995,
title="The Wake-Sleep Algorithm for Unsupervised Neural Networks",
author="Hinton, G. E. and Dayan, P. and Frey, B. J. and Neal, R. M.",
journal="Science",
year="1995",
volume="268",
number="5214",
pages="1158-1161",
abstract="An unsupervised learning algorithm for a multilayer network of
stochastic neurons is described. Bottom-up `recognition'
connections convert the input into representations in successive
hidden layers, and top-down `generative' connections reconstruct
the representation in one layer from the representation in the layer
above. In the `wake' phase, neurons are driven by recognition
connections, and generative connections are adapted to increase the
probability that they would reconstruct the correct activity vector
in the layer below. In the `sleep' phase, neurons are driven by
generative connections, and recognition connections are adapted to
increase the probability that they would produce the correct activity
vector in the layer above."
}
@article{Montgomery1993,
title= "Navier-Stokes relaxation to sinh-Poisson states at finite
Reynolds numbers",
author= "D. Montgomery and X. Shan and W. H. Matthaeus",
journal= "Phys. Fluids A",
vol= "5",
number = "9",
year = "1993",
abstract = "A math. framework is proposed in which it seems
possible to justify the computationally-observed
relaxation of a two-dimensional N-S fluid to a ...
maximum entropy state..... "
}
@INPROCEEDINGS{Ripley94,
KEY ="Ripley",
AUTHOR ="B. D. Ripley",
TITLE ="Flexible Non--linear Approaches to Classification",
BOOKTITLE ="From Statistics to Neural Networks. Theory and
Pattern Recognition Applications",
editor = "V. Cherkassky and J. H. Friedman and H. Wechsler",
series = "ASI Proceedings (F)",
YEAR =1994,
publisher = "Springer-Verlag",
}
%Ripley95
@Book{Ripley96,
author = "B. D. Ripley",
title = "Pattern Recognition and Neural Networks",
publisher = "Cambridge",
year = 1996,
annote = "ISBN 0-521-46086-7"
}
% obsolete:
@Book{Ripley95,
author = "B. D. Ripley",
title = "Pattern Recognition and Neural Networks",
publisher = "Cambridge",
year = 1996,
annote = "ISBN 0-521-46086-7"
}
% critical phenomena, Ising models
@book{binney92,
title="The theory of critical phenomena: an introduction to the
renormalization group",
publisher="Oxford",
year=1992,
author="Binney, J.J. and Dowrick, N.J. and Fisher, A.J.",
annote ="[Rayleigh Lib.] 31 B 28"}
@phdthesis ( steeg-phd,
key = "Steeg" ,
author = "Steeg, E.W." ,
title = "Automated Motif Discovery in Protein Structure Prediction",
school = "Department of Computer Science, University of Toronto",
year = "1997"
)
@book{yeomans92,
author="Yeomans, J.M.",
title = "Statistical mechanics of phase transitions",
publisher="Clarendon Press",
address="Oxford",
year= 1992,
annote="[Rayleigh Lib.] 31 Y 2"}
% this is a very nice book... chapter 5 describes the transfer matrix
@book{Cardy96,
author={Cardy, J. L.},
publisher="Cambridge University Press",
address="Cambridge",
Year= 1996,
Title={Scaling and renormalization in statistical physics},
annote="[Rayleigh Lib.] 31 C 12"}
@article{Propp1996,
title={Exact Sampling with Coupled {M}arkov Chains and Applications to
Statistical Mechanics},
author={Propp, J. G. and Wilson, D. B.},
journal={Random Structures and Algorithms},
year={1996},
volume={9},
number={1-2},
pages={223-252}
}
@InCollection{Temperley,
author = "Temperley",
title = "Two-dimensional {I}sing Models",
booktitle = "Phase Transitions and Critical Phenomena",
publisher = "Academic Press",
year = 1972,
editor = "C. Domb and M. S. Green",
volume = "1. Exact Results",
chapter = 6,
pages = "227-267",
address = "London",
annote = "Gives Historical review of Ising model"
}
% Plane triangular lattice, anit-ferromagnetic - frustruations -> the
% transition temperature is decreased to zero.
% the entropy per site is finite at absolute zero.
@InCollection{Binder,
author = "Binder",
title = "Monte Carlo Investigations of Phase Transitions and
Critical Phenomena",
booktitle = "Phase Transitions and Critical Phenomena",
publisher = "Academic Press",
year = 1972,
editor = "C. Domb and M. S. Green",
volume = "1. Exact Results",
chapter = 6,
pages = "1-105",
address = "London",
annote = "Gives Historical review of Ising model"
}
% Plane triangular lattice, anit-ferromagnetic - frustruations -> the
% transition temperature is decreased to zero.
% the entropy per site is finite at absolute zero.
@Unpublished{heckerman,
author = "D. M. Chickering and D. Heckerman",
title = "Efficient Approximations for the Marginal Likelihood of
{B}ayesian Networks With Hidden Variables",
note = "Microsoft Research Technical Report MSR-TR-96-08",
year = 1996,
url={http://www.research.microsoft.com/research/dtg/heckerma/TR-96-08.htm}
}
%To appear in Machine Learning
% was Efficient Approximations for Learning {B}ayesian
% Networks given Incomplete Data",
@InCollection{kanerva:spatter,
author = "P. Kanerva",
title = "Binary Spatter-Coding of $K$-tuples",
booktitle = "Artifical Neural Networks --- ICANN 96 Proceedings
(Bochum, Germany)",
publisher = "Springer",
year = 1996,
OPTeditor = "von der Malsburg, C. and von Seelen, W. and J. B.
Vorbr\uggen and B. Sendhoff",
pages = "869-873",
address = "Berlin"
}
@Book{Sivia:96,
author = "D. S. Sivia",
title = "Data Analysis. A {B}ayesian Tutorial",
publisher = "Oxford University Press",
year = 1996,
annote="ISBN 0-19-851889-7"
}
@TechReport{jaakkola_jordan:logistic,
author = "T. S. Jaakkola and M. I. Jordan",
title = "Bayesian Logistic Regression: a Variational Approach",
institution = "MIT",
year = 1996
}
@article{jaakkola_jordan:logistic00,
title={Bayesian parameter estimation via variational methods},
author={T. S. Jaakkola and M. I. Jordan},
journal={Statistics and Computing},
volume={10},number={1},pages={25-37},month={January},year={2000}
}
@TechReport{jaakkola_jordan:recursive,
author = "T. S. Jaakkola and M. I. Jordan",
title = "Recursive Algorithms for Approximating Probabilities
in Graphical Models",
institution = "MIT",
year = 1996,
OPTtype = "Computational Cognitive Science",
OPTnumber = "9604"
}
@InCollection{jaakkola_jordan:bounds,
author = "T. S. Jaakkola and M. I. Jordan",
title = "Computing Upper and Lower Bounds on Likelihoods in
Intractable Networks",
booktitle = "Proceedings of the Twelfth Conference on Uncertainty
in {AI}",
publisher = "Morgan Kaufman",
year = 1996
}
@InCollection{williams_rasmussen:96,
author = "C. K. I. Williams and C. E. Rasmussen",
title = "Gaussian Processes for Regression",
booktitle = "Advances in Neural Information Processing Systems 8",
publisher = "MIT Press",
year = 1996,
editor = "D. S. Touretzky and M. C. Mozer and M. E. Hasselmo.",
annote = "The Bayesian analysis of neural networks is difficult because a simple prior over weights implies a
complex prior distribution over functions. In this paper we investigate the use of Gaussian process priors over
functions, which permit the predictive Bayesian analysis for fixed values of hyperparameters to be carried out
exactly using matrix operations. Two methods, using optimization and averaging (via Hybrid Monte Carlo) over
hyperparameters have been tested on a number of challenging problems and have produced excellent results.
"
}
@unpublished{williams:95,
author = "C. K. I. Williams",
title = "Regression with {G}aussian Processes",
note = "To appear in Annals of
Mathematics and Artificial Intelligence.",
year = 1995
}
%Bayesian Classification with Gaussian Processes gzipped postscript
% C. K. I. Williams and David Barber
% In: IEEE Trans Pattern Analysis and Machine Intelligence , 20(12) 1342-1351, (1998).
@Inproceedings{williams:96,
author = "D. Barber and C. K. I. Williams",
title = "{G}aussian Processes for {B}ayesian Classification via
Hybrid {M}onte {C}arlo",
booktitle = "Neural Information Processing Systems 9",
publisher = "MIT Press",
editor = "M. C. Mozer and M. I. Jordan and T. Petsche",
pages = "340-346",
year = 1997
}
@unpublished{williams:01,
author = "C. K. I. Williams",
title = "Personal communication",
year = 2001
}
@mastersthesis{Agakov-2000,
author = "F. Agakov",
title = {{Investigations of Gaussian Products-of-Experts Models}},
school = "Division of Informatics, The University of Edinburgh",
note = "Available at \url{http://www.dai.ed.ac.uk/homes/felixa/all.ps.gz}",
year = 2000}
@PhdThesis{rasmussen:phd,
author = "C. E. Rasmussen",
title = "Evaluation of Gaussian Processes and Other Methods
for Non-Linear Regression ",
school = "University of Toronto",
year = 1996
}
@article{Herz1995,
title="Earthquake Cycles and Neural Reverberations - Collective Oscillations
in Systems with Pulse-Coupled Threshold Elements",
author="Herz, A. V. M. and Hopfield, J. J.",
journal="Physical Review Letters",
year="1995",
volume="75",
number="6",
pages="1222-1225",
abstract="Driven systems of interconnected blocks with stick-slip friction
capture main features of earthquake processes. The microscopic
dynamics closely resemble those of spiking nerve cells. We analyze
the differences in the collective behavior and introduce a class of
solvable models. We prove that the models exhibit rapid phase
locking, a phenomenon of particular interest to both geophysics and
neurobiology. We study the dependence upon initial conditions and
system parameters, and discuss implications for earthquake modeling
and neural computation."
}
@article{Hopfield_Herz1995,
title="Rapid Local Synchronization of Action-Potentials - Toward Computation
with Coupled Integrate-And-Fire Neurons",
author="Hopfield, J. J. and Herz, A. V. M.",
journal="Proceedings of the National Academy of Sciences of the United States of America",
year="1995",
volume="92",
number="15",
pages="6655-6662",
abstract="The collective behavior of interconnected spiking nerve cells is
investigated. It is shown that a variety of model systems exhibit the
same short-time behavior and rapidly converge to (approximately)
periodic firing patterns with locally synchronized action potentials.
The dynamics of one model can be described by a downhill motion on an
abstract energy landscape, Since an energy landscape makes it
possible to understand and program computation done by an attractor
network, the results will extend our understanding of collective
computation from models based on a firing-rate description to
biologically more realistic systems with integrate-and-fire neurons."
}
@article{Hopfield1995,
title="Pattern-Recognition Computation Using Action-Potential Timing for
Stimulus Representation",
author="Hopfield, J. J.",
journal="Nature",
year="1995",
volume="376",
number="6535",
pages="33-36",
abstract="A computational model is described in which the sizes of variables
are represented by the explicit times at which action potentials
occur, rather than by the more usual 'firing rate' of neurons. The
comparison of patterns over sets of analogue variables is done by a
network using different delays for different information paths. This
mode of computation explains how one scheme of neuroarchitecture can
be used for very different sensory modalities and seemingly different
computations. The oscillations and anatomy of the mammalian olfactory
systems have a simple interpretation in terms of this representation,
and relate to processing in the auditory system. Single-electrode
recording would plot detect such neural computing. Recognition
'units' in this style respond more like radial basis function units
than elementary sigmoid un its."
}
@article{Hopfield1994,
title="Physics, Computation, and Why Biology Looks So Different",
author="Hopfield, J. J.",
journal="Journal of Theoretical Biology",
year="1994",
volume="171",
number="1",
pages="53-60",
abstract="The biological world is a physical system whose properties and
behaviors seem entirely foreign to physics. The origins of this
discrepancy lie in the very high information content in biological
systems (the large amount of dynamically broken symmetry) and the
evolutionary value placed on predicting the future (computation) in
an environment which is inhomogeneous in time and in space; Within
this context, 'free will' can be described as a useful predictive
myth."
}
@article{Hendin1994,
title="Decomposition of a Mixture of Signals in a Model of the Olfactory
Bulb",
author="Hendin, O. and Horn, D. and Hopfield, J. J.",
journal="Proceedings of the National Academy of Sciences of the United States of America",
year="1994",
volume="91",
number="13",
pages="5942-5946",
abstract="We describe models for the olfactory bulb which perform separation
and decomposition of mixed odor inputs from different sources. The
odors are unknown to the system; hence this is an analog and
extension of the engineering problem of blind separation of signals.
The separation process makes use of the different temporal
fluctuations of the input odors which occur under natural conditions.
We discuss two possibilities, one relying on a specific architecture
connecting modules with the same sensory inputs and the other
assuming that the modules (e.g., glomeruli) have different receptive
fields in odor space. We compare the implications of these models for
the testing of mixed odors from a single source."
}
% Chib, S. (1995) Marginal likelihood from the Gibbs output, Journal
% of the American Statistical Association, v. 90, pp. 1313-1321.
%
% search for this string
%%%%%%%% changed since last publication list %%%%%%%%%%%
@Article{Gelman96,
author = "A. Gelman",
title = "Bayesian Model--Building by Pure Thought: Some
Principles and Examples",
journal = "Statistica Sinica",
year = 1996,
volume = 6,
pages = "215-232"
}
% Bernardo, J. M. and Smith, A. F. M. (1994) Bayesian Theory, New
% York: John Wiley.
% 13 B 27
%
@book{gelman1995,
author={Gelman, A. and Carlin, J.B. and Stern, H.S. and Rubin, D.B.},
year={1995},
title={Bayesian Data Analysis},
address={London},
publisher={Chapman and Hall},
ISBN={0-412-03991-5},
annote={cav 13 G 25}}
@ARTICLE{OHagan78,
AUTHOR = "A. O'Hagan",
TITLE = "On curve fitting and optimal design for regression",
JOURNAL = "Journal of the Royal Statistical Society, B",
YEAR = 1978,
VOLUME = 40,
PAGES = "1-42"
}
@ARTICLE{Matheron63b,
AUTHOR = "G. Matheron",
TITLE = "Principles of Geostatistics",
JOURNAL = "Economic Geology",
YEAR = 1963,
VOLUME = 58,
PAGES = "1246-1266"
}
@ARTICLE{Omre87,
AUTHOR = "H. Omre",
TITLE = "Bayesian kriging - merging observations and
qualified guesses in kriging",
JOURNAL = "Mathematical Geology",
YEAR = 1987,
VOLUME = 19,
PAGES = "25-39"
}
@ARTICLE{Kitanidis86,
AUTHOR = "P. K. Kitanidis",
TITLE = "Parameter uncertainty in estimation of spatial
functions: Bayesian analysis",
JOURNAL = "Water Resources Research",
YEAR = 1986,
VOLUME = 22,
PAGES = "499-507"
}
@ARTICLE{Lowe95,
AUTHOR = "D. G. Lowe",
TITLE = "Similarity Metric Learning for a Variable Kernel
Classifier",
JOURNAL = "Neural Computation",
YEAR = 1995,
VOLUME = 7,
PAGES = "72-85"
}
@BOOK{Cressie,
author = "N.A.C. Cressie",
title = "Statistics for Spatial Data",
publisher = "Wiley",
year = 1993 }
@BOOK{Barnett,
author = "S. Barnett",
title = "Matrix Methods for Engineers and Scientists",
publisher = "McGraw-Hill",
year = 1979 }
%
@book{ohagan94,
author = {O'Hagan, A.},
title="{B}ayesian Inference",
volume={2B},
series={{K}endall's Advanced Theory of Statistics},
publisher={Edward Arnold},
year={1994}}
%
% O'Hagan, A. (1994) Bayesian Inference (Volume 2B in Kendall's Advanced Theory of Statistics), ISBN
% 0-340-52922-9.
% 13 K 29
%
% Robert, C. P. (1995) The Bayesian Choice, New York: Springer-Verlag.
@Article{drugowich,
author = "Drugowich de Felicio, J. R. and Libero, V. L.",
title = "Updating {M}onte {C}arlo Algorithms",
journal = "Am. J. Phys.",
year = 1996,
volume = 64,
number = 10,
pages = "1281-1285",
month = "October"
}
Swendsen, R. H., Wang, J-S., and Ferrenberg, A. M. (1992) ``New
Monte Carlo methods for improved efficiency of computer simulations
in statistical mechanics'', in K. Binder (editor) {\em The Monte
Carlo Method in Condensed Matter Physics}, Berlin: Springer-Verlag.
@Article{Swendsen1987,
author = "Swendsen, R. H. and Wang, J-S",
title = "Nonuniversal critical dynamics
in {M}onte {C}arlo simulations",
journal = "Physical Review Letters",
year = 1987,
volume = 58,
pages = "86-88"
}
@InCollection{Swendsen1992,
author = "Swendsen, R. H. and Wang, J-S. and Ferrenberg, A. M.",
title = "New {M}onte {C}arlo
methods for improved efficiency of computer simulations
in statistical mechanics",
publisher = "Springer-Verlag",
year = 1992,
editor = "K. Binder",
booktitle = "The {M}onte {C}arlo Method in Condensed Matter Physics",
address = "Berlin"
}
@Article{Ziv_Lempel77,
author = "J. Ziv and A. Lempel",
title = "A Universal Algorithm for Sequential Data
Compression",
journal = "IEEE Transactions on Information Theory",
year = 1977,
volume = 23,
number = 3,
pages = "337-343",
month = "May"
}
@Book{Gardner:Carnival,
author = "M. Gardner",
title = "Mathematical Carnival",
publisher = "Random House Vintage Books",
year = 1977,
annote = "orig pub Alred Knopf Inc Oct 1975"
}% the ch 9 puzzle on the glass was first in
% Barr's 'Second Miscellany of Puzzles'
@book{EliasACmentionedpages61to62,
author={N. Abramson},
title={Information theory and coding},
address={New York},
publisher={McGraw-Hill},
year={1963}
}
@Article{Elias75,
author = "P. Elias",
title = "Universal Codeword Sets and Representations of the Integers",
journal = "IEEE Transactions on Information Theory",
year = 1975,
volume = 21,
number = 2,
pages = "194--203",
month = "March"
}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% evolution
@Article{EWK99,
author = {Eyre--Walker, A. and Keightley, P.},
title = {High genomic deleterious mutation rates in hominids},
journal = {Nature},
year = 1999,
volume= 397,
pages = {344-347}
}
% should the mtn rate be close as pos to zero?
%13 Maynard Smith J. (1989), "The limitations of evolutionary theory", in Did Darwin Get It Right?,
% J. M. Smith (Ed.), New York: Chapman and Hall, pp. 180-191.
% 16 Williams G. C. (1966), Adaptation and Natural Selection, Princeton: Princeton University Press.
%
% E.B. Baum and W.D. Smith. A bayesian approach to relevance in game playing.
%Artificial Intelligence, 97(1--2):195--242, 1997.
@article{ baum97bayesian,
author = "Eric B. Baum and Warren D. Smith",
title = "A {B}ayesian Approach to Relevance in Game Playing",
journal = "Artificial Intelligence",
volume = "97",
number = "1-2",
pages = "195-242",
year = "1997",
url = "citeseer.nj.nec.com/baum97bayesian.html" }
@techreport{ baum93best,
author = "Eric B. Baum and Warren D. Smith",
title = "Best Play for Imperfect Players and Game Tree Search",
address = "Princeton, NJ",
year = "1993",
institution={NEC},
url = "citeseer.nj.nec.com/baum95best.html" }
@unpublished{Baum96,
title = "Where Genetic Algorithms Excel",
author={E.B. Baum and Dan Boneh and C. Garrett},
year={1996},
note={To appear in {\em Evolutionary Computation}},
annote={Jul 10 1996},
url={http://www.neci.nj.nec.com/homepages/eric/eric.html},
address={NEC Research Institute, Princeton}
}
@inproceedings{Baum95,
title = "On Genetic Algorithms",
author={E.B. Baum and Dan Boneh and C. Garrett},
year={1995},
booktitle={Proceedings of the Eighth Annual Conference on Computational
Learning Theory},
pages={230-239},
publisher={ACM},
address={New York},
url={http://www.neci.nj.nec.com/homepages/eric/eric.html},
authoraddress={NEC Research Institute, Princeton}
}
@article{Worden95,
title={A speed limit for evolution},
author={R. P. Worden},
journal={Journal of Theoretical Biology}, volume={176}, number={1},
month={Sep},
year={1995}, pages={137-152}
}
@book{Bulmer1985,
author={M.G. Bulmer},year={1985},title={The Mathematical Theory of Quantitative Genetics},
publisher={Oxford University Press},
address={Oxford}
}
@Article{Muhlenbein93,
author = {H. M\"uhlenbein and D. Schlierkamp--Voosen},
title = {Predictive Models for the Breeder Genetic Algorithm {I.}
{C}ontinuous Parameter Optimization},
journal = {Evolutionary Computation},
year = 1993,
volume = 1,
pages = {25-50}
}
% what is the mutation rate?:
%Li, W.-H., C.-I. Wu, and C.-C. Luo. 1985. A new method
%for estimating synonymous and nonsynonymous rates
%in nucleotide substitution considering the relative
%likelihood of nucleotide and codon changes. Mol. Biol. Evol. 2:
%150-174.
%Li, W.-H., M. Tanimura, and P. Sharp. 1987. An evaluation
%of the molecular clock hypothesis using mammalian
%DNA sequences. J. Mol. Evol. 25: 330-342.
%Blackman, R.K., and M. Meselson. 1986. Interspecific nucleotide
%
%sequence comparisons used to identify
%regulatory and structural features of the Drosophila hsp82 gene.
%J. Mol. Biol. 188: 499-515.
% what is the E coli mutation rate?
% from http://proks.bio.cmu.edu/term-papers/mutator/
% due to the
%editing function of the e subunit of DNA polymerase III, the error
%rate of an incorrect base incorporation is 10^-7 - 10^-6
% /bp/cell/generation (For bacteria the unit of which the mutation rate
%for a particular trait is expressed is measured in
%mutations/bacterium/cell division and the observable quantities are
%total number of bacteria at the beginning and end of the experiment as
%well as total number of mutant cells) (Miller, 112) However, if a
%mutation were to occur at the e subunit of DNA Polymerase III, it will
%result in communication loss between this subunit and other subunits
%of the Polymerase III (especially subunit a) (Fijalkowska, 5979-5985)
%, altering the capability of the enzyme's proofreading. This effect
%will be further discussed in the mutD.
%
%%%%%%%%%%%%%%5 %
%%%%%%%%%%%%%%
@Book{Ridley1993,
author = {Ridley, M},
title = {The Red Queen},
publisher = {Penguin},
year = 1993,
address = {London}
}
@book{Fisher1930,
author={Fisher, Ronald A.},
title={The genetical theory of natural selection},
address={Oxford},
publisher={Clarendon},
year={1930}}
@article{Fersht1988,
title={Relationships Between Apparent Binding-Energies Measured in Site-
Directed Mutagenesis Experiments and Energetics of Binding and
Catalysis},
author={Fersht, A. R.},
journal={Biochemistry},
year={1988},
volume={27},
number={5},
pages={1577-1580}
}
@article{Fersht1987,
title={Structure-Activity-Relationships in Engineered Proteins - Analysis Of
Use of Binding-Energy by Linear Free-Energy Relationships},
author={Fersht, A. R. and Leatherbarrow, R. J. and Wells, T. N. C.},
journal={Biochemistry},
year={1987},
volume={26},
number={19},
pages={6030-6038}
}
@article{Wells1987,
title={Using Protein Engineering to Understand Catalytic Yield},
author={Wells, T. N. C. and Fersht, A. R.},
journal={Protein Engineering},
year={1987},
volume={1},
number={3},
pages={261}
}
@article{Fersht1986,
title={Quantitative-Analysis of Structure-Activity-Relationships in
Engineered Proteins by Linear Free-Energy Relationships},
author={Fersht, A. R. and Leatherbarrow, R. J. and Wells, T. N. C.},
journal={Nature},
year={1986},
volume={322},
number={6076},
pages={284-286}
}
@Book{JMSES95,
author = {Maynard Smith, John and Sz\'athmary, E\"ors},
title = {The Major Transitions in Evolution},
publisher = {Freeman},
year = 1995,
address = {Oxford}
}
@Book{JMSES99,
author = {Maynard Smith, John and Sz\'athmary, E\"ors},
title = {The Origins of Life},
publisher = {O.U.P.},
year = 1999,
address = {Oxford}
}
@Book{JMS88,
author = {Maynard Smith, John},
title = {Games, Sex and Evolution},
publisher = {Harvester--Wheatsheaf},
year = 1988,
address = {Hertfordshire},
annote = {379:5.c.95.333 U.L. SF 5}
}
@Book{JMS58,
author = {Maynard Smith, John},
title = {The Theory of Evolution},
publisher = {C.U.P.},
year = 1958,
address = {Cambridge},
annote = {}
}
@Book{JMS78,
author = {Maynard Smith, John},
title = {The Evolution of Sex},
publisher = {C.U.P.},
year = {1978},
address = {Cambridge},
annote = {379:5.c.95.131 U.L. SF 5, W.465 in Whipple, GGE in Eth (arch and anth, haddon)}
}
@InCollection{Felsenstein85,
author = {Felsenstein, J.},
title = {Recombination and sex: is {M}aynard {S}mith necessary?},
booktitle = {Evolution. Essays in honour of {J}ohn {M}aynard {S}mith},
pages = {209-220},
publisher = {C.U.P.},
year = 1985,
editor = {P. J. Greenwood and P. H. Harvey and M. Slatkin},
address = {Cambridge}
}
@article{Goebel1995,
title="The 11-Micron Emissions of Carbon Stars",
author="Goebel, J. H. and Cheeseman, P. and Gerbault, F.",
journal="Astrophysical Journal",
year="1995",
volume="449",
number="1Pt1",
pages="246-257",
abstract="A new classification scheme of the IRAS LRS carbon stars is
presented. It comprises the separation of 718 probable carbon stars
into 12 distinct self-similar spectral groupings. Continuum
temperatures are assigned and range from 470 to 5000 K. Three
distinct dust species are identifiable: SiC, alpha:C-H, and MgS. In
addition to the narrow 11+ mu m emission feature that is commonly
attributed to SiC, a broad 11+ mu m emission feature, that is
correlated with the 8.5 and 7.7 mu m features, is found and
attributed to alpha:C-H. SiC and alpha:C-H band strengths are found
to correlate with the temperature progression among the Classes. We
find a spectral sequence of Classes that reflects the carbon star
evolutionary sequence of spectral types, or alternatively
developmental sequences of grain condensation in carbon-rich
circumstellar shells. If decreasing temperature corresponds to
increasing evolution, then decreasing temperature corresponds to
increasing C/O resulting in increasing amounts of carbon rich dust,
namely alpha:C-H. If decreasing the temperature corresponds to a
grain condensation sequence, then heterogeneous, or induced
nucleation scenarios are supported. SiC grains precede alpha:C-H and
form the nuclei for the condensation of the latter material. At still
lower temperatures, MgS appears to be quite prevalent. No 11.3 mu m
PAH features are identified in any of the 718 carbon stars. However,
one of the coldest objects, IRAS 15048-5702, and a few others,
displays an 11.9 mu m emission feature characteristic of laboratory
samples of coronene. That feature corresponds to the C-H out of plane
deformation mode of aromatic hydrocarbon. This band indicates the
presence of unsaturated, sp(3), hydrocarbon bonds that may
subsequently evolve into saturated bonds, sp(2), if, and when, the
star enters the planetary nebulae phase of stellar evolution. The
effusion of hydrogen from the hydrocarbon grain results in the
evolution in wavelength of this 11.9 mu m emission feature to the
11.3 mu m feature."
}
@article{Goebel1989,
title="A {B}ayesian Classification of the {IRAS} {LRS} Atlas",
author="Goebel, J. and Volk, K. and Walker, H. and Gerbault, F. and Cheeseman, P. and Self, M. and Stutz, J. and
Taylor, W.",
journal="Astronomy and Astrophysics",
year="1989",
volume="222",
number="1-2",
pages="L5-L8"
}
@book{goldie91,
author={Goldie, C. M. and R. G. E. Pinch},
title={Communication theory},
address={Cambridge},
publisher={Cambridge University Press},
year= 1991
}
% Series title: London Mathematical Society student texts; 20
% Subjects: Communication
% Other entries: Pinch, Richard G. E.
% Location: [Univ. Lib.] 351:5.c.95.185 South Front 4
%
% currently available at paperback price.
%COMMUNICATION THEORY - Social Sciences title
%D Crowley
%Binding: Hardback
%ISBN: 0804723486
%Published: January 1995
%Format: pages;
%UK Price: 33.75
%There is also a Paperback available
%ISBN: 0804723478
%UK Price: 11.96
@Book{applebaum,
author = "D. Applebaum",
title = "Probability and Information. And Integrated Approach",
publisher = "Cambridge University Press",
year = 1996,
address = "Cambridge"
}
@Book{Ripley91,
author = "B. D. Ripley",
title = "Statistical Inference for Spatial Processes",
publisher = "Cambridge",
year = 1991
}
% Statistical Inference for Spatial Processes
%
% B D Ripley
%
% Title Details
%
% Binding: Paperback
% ISBN: 0521424208
% Published: July 1991
% Format: 154 pp pages; 229 x 153mm
% UK Price: 14.95
%
% There is also a Hardback available
% ISBN: 0521352347
% UK Price: £22.5
% MIT press
% Neural Network Learning and Expert
% Systems
% by Stephen I. Gallant
% 1993
% ISBN 0-262-07145-2
% 364 pp. 156 illus.
% $50.00 (cloth)
%
%
search for this string
%%%%%%%% changed since last publication list %%%%%%%%%%%
@article{Marinari1992,
title={Simulated Tempering --- a New {M}onte-{C}arlo Scheme},
author={Marinari, E. and Parisi, G.},
journal={Europhysics Letters},
year={1992},
volume={19},
number={6},
pages={451-458},
abstract={We propose a new global optimization method (Simulated Tempering) for
simulating effectively a system with a rough free-energy landscape
(i.e., many coexisting states) at finite nonzero temperature. This
method is related to simulated annealing, but here the temperature
becomes a dynamic variable, and the system is always kept at
equilibrium. We analyse the method on the Random Field Ising Model,
and we find a dramatic improvement over conventional Metropolis and
cluster methods. We analyse and discuss the conditions under which
the method has optimal performances.}
}
@article{Kerler1994,
title={Simulated-Tempering Procedure for Spin-Glass Simulations},
author={Kerler, W. and Rehberg, P.},
journal={Physical Review E},
year={1994},
volume={50},
number={5},
pages={4220-4225}
}
@article{Hansmann1996,
title={{M}onte-{C}arlo Simulations in Generalized Ensemble
--- Multicanonical
Algorithm Versus Simulated Tempering},
author={Hansmann, U. H. E. and Okamoto, Y.},
journal={Physical Review E},
year={1996},
volume={54},
number={5},
pages={5863-5865},
abstract={It is shown that two Monte Carlo methods in generalized ensemble,
multicanonical algorithm and simulated tempering, are closely
related. The equivalence and effectiveness of the two methods
illustrated by taking an energy function for the protein folding
problem as an example.}
}
@article{Besag1993,
title={Spatial Statistics and Bayesian Computation},
author={Besag, J. and Green, P. J.},
journal={Journal of the Royal Statistical Society Series B-Methodological},
year={1993},
volume={55},
number={1},
pages={25-37},
abstract={Markov chain Monte Carlo (MCMC) algorithms, such as the Gibbs
sampler, have provided a Bayesian inference machine in image analysis
and in other areas of spatial statistics for several years, founded
on the pioneering ideas of Ulf Grenander. More recently, the
observation that hyperparameters can be included as part of the
updating schedule and the fact that almost any multivariate
distribution is equivalently a Markov random field has opened the way
to the use of MCMC in general Bayesian computation. In this paper, we
trace the early development of MCMC in Bayesian inference, review
some recent computational progress in statistical physics, based on
the introduction of auxiliary variables, and discuss its current and
future relevance in Bayesian applications. We briefly describe a
simple MCMC implementation for the Bayesian analysis of agricultural
field experiments, with which we have some practical experience.}
}
@article{Cowles1996a,
title={{M}arkov-Chain {M}onte-{C}arlo Convergence Diagnostics ---
a Comparative Review},
author={Cowles, M. K. and Carlin, B. P.},
journal={Journal of the American Statistical Association},
year={1996},
volume={91},
number={434},
pages={883-904},
abstract={A critical issue for users of Markov chain Monte Carlo (MCMC) methods
in applications is how to determine when it is safe to stop sampling
and use the samples to estimate characteristics of the distribution
of interest. Research into methods of computing theoretical
convergence bounds holds promise for the future but to date has
yielded relatively little of practical use in applied work.
Consequently, most MCMC users address the convergence problem by
applying diagnostic tools to the output produced by running their
samplers. After giving a brief overview of the area, we provide an
expository review of 13 convergence diagnostics, describing the
theoretical basis and practical implementation of each. We then
compare their performance in two simple models and conclude that all
of the methods can fail to detect the sorts of convergence failure
that they were designed to identify. We thus recommend a combination
of strategies aimed at evaluating and accelerating MCMC sampler
convergence, including applying diagnostic procedures to a small
number of parallel chains, monitoring autocorrelations and cross-
correlations, and modifying parameterizations or sampling algorithms
appropriately. We emphasize, however, that it is not possible to say
with certainty that a finite sample from an MCMC algorithm is
representative of an underlying stationary distribution.}
}
@article{Cowles1996b,
title={Bayesian Tobit Modeling of Longitudinal Ordinal Clinical-Trial
Compliance Data with Nonignorable Missingness},
author={Cowles, M. K. and Carlin, B. P. and Connett, J. E.},
journal={Journal of the American Statistical Association},
year={1996},
volume={91},
number={433},
pages={86-98},
abstract={In the Lung Health Study (LHS), compliance with the use of inhaled
medication was assessed at each follow-up visit both by self-report
and by weighing the used medication canisters. One or both of these
assessments were missing if the participant failed to attend the
visit or to return all canisters. Approximately 30% of canister-
weight data and 5% to 15% of self-report data were missing at
different visits. We use Gibbs sampling with data augmentation and a
multivariate Hastings update step to implement a Bayesian
hierarchical model for LHS inhaler compliance. Incorporating
individual-level random effects to account for correlations among
repeated measures on the same participant, our model is a
longitudinal extension of the Tobit models used in econometrics to
deal with partially unobservable data. It enables (a) assessment of
the relationships among visit attendance, canister return, self-
reported compliance level, and canister weight compliance, and (b)
determination of demographic, physiological, and behavioral
predictors of compliance. In addition to addressing the estimation
and prediction questions of substantive interest, we use sampling-
based methods for covariate screening and model selection and
investigate a range of informative priors on missing data.}
}
@book{Tanner96,
title={Tools for Statistical Inference: Methods for
the Exploration of Posterior Distributions and
Likelihood Functions},
series={Springer Series in Statistics},
author={M. A. Tanner},
edition={3rd},
publisher={Springer Verlag},
year={1996},
isbn={0387946888}
}
% cav ordered
@article{Richardson98,
title={The Capacity of Low-Density Parity Check Codes under Message-Passing Decoding},
author={T. Richardson and R. Urbanke},
annote={Submitted to IEEE Trans.\ on Information Theory 1998},
year={2001},
journal={IEEE Transactions on Information Theory},
volume={47},
number={2},
pages={599-618}
}
@article{Richardson2001b,
title={Design of Capacity-Approaching Irregular Low-Density Parity Check Codes},
author={T. Richardson and M. A. Shokrollahi and R. Urbanke},
year={2001},
journal={IEEE Transactions on Information Theory},
volume={47},
number={2},
pages={619-637}
}
@article{Urbanke00,
title={Efficient Encoding of Low-Density Parity-Check Codes},
author={T. Richardson and R. Urbanke},
year={2001},
journal={IEEE Transactions on Information Theory},
volume={47},
number={2},
pages={638-656}
}
@unpublished{Chung2001,
title={Analysis of Sum-Product Decoding of Low-Density Parity-Check Codes Using a {G}aussian Approximation},
author={Chung, Sae-Young and Richardson, Thomas J. and Urbanke, R\"udiger L.},
year={2001},
journal={IEEE Transactions on Information Theory},
volume={47},
number={2},
pages={657-670}
}
@book{Spiegel,
title={Statistics},
author={Murray R. Spiegel},
edition={2},
publisher={McGraw-Hill},
address={New York},
series={Schaum's outline series},
year={1988}
}
@book{MCMC96,
title={{M}arkov Chain {M}onte {C}arlo in Practice},
author={W. R. Gilks and S. Richardson and D. J. Spiegelhalter},
publisher={Chapman and Hall},
year={1996},
isbn={0412055511}
}
% cav ordered
%% RUBIN_DB, 1988 Vol.3 p.385, BAYESIAN STAT (OUP)
%% RUBIN_DB, 1987 Vol.82 p.543, J AM STAT ASSOC
%% TI- THE CALCULATION OF POSTERIOR DISTRIBUTIONS BY DATA AUGMENTATION -
%% COMMENT
%% AU- RUBIN, DB
%% NA- HARVARD UNIV,DEPT STAT,CAMBRIDGE,MA,02138
%% JN- JOURNAL OF THE AMERICAN STATISTICAL ASSOCIATION
%% PY- 1987
%% VO- 82
%% NO- 398
%% PG- 543-546
%% DT- Note
%% CR- COCHRAN_WG, 1977, SURVEY TECHNIQUES
%% RUBIN_DB, 1985 Vol.12 p.1151, ANN STATISTICS
%% RUBIN_DB, 1976 Vol.63 p.581, BIOMETRIKA
%% RUBIN_DB, 1986 Vol.81 p.366, J AM STATISTICAL ASS
%% RUBIN_DB, 1987, MULTIPLE IMPUTATION
%% RUBIN_DB, 1983, PROGR REPORT PROJECT
%% WILKS_SS, 1932 Vol.2 p.163, ANN MATH STATISTICS
@book{Mehta,
title={Random Matrices},
author={M. L. Mehta},
publisher={Academic Press},
year={1991},
edition={2},
isbn={0124880517}
}
% first Publication date: November 1967
% ISBN: 0124880509
@book{Haake91,
title={Quantum Signatures of Chaos},
author={F. Haake},
publisher={Springer Verlag},
Cavendish={60 H 22},
year={1991}
}
%de Finetti's (famous?) prediction
%that Bayesian approaches would dominate science by the year 2020.
%
%The reference is: de Finetti, B. (1974) Theory of Probability: a critical
%introductory treatment. Vol 1. Wiley, New York.
%
@Article{turin96,
author = "L. Turin",
title = "A Spectroscopic Mechanism for Primary Olfactory Reception",
journal = "Chem. Senses",
year = 1996,
volume = 21,
number = "773-791"
}
@article{Schell97,
title={D-Serine as a Neuromodulator: Regional and developmental
Localizations in Rat Brain Glia Resemble {NMDA} Receptors},
author={Schell, M. J., Brady, R. O., Molliver, M. E., Snyder, S. H.},
journal={Journal of Neuroscience},
year={1997},
volume={17},
number={5},
pages={1604-1615}
}
@Article{Lauritzen81,
author = {S. L. Lauritzen},
title ={Time Series Analysis in 1880, a Discussion of Contributions made
by {T.N.\ Thiele}},
journal={ISI Review} ,volume ={49},year={1981}, pages={319-333}}
@TechReport{Abrahamsen97,
author = "Abrahamsen, P.",
title = "A Review of {G}aussian Random Fields and
Correlation Functions",
institution = "Norwegian Computing Center",
year = 1997,
number = 917,
address={Box 114, Blindern, N-0314 Oslo, Norway},
note = "2nd edition"
}
@Book{Cybernetics,
author = "N. Wiener",
title = "Cybernetics",
publisher = "Wiley",
year = "1948"
}
@inproceedings{scholkopf95,
author={B. Scholkopf and C. Burges and V.~Vapnik},
title ={Extracting support data for a given task},
editor={U. M. Fayyad and R. Uthurusamy},
booktitle= {Proceedings First
International Conference on Knowledge Discovery and Data Mining},
publisher={AAAI Press},
address={Menlo Park, CA},
year= 1995
}
@book{vapnik95,
author={V. Vapnik},
title={The Nature of Statistical Learning Theory},
publisher={Springer Verlag},
address={New York},
year= 1995
}
@book{jensen96,
author={Jensen, F. V.},
title={An Introduction to {B}ayesian Networks},
year={1996},
publisher={UCL press},
address={London}
}
@incollection{deSa1997b,
author={de Sa, V. R., and Ballard, D.},
title={Perceptual Learning from Cross-Modal Feedback},
editors={R. L. Goldstone, P. G. Schyns, and D. L. Medin},
booktitle={Psychology of Learning and Motivation, Vol 36.},
address={San Diego, CA},
publisher={Academic Press}
}
%McRae, K., de Sa V.R., & Seidenberg, M.S. (1997). On the nature and scope of featural
%representations of word meaning. Journal of Experimental Psychology: General, Jun, 126(2),
%99-130. abstract.
% Caruana, R., & de Sa, V.R. (1997). Promoting Poor Features to Supervisors: Some Inputs Work
% Better as Outputs. To appear in M.C. Mozer, M.I. Jordan & T. Petsche (Eds.), Advances in Neural
% Information Processing Systems 9. MIT Press. postscript.
%
% de Sa, V.R. (1994). Unsupervised Classification Learning from Cross-Modal Environmental
% Structure. Doctoral dissertation, Department of Computer Science, University of Rochester, 96
% pages.
% Combining Uni-Modal Classifiers to Improve Learning: Taking Advantage of Cross-Modal
% Environmental Structure. Presented at the conference on Integration of Elementary Functions into
% Complex Behavior, Zentrum für interdisziplinäre Forschung, Universität Bielefeld, Bielefeld,
% Germany, July 12-15 1994. postscript.
@incollection{deSa1994a,
author={de Sa, V. R.},
year={1994},
title={Learning Classification with Unlabeled Data},
editor = "Cowan, J. D. and Tesauro, G. and Alspector, J.",
booktitle = "Advances in Neural Information Processing Systems 6",
year = 1994,
ADDRESS ="San Mateo, California",
publisher = "Morgan Kaufmann",
PAGES ="112--119"
}
@incollection{deSa1994b,
author={de Sa, V. R.},
year={1994},
title={Minimizing Disagreement for Self-Supervised Classification},
editor = "M.C. Mozer, P.
Smolensky, D.S. Touretzky and J.L. Elman",
booktitle = "Proceedings of the 1993 Connectionist Models
Summer School",
year = 1994,
ADDRESS ="San Mateo, California",
publisher = "Erlbaum Associates",
PAGES ="300-307"
}
% de Sa, V.R., & Ballard, D.H. (1993). A Note on Learning Vector Quantization. In C.L. Giles, S.J.
% Hanson & J.D. Cowan (Eds.), Advances in Neural Information Processing Systems 5, (pp.
% 220---227). Morgan Kaufmann. postscript.
% McRae, K., de Sa, V.R., & Seidenberg, M.S. (1993). Modeling Property Intercorrelations in
% Conceptual Memory. In Proceedings of the 15th Annual Meeting of the Cognitive Science Society
% (pp. 729---734).
@Article{BeckerHinton92,
author = "Suzanna Becker and Geoffrey E. Hinton",
title = "Self--organizing neural network that discovers surfaces
in random-dot stereograms",
journal = "Nature",
year = "1992",
volume = "355",
pages = "161--163",
ref = "VV25",
}
@InProceedings{Hinton90CX40,
author = "G. E. Hinton and S. Becker",
title = "An Unsupervised Learning Procedure that
Discovers Surfaces in Random--dot Stereograms",
booktitle = "Proceedings of the International Joint Conference on
Neural Networks",
year = "1990",
address = "Washington, DC",
month = jan,
ref = "CX40",
}
@TechReport{becker-hinton-89,
key = "becker",
author = "S. Becker and G.~E. Hinton",
title = "Spatial coherence as an internal teacher for a neural
network",
type = "Technical Report",
number = "CRG-TR-89-7",
institution = "University of Toronto",
year = "1989",
annote = "In CRG Library",
}
@InProceedings{Luttrell89b,
author = "Stephen P. Luttrell",
title = "Hierarchical self-organizing networks",
booktitle = "Proc. 1st IEE Conf. of Artificial Neural Networks",
year = "1989",
pages = "2--6",
publisher = "British Neural Network Society",
address = "London, UK",
}
@Article{Luttrell89c,
author = "S. P. Luttrell",
title = "Image compression using a multilayer neural network",
journal = "Pattern Recognition Letters",
year = "1989",
volume = "10",
pages = "1--7",
}@Article{Luttrell89d,
author = "S. P. Luttrell",
title = "Hierarchical vector quantisation",
journal = "Proc. IEE Part I",
year = "1989",
volume = "136",
pages = "405--413",
}
@InProceedings{Luttrell91b,
author = "S. P. Luttrell",
title = "Self-supervised training of hierarchical vector
quantisers",
booktitle = "Proc. 2nd IEE Conf. on Artificial Neural Networks",
year = "1991",
pages = "5--9",
publisher = "British Neural Network Society",
address = "London, UK",
}@TechReport{Luttrell91c,
author = "S. P. Luttrell",
title = "Self-supervision in multilayer adaptive networks",
institution = "RSRE",
year = "1991",
number = "4467",
address = "Malvern, UK",
}
@Misc{Luttrell92a,
author = "S. P. Luttrell",
title = "Image anomaly detector",
howpublished = "British Patent Application 9202752.3",
year = "1992",
}
@Article{Luttrell92b,
author = "S. P. Luttrell",
title = "Self-supervised adaptive networks",
journal = "IEE Proc. F [Radar and Signal Processing]",
year = "1992",
volume = "139",
number = "6",
pages = "371--377",
month = dec,
}
@TechReport{luttrell-90,
key = "Luttrell",
author = "S. P. Luttrell",
title = "A Trainable Texture Anomaly Detector Using the
Adaptive Cluster Expansion ({ACE}) Method",
type = "Technical Report",
number = "{RSRE} Memorandum Number 4437",
institution = "Royal Signals and Radar Establishment",
year = "1990",
}
@Article{Dushuang95,
author = "Huang Dushuang",
title = "An analysis of the statistical properties on the
self-supervised learning subspaces for pattern
recognition",
journal = "Acta Electronica Sinica",
year = "1995",
volume = "23",
number = "9",
pages = "99--102",
}
@InProceedings{Ossen93,
author = "Arnfried Ossen",
title = "Learning Topology-Preserving Maps Using
Self-Supervised Backpropagation",
booktitle = "Proc. ICANN'93, Int. Conf. on Artificial Neural
Networks",
year = "1993",
editor = "Stan Gielen and Bert Kappen",
pages = "586--591",
publisher = "Springer",
address = "London, UK",
}
@TechReport{MIT-AILab//AITR-1086,
bibdate = "February 27, 1995",
type = "Technical Report",
number = "AITR-1086",
title = "Optimal Unsupervised Learning in Feedforward Neural
Networks",
year = "1989",
month = jan,
institution = "Massachusetts Institute of Technology, Artificial
Intelligence Laboratory",
pages = "130",
author = "Terence D. Sanger",
abstract = "We investigate the properties of feedforward neural
networks trained with Hebbian learning algorithms. A
new unsupervised algorithm is proposed which produces
statistically uncorrelated outputs. The algorithm
causes the weights of the network to converge to the
eigenvectors of the input correlation with largest
eigenvalues. The algorithm is closely related to the
technique of Self-supervised Backpropagation, as well
as other algorithms for unsupervised learning.
Applications of the algorithm to texture processing,
image coding, and stereo depth edge detection are
given. We show that the algorithm can lead to the
development of filters qualitatively similar to those
found in primate visual cortex.",
notes = "Keywords: neural networks, learning, connectionism,
vision, eigenvector analysis Cost: \$8.00 AD-A207961",
}
@TechReport{Schmidhuber:1990c,
title = "Making the world differentiable: On Using
Self-Supervised Fully Recurrent Neural Networks for
Dynamic Reinforcement Learning and Planning in
Non-Stationary Environments",
author = "J{\"u}rgen H. Schmidhuber",
institution = "Institut f{\"u}r Informatik, Technische
Universit{\"a}t M{\"u}nchen",
year = "1990",
type = "Forschungsberichte K{\"u}nstliche Intelligenz",
number = "FKI-126-90(revised)"
}
@TechReport{Sa94,
author = "Virginia R. de Sa",
title = "Unsupervised Classification Learning from Cross-Modal
Environmental Structure",
institution = "University of Rochester, Computer Science Department",
number = "TR536",
month = nov,
year = "1994",
keywords = "cross-modal; classification; connectionist; learning
vector quantization (LVQ); neural networks;
self-supervised; unsupervised learning",
url = "ftp://ftp.cs.rochester.edu/pub/papers/ai/94.tr536.Unsupervised_classification_learning.ps.Z",
abstract = "This dissertation addresses the problem of
unsupervised learning for pattern classification or
category learning. A model that is based on gross
cortical anatomy and implements biologically plausible
computations is developed and shown to have
classification power approaching that of a supervised
discriminant algorithm. .pp The advantage of supervised
learning is that the final error metric is available
during training. Unfortunately, when modeling human
category learning, or in constructing classifiers for
autonomous robots, one must deal with not having an
omniscient entity labeling all incoming sensory
patterns. We show that we can substitute for the labels
by making use of structure between the pattern
distributions to different sensory modalities. For
example the co-occurrence of a visual image of a cow
with a ``moo'' sound can be used to simultaneously
develop appropriate visual features for distinguishing
the cow image and appropriate auditory features for
recognizing the moo. .pp We model human category
learning as a process of minimizing the disagreement
between outputs of sensory modalities processing
temporally coincident patterns. We relate this
mathematically to the optimal goal of minimizing the
number of misclassifications in each modality and apply
the idea to derive an algorithm for piecewise linear
classifiers in which each network uses the output of
the other networks as a supervisory signal. .pp Using
the Peterson-Barney vowel dataset we show that the
algorithm finds appropriate placement for the
classification boundaries. The algorithm is then
demonstrated on the task of learning to recognize
acoustic and visual speech from images of lips and
their emanating sounds Performance on these tasks is
within 1-7$\backslash$\% of the related supervised
algorithm (LVQ2.1). .pp Finally we compare the
algorithm to Becker's IMAX algorithm and give
suggestions as to how the algorithm may be implemented
in the brain using physiological results concerning the
relationship between two types of neural plasticity,
LTP and LTD, observed in visual cortical cells. We also
show how the algorithm can be used as an efficient
method for dealing with learning from data with missing
values.",
note = "Thu, 17 Jul 97 09:00:00 GMT",
}
% Hinton, G. E. and Nowlan, S. J. (1987) How learning can guide evolution. Complex Systems, 1,
% 495--502.
@Article{Smith87,
author = "Maynard Smith, J.",
title = "When learning guides evolution.",
journal = "Nature",
volume = "329",
pages = "761--762",
year = "1987",
keywords = "machine learning, AI",
abstract = "via enews",
}
@article{HintonNowlan87,
author={Hinton, G.E. and Nowlan, S.J.},
year={1987},
title={How learning can guide evolution},
journal={Complex Systems},
volume={1},
pages={495-502}
}
@article{Baldwin1896,
author={Baldwin, J.M.},
year={1896},
title={A new factor in evolution},
journal={American Naturalist},
volume={30},
pages={441-451}
}
@article{mayley1996,
author={G. Mayley},
year={1996},
title={Landscapes, Learning Costs and Genetic Assimilation},
note={In Evolution, Learning, and Instinct:
100 Years of the Baldwin Effect, Special Issue},
journal={Evolutionary Computation},
volume={4},
number={3},
editor={P. Turney, D. Whitley and R. Anderson},
url={http://www.cogs.susx.ac.uk/users/gilesm/index.html}
}
@TechReport{87aHint,
author = "Geoffrey E. Hinton",
file = "nn.bib",
index = "NN Review",
title = "Connectionist learning procedures",
number = "Computer Science Technical Report",
publisher = "Carnegie-Mellon University",
address = "Pittsburgh, PA",
year = "1987",
month = jun,
ordernumber = "CMU-CS-87-115",
status = "In hand",
equations = "26",
figures = "10",
refs = "83",
pagecount = "46",
annote = "The author reviews learning procedures under a number
of network paradigms. He defines the description of
knowledge representation in a network to be {"}local{"}
if a concept in the external descriptive language maps
to one or a small, fixed number of units in the
network, and {"}distributed{"} otherwise. He notes that
the classification of a network as one or the other
depends upon the descriptive language chosen. Each
output of a linear associator computes a linear
functions of the input. Hebbian learning consists of
adding to the weights on each connection the product of
the input and output activites for that association.
There are also nonlinear single-layer associative
memories. A common deficiency of single-layer networks
is that they can only perfectly encode inputs which are
linearly independent. Supervised learning amounts to
changing each weight by an amount proportional to the
local partial derivative of the error with respect to
that weight. This solution is optimal in the Least Mean
Square sense, and so is called LMS. In a multilayer
network, LMS is implemented by back-propagation. If the
outputs are binary-valued, then the outputs as a whole
can be taken to represent the probability distribution
of correctness for each output, given the
representation the network can achieve, i.e. the
network models a maximum likelihood estimator. Back
propagation can be applied to iterative (lattice)
networks. Reinforcement has also been used with
back-prop networks. A major drawback of back-prop is
that (on a sequential processor) its execution time
scales at least as order $N squared$. Back propagation
is probably not a good model of biological systems.
Boltzmann machines optimally adjust the weights in the
middle layer in a kind of stochastic physical model.
The information storage capabilities of Boltzmann
machines are well understood, by physical analog. Other
learning procedures include maximizing mutual
information between pairs of input classes,
unsupervised Hebbian learning, competitive learning,
and reinforcement learning. Unsupervised Hebbian
learning maximizes the covariances of the weighted
inputs to each unit. This maximizes the information
sent forward by each unit. Competitive learning is a
degenerate case of self-supervised back-propagation. A
version of competitive learning called genetic learning
produces new configurations by 'cross-breeding' good
ones in the current generation, and then re-running the
iteration toward a final state.",
audience = "Tutorial",
}
@InProceedings{Mel88,
author = "Bartlett W. Mel",
title = "{MURPHY}: {A} Robot that Learns by Doing",
booktitle = "Proc. First IEEE Conf. on Neural Information
Processing Systems",
editor = "Dana Z. Anderson",
year = "1988",
publisher = "IEEE Service Center",
address = "Piscataway, NJ",
pages = "544--553",
}
@TechReport{Mel??,
author = "B. Mel",
title = "The Sigma-Pi Column: {A} Model of Associative Learning
in Cerebral Neocortex",
institution = "Neuroprose",
type = "Technical Report",
pages = "44",
url = "ftp://archive.cis.ohio-state.edu/pub/neuroprose/mel.sigmapi?.ps.Z",
}
@InProceedings{Mel88V4,
author = "B. W. Mel",
title = "Building and Using Mental Models in a Sensory-Motor
Domain: {A} Connectionist Approach",
booktitle = "Proceedings of the Fifth International Conference on
Machine Learning",
year = "1988",
month = jun,
address = "Univerity of Michigan, Ann Arbor",
pages = "207--213",
ref = "V4",
}
@TechReport{Mel90HH12,
author = "B.~W. Mel",
title = "The Sigma-Pi Column",
institution = "California Institute of Technology, Pasadena",
year = "1990",
number = "CNS 216-76",
month = apr,
ref = "HH12",
}
@Article{barlow-72,
key = "Barlow",
author = "H.~B. Barlow",
year = "1972",
title = "Single units and sensation: {A} neuron doctrine for
perceptual psychology?",
journal = "Perception",
volume = "1",
annote = "In CRG Library",
pages = "371--394",
}
@inproceedings{berkmannisit,
AUTHOR ={J. Berkmann and F. Burkert},
TITLE ={Turbo--decoding of nonbinary codes},
YEAR =1997,
booktitle={Proceedings of ISIT 1997. Ulm, Germany.},
PAGES =""}
@book{zipf,
author={Zipf, G. K.},
title={Human Behavior and the Principle of Least Effort},
publisher={Addison-Wesley},
year={1949},
}
@article{KeelingRand95,
title={A Spatial Mechanism for the Evolution and Maintenance of Sexual
Reproduction},
author={Kelling, M. J. and Rand, D. A.},
journal={Oikos},
volume={74},
year={1995},
pages={414-424}
}
@article{Kimura61,
author={M. Kimura},
year={1961},
title={Natural Selection as the Process of Accumulating Genetic
Information in Adaptive Evolution},
journal={Genetical Research Cambridge}
}
@article{Kondrashov1988,
title={Deleterious Mutations and the Evolution of Sexual Reproduction},
author={Kondrashov, A. S.},
journal={Nature},
year={1988},
volume={336},
number={6198},
pages={435-440}
}
@Article{Smith68,
author = "Maynard Smith, J.",
title = {``{H}aldane's Dilemma'' and the Rate of Evolution},
journal = "Nature",
volume = "219",
number={5159},
pages = "1114-1116",
year = "1968"
}
@article{bdelloidsize98,
journal={Hydrobiologia},
volume={387/388},
pages={395-402},
year=1998,
publisher={Kluwer Academic Publishers},
title={Measurements of the genome size of the
monogonont rotifer {Brachionus plicatilis} and of
the bdelloid rotifers {Philodina roseola} and
{Habrotrocha constricta}},
author={Mark Welch, David B. and Matthew Meselson}
}
@article{Yeung1991,
title={A New Outlook on {S}hannon-Information Measures},
author={Yeung, R. W.},
journal={IEEE Transactions on Information Theory},
year={1991},
volume={37},
number={3.1},
pages={466-474},
abstract={Let X(i), i = 1,...,n, be discrete random variables, and X
approximately (i) be a set variable corresponding to X(i). Define the
universal set OMEGA to be union i(n) = 1X approximately (i) and let
the sigma-field generated by {X approximately (i), i = 1,...,n}. It
is shown that Shannon's information measures on the random vairables
X(i), i = 1,...,n, constitute a unique measure mu* on F, which is
called the I-Measure. In other words, the Shannon information measure
(i.e., Shannon's information measures as a whole) is a measure on F,
thus establishing the analogy between information theory and set
theory. Therefore each information theoretic operation can formally
be viewed as a set theoretic operation, and vice versa. This point of
view, which we believe is of fundamental importance, has apparently
been overlooked in the past by information theorists. As a
consequence the I-Diagram is introduced, which is a geometrical
representation of the relationship among the information measures.
The J-Diagram is analogous to the Venn Diagram in set theory. The use
of the I-Diagram is discussed; some applications of which reveal
results that may otherwise be difficult to discover. A formula is
also derived for the value of the I-Measure of the atoms of F and its
sub-sigma-fields generated by some subsets of the basic set
variables.}
}
@Article{Wolf92,
author = {B. H. Marcus and P. H. Siegel and J. K. Wolf},
title = {Finite-State Modulation Codes for Data Storage},
journal = {IEEE Journal on Selected Areas in Communication},
year = {1992},
volume = {10},
number = {1},
pages = {5--38},
month = {January},
}
@unpublished{LubyDF0,
author={M. Luby},
year={1998},
title={Digital fountain},
note={Unpublished work, patents pending}
}
@inproceedings{LubyDF,
title={A Digital Fountain Approach to Reliable Distribution of Bulk Data},
url={http://www.dfountain.com/tech/techpapers/index.html},
author={John Byers and Michael Luby and Michael Mitzenmacher and Ashu Rege},
booktitle={Proceedings of ACM SIGCOMM '98, September 2-4, 1998},
annote={ACM SIGCOMM '98, September 2-4, 1998},
year={1998}
}
@inproceedings{LubyTC,
title={Accessing Multiple Mirror Sites in Parallel: Using Tornado Codes to Speed Up Downloads},
url={http://www.dfountain.com/tech/techpapers/index.html},
author={John Byers and Michael Luby and Michael Mitzenmacher},
booktitle={Proceedings of IEEE INFOCOMM '99, March
21-25, 1999, New York},
annote={IEEE INFOCOMM '99, March
21-25, 1999, New York},
year={1999}
}
@book{AliceLookingGlass,
author={Lewis Carroll},
title={Alice's adventures in Wonderland; and, Through the
looking-glass: and what Alice found there},
address={London},
publisher={Macmillan Children's Books},
year={1998}
}
@article{saad99,
author={Kanter, I. and Saad, D.},
journal={Physics Review Letters},
title={Error-correcting Codes that Nearly Saturate {S}hannon's Bound},
volume={83},
number={13},
pages={2660-2663},
year={1999}
}
@article{schulman-zuckerman99,
author={L. J. Schulman and D. Zuckerman},
title={Asymptotically Good Codes Correcting Insertions, Deletions, and Transpositions},
journal={IEEE Transactions on Information Theory},
volume={45},
number={7},
year={1999},
pages={2552-2557}
}
@PhdThesis{bours-phd94,
author = {P. A. H. Bours},
title = {Codes for Correcting Insertion and Deletion Errors},
school = {Eindhoven Technical University},
year = {1994},
month = {June},
annote = {Available from {\tt http://www.win.tue.nl/math/dw/pp/wsdwpb/thesis.html}},
}
@Article{Bours:1995:CPD,
author = "Patrick A. H. Bours",
title = "On the Construction of Perfect Deletion-Correcting
Codes using Design Theory",
journal = "Designs, Codes, and Cryptography",
volume = "6",
number = "1",
pages = "5--20",
month = jul,
year = "1995",
coden = "DCCREC",
ISSN = "0925-1022",
mrclass = "94B60 (05B05)",
mrnumber = "96c:94007",
bibdate = "Wed Feb 10 09:30:50 MST 1999",
url = "http://www.wkap.nl/oasis.htm/85445",
acknowledgement = ack-nhfb,
journalabr = "Des Codes Cryptography",
}
@article{MacKayMcCulloch1952,
author={MacKay, D. M. and McCulloch, W. S.},
year={1952},
title={The limiting information capacity of a neuronal link},
journal={Bull. Math. Biophys.},
volume={14},
pages={127-135}
}
@book{RussellNorvik,
author={Russell, S. and Norvik, P.},
year={1995},
title={Artificial Intelligence: A Modern Approach},
publisher={Prentice Hall},
address={Englewood Cliffs, New Jersey}
}
@article{KabaSaad99,
author={Kabashima, Y. and Saad, D.},
title={Statistical Mechanics of Error-correcting Codes},
year={1999},
journal={Europhys. Lett.},
volume={45},
pages={97-103}
}
@article{KabaMuraSaad00a,
author={Kabashima, Y. and Murayama, T. and Saad, D.},
title={Typical Performance of {G}allager-Type Error-Correcting Codes},
year={2000},
journal={Physical Review Letters},
volume={84},number={6},
pages={1355-1358}
}
@article{KabaMuraSaad00b,
author={Kabashima, Y. and Murayama, T. and Saad, D.},
title={Cryptographical Properties of Ising Spin Systems},
year={2000},
journal={Physical Review Letters},
volume={84},number={9},
pages={2030-2033}
}
@unpublished{Naka00,
author={Nakamura, K. and Kabashima, Y. and Saad, D.},
year={2000},
title={Statistical Mechanics of Low-Density Parity Check Error-Correcting Codes over {G}alois fields},
note={Submitted to Europhysics Letters}
}
@unpublished{IBMpc,
author={Evangelos Eleftheriou},
note={IBM Z\"urich Research Laboratories},
year={2000},
title={Personal communication}
}
@article{Immink90,
author={K. A. S. Immink},
title={Runlength-Limited Sequences},
journal={Proc. IEEE}, volume={78}, pages={1745},
month={Nov},
year={1990}
}
@article{Immink97a,
author={K. A. S. Immink},
title={A Practical Method for Approaching the Channel Capacity of
Constrained Channels},
journal={IEEE Trans. Inform. Theory}, volume={43},
pages={1389-1399}, number={5}, month={Sept}, year={1997}
}
@article{Immink98,
author={K. A. S. Immink and Paul Siegel and Jack Wolf},
title={Codes for Digital Recorders},
journal={IEEE Trans. Inform. Theory}, volume={44}, pages={2260-2299},
month={Oct}, year={1998}
}
@article{Immink97b,
author={K. A. S. Immink},
title={Weakly constrained codes},
journal={Electronics Letters},
volume={33},
number={23},page={1943-1944}, month={Nov.}, year={1997}
}
@article{Immink95,
author={K. A. S. Immink},
title={Constructions of Almost Block-Decodable Runlength-Limited Codes},
journal={IEEE Transactions on Information Theory},
Volume={ 41}, Number= 1,month={January}, year={ 1995}
}
@article{DengHerro,
author={R. H. Deng and M. A. Herro},
title={{DC}-free coset codes},
journal={IEEE Trans. Inf. Th.}, volume={34},
year={1988},
pages={786-792}
}
@article{Makarian,
author={G. S. Markarian and Naderi, M. and Honary, B. and Popplewell, A. and O'Reilly, J. J.},
title={Maximum likelihood decoding of {RLL-FEC} array codes on partial response channels},
journal={Electronics Letters},
year={1993},
volume={29}, number={16}, pages={1406-1408}
}
@article{Zigangirov1969,
author={Zigangirov, K. Sh.},
title={Sequential Decoding for a Binary Channel with Drop-outs and Insertions},
journal={Problemy Peredachi Informatsii},
volume={5},
number={2},
pages={23-30},
year={1969}
}
@article{Dobrushin1967,
author={Dobrushin, R. L.},
year={1967},
title={Shannon's theorem for Channels with Synchronization Errors},
journal={Problemy Peredachi Informatsii},
volume={3},
number={4},
pages={18-36},
annote={ref 1 in Zigangirov1969}
}
@unpublished{Pinsker1965,
author={M. S. Pinsker},
title={Capacity of Channels with Synchronization Errors},
note={Report at the Second Conference on the Theory of Coding and its Applications,
Baku},
year={1965},
annote={ref 2 in Zigangirov1969}
}
@article{Dobrushin1968,
author={Vvedenskaya, N. D., and Dobrushin, R. L.},
year={1968},
title={The Computations on a Computer of the Channel Capacity of a Line with Symbol Drop-out},
journal={Problemy Peredachi Informatsii},
volume={4},
number={3},
pages={92-95},
annote={ref 3 in Zigangirov1969}
}
@Book{Wozencraft1965,
author = {Wozencraft, J. M. and Jacobs, I. M.},
year=1965,
title = {Principles of Communication Engineering},
publisher = {Wiley},
address = {New York},
annote={ref 4 in Zigangirov1969; [Univ. Lib.] 431.c.96.413
South Front, Floor 6}
}
@article{Zigangirov1966,
author={Zigangirov, K. Sh.},
title={Some Sequential Decoding Procedures},
journal={Problemy Peredachi Informatsii},
volume={2},
number={4},
pages={13-25},
year={1966},
annote={ref 5 in Zigangirov1969}
}
@Book{Gallager68,
author = "Gallager, R. G.",
title = "Information Theory and Reliable Communication",
publisher = "Wiley",
year = 1968,
address = "New York"
}
% South Front 6
% [Univ. Lib.] 431.c.96.694
@Article{Gallager68b,
author = "Gallager, R. G.",
title = "Sequential Decoding for Binary Channels with Noise and Synchronization Errors",
note={unpublished Lincoln Lab report 25 G-2},
year={1961},
annote = "ref 6 in ZIGANGIROV1969.
The paper you mention was actually 1961 and was a group report
for MIT Lincoln Laboratories. You could probably get it from
the US Air Force, but I am asking my secretary to simply make
a copy of my copy and send it to you. It is a has few handwritten
comments on it by someone who read it (name lost in time), I
don't know how it ought to be referred to, probably just as
an unpublished Lincoln Lab report."
}
@Book{Wozencraft1965,
author = {Wozencraft, J. M. and Reiffen, B.},
year=1963,
title = {Sequential Decoding},
note={Russian translation - need to find English reference},
annote={ref 7 in Zigangirov1969}
}
@article{Zigangirov1968,
author={Zigangirov, K. Sh.},
title={Sequential Decoding Procedures with Error Probability Exponent Given by Random Coding},
journal={Problemy Peredachi Informatsii},
volume={4},
number={2},
pages={83-85},
year={1968},
annote={ref 8 in Zigangirov1969}
}
@unpublished{ChungAppletb,
year={1999},
author={Chung, Sae-Young and Urbanke, R\"udiger L. and Richardson, Thomas J.},
title={{LDPC} code design applet},
note={{\tt http://truth.mit.edu/$\sim$sychung/gath.html}}
}
@unpublished{ChungApplet,
year={1999},
author={Chung, Sae-Young and Urbanke, R\"udiger L. and Richardson, Thomas J.},
title={{LDPC} code design applet},
note={{\tt http://truth.mit.edu/$\sim$sychung/gaopt.html}}
}
@ARTICLE{Turin97a,
author = {Turin, L.},
title = {Sensational subjects},
journal = {Chem. Ind.},
year = {1997},
volume = {},
pages = {924-+},
abstract = {}
}
@ARTICLE{Turin97b,
author = {Turin, L.},
title = {The nose as spectroscopist},
journal = {Chem. Ind.},
year = {1997},
volume = {},
pages = {866-870},
abstract = {}
}
@ARTICLE{Turin96,
author = {Turin, L.},
title = {A spectroscopic mechanism for primary olfactory
reception},
journal = {Chem. Senses},
year = {1996},
volume = {21},
pages = {773-791},
abstract = {}
}
% (John Edensor), 1885-1977
@book{Littlewood1986,
Author={ Littlewood, J. E.},
annote={ Originally published as: 'A mathematician's miscellany',
1953;
UL: 348:6.c.95.124 South Front, Floor 4;
page 186-188 is of interest to me},
Title={ Littlewood's miscellany},
editor={B\'ela Bollob\'as},
address={ Cambridge},
publisher={ Cambridge University Press},
year={1986}
}
@incollection{Littlewood1952,
title={ On the problem of $n$ bodies},
Author={ Littlewood, J. E.},
booktitle={Communications du s\'eminaire math\'ematique de l'Universit\'e
de Lund, tome supplementaire, d\'edi\'e \`a Marcel Riesz },
year={1952},
pages={143-151}
}
@book{Bentley2,
author={Jon Bentley},
edition={second},
title={Programming Pearls},
publisher={Addison-Wesley},
address={Reading, Massachusetts},
year={2000},
}
@misc{ dietterich91error-correcting,
author = "T. Dietterich and G. Bakiri",
title = "Error-correcting output codes: A general method for improving multiclass
inductive learning programs",
note={In Proceedings
of the Ninth National Conference on Artificial Intelligence (AAAI-91), pages
572--577. AAAI Press, 1991.},
text = "T. G. Dietterich and G. Bakiri. Error-correcting output codes: A general
method for improving multiclass inductive learning programs. In Proceedings
of the Ninth National Conference on Artificial Intelligence (AAAI-91), pages
572--577. AAAI Press, 1991.",
year = "1991"
}
@misc{ dietterich95solving,
author = "T. Dietterich and G. Bakiri",
title = "Solving multiclass learning problems via error-correcting output codes",
text = "Thomas G. Dietterich and Ghulum Bakiri. Solving multiclass learning problems
via error-correcting output codes. Journal of Artificial Intelligence Research,
2:263--286, January 1995.",
year = "1995"
}
@book{zurek,
title={Complexity, Entropy and the Physics of Information},
editor={Wojciech H. Zurek},
series={SFI Studies in the Sciences of Complexity},
year={1990},
publisher={Addison Wesley Longman},
annote={0-201-51506-7 www.santafe.edu/sfi/publications/Bookinforev/cepinew.html}
}
@article{besag77some,
author = "J. Besag",
title = "Some methods of statistical analysis for spatial data",
journal = "Bull. Intern. Statist. Inst.",
volume = "47(2)",
pages = "77-92",
year = "1977"
}
@techreport{YFW2000,
author={J. S. Yedidia and W. T. Freeman and Y. Weiss},
title={Generalized Belief Propagation},
institution={Mitsubishi},
note={MERL TR-2000-26},
year={2000}
}
@techreport{YFW2002,
author={J. S. Yedidia and W. T. Freeman and Y. Weiss},
title={Constructing Free Energy Approximations and Generalized Belief Propagation
Algorithms},
institution={Mitsubishi},
note={MERL TR-2002-??},
year={2002}
}
@techreport{YFW2001short,
author={J. S. Yedidia and W. T. Freeman and Y. Weiss},
title={Characterization of belief propagation and its
generalizations},
institution={Mitsubishi},
note={MERL TR-2001-15},
year={2000}
}
@techreport{YFW2001long,
author={J. S. Yedidia and W. T. Freeman and Y. Weiss},
title={{B}ethe
Free Energy, {K}ikuchi Approximations and Belief Propagation Algorithms},
institution={Mitsubishi},
note={MERL TR-2001-16},
year={2000}
}
@techreport{Yedidia2000,
author={J. S. Yedidia},
title={An Idiosyncratic Journey Beyond Mean Field Theory},
institution={Mitsubishi},
note={MERL TR-2000-27},
year={2000}
}
@techreport{SmolaBartlett,
author={A. Smola and P. Bartlett},
title={Sparse Greedy Gaussian Process Regression},
institution={Australian National University},
year={2000},
note={NIPS 13}
}
@techreport{Welling2001,
title={Belief optimization for Binary Networks: A Stable
Alternative to Loopy Belief Propagation},
author={Max Welling and Yee Whye Teh},
institution={Gatsby Computational Neuroscience Unit},
note={Check with Max for correct bibtex},
year={2001}
}
@unpublished{Yuille2001,
author = "A. L. Yuille",
title = "A Double-Loop Algorithm to Minimize the {B}ethe and {K}ikuchi Free
Energies",
year = 2001,
note={Unpublished}}
@article{ fisher43relation,
author = "R. Fisher and A. Corbet and C. Williams",
title = "The relation between the number of species and the number of individuals
in a random sample of an animal population",
annote = "Fisher, R.A., Corbet, A.S., and Williams, C.B. 1943. The relation between
the number of species and the number of individuals in a random sample of
an animal population. Journal of Animal Ecology 12, 42-58.",
journal={ Journal of Animal Ecology},
volume={12},
pages={42-58},
year = "1943"
}
@Book{Schneier96,
author = {B. Schneier},
title = {Applied Cryptography},
publisher = {Wiley},
year = 1996,
address = {New York}
}
@book{Layzer84,
author={David Layzer},
title={Constructing the Universe},
publisher={Scientific American Library},
year={1984}
}
@book{Gingerich92,
title={ The Great Copernicus Chase : And Other Adventures in Astronomical History},
author={ Owen Gingerich },
publisher={Cambridge University Press },
year={ 1992}
}
@book{Gingerich93,
author={ Owen Gingerich },
title={The Eye of Heaven : Ptolemy, Copernicus, Kepler (Masters of Modern Physics)},
publisher={ American Institute of Physics },
year={ 1993}
}
% author={C. J. Colbourn and J. H. Dinitz (editors)},
@book{Designs,
editor={C. J. Colbourn and J. H. Dinitz},
title={The {CRC} Handbook of Combinatorial Designs},
publisher={CRC Press},
address={New York},
year=1996
}
@inproceedings{RosenthalVontobel,
author={Joachim Rosenthal and Pascal O. Vontobel},
title={Constructions of {LDPC} codes using {R}amanujan graphs and ideas from {M}argulis},
URL={See http://www.nd.edu/~rosen/preprints.html},
conference={38th Annual Allerton Conference on Communication, Control, and Computing, 2000},
booktitle={Proceedings of the 38th Annual Allerton Conference on Communication, Control, and Computing},
pages={248-257},
year=2000
}
% girth properties of gallager
@InProceedings{mao01,
author = {Yongyi Mao and Amir Banihashemi},
title = {A Heuristic Search for Good {LDPC} Codes at Short Block Lengths},
booktitle = {IEEE International Conference on Communications},
OPTpages = {},
year = {2001},
OPTeditor = {},
month = {June},
}
@InProceedings{mao00,
author = {Yongyi Mao and Amir Banihashemi},
title = {Design of Good {LDPC} Codes Using Girth Distribution},
booktitle={IEEE International Symposium on Information Theory, Italy, June, 2000},
conference={IEEE International Symposium on Information Theory, Italy, June, 2000},
year = {2000},
OPTeditor = {},
month = {June},
}
%Dr. Amir H. Banihashemi
% Y. Mao and A. H. Banihashemi, "Design of Good LDPC Codes Using Girth Distribution", presented (by A. Banihashemi. People knowing me will realize that those slides are not my style) at IEEE International Symposium on Information Theory, Italy, June, 2000
@book{babel,
author="Jorge Luis Borges",
title="The {L}ibrary of {B}abel",
publisher="David R. Godine, Inc.",
address={Boston, Massachusetts},
year="1941", ISBN={ 156792123X},
translator={Andrew Hurley}
}
% ,illustrator={Erik Desmazieres}
% Borges wrote ?La Biblioteca de Babel? in 1941, and it was published the same year in a collection of stories entitled ?El jard?n de senderos que bifurcan? (?The Garden of Forking Paths?).
@misc{dasher164,
title={Dasher version 1.6.4},
author={David J. Ward},
note={Dasher version 1.6.4, available from
\verb+www.inference.phy.cam.ac.uk/dasher/+, (2001)},
annoteyear={2001}
}
@phdthesis{teahan97modelling,
author = "W. Teahan",
title = "Modelling {E}nglish {T}ext",
school="Univ. of Waikato, N.Z.",
year = "1997" }
% Valiant's "PAC learning"
% http://yoda.cis.temple.edu:8080/UGAIWWW/lectures95/learn/pac/pac.html
% Valiant, L .G.: A Theory of the Learnable, CACM, 27(11):1134-1142,1984
@Article{valiant1984,
author = "L. G. Valiant",
title = "A theory of the Learnable",
journal = "Communications of the ACM",
year = "1984",
volume=27,
number=11,
pages = "1134-1142"
}
% PPMD5 reference modified for nature idiots
@inproceedings{Teahan95a,
AUTHOR ="Teahan, W. J.",
TITLE ="Probability estimation for {PPM}",
booktitle={Proceedings {NZCSRSC}'95},
note={Probability estimation for {PPM}. Available from \verb+http://citeseer.nj.nec.com/+%
\verb+teahan95probability.html+},
annote={Available from \verb+http://www.cs.waikato.ac.nz/~wjt/+%
\verb+papers/NZCSRSC.ps.gz+},
year="1995",
}
% Theres a copy of the paper in /home/djw30/papers/NZCSRSC.ps
%
@misc{teahan95probability,
author = "W. J. Teahan",
title = "Probability estimation for {PPM}",
note = "Probability estimation for {PPM}. In {\em Proc. of the N.Z. Comp.
Sci. Research Students' Conf.,} available from
\verb+citeseer.nj.nec.com/teahan95probability.html+ (1995).",
annote="Univ. of Waikato, Hamilton, New Zealand.",
annoteyear = "1995",
url = "citeseer.nj.nec.com/teahan95probability.html" }
@inproceedings{moffat95,
author = "A. Moffat and R. M. Neal and I. H. Witten",
title = "Arithmetic Coding Revisited",
booktitle="Proceedings of the Data Compression Conference 1995",
publisher="IEEE Computer Society Press",
editor="J. A. Storer and M. Cohn",
address="Los Alamitos: CA",
pages = "202-111",
year = "1995"
}
@article{ moffat90,
author = "A. Moffat",
title = "Implementing the {PPM} Data Compression Scheme",
journal = "IEEE Transactions on Communications",
volume = "38",
year = "1990",
pages={1917-1921}
}