{ "id": "1507.04783", "version": "v1", "published": "2015-07-16T22:09:36.000Z", "updated": "2015-07-16T22:09:36.000Z", "title": "Maximum entropy method: sampling bias", "authors": [ "Jorge Fernandez-de-Cossio", "Jorge Fernandez-de-Cossio Diaz" ], "categories": [ "cond-mat.stat-mech" ], "abstract": "Maximum entropy method is a constructive criterion for setting up a probability distribution maximally non-committal to missing information on the basis of partial knowledge, usually stated as constrains on expectation values of some functions. In connection with experiments sample average of those functions are used as surrogate of the expectation values. We address sampling bias in maximum entropy approaches with finite data sets without forcedly equating expectation values to corresponding experimental average values. Though we rise the approach in a general formulation, the equations are unfortunately complicated. We bring simple case examples, hopping clear but sufficient illustration of the concepts.", "revisions": [ { "version": "v1", "updated": "2015-07-16T22:09:36.000Z" } ], "analyses": { "keywords": [ "maximum entropy method", "sampling bias", "bring simple case examples", "corresponding experimental average values", "probability distribution maximally non-committal" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }