Brofos, James A; Lederman, Roy R Magnetic Manifold Hamiltonian Monte Carlo Technical Report 2020, (arXiv: 2010.07753). Abstract | Links | BibTeX | Tags: Algorithms, Computer Science - Machine Learning, HMC, Manifolds, MCMC, Statistics - Machine Learning @techreport{brofos_magnetic_2020, title = {Magnetic Manifold Hamiltonian Monte Carlo}, author = {James A Brofos and Roy R Lederman}, url = {http://arxiv.org/abs/2010.07753}, year = {2020}, date = {2020-10-01}, urldate = {2020-11-25}, abstract = {Markov chain Monte Carlo (MCMC) algorithms offer various strategies for sampling; the Hamiltonian Monte Carlo (HMC) family of samplers are MCMC algorithms which often exhibit improved mixing properties. The recently introduced magnetic HMC, a generalization of HMC motivated by the physics of particles influenced by magnetic field forces, has been demonstrated to improve the performance of HMC. In many applications, one wishes to sample from a distribution restricted to a constrained set, often manifested as an embedded manifold (for example, the surface of a sphere). We introduce magnetic manifold HMC, an HMC algorithm on embedded manifolds motivated by the physics of particles constrained to a manifold and moving under magnetic field forces. We discuss the theoretical properties of magnetic Hamiltonian dynamics on manifolds, and introduce a reversible and symplectic integrator for the HMC updates. We demonstrate that magnetic manifold HMC produces favorable sampling behaviors relative to the canonical variant of manifold-constrained HMC.}, note = {arXiv: 2010.07753}, keywords = {Algorithms, Computer Science - Machine Learning, HMC, Manifolds, MCMC, Statistics - Machine Learning}, pubstate = {published}, tppubtype = {techreport} } Markov chain Monte Carlo (MCMC) algorithms offer various strategies for sampling; the Hamiltonian Monte Carlo (HMC) family of samplers are MCMC algorithms which often exhibit improved mixing properties. The recently introduced magnetic HMC, a generalization of HMC motivated by the physics of particles influenced by magnetic field forces, has been demonstrated to improve the performance of HMC. In many applications, one wishes to sample from a distribution restricted to a constrained set, often manifested as an embedded manifold (for example, the surface of a sphere). We introduce magnetic manifold HMC, an HMC algorithm on embedded manifolds motivated by the physics of particles constrained to a manifold and moving under magnetic field forces. We discuss the theoretical properties of magnetic Hamiltonian dynamics on manifolds, and introduce a reversible and symplectic integrator for the HMC updates. We demonstrate that magnetic manifold HMC produces favorable sampling behaviors relative to the canonical variant of manifold-constrained HMC. |
Bandeira, Afonso S; Chen, Yutong; Lederman, Roy R; Singer, Amit Non-unique games over compact groups and orientation estimation in cryo-EM Journal Article Inverse Problems, 36 (6), pp. 064002, 2020, ISSN: 0266-5611, 1361-6420. Links | BibTeX | Tags: Algorithms, cryo-EM, Non-unique games, Representation Theory @article{bandeira_non-unique_2020, title = {Non-unique games over compact groups and orientation estimation in cryo-EM}, author = {Afonso S Bandeira and Yutong Chen and Roy R Lederman and Amit Singer}, url = {https://iopscience.iop.org/article/10.1088/1361-6420/ab7d2c}, doi = {10.1088/1361-6420/ab7d2c}, issn = {0266-5611, 1361-6420}, year = {2020}, date = {2020-06-01}, urldate = {2020-08-13}, journal = {Inverse Problems}, volume = {36}, number = {6}, pages = {064002}, keywords = {Algorithms, cryo-EM, Non-unique games, Representation Theory}, pubstate = {published}, tppubtype = {article} } |
Brofos, James A; Lederman, Roy R Non-Canonical Hamiltonian Monte Carlo Technical Report 2020, (arXiv: 2008.08191). Abstract | Links | BibTeX | Tags: Algorithms, Computer Science - Machine Learning, HMC, MCMC, Statistics - Machine Learning @techreport{brofos_non-canonical_2020, title = {Non-Canonical Hamiltonian Monte Carlo}, author = {James A Brofos and Roy R Lederman}, url = {http://arxiv.org/abs/2008.08191}, year = {2020}, date = {2020-01-01}, urldate = {2020-11-25}, abstract = {Hamiltonian Monte Carlo is typically based on the assumption of an underlying canonical symplectic structure. Numerical integrators designed for the canonical structure are incompatible with motion generated by non-canonical dynamics. These non-canonical dynamics, motivated by examples in physics and symplectic geometry, correspond to techniques such as preconditioning which are routinely used to improve algorithmic performance. Indeed, recently, a special case of non-canonical structure, magnetic Hamiltonian Monte Carlo, was demonstrated to provide advantageous sampling properties. We present a framework for Hamiltonian Monte Carlo using non-canonical symplectic structures. Our experimental results demonstrate sampling advantages associated to Hamiltonian Monte Carlo with non-canonical structure. To summarize our contributions: (i) we develop non-canonical HMC from foundations in symplectic geomtry; (ii) we construct an HMC procedure using implicit integration that satisfies the detailed balance; (iii) we propose to accelerate the sampling using an textbackslashem approximate explicit methodology; (iv) we study two novel, randomly-generated non-canonical structures: magnetic momentum and the coupled magnet structure, with implicit and explicit integration.}, note = {arXiv: 2008.08191}, keywords = {Algorithms, Computer Science - Machine Learning, HMC, MCMC, Statistics - Machine Learning}, pubstate = {published}, tppubtype = {techreport} } Hamiltonian Monte Carlo is typically based on the assumption of an underlying canonical symplectic structure. Numerical integrators designed for the canonical structure are incompatible with motion generated by non-canonical dynamics. These non-canonical dynamics, motivated by examples in physics and symplectic geometry, correspond to techniques such as preconditioning which are routinely used to improve algorithmic performance. Indeed, recently, a special case of non-canonical structure, magnetic Hamiltonian Monte Carlo, was demonstrated to provide advantageous sampling properties. We present a framework for Hamiltonian Monte Carlo using non-canonical symplectic structures. Our experimental results demonstrate sampling advantages associated to Hamiltonian Monte Carlo with non-canonical structure. To summarize our contributions: (i) we develop non-canonical HMC from foundations in symplectic geomtry; (ii) we construct an HMC procedure using implicit integration that satisfies the detailed balance; (iii) we propose to accelerate the sampling using an textbackslashem approximate explicit methodology; (iv) we study two novel, randomly-generated non-canonical structures: magnetic momentum and the coupled magnet structure, with implicit and explicit integration. |
Lederman, Roy R; Singer, Amit A representation theory perspective on simultaneous alignment and classification Journal Article Applied and Computational Harmonic Analysis, pp. S1063520319301034, 2019, ISSN: 10635203. Links | BibTeX | Tags: Algorithms, cryo-EM, Non-unique games, Representation Theory @article{lederman_representation_2019, title = {A representation theory perspective on simultaneous alignment and classification}, author = {Roy R Lederman and Amit Singer}, url = {https://linkinghub.elsevier.com/retrieve/pii/S1063520319301034}, doi = {10.1016/j.acha.2019.05.005}, issn = {10635203}, year = {2019}, date = {2019-01-01}, urldate = {2020-08-13}, journal = {Applied and Computational Harmonic Analysis}, pages = {S1063520319301034}, keywords = {Algorithms, cryo-EM, Non-unique games, Representation Theory}, pubstate = {published}, tppubtype = {article} } |
Lederman, Roy R; Talmon, Ronen Learning the geometry of common latent variables using alternating-diffusion Journal Article Applied and Computational Harmonic Analysis, 44 (3), pp. 509–536, 2018, ISSN: 1063-5203. Abstract | Links | BibTeX | Tags: Algorithms, Alternating Diffusion, Alternating-diffusion, Common variable, diffusion maps, Diffusion-maps, Multi-view, multimodal, Multimodal analysis, test @article{lederman_learning_2018, title = {Learning the geometry of common latent variables using alternating-diffusion}, author = {Roy R Lederman and Ronen Talmon}, url = {http://www.sciencedirect.com/science/article/pii/S1063520315001190}, doi = {10.1016/j.acha.2015.09.002}, issn = {1063-5203}, year = {2018}, date = {2018-01-01}, urldate = {2020-08-13}, journal = {Applied and Computational Harmonic Analysis}, volume = {44}, number = {3}, pages = {509--536}, abstract = {One of the challenges in data analysis is to distinguish between different sources of variability manifested in data. In this paper, we consider the case of multiple sensors measuring the same physical phenomenon, such that the properties of the physical phenomenon are manifested as a hidden common source of variability (which we would like to extract), while each sensor has its own sensor-specific effects (hidden variables which we would like to suppress); the relations between the measurements and the hidden variables are unknown. We present a data-driven method based on alternating products of diffusion operators and show that it extracts the common source of variability. Moreover, we show that it extracts the common source of variability in a multi-sensor experiment as if it were a standard manifold learning algorithm used to analyze a simple single-sensor experiment, in which the common source of variability is the only source of variability.}, keywords = {Algorithms, Alternating Diffusion, Alternating-diffusion, Common variable, diffusion maps, Diffusion-maps, Multi-view, multimodal, Multimodal analysis, test}, pubstate = {published}, tppubtype = {article} } One of the challenges in data analysis is to distinguish between different sources of variability manifested in data. In this paper, we consider the case of multiple sensors measuring the same physical phenomenon, such that the properties of the physical phenomenon are manifested as a hidden common source of variability (which we would like to extract), while each sensor has its own sensor-specific effects (hidden variables which we would like to suppress); the relations between the measurements and the hidden variables are unknown. We present a data-driven method based on alternating products of diffusion operators and show that it extracts the common source of variability. Moreover, we show that it extracts the common source of variability in a multi-sensor experiment as if it were a standard manifold learning algorithm used to analyze a simple single-sensor experiment, in which the common source of variability is the only source of variability. |
Lederman, Roy R Numerical Algorithms for the Computation of Generalized Prolate Spheroidal Functions Technical Report 2017. Abstract | Links | BibTeX | Tags: Algorithms, cryo-EM, Fourier Transform, Numerical Analysis, Prolate, Slepian, Software @techreport{lederman_numerical_2017, title = {Numerical Algorithms for the Computation of Generalized Prolate Spheroidal Functions}, author = {Roy R Lederman}, url = {https://arxiv.org/abs/1710.02874v1}, year = {2017}, date = {2017-10-01}, urldate = {2020-08-13}, abstract = {Generalized Prolate Spheroidal Functions (GPSF) are the eigenfunctions of the truncated Fourier transform, restricted to D-dimensional balls in the spatial domain and frequency domain. Despite their useful properties in many applications, GPSFs are often replaced by crude approximations. The purpose of this paper is to review the elements of computing GPSFs and associated eigenvalues. This paper is accompanied by open-source code.}, keywords = {Algorithms, cryo-EM, Fourier Transform, Numerical Analysis, Prolate, Slepian, Software}, pubstate = {published}, tppubtype = {techreport} } Generalized Prolate Spheroidal Functions (GPSF) are the eigenfunctions of the truncated Fourier transform, restricted to D-dimensional balls in the spatial domain and frequency domain. Despite their useful properties in many applications, GPSFs are often replaced by crude approximations. The purpose of this paper is to review the elements of computing GPSFs and associated eigenvalues. This paper is accompanied by open-source code. |
Lederman, Roy R; Rokhlin, V On the Analytical and Numerical Properties of the Truncated Laplace Transform. Part II Journal Article SIAM Journal on Numerical Analysis, 54 (2), pp. 665–687, 2016, ISSN: 0036-1429, 1095-7170. Links | BibTeX | Tags: Algorithms, Laplace Transform, Numerical Analysis @article{lederman_analytical_2016, title = {On the Analytical and Numerical Properties of the Truncated Laplace Transform. Part II}, author = {Roy R Lederman and V Rokhlin}, url = {http://epubs.siam.org/doi/10.1137/15M1028583}, doi = {10.1137/15M1028583}, issn = {0036-1429, 1095-7170}, year = {2016}, date = {2016-01-01}, urldate = {2020-08-13}, journal = {SIAM Journal on Numerical Analysis}, volume = {54}, number = {2}, pages = {665--687}, keywords = {Algorithms, Laplace Transform, Numerical Analysis}, pubstate = {published}, tppubtype = {article} } |
Lederman, Roy R; Rokhlin, V On the Analytical and Numerical Properties of the Truncated Laplace Transform I. Journal Article SIAM Journal on Numerical Analysis, 53 (3), pp. 1214–1235, 2015, ISSN: 0036-1429, 1095-7170. Links | BibTeX | Tags: Algorithms, Laplace Transform, Numerical Analysis @article{lederman_analytical_2015, title = {On the Analytical and Numerical Properties of the Truncated Laplace Transform I.}, author = {Roy R Lederman and V Rokhlin}, url = {http://epubs.siam.org/doi/10.1137/140990681}, doi = {10.1137/140990681}, issn = {0036-1429, 1095-7170}, year = {2015}, date = {2015-01-01}, urldate = {2020-08-13}, journal = {SIAM Journal on Numerical Analysis}, volume = {53}, number = {3}, pages = {1214--1235}, keywords = {Algorithms, Laplace Transform, Numerical Analysis}, pubstate = {published}, tppubtype = {article} } |
Lederman, Roy R; Talmon, Ronen Common Manifold Learning Using Alternating-Diffusion Technical Report Yale CS (YALEU/DCS/TR-1497), 2014. Links | BibTeX | Tags: AD, Algorithms, Alternating Diffusion, Manifold Learning @techreport{lederman_common_2014, title = {Common Manifold Learning Using Alternating-Diffusion}, author = {Roy R Lederman and Ronen Talmon}, url = {https://cpsc.yale.edu/sites/default/files/files/tr1497.pdf}, year = {2014}, date = {2014-01-01}, number = {YALEU/DCS/TR-1497}, pages = {42}, institution = {Yale CS}, keywords = {AD, Algorithms, Alternating Diffusion, Manifold Learning}, pubstate = {published}, tppubtype = {techreport} } |
Lederman, Roy R On the Analytical and Numerical Properties of the Truncated Laplace Transform Technical Report Yale CS (YALEU/DCS/TR-1490), 2014. BibTeX | Tags: Algorithms, Laplace Transform, Numerical Analysis @techreport{lederman_analytical_2014, title = {On the Analytical and Numerical Properties of the Truncated Laplace Transform}, author = {Roy R Lederman}, year = {2014}, date = {2014-01-01}, number = {YALEU/DCS/TR-1490}, pages = {82}, institution = {Yale CS}, keywords = {Algorithms, Laplace Transform, Numerical Analysis}, pubstate = {published}, tppubtype = {techreport} } |
Lederman, Roy R A permutations-based algorithm for fast alignment of long paired-end reads Technical Report Yale CS (YALEU/DCS/TR-1474), 2013. BibTeX | Tags: Algorithms, DNA sequencing, Fast algorithms, Randomized algorithms, Sequencing @techreport{lederman_permutations-based_2013, title = {A permutations-based algorithm for fast alignment of long paired-end reads}, author = {Roy R Lederman}, year = {2013}, date = {2013-04-01}, number = {YALEU/DCS/TR-1474}, pages = {11}, institution = {Yale CS}, keywords = {Algorithms, DNA sequencing, Fast algorithms, Randomized algorithms, Sequencing}, pubstate = {published}, tppubtype = {techreport} } |
Lederman, Roy R Homopolymer Length Filters Technical Report Yale CS (YALEU/DCS/TR-1465), 2012. BibTeX | Tags: Algorithms, DNA sequencing, Sequence Alignment, Sequencing @techreport{lederman_homopolymer_2012, title = {Homopolymer Length Filters}, author = {Roy R Lederman}, year = {2012}, date = {2012-10-01}, number = {YALEU/DCS/TR-1465}, pages = {12}, institution = {Yale CS}, keywords = {Algorithms, DNA sequencing, Sequence Alignment, Sequencing}, pubstate = {published}, tppubtype = {techreport} } |