Katz, Ori; Lederman, Roy R; Talmon, Ronen Spectral Flow on the Manifold of SPD Matrices for Multimodal Data Processing Technical Report 2020, (arXiv: 2009.08062). Abstract  Links  BibTeX  Tags: Common variable, Computer Science  Machine Learning, Manifold Learning, Multiview, multimodal, SPD Matrices, Statistics  Machine Learning @techreport{katz_spectral_2020,
title = {Spectral Flow on the Manifold of SPD Matrices for Multimodal Data Processing},
author = {Ori Katz and Roy R Lederman and Ronen Talmon},
url = {http://arxiv.org/abs/2009.08062},
year = {2020},
date = {20200901},
urldate = {20201125},
abstract = {In this paper, we consider data acquired by multimodal sensors capturing complementary aspects and features of a measured phenomenon. We focus on a scenario in which the measurements share mutual sources of variability but might also be contaminated by other measurementspecific sources such as interferences or noise. Our approach combines manifold learning, which is a class of nonlinear datadriven dimension reduction methods, with the wellknown Riemannian geometry of symmetric and positivedefinite (SPD) matrices. Manifold learning typically includes the spectral analysis of a kernel built from the measurements. Here, we take a different approach, utilizing the Riemannian geometry of the kernels. In particular, we study the way the spectrum of the kernels changes along geodesic paths on the manifold of SPD matrices. We show that this change enables us, in a purely unsupervised manner, to derive a compact, yet informative, description of the relations between the measurements, in terms of their underlying components. Based on this result, we present new algorithms for extracting the common latent components and for identifying common and measurementspecific components.},
note = {arXiv: 2009.08062},
keywords = {Common variable, Computer Science  Machine Learning, Manifold Learning, Multiview, multimodal, SPD Matrices, Statistics  Machine Learning},
pubstate = {published},
tppubtype = {techreport}
}
In this paper, we consider data acquired by multimodal sensors capturing complementary aspects and features of a measured phenomenon. We focus on a scenario in which the measurements share mutual sources of variability but might also be contaminated by other measurementspecific sources such as interferences or noise. Our approach combines manifold learning, which is a class of nonlinear datadriven dimension reduction methods, with the wellknown Riemannian geometry of symmetric and positivedefinite (SPD) matrices. Manifold learning typically includes the spectral analysis of a kernel built from the measurements. Here, we take a different approach, utilizing the Riemannian geometry of the kernels. In particular, we study the way the spectrum of the kernels changes along geodesic paths on the manifold of SPD matrices. We show that this change enables us, in a purely unsupervised manner, to derive a compact, yet informative, description of the relations between the measurements, in terms of their underlying components. Based on this result, we present new algorithms for extracting the common latent components and for identifying common and measurementspecific components. 
Shnitzer, Tal; Lederman, Roy R; Liu, GiRen; Talmon, Ronen; Wu, HauTieng Diffusion operators for multimodal data analysis Incollection Handbook of Numerical Analysis, 20 , pp. 1–39, Elsevier, 2019, ISBN: 9780444641403. Links  BibTeX  Tags: Alternating Diffusion, BookChapter, Common variable, diffusion maps, Manifold Learning, Multiview, multimodal, Multimodal data, Sensor fusion, Shape differences @incollection{shnitzer_diffusion_2019,
title = {Diffusion operators for multimodal data analysis},
author = {Tal Shnitzer and Roy R Lederman and GiRen Liu and Ronen Talmon and HauTieng Wu},
url = {https://linkinghub.elsevier.com/retrieve/pii/S1570865919300213},
doi = {10.1016/bs.hna.2019.07.008},
isbn = {9780444641403},
year = {2019},
date = {20190101},
urldate = {20200813},
booktitle = {Handbook of Numerical Analysis},
volume = {20},
pages = {139},
publisher = {Elsevier},
keywords = {Alternating Diffusion, BookChapter, Common variable, diffusion maps, Manifold Learning, Multiview, multimodal, Multimodal data, Sensor fusion, Shape differences},
pubstate = {published},
tppubtype = {incollection}
}

Lederman, Roy R; Talmon, Ronen Learning the geometry of common latent variables using alternatingdiffusion Journal Article Applied and Computational Harmonic Analysis, 44 (3), pp. 509–536, 2018, ISSN: 10635203. Abstract  Links  BibTeX  Tags: Algorithms, Alternating Diffusion, Alternatingdiffusion, Common variable, diffusion maps, Diffusionmaps, Multiview, multimodal, Multimodal analysis @article{lederman_learning_2018,
title = {Learning the geometry of common latent variables using alternatingdiffusion},
author = {Roy R Lederman and Ronen Talmon},
url = {http://www.sciencedirect.com/science/article/pii/S1063520315001190},
doi = {10.1016/j.acha.2015.09.002},
issn = {10635203},
year = {2018},
date = {20180101},
urldate = {20200813},
journal = {Applied and Computational Harmonic Analysis},
volume = {44},
number = {3},
pages = {509536},
abstract = {One of the challenges in data analysis is to distinguish between different sources of variability manifested in data. In this paper, we consider the case of multiple sensors measuring the same physical phenomenon, such that the properties of the physical phenomenon are manifested as a hidden common source of variability (which we would like to extract), while each sensor has its own sensorspecific effects (hidden variables which we would like to suppress); the relations between the measurements and the hidden variables are unknown. We present a datadriven method based on alternating products of diffusion operators and show that it extracts the common source of variability. Moreover, we show that it extracts the common source of variability in a multisensor experiment as if it were a standard manifold learning algorithm used to analyze a simple singlesensor experiment, in which the common source of variability is the only source of variability.},
keywords = {Algorithms, Alternating Diffusion, Alternatingdiffusion, Common variable, diffusion maps, Diffusionmaps, Multiview, multimodal, Multimodal analysis},
pubstate = {published},
tppubtype = {article}
}
One of the challenges in data analysis is to distinguish between different sources of variability manifested in data. In this paper, we consider the case of multiple sensors measuring the same physical phenomenon, such that the properties of the physical phenomenon are manifested as a hidden common source of variability (which we would like to extract), while each sensor has its own sensorspecific effects (hidden variables which we would like to suppress); the relations between the measurements and the hidden variables are unknown. We present a datadriven method based on alternating products of diffusion operators and show that it extracts the common source of variability. Moreover, we show that it extracts the common source of variability in a multisensor experiment as if it were a standard manifold learning algorithm used to analyze a simple singlesensor experiment, in which the common source of variability is the only source of variability. 
Shaham, Uri; Lederman, Roy R Learning by coincidence: Siamese networks and common variable learning Journal Article Pattern Recognition, 74 , pp. 52–63, 2018, ISSN: 00313203. Links  BibTeX  Tags: Common variable, Deep Learning, Multiview, multimodal, Siamese networks @article{shaham_learning_2018,
title = {Learning by coincidence: Siamese networks and common variable learning},
author = {Uri Shaham and Roy R Lederman},
url = {https://linkinghub.elsevier.com/retrieve/pii/S0031320317303588},
doi = {10.1016/j.patcog.2017.09.015},
issn = {00313203},
year = {2018},
date = {20180101},
urldate = {20200813},
journal = {Pattern Recognition},
volume = {74},
pages = {5263},
keywords = {Common variable, Deep Learning, Multiview, multimodal, Siamese networks},
pubstate = {published},
tppubtype = {article}
}

Shaham, Uri; Lederman, Roy R Common Variable Learning and Invariant Representation Learning using Siamese Neural Networks Technical Report 2015. Abstract  Links  BibTeX  Tags: Common variable, Deep Learning, Multiview @techreport{shaham_common_2015,
title = {Common Variable Learning and Invariant Representation Learning using Siamese Neural Networks},
author = {Uri Shaham and Roy R Lederman},
url = {https://arxiv.org/abs/1512.08806v3},
year = {2015},
date = {20151201},
urldate = {20200813},
abstract = {We consider the statistical problem of learning common source of variability
in data which are synchronously captured by multiple sensors, and demonstrate
that Siamese neural networks can be naturally applied to this problem. This
approach is useful in particular in exploratory, datadriven applications,
where neither a model nor label information is available. In recent years, many
researchers have successfully applied Siamese neural networks to obtain an
embedding of data which corresponds to a "semantic similarity". We present an
interpretation of this "semantic similarity" as learning of equivalence
classes. We discuss properties of the embedding obtained by Siamese networks
and provide empirical results that demonstrate the ability of Siamese networks
to learn common variability.},
keywords = {Common variable, Deep Learning, Multiview},
pubstate = {published},
tppubtype = {techreport}
}
We consider the statistical problem of learning common source of variability
in data which are synchronously captured by multiple sensors, and demonstrate
that Siamese neural networks can be naturally applied to this problem. This
approach is useful in particular in exploratory, datadriven applications,
where neither a model nor label information is available. In recent years, many
researchers have successfully applied Siamese neural networks to obtain an
embedding of data which corresponds to a "semantic similarity". We present an
interpretation of this "semantic similarity" as learning of equivalence
classes. We discuss properties of the embedding obtained by Siamese networks
and provide empirical results that demonstrate the ability of Siamese networks
to learn common variability. 
Lederman, Roy R; Talmon, Ronen; Wu, Hautieng; Lo, YuLun; Coifman, Ronald R Alternating diffusion for common manifold learning with application to sleep stage assessment Inproceedings 2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), pp. 5758–5762, 2015, (ISSN: 2379190X). Abstract  Links  BibTeX  Tags: Alternating Diffusion, Common variable, diffusion maps, Kernel, learning (artificial intelligence), Manifolds, multimodal, multimodal respiratory signals, multimodal signal processing, Physiology, Sensitivity, Sensor phenomena and characterization, signal processing, sleep, sleep stage assessment, standard manifold learning method, time series @inproceedings{lederman_alternating_2015,
title = {Alternating diffusion for common manifold learning with application to sleep stage assessment},
author = {Roy R Lederman and Ronen Talmon and Hautieng Wu and YuLun Lo and Ronald R Coifman},
doi = {10.1109/ICASSP.2015.7179075},
year = {2015},
date = {20150101},
booktitle = {2015 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},
pages = {57585762},
abstract = {In this paper, we address the problem of multimodal signal processing and present a manifold learning method to extract the common source of variability from multiple measurements. This method is based on alternatingdiffusion and is particularly adapted to time series. We show that the common source of variability is extracted from multiple sensors as if it were the only source of variability, extracted by a standard manifold learning method from a single sensor, without the influence of the sensorspecific variables. In addition, we present application to sleep stage assessment. We demonstrate that, indeed, through alternatingdiffusion, the sleep information hidden inside multimodal respiratory signals can be better captured compared to singlemodal methods.},
note = {ISSN: 2379190X},
keywords = {Alternating Diffusion, Common variable, diffusion maps, Kernel, learning (artificial intelligence), Manifolds, multimodal, multimodal respiratory signals, multimodal signal processing, Physiology, Sensitivity, Sensor phenomena and characterization, signal processing, sleep, sleep stage assessment, standard manifold learning method, time series},
pubstate = {published},
tppubtype = {inproceedings}
}
In this paper, we address the problem of multimodal signal processing and present a manifold learning method to extract the common source of variability from multiple measurements. This method is based on alternatingdiffusion and is particularly adapted to time series. We show that the common source of variability is extracted from multiple sensors as if it were the only source of variability, extracted by a standard manifold learning method from a single sensor, without the influence of the sensorspecific variables. In addition, we present application to sleep stage assessment. We demonstrate that, indeed, through alternatingdiffusion, the sleep information hidden inside multimodal respiratory signals can be better captured compared to singlemodal methods. 