Recent Publications

@article{ablin-etal:2017b,
 adsnote = {Provided by the SAO/NASA Astrophysics Data System},
 adsurl = {http://adsabs.harvard.edu/abs/2017arXiv171110873A},
 archiveprefix = {arXiv},
 author = {Ablin, P., Cardoso, J. and Gramfort, A.},
 eprint = {1711.10873},
 journal = {ArXiv e-prints},
 keyword = {Statistics - Machine Learning},
 month = {nov},
 primaryclass = {stat.ML},
 title = {Faster ICA under orthogonal constraint},
 year = {2017}
}

@article{bekhti-etal:17,
 adsnote = {Provided by the SAO/NASA Astrophysics Data System},
 adsurl = {http://adsabs.harvard.edu/abs/2017arXiv171008747B},
 archiveprefix = {arXiv},
 author = {Bekhti, Y., Lucka, F., Salmon, J. and Gramfort, A.},
 eprint = {1710.08747},
 journal = {ArXiv e-prints},
 keyword = {Statistics - Applications},
 month = {oct},
 primaryclass = {stat.AP},
 title = {A hierarchical Bayesian perspective on majorization-minimization for non-convex sparse regression: application to M/EEG source imaging},
 year = {2017}
}

@article{duprelatour-etal:2017b,
 abstract = {Author summary Neural oscillations synchronize information across brain areas at various anatomical and temporal scales. Of particular relevance, slow fluctuations of brain activity have been shown to affect high frequency neural activity, by regulating the excitability level of neural populations. Such cross-frequency-coupling can take several forms. In the most frequently observed type, the power of high frequency activity is time-locked to a specific phase of slow frequency oscillations, yielding phase-amplitude-coupling (PAC). Even when readily observed in neural recordings, such non-linear coupling is particularly challenging to formally characterize. Typically, neuroscientists use band-pass filtering and Hilbert transforms with ad-hoc correlations. Here, we explicitly address current limitations and propose an alternative probabilistic signal modeling approach, for which statistical inference is fast and well-posed. To statistically model PAC, we propose to use non-linear auto-regressive models which estimate the spectral modulation of a signal conditionally to a driving signal. This conditional spectral analysis enables easy model selection and clear hypothesis-testing by using the likelihood of a given model. We demonstrate the advantage of the model-based approach on three datasets acquired in rats and in humans. We further provide novel neuroscientific insights on previously reported PAC phenomena, capturing two mechanisms in PAC: influence of amplitude and directionality estimation.},
 author = {Dupré la Tour, T., Tallot, L., Grabot, L., Doyere, V., van Wassenhove, V., Grenier, Y. and Gramfort, A.},
 doi = {10.1371/journal.pcbi.1005893},
 journal = {PLOS Computational Biology},
 link = {https://doi.org/10.1371/journal.pcbi.1005893},
 month = {12},
 number = {12},
 pages = {1-32},
 publisher = {Public Library of Science},
 title = {Non-linear auto-regressive models for cross-frequency coupling in neural time series},
 volume = {13},
 year = {2017}
}

@inproceedings{schulz:hal-01633096,
 address = {Long Beach, United States},
 author = {Schulz, M., Varoquaux, G., Gramfort, A., Thirion, B. and Bzdok, D.},
 booktitle = {Neural Information Processing Systems, Machine Learning in Health Workshop},
 hal_id = {hal-01633096},
 hal_version = {v1},
 link = {https://hal.archives-ouvertes.fr/hal-01633096},
 month = {Dec},
 title = {Label scarcity in biomedicine: Data-rich latent factor discovery enhances phenotype prediction},
 year = {2017}
}

@article{ndiaye-etal:17,
 abstract = {In high dimensional settings, sparse structures are crucial for efficiency, both in term of memory, computation and performance. It is customary to consider ℓ 1 penalty to enforce sparsity in such scenarios. Sparsity enforcing methods, the Lasso being a canonical example, are popular candidates to address high dimension. For efficiency, they rely on tuning a parameter trading data fitting versus sparsity. For the Lasso theory to hold this tuning parameter should be proportional to the noise level, yet the latter is often unknown in practice. A possible remedy is to jointly optimize over the regression parameter as well as over the noise level. This has been considered under several names in the literature: Scaled-Lasso, Square-root Lasso, Concomitant Lasso estimation for instance, and could be of interest for uncertainty quantification. In this work, after illustrating numerical difficulties for the Concomitant Lasso formulation, we propose a modification we coined Smoothed Concomitant Lasso, aimed at increasing numerical stability. We propose an efficient and accurate solver leading to a computational cost no more expensive than the one for the Lasso. We leverage on standard ingredients behind the success of fast Lasso solvers: a coordinate descent algorithm, combined with safe screening rules to achieve speed efficiency, by eliminating early irrelevant features.},
 author = {Ndiaye, E., Fercoq, O., Gramfort, A., Leclère, V. and Salmon, J.},
 journal = {Journal of Physics: Conference Series},
 link = {http://stacks.iop.org/1742-6596/904/i=1/a=012006},
 number = {1},
 pages = {012006},
 title = {Efficient Smoothed Concomitant Lasso Estimation for High Dimensional Regression},
 volume = {904},
 year = {2017}
}

@inproceedings{8081206,
 author = {Bekhti, Y., Badeau, R. and Gramfort, A.},
 booktitle = {2017 25th European Signal Processing Conference (EUSIPCO)},
 doi = {10.23919/EUSIPCO.2017.8081206},
 issn = {},
 keyword = {Bayes methods;brain;convex programming;electroencephalography;inverse problems;iterative methods;maximum likelihood estimation;medical image processing;regression analysis;Bayesian inference;brain activations;brain imaging;faster algorithms;high-dimensional sparse synthesis models;hyperparameter estimation;inverse problem;nonconvex penalty;posteriori regression;recurrent problem;sparse regression models;Bayes methods;Brain modeling;Estimation;Europe;Inverse problems;Sensors;Signal processing},
 number = {},
 pages = {246-250},
 title = {Hyperparameter estimation in maximum a posteriori regression using group sparsity with an application to brain imaging},
 volume = {},
 year = {2017}
}

@article{NisoGalan-etal:17,
 abstract = {We present a significant extension of the Brain Imaging Data Structure (BIDS) to support the specific aspects of magnetoencephalography (MEG) data. MEG provides direct measurement of brain activity with millisecond temporal resolution and unique source imaging capabilities. So far, BIDS has provided a solution to structure the organization of magnetic resonance imaging (MRI) data, which nature and acquisition parameters are different. Despite the lack of standard data format for MEG, MEG-BIDS is a principled solution to store, organize and share the typically-large data volumes produced. It builds on BIDS for MRI, and therefore readily yields a multimodal data organization by construction. This is particularly valuable for the anatomical and functional registration of MEG source imaging with MRI. With MEG-BIDS and a growing range of software adopting the standard, the MEG community has a solution to minimize curation overheads, reduce data handling errors and optimize usage of computational resources for analytics. The standard also includes well-defined metadata, to facilitate future data harmonization and sharing efforts.},
 author = {Niso Galan, J., Gorgolewski, K., Bock, E., Brooks, T., Flandin, G., Gramfort, A., Henson, R., Jas, M., Litvak, V., Moreau, J., Oostenveld, R., Schoffelen, J., Tadel, F., Wexler, J. and Baillet, S.},
 doi = {10.1101/172684},
 eprint = {https://www.biorxiv.org/content/early/2017/08/08/172684.full.pdf},
 journal = {bioRxiv},
 link = {https://www.biorxiv.org/content/early/2017/08/08/172684},
 publisher = {Cold Spring Harbor Laboratory},
 title = {MEG-BIDS: an extension to the Brain Imaging Data Structure for magnetoencephalography},
 year = {2017}
}

Full list of publications

Short Bio

I'm currently researcher at Inria in the Parietal Team. My work is on statistical machine learning, signal and image processing, optimization, scientific computing and software engineering with primary applications in brain functional imaging (MEG, EEG, fMRI). Before joining Inria, I was an assistant professor for 5 years at Telecom ParisTech in the signal processing and machine learning department and before I was at the Martinos Center for Biomedical Imaging at Harvard University in Boston. I am also an active member of the Center for Data Science at Université Paris-Saclay.

Contact

Email: alexandre.gramfort@inria.fr

Address: Inria Saclay Île-de-France, Bâtiment Alan Turing, 1 rue Honoré d'Estienne d'Orves, Campus de l'École Polytechnique 91120 Palaiseau

Software

  • scikit-learn - A Python project for machine learning.
  • openmeeg - C++ package for low-frequency bio-electromagnetism including the EEG/MEG forward problem. OpenMEEG implements the Symmetric BEM which has shown to provide very accurate solutions. Some features: parallel processing, Python Bindings, Matlab integration with Fieldtrip and BrainStorm.
  • MNE - A complete package to process EEG and MEG data: forward and inverse problems (MNE, dSPM, MxNE), stats, time-frequency analysis.

More on my Github Page

Team

Engineers

Post-docs

PhD Students

Alumni

Positions

  • PhD/Post-doc positions on machine learning and signal processing with applications in neuroimaging (MEG, EEG)

This list is fuzzy so please contact me directly for potential opportunities.

Teaching

News