I'm currently researcher at Inria in the Parietal Team. My work is on statistical machine learning, signal and image processing, optimization, scientific computing and software engineering with primary applications in brain functional imaging (MEG, EEG, fMRI). Before joining Inria, I was an assistant professor for 5 years at Telecom ParisTech in the signal processing and machine learning department and before I was at the Martinos Center for Biomedical Imaging at Harvard University in Boston. I am also an active member of the Center for Data Science at Université Paris-Saclay.

# Recent Publications

@article{ablin-etal:2017b, adsnote = {Provided by the SAO/NASA Astrophysics Data System}, adsurl = {http://adsabs.harvard.edu/abs/2017arXiv171110873A}, archiveprefix = {arXiv}, author = {Ablin, P., Cardoso, J. and}, eprint = {1711.10873}, journal = {ArXiv e-prints}, keyword = {Statistics - Machine Learning}, month = {nov}, primaryclass = {stat.ML}, title = {Faster ICA under orthogonal constraint}, year = {2017} }Gramfort, A.

@article{bekhti-etal:17, adsnote = {Provided by the SAO/NASA Astrophysics Data System}, adsurl = {http://adsabs.harvard.edu/abs/2017arXiv171008747B}, archiveprefix = {arXiv}, author = {Bekhti, Y., Lucka, F., Salmon, J. and}, eprint = {1710.08747}, journal = {ArXiv e-prints}, keyword = {Statistics - Applications}, month = {oct}, primaryclass = {stat.AP}, title = {A hierarchical Bayesian perspective on majorization-minimization for non-convex sparse regression: application to M/EEG source imaging}, year = {2017} }Gramfort, A.

@article{duprelatour-etal:2017b, abstract = {Author summary Neural oscillations synchronize information across brain areas at various anatomical and temporal scales. Of particular relevance, slow fluctuations of brain activity have been shown to affect high frequency neural activity, by regulating the excitability level of neural populations. Such cross-frequency-coupling can take several forms. In the most frequently observed type, the power of high frequency activity is time-locked to a specific phase of slow frequency oscillations, yielding phase-amplitude-coupling (PAC). Even when readily observed in neural recordings, such non-linear coupling is particularly challenging to formally characterize. Typically, neuroscientists use band-pass filtering and Hilbert transforms with ad-hoc correlations. Here, we explicitly address current limitations and propose an alternative probabilistic signal modeling approach, for which statistical inference is fast and well-posed. To statistically model PAC, we propose to use non-linear auto-regressive models which estimate the spectral modulation of a signal conditionally to a driving signal. This conditional spectral analysis enables easy model selection and clear hypothesis-testing by using the likelihood of a given model. We demonstrate the advantage of the model-based approach on three datasets acquired in rats and in humans. We further provide novel neuroscientific insights on previously reported PAC phenomena, capturing two mechanisms in PAC: influence of amplitude and directionality estimation.}, author = {Dupré la Tour, T., Tallot, L., Grabot, L., Doyere, V., van Wassenhove, V., Grenier, Y. and}, doi = {10.1371/journal.pcbi.1005893}, journal = {PLOS Computational Biology}, link = {https://doi.org/10.1371/journal.pcbi.1005893}, month = {12}, number = {12}, pages = {1-32}, publisher = {Public Library of Science}, title = {Non-linear auto-regressive models for cross-frequency coupling in neural time series}, volume = {13}, year = {2017} }Gramfort, A.

@inproceedings{schulz:hal-01633096, address = {Long Beach, United States}, author = {Schulz, M., Varoquaux, G.,, Thirion, B. and Bzdok, D.}, booktitle = {Neural Information Processing Systems, Machine Learning in Health Workshop}, hal_id = {hal-01633096}, hal_version = {v1}, link = {https://hal.archives-ouvertes.fr/hal-01633096}, month = {Dec}, title = {Label scarcity in biomedicine: Data-rich latent factor discovery enhances phenotype prediction}, year = {2017} }Gramfort, A.

@article{ndiaye-etal:17, abstract = {In high dimensional settings, sparse structures are crucial for efficiency, both in term of memory, computation and performance. It is customary to consider ℓ 1 penalty to enforce sparsity in such scenarios. Sparsity enforcing methods, the Lasso being a canonical example, are popular candidates to address high dimension. For efficiency, they rely on tuning a parameter trading data fitting versus sparsity. For the Lasso theory to hold this tuning parameter should be proportional to the noise level, yet the latter is often unknown in practice. A possible remedy is to jointly optimize over the regression parameter as well as over the noise level. This has been considered under several names in the literature: Scaled-Lasso, Square-root Lasso, Concomitant Lasso estimation for instance, and could be of interest for uncertainty quantification. In this work, after illustrating numerical difficulties for the Concomitant Lasso formulation, we propose a modification we coined Smoothed Concomitant Lasso, aimed at increasing numerical stability. We propose an efficient and accurate solver leading to a computational cost no more expensive than the one for the Lasso. We leverage on standard ingredients behind the success of fast Lasso solvers: a coordinate descent algorithm, combined with safe screening rules to achieve speed efficiency, by eliminating early irrelevant features.}, author = {Ndiaye, E., Fercoq, O.,, Leclère, V. and Salmon, J.}, journal = {Journal of Physics: Conference Series}, link = {http://stacks.iop.org/1742-6596/904/i=1/a=012006}, number = {1}, pages = {012006}, title = {Efficient Smoothed Concomitant Lasso Estimation for High Dimensional Regression}, volume = {904}, year = {2017} }Gramfort, A.

@inproceedings{8081206, author = {Bekhti, Y., Badeau, R. and}, booktitle = {2017 25th European Signal Processing Conference (EUSIPCO)}, doi = {10.23919/EUSIPCO.2017.8081206}, issn = {}, keyword = {Bayes methods;brain;convex programming;electroencephalography;inverse problems;iterative methods;maximum likelihood estimation;medical image processing;regression analysis;Bayesian inference;brain activations;brain imaging;faster algorithms;high-dimensional sparse synthesis models;hyperparameter estimation;inverse problem;nonconvex penalty;posteriori regression;recurrent problem;sparse regression models;Bayes methods;Brain modeling;Estimation;Europe;Inverse problems;Sensors;Signal processing}, number = {}, pages = {246-250}, title = {Hyperparameter estimation in maximum a posteriori regression using group sparsity with an application to brain imaging}, volume = {}, year = {2017} }Gramfort, A.

@article{NisoGalan-etal:17, abstract = {We present a significant extension of the Brain Imaging Data Structure (BIDS) to support the specific aspects of magnetoencephalography (MEG) data. MEG provides direct measurement of brain activity with millisecond temporal resolution and unique source imaging capabilities. So far, BIDS has provided a solution to structure the organization of magnetic resonance imaging (MRI) data, which nature and acquisition parameters are different. Despite the lack of standard data format for MEG, MEG-BIDS is a principled solution to store, organize and share the typically-large data volumes produced. It builds on BIDS for MRI, and therefore readily yields a multimodal data organization by construction. This is particularly valuable for the anatomical and functional registration of MEG source imaging with MRI. With MEG-BIDS and a growing range of software adopting the standard, the MEG community has a solution to minimize curation overheads, reduce data handling errors and optimize usage of computational resources for analytics. The standard also includes well-defined metadata, to facilitate future data harmonization and sharing efforts.}, author = {Niso Galan, J., Gorgolewski, K., Bock, E., Brooks, T., Flandin, G.,, Henson, R., Jas, M., Litvak, V., Moreau, J., Oostenveld, R., Schoffelen, J., Tadel, F., Wexler, J. and Baillet, S.}, doi = {10.1101/172684}, eprint = {https://www.biorxiv.org/content/early/2017/08/08/172684.full.pdf}, journal = {bioRxiv}, link = {https://www.biorxiv.org/content/early/2017/08/08/172684}, publisher = {Cold Spring Harbor Laboratory}, title = {MEG-BIDS: an extension to the Brain Imaging Data Structure for magnetoencephalography}, year = {2017} }Gramfort, A.

# Short Bio

# Contact

**Email:** alexandre.gramfort@inria.fr

**Address:** Inria Saclay Île-de-France,
Bâtiment Alan Turing,
1 rue Honoré d'Estienne d'Orves,
Campus de l'École Polytechnique
91120 Palaiseau

# Software

- scikit-learn - A Python project for machine learning.
- openmeeg - C++ package for low-frequency bio-electromagnetism including the EEG/MEG forward problem. OpenMEEG implements the Symmetric BEM which has shown to provide very accurate solutions. Some features: parallel processing, Python Bindings, Matlab integration with Fieldtrip and BrainStorm.
- MNE - A complete package to process EEG and MEG data: forward and inverse problems (MNE, dSPM, MxNE), stats, time-frequency analysis.

More on my Github Page

# Team

### Engineers

### Post-docs

### PhD Students

- Yousra Bekhti (coadvised with Roland Badeau)
- Mainak Jas
- Tom Dupré La Tour (coadvised with Yves Grenier)
- Stanislas Chambon (coadvised with Gilles Wainrib)
- Pierre Ablin (coadvised with Jean-François Cardoso)
- Mathurin Massias (coadvised with Joseph Salmon)
- Hicham Janati (coadvised with Marco Cuturi)

### Alumni

- Albert Thomas (coadvised with Stéphan Clémençon) [PhD]
- Jaakko Leppäkangas [Engineer]
- Romain Laby (coadvised with François Roueff) (now at Criteo) [PhD]
- Thierry Guillemot (now at Rythm Inc.) [Engineer]
- Fabian Pedregosa [PhD] (coadvised with Francis Bach) (now at UC Berkeley) [PhD]
- Michael Eickenberg [PhD] (coadvised with Bertrand Thirion) (now at UC Berkeley) [PhD]
- Jair Montoya [Post Doc]
- Daniel Strohmeier (coadvised with Jens Haueisen) [PhD]
- Raghav Rajagopalan [Engineer]

# Positions

- PhD/Post-doc positions on machine learning and signal processing with applications in neuroimaging (MEG, EEG)

This list is fuzzy so please contact me directly for potential opportunities.

# Teaching

**Optimization for Data Science**course in Master program Data Science Ecole Polytechnique: Covers the different algorithms to minimize the cost functions that come up in machine learning (first / second order methods, batch / accelerated / stochastic gradient methods, coordinate descent).**Data Camp**course in Master program Data Science Ecole Polytechnique: Purpose is to build a working predictive model on an applied scientific or industrial problem, but also to be able to formulate a data problem as a machine learning task.- Course on Advanced Modeling and Analysis for Neuroimaging Data at Master on biomedical engineering at Paris Descartes.

# News

- June 2017: I'll be at the Scipy Conference in Austin, Texas to give the scikit-learn tutorial and the talk on MNE in the neuroscience symposium.
- June 2017: I'll be visiting the University of Washington to give a talk on non-linear signal models for neural time-series on my way the OHBM conference where I'll be presenting 2 posters.
- June 2017: Time for the annual scikit-learn code sprint in Paris!
- Mar. 2017: I'll attend the MNE code sprint at New York University Center for Datascience
- Mar. 2017: I'll be giving a talk on Statistical Learning and optimization for MRI data mining at Inria Grenoble Macaron workshop
- Mar. 2017: I'll be visiting the MEG lab and give an MNE tutorial at Université Libre de Bruxelles in Brussels
- Jan. 2017: I'll be giving a talk on inverse problems, optimization and machine learning at Learning in Astrophysics day
- Jan. 2017: I'll be giving a introductary talk on statistical machine learning for brain imaging at Karolinska insitute in Stockholm at the Machine learning for functional brain imaging workshop
- Oct. 2016: I'll be giving a talk on statistical machine learning and neuroscience at JSTAR Conf in Rennes
- Oct. 2016: I'll be giving a talk on optimal transport for neuroscience applications at Workshop Optimisation et Transport optimal en Imagerie
- Oct. 2016: I'll be giving talks on group analysis with MNE-Software and source localization at Biomag Conf. in Seoul, Corea
- Sept. 2016: I'll be a panel member at France is AI Startup Event at BPI France
- Sept. 2016: I'll be giving a talk about Paris-Saclay Center for Datascience at Data Science Game kick-off event at Microsoft France
- Aug. 2016: I'll be giving a talk on Gap Safe screening rules at MAS Conf. in Grenoble
- July 2016: I'll be one of the keynote speakers at CAP Conf. (Conférence Francophone pour l'apprentissage automatique)
- June 2016: I'll be giving two talks on teaching and doing anomaly detection with scikit-learn at PyData Paris
- June 2016: I'll be teaching MEG/EEG data analysis with MNE for 2 days at PRNI 2016 conf.
- May 2016: I'll be teaching MEG/EEG data analysis with MNE at Dalhousie University in Halifax, Canada
- Nov. 2015: I'll giving a talk on the use of open source in machine learning at Big Data Business Convention BDBC on HEC Campus
- Nov. 2015: I'll be giving a talk on supervised learning on MEG/EEG signals at Paris Workshop on Decoding of Sound and Brain (slides pdf)
- Oct. 2015: I have been awarded an
**ERC Starting Grant**! My project is called "Signal processing and Learning for Brain data (SLAB)" - 22 Sept. 2015: I'll give a seminar on Brain Decoding using fMRI at Donders Center for cognitive neuroscience.
- Sept. 2015: I'll be presenting our latest work on MEG/EEG inverse modeling at BACI conference in Utrecht.
- July. 2015: My work presented at IPMI conf. in Scotland got a runner up award !
- July. 2015: I'll be one the keynote speakers at machine learning in neuroscience workshop at ICML in Lille
- July. 2015: I'll present our work on speeding up the Lasso estimator using SAFE Screening rules at ICML in Lille. Video lecture
- July. 2015: I'll give an oral presentation on my work G. Peyré and M. Cuturi at IPMI conf. in Scotland
- June. 2015: My work with D. Engemann and D. Strohmeier will be presented in oral presentations at PRNI conference in Stanford
- June. 2015: My work with my student Michael Eickenberg will be presented in an oral presentation at OHBM conference in Hawai
- May. 2015: Talk at Chalmers University "The impact of tools and modeling assumptions on neuroscience"
- Apr. 2015: I'll be giving a tutorial on Scikit-Learn for medical imaging at IEEE ISBI conference in NYC.
- 5 Dec. 2014: I'll be talking at Gipsa Lab at Workshop on challenges in multimodality about the problems and the benefits of combining MEG and EEG for source analysis.
- 5 and 12 Nov. 2014: MNE training session at Telecom ParisTech.
- 6 June. 2014: My student Daniel Strohmeier gets the Best Paper Award at PRNI conference for our work on the M/EEG inverse problem.
- 17 June. 2014: I'll be talking on the MNE project at the INCF Paris workshop
- 24 Aug. 2014: My symposium on supervised learning for M/EEG data analysis has been accepted at BIOMAG 2014
- 3 Sept. 2014: I'll give a talk at the workshop on Statistical Challenges in Neuroscience in Warwick UK
- 8 Jun. 2014: My work will be presented in 2 oral presentations at Human Brain Mapping conference.
- 14-16 May 2014: I'll be teaching MEG/EEG data analysis with MNE at MRC Lab in Cambridge UK
- 20 Jan. 2014: I'll be teaching MEG/EEG data analysis with MNE in Stockholm Karolinska Institute NatMEG data analysis workshop
- 4 Dec. 2013: MNE Training Session in Paris, ICM
- 23 Oct. 2013: I'll be at BrainHack in Paris
- 7 Oct. 2013: I'll be at CIMEC in Trento, Italy to teach MNE and Scikit-Learn
- 23 Sept. 2013: I'll be in Magdeburg, Germany to teach MNE for the Timely Workshop