ERC Starting Grant:
Signal and Learning Applied to Brain data (SLAB)

Summary: Understanding how the brain works in healthy and pathological conditions is considered as one of the challenges for the 21st century. After the first electroencephalography (EEG) measurements in 1929, the 90’s was the birth of modern functional brain imaging with the first functional MRI (fMRI) and full head magnetoencephalography (MEG) system. By offering noninvasively unique insights into the living brain, imaging has revolutionized in the last twenty years both clinical and cognitive neuroscience. After pioneering breakthroughs in physics and engineering, the field of neuroscience has to face two major challenges. The size of the datasets keeps growing with ambitious projects such as the Human Connectome Project (HCP) which will release terabytes of data. The answers to current neuroscience questions are limited by the complexity of the observed signals: non-stationarity, high noise levels, heterogeneity of sensors, lack of accurate models for the signals. SLAB will provide the next generation of models and algorithms for mining electrophysiology signals which offer unique ways to image the brain at a millisecond time scale. SLAB will develop dedicated machine learning and statistical signal processing methods and favor the emergence of new challenges for these fields focussing on five open problems: 1) source localization with M/EEG for brain imaging at high temporal resolution 2) representation learning from multivariate (M/EEG) signals to boost statistical power and reduce acquisition costs 3) fusion of heterogeneous sensors to improve spatiotemporal resolution 4) modeling of non-stationary spectral interactions to identify functional coupling between neural ensembles 5) development of algorithms tractable on large datasets and easy to use by non-experts. SLAB aims to strengthen mathematical and computational foundations of neuroimaging data analysis. The methods developed will have applications across fields (e.g. computational biology, astronomy, econometrics). Yet, the primary users of the technologies developed will be in the cognitive and clinical neuroscience community. The tools and high quality open software produced in SLAB will facilitate the analysis of electrophysiology data, offering new perspectives to understand how the brain works at a mesoscale, and for clinical applications (epilepsy, autism, essential tremor, sleep disorders).

Publications related to the SLAB Project:

@article{2019arXiv190204812J,
 adsnote = {Provided by the SAO/NASA Astrophysics Data System},
 adsurl = {https://ui.adsabs.harvard.edu/abs/2019arXiv190204812J},
 archiveprefix = {arXiv},
 author = {Janati, H., Bazeille, T., Thirion, B., Cuturi, M. and Gramfort, A.},
 eprint = {1902.04812},
 journal = {arXiv e-prints},
 keywords = {Statistics - Machine Learning, Computer Science - Machine Learning},
 month = {February},
 pdf = {https://arxiv.org/pdf/1902.04812.pdf},
 primaryclass = {stat.ML},
 title = {{Group level MEG/EEG source imaging via optimal transport: minimum Wasserstein estimates}},
 year = {2019}
}

@article{2019arXiv190202509B,
 adsnote = {Provided by the SAO/NASA Astrophysics Data System},
 adsurl = {https://ui.adsabs.harvard.edu/abs/2019arXiv190202509B},
 archiveprefix = {arXiv},
 author = {Bertrand, Q., Massias, M., Gramfort, A. and Salmon, J.},
 eprint = {1902.02509},
 journal = {arXiv e-prints},
 keywords = {Statistics - Machine Learning, Computer Science - Machine Learning, Mathematics - Optimization and Control, Statistics - Applications},
 month = {February},
 pdf = {https://arxiv.org/pdf/1902.02509.pdf},
 primaryclass = {stat.ML},
 title = {{Concomitant Lasso with Repetitions (CLaR): beyond averaging multiple realizations of heteroscedastic noise}},
 year = {2019}
}

@article{2019arXiv190109235M,
 adsnote = {Provided by the SAO/NASA Astrophysics Data System},
 adsurl = {https://ui.adsabs.harvard.edu/abs/2019arXiv190109235M},
 archiveprefix = {arXiv},
 author = {Moreau, T. and Gramfort, A.},
 eprint = {1901.09235},
 journal = {arXiv e-prints},
 keywords = {Computer Science - Machine Learning, Computer Science - Distributed, Parallel, and Cluster Computing, Statistics - Machine Learning},
 month = {January},
 pdf = {https://arxiv.org/pdf/1901.09235.pdf},
 title = {{Distributed Convolutional Dictionary Learning (DiCoDiLe): Pattern Discovery in Large Images and Signals}},
 year = {2019}
}

@inproceedings{ablin-etal:18c,
 author = {Ablin, P., Cardoso, J. and Gramfort, A.},
 booktitle = {European Symposium on Artificial Neural Networks, Computational Intelligence and Machine Learning (ESANN)},
 month = {April},
 pdf = {https://hal.archives-ouvertes.fr/hal-01936887/file/main.pdf},
 title = {{Beyond Pham's algorithm for joint diagonalization}},
 url = {https://hal.archives-ouvertes.fr/hal-01936887},
 year = {2019}
}

@inproceedings{ablin-etal:18b,
 author = {Ablin, P., Fagot, D., Wendt, H., Gramfort, A. and Fevotte, C.},
 booktitle = {ICASSP 2019 - 2019 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)},
 doi = {10.1109/ICASSP.2019.8683291},
 issn = {2379-190X},
 keywords = {Nonnegative matrix factorization (NMF);transform learning;source separation;non-convex optimization;manifolds;audio signal processing},
 month = {May},
 number = {},
 pages = {700-704},
 pdf = {https://arxiv.org/pdf/1811.02225.pdf},
 title = {{A Quasi-Newton algorithm on the orthogonal manifold for NMF with transform learning}},
 url = {https://arxiv.org/abs/1811.02225},
 volume = {},
 year = {2019}
}

@article{Grabot-etal:19,
 abstract = {Precise timing makes the difference between harmony and cacophony, but how the brain achieves precision during timing is unknown. In this study, human participants (7 females, 5 males) generated a time interval while being recorded with magnetoencephalography. Building on the proposal that the coupling of neural oscillations provides a temporal code for information processing in the brain, we tested whether the strength of oscillatory coupling was sensitive to self-generated temporal precision. On a per individual basis, we show the presence of alpha{\textendash}beta phase{\textendash}amplitude coupling whose strength was associated with the temporal precision of self-generated time intervals, not with their absolute duration. Our results provide evidence that active oscillatory coupling engages α oscillations in maintaining the precision of an endogenous temporal motor goal encoded in β power; the when of self-timed actions. We propose that oscillatory coupling indexes the variance of neuronal computations, which translates into the precision of an individual{\textquoteright}s behavioral performance.SIGNIFICANCE STATEMENT Which neural mechanisms enable precise volitional timing in the brain is unknown, yet accurate and precise timing is essential in every realm of life. In this study, we build on the hypothesis that neural oscillations, and their coupling across time scales, are essential for the coding and for the transmission of information in the brain. We show the presence of alpha{\textendash}beta phase{\textendash}amplitude coupling (α{\textendash}β PAC) whose strength was associated with the temporal precision of self-generated time intervals, not with their absolute duration. α{\textendash}β PAC indexes the temporal precision with which information is represented in an individual{\textquoteright}s brain. Our results link large-scale neuronal variability on the one hand, and individuals{\textquoteright} timing precision, on the other.},
 author = {Grabot, L., Kononowicz, T., Dupr{\'e} la Tour, T., Gramfort, A., Doy{\`e}re, V. and van Wassenhove, V.},
 doi = {10.1523/JNEUROSCI.2473-18.2018},
 eprint = {http://www.jneurosci.org/content/39/17/3277.full.pdf},
 issn = {0270-6474},
 journal = {Journal of Neuroscience},
 number = {17},
 pages = {3277--3291},
 publisher = {Society for Neuroscience},
 title = {The Strength of Alpha{\textendash}Beta Oscillatory Coupling Predicts Motor Timing Precision},
 url = {http://www.jneurosci.org/content/39/17/3277},
 volume = {39},
 year = {2019}
}

@inproceedings{Ablin-etal:19a,
 address = {},
 author = {Ablin, P., Gramfort, A., Cardoso, J. and Bach, F.},
 booktitle = {Proceedings of Machine Learning Research},
 editor = {Chaudhuri, Kamalika and Sugiyama, Masashi},
 month = {16--18 Apr},
 pages = {1564--1573},
 pdf = {http://proceedings.mlr.press/v89/ablin19a/ablin19a.pdf},
 publisher = {PMLR},
 series = {Proceedings of Machine Learning Research},
 title = {Stochastic algorithms with descent guarantees for ICA},
 url = {http://proceedings.mlr.press/v89/ablin19a.html},
 volume = {89},
 year = {2019}
}

@inproceedings{dupre-etal:18,
 author = {Dupr\'{e} la Tour, T., Moreau, T., Jas, M. and Gramfort, A.},
 booktitle = {Advances in Neural Information Processing Systems 31},
 editor = {S. Bengio and H. Wallach and H. Larochelle and K. Grauman and N. Cesa-Bianchi and R. Garnett},
 pages = {3292--3302},
 publisher = {Curran Associates, Inc.},
 title = {Multivariate Convolutional Sparse Coding for Electromagnetic Brain Signals},
 url = {http://papers.nips.cc/paper/7590-multivariate-convolutional-sparse-coding-for-electromagnetic-brain-signals.pdf},
 year = {2018}
}

@inproceedings{janati-etal:19,
 address = {},
 author = {Janati, H., Cuturi, M. and Gramfort, A.},
 booktitle = {Proceedings of Machine Learning Research},
 editor = {Chaudhuri, Kamalika and Sugiyama, Masashi},
 month = {16--18 Apr},
 pages = {1407--1416},
 pdf = {http://proceedings.mlr.press/v89/janati19a/janati19a.pdf},
 publisher = {PMLR},
 series = {Proceedings of Machine Learning Research},
 title = {Wasserstein regularization for sparse multi-task regression},
 url = {http://proceedings.mlr.press/v89/janati19a.html},
 volume = {89},
 year = {2019}
}

@article{jas-etal:18,
 author = {Jas, M., Larson, E., Engemann, D., Leppäkangas, J., Taulu, S., Hämäläinen, M. and Gramfort, A.},
 comment = {[Code]},
 doi = {10.3389/fnins.2018.00530},
 issn = {1662-453X},
 journal = {Frontiers in Neuroscience},
 pages = {530},
 title = {A Reproducible {MEG/EEG} Group Study With the MNE Software: Recommendations, Quality Assessments, and Good Practices},
 url = {https://www.frontiersin.org/article/10.3389/fnins.2018.00530},
 volume = {12},
 year = {2018}
}

@inproceedings{Massias_Gramfort_Salmon18,
 author = {Massias, M., Gramfort, A. and Salmon, J.},
 booktitle = {Proceedings of the 35th International Conference on Machine Learning},
 comment = {[Code]},
 pages = {3321--3330},
 pdf = {https://arxiv.org/pdf/1802.07481},
 title = {Celer: a Fast Solver for the Lasso with Dual Extrapolation},
 url = {https://arxiv.org/abs/1802.07481},
 volume = {80},
 year = {2018}
}

@article{ablin-etal:2017,
 author = {Ablin, P., Cardoso, J. and Gramfort, A.},
 comment = {[Code]},
 doi = {10.1109/TSP.2018.2844203},
 issn = {1053-587X},
 journal = {IEEE Transactions on Signal Processing},
 keywords = {Approximation algorithms;Brain modeling;Data models;Electronic mail;Neuroscience;Signal processing algorithms;Tensile stress;Blind source separation;Independent Component Analysis;maximum likelihood estimation;preconditioning;quasi-Newton methods;second order methods},
 month = {},
 number = {15},
 pages = {4040-4049},
 pdf = {https://hal.inria.fr/hal-01552340/file/quasi-newton-methods%20%286%29.pdf},
 title = {Faster independent component analysis by preconditioning with Hessian approximations},
 volume = {66},
 year = {2018}
}

@article{bekhti-etal:17,
 author = {Bekhti, Y., Lucka, F., Salmon, J. and Gramfort, A.},
 comment = {[Code]},
 journal = {Inverse Problems},
 pdf = {https://arxiv.org/pdf/1710.08747},
 title = {A hierarchical Bayesian perspective on majorization-minimization for non-convex sparse regression: application to M/EEG source imaging},
 url = {http://iopscience.iop.org/article/10.1088/1361-6420/aac9b3/meta},
 year = {2018}
}

@inproceedings{ablin-etal:2018b,
 address = {Cham},
 author = {Ablin, P., Cardoso, J. and Gramfort, A.},
 booktitle = {Latent Variable Analysis and Signal Separation (LVA-ICA)},
 comment = {[Code]},
 editor = {Deville, Yannick and Gannot, Sharon and Mason, Russell and Plumbley, Mark D. and Ward, Dominic},
 isbn = {978-3-319-93764-9},
 pages = {151--160},
 pdf = {https://hal.inria.fr/hal-01822602/document},
 publisher = {Springer International Publishing},
 title = {Accelerating Likelihood Optimization for ICA on Real Signals},
 year = {2018}
}

@inproceedings{ablin-etal:2018a,
 address = {Calgary, Canada},
 author = {Ablin, P., Cardoso, J. and Gramfort, A.},
 booktitle = {International Conference on Acoustics, Speech, and Signal Processing (ICASSP)},
 comment = {[Code]},
 month = {April},
 pdf = {https://arxiv.org/pdf/1711.10873},
 title = {{Faster ICA under orthogonal constraint}},
 year = {2018}
}

@inproceedings{duprelatour-etal:18,
 address = {Calgary, Canada},
 author = {Dupré la Tour, T., Grenier, Y. and Gramfort, A.},
 booktitle = {International Conference on Acoustics, Speech, and Signal Processing (ICASSP)},
 hal_id = {hal-01696786},
 hal_version = {v1},
 keywords = {cross-frequency coupling ; non-linear autoregressive models ; spectrum estimation ; electrophysiology},
 month = {April},
 pdf = {https://hal.archives-ouvertes.fr/hal-01696786/file/duprelatour2018icassp.pdf},
 title = {{Driver estimation in non-linear autoregressive models}},
 url = {https://hal.archives-ouvertes.fr/hal-01696786},
 year = {2018}
}

@inproceedings{schiratti-etal:2018a,
 address = {Calgary, Canada},
 author = {Schiratti, J., Le Douget, J., Le Van Quyen, M., Essid, S. and Gramfort, A.},
 booktitle = {International Conference on Acoustics, Speech, and Signal Processing (ICASSP)},
 comment = {[Code]},
 month = {April},
 pdf = {https://hal.archives-ouvertes.fr/hal-01724272/document},
 title = {{An ensemble learning approach to detect epileptic seizures from long intracranial EEG recordings}},
 year = {2018}
}

@article{duprelatour-etal:2017b,
 abstract = {Author summary Neural oscillations synchronize information across brain areas at various anatomical and temporal scales. Of particular relevance, slow fluctuations of brain activity have been shown to affect high frequency neural activity, by regulating the excitability level of neural populations. Such cross-frequency-coupling can take several forms. In the most frequently observed type, the power of high frequency activity is time-locked to a specific phase of slow frequency oscillations, yielding phase-amplitude-coupling (PAC). Even when readily observed in neural recordings, such non-linear coupling is particularly challenging to formally characterize. Typically, neuroscientists use band-pass filtering and Hilbert transforms with ad-hoc correlations. Here, we explicitly address current limitations and propose an alternative probabilistic signal modeling approach, for which statistical inference is fast and well-posed. To statistically model PAC, we propose to use non-linear auto-regressive models which estimate the spectral modulation of a signal conditionally to a driving signal. This conditional spectral analysis enables easy model selection and clear hypothesis-testing by using the likelihood of a given model. We demonstrate the advantage of the model-based approach on three datasets acquired in rats and in humans. We further provide novel neuroscientific insights on previously reported PAC phenomena, capturing two mechanisms in PAC: influence of amplitude and directionality estimation.},
 author = {Dupré la Tour, T., Tallot, L., Grabot, L., Doyere, V., van Wassenhove, V., Grenier, Y. and Gramfort, A.},
 doi = {10.1371/journal.pcbi.1005893},
 journal = {PLOS Computational Biology},
 month = {12},
 number = {12},
 pages = {1-32},
 pdf = {http://journals.plos.org/ploscompbiol/article/file?id=10.1371/journal.pcbi.1005893&type=printable},
 publisher = {Public Library of Science},
 title = {Non-linear auto-regressive models for cross-frequency coupling in neural time series},
 url = {https://doi.org/10.1371/journal.pcbi.1005893},
 volume = {13},
 year = {2017}
}

@article{ndiaye-etal:17b,
 author = {Ndiaye, E., Fercoq, O., Gramfort, A. and Salmon, J.},
 journal = {Journal of Machine Learning Research},
 number = {128},
 pages = {1-33},
 pdf = {http://jmlr.org/papers/volume18/16-577/16-577.pdf},
 title = {Gap Safe Screening Rules for Sparsity Enforcing Penalties},
 url = {http://jmlr.org/papers/v18/16-577.html},
 volume = {18},
 year = {2017}
}

@article{ndiaye-etal:17,
 abstract = {In high dimensional settings, sparse structures are crucial for efficiency, both in term of memory, computation and performance. It is customary to consider ℓ 1 penalty to enforce sparsity in such scenarios. Sparsity enforcing methods, the Lasso being a canonical example, are popular candidates to address high dimension. For efficiency, they rely on tuning a parameter trading data fitting versus sparsity. For the Lasso theory to hold this tuning parameter should be proportional to the noise level, yet the latter is often unknown in practice. A possible remedy is to jointly optimize over the regression parameter as well as over the noise level. This has been considered under several names in the literature: Scaled-Lasso, Square-root Lasso, Concomitant Lasso estimation for instance, and could be of interest for uncertainty quantification. In this work, after illustrating numerical difficulties for the Concomitant Lasso formulation, we propose a modification we coined Smoothed Concomitant Lasso, aimed at increasing numerical stability. We propose an efficient and accurate solver leading to a computational cost no more expensive than the one for the Lasso. We leverage on standard ingredients behind the success of fast Lasso solvers: a coordinate descent algorithm, combined with safe screening rules to achieve speed efficiency, by eliminating early irrelevant features.},
 author = {Ndiaye, E., Fercoq, O., Gramfort, A., Leclère, V. and Salmon, J.},
 journal = {Journal of Physics: Conference Series},
 number = {1},
 pages = {012006},
 pdf = {https://arxiv.org/pdf/1606.02702},
 title = {Efficient Smoothed Concomitant Lasso Estimation for High Dimensional Regression},
 url = {http://stacks.iop.org/1742-6596/904/i=1/a=012006},
 volume = {904},
 year = {2017}
}

@inproceedings{bekhti-etal:17,
 author = {Bekhti, Y., Badeau, R. and Gramfort, A.},
 booktitle = {2017 25th European Signal Processing Conference (EUSIPCO)},
 doi = {10.23919/EUSIPCO.2017.8081206},
 issn = {},
 keywords = {Bayes methods;brain;convex programming;electroencephalography;inverse problems;iterative methods;maximum likelihood estimation;medical image processing;regression analysis;Bayesian inference;brain activations;brain imaging;faster algorithms;high-dimensional sparse synthesis models;hyperparameter estimation;inverse problem;nonconvex penalty;posteriori regression;recurrent problem;sparse regression models;Bayes methods;Brain modeling;Estimation;Europe;Inverse problems;Sensors;Signal processing},
 month = {Aug},
 number = {},
 pages = {246-250},
 pdf = {https://hal.archives-ouvertes.fr/hal-01531238/document},
 title = {Hyperparameter estimation in maximum a posteriori regression using group sparsity with an application to brain imaging},
 volume = {},
 year = {2017}
}

@article{NisoGalan-etal:18,
 author = {Niso, G., Gorgolewski, K., Bock, E., Brooks, T., Flandin, G., Gramfort, A., Henson, R., Jas, M., Litvak, V., T. Moreau, J., Oostenveld, R., Schoffelen, J., Tadel, F., Wexler, J. and Baillet, S.},
 date = {2018/06/19/online},
 day = {19},
 journal = {Scientific Data},
 month = {06},
 pdf = {https://www.biorxiv.org/content/early/2017/08/08/172684.full.pdf},
 title = {MEG-BIDS, the brain imaging data structure extended to magnetoencephalography},
 url = {http://dx.doi.org/10.1038/sdata.2018.110},
 volume = {5},
 year = {2018}
}

@article{jas-etal:17b,
 author = {Jas, M., Engemann, D., Bekhti, Y., Raimondo, F. and Gramfort, A.},
 doi = {https://doi.org/10.1016/j.neuroimage.2017.06.030},
 issn = {1053-8119},
 journal = {NeuroImage},
 pages = {417 - 429},
 pdf = {https://arxiv.org/pdf/1612.08194.pdf},
 title = {Autoreject: Automated artifact rejection for MEG and EEG data},
 url = {http://www.sciencedirect.com/science/article/pii/S1053811917305013},
 volume = {159},
 year = {2017}
}

@inproceedings{massias-etal:2017,
 author = {Massias, M., Fercoq, O., Gramfort, A. and Salmon, J.},
 booktitle = {AISTATS},
 pages = {998--1007},
 pdf = {http://proceedings.mlr.press/v84/massias18a/massias18a.pdf},
 series = {Proceedings of Machine Learning Research},
 title = {Generalized Concomitant Multi-Task Lasso for Sparse Multimodal Regression},
 volume = {84},
 year = {2018}
}

@inproceedings{jas-etal:2017,
 author = {Jas, M., Dupr\'{e} la Tour, T., Simsekli, U. and Gramfort, A.},
 booktitle = {Advances in Neural Information Processing Systems (NIPS) 30},
 editor = {I. Guyon and U. V. Luxburg and S. Bengio and H. Wallach and R. Fergus and S. Vishwanathan and R. Garnett},
 pages = {1099--1108},
 pdf = {http://papers.nips.cc/paper/6710-learning-the-morphology-of-brain-signals-using-alpha-stable-convolutional-sparse-coding.pdf},
 publisher = {Curran Associates, Inc.},
 title = {Learning the Morphology of Brain Signals Using Alpha-Stable Convolutional Sparse Coding},
 year = {2017}
}

@misc{1703.07285,
 author = {Massias, M., Gramfort, A. and Salmon, J.},
 eprint = {arXiv:1703.07285},
 pdf = {http://arxiv.org/pdf/1703.07285.pdf},
 title = {From safe screening rules to working sets for faster Lasso-type solvers},
 year = {2017}
}

@inproceedings{dupre-etal:2017,
 address = {New Orleans, USA},
 author = {Dupre la Tour, T., Grenier, Y. and Gramfort, A.},
 booktitle = {International Conference on Acoustics, Speech, and Signal Processing (ICASSP)},
 month = {February},
 pdf = {https://hal.archives-ouvertes.fr/hal-01448603/document},
 title = {{Parametric estimation of spectrum driven by an exogenous signal}},
 url = {https://hal.archives-ouvertes.fr/hal-01448603/},
 year = {2017}
}

@inproceedings{montoya-etal:2017,
 address = {Grenoble, France},
 author = {Montoya-Martinez, J., Cardoso, J. and Gramfort, A.},
 booktitle = {International Conference on Latent Variable Analysis, Independent Component Analysis LVA-ICA},
 month = {February},
 pdf = {https://hal.archives-ouvertes.fr/hal-01451432/document},
 title = {{Caveats with stochastic gradient and maximum likelihood based ICA for EEG}},
 url = {https://hal.archives-ouvertes.fr/hal-01451432/},
 year = {2017}
}

@inproceedings{ndiaye-etal:16b,
 author = {Ndiaye, E., Fercoq, O., Gramfort, A. and Salmon, J.},
 booktitle = {Proc. NIPS 2016},
 pdf = {http://arxiv.org/pdf/1602.06225v1.pdf},
 title = {{GAP} Safe Screening Rules for {Sparse-Group Lasso}},
 year = {2016}
}

@article{ndiaye-etal:16a,
 abstract = {In high dimensional settings, sparse structures are crucial for efficiency, both in term of memory, computation and performance. It is customary to consider ℓ 1 penalty to enforce sparsity in such scenarios. Sparsity enforcing methods, the Lasso being a canonical example, are popular candidates to address high dimension. For efficiency, they rely on tuning a parameter trading data fitting versus sparsity. For the Lasso theory to hold this tuning parameter should be proportional to the noise level, yet the latter is often unknown in practice. A possible remedy is to jointly optimize over the regression parameter as well as over the noise level. This has been considered under several names in the literature: Scaled-Lasso, Square-root Lasso, Concomitant Lasso estimation for instance, and could be of interest for uncertainty quantification. In this work, after illustrating numerical difficulties for the Concomitant Lasso formulation, we propose a modification we coined Smoothed Concomitant Lasso, aimed at increasing numerical stability. We propose an efficient and accurate solver leading to a computational cost no more expensive than the one for the Lasso. We leverage on standard ingredients behind the success of fast Lasso solvers: a coordinate descent algorithm, combined with safe screening rules to achieve speed efficiency, by eliminating early irrelevant features.},
 author = {Ndiaye, E., Fercoq, O., Gramfort, A., Leclère, V. and Salmon, J.},
 journal = {Journal of Physics: Conference Series},
 number = {1},
 pages = {012006},
 pdf = {https://arxiv.org/pdf/1606.02702v1.pdf},
 title = {Efficient Smoothed Concomitant Lasso Estimation for High Dimensional Regression},
 url = {http://stacks.iop.org/1742-6596/904/i=1/a=012006},
 volume = {904},
 year = {2017}
}