@InProceedings{CI-seraphim-2024,
author = {Mathieu Seraphim and Alexis Lechervy and Florian Yger and Luc Brun and Olivier Etard},
title = {Structure preserving transformers for sequences of SPD matrices},
booktitle = {Proceedings of EUSIPCO 2024},
year = 2024,
pages = {1451-1455},
month = {August},
address = {Lyon, France},
organization = {EURASIP},
url = "Eurasip:=https://eurasip.org/Proceedings/Eusipco/Eusipco2024/pdfs/0001451.pdf, HAL:=https://hal.science/hal-04638595",
theme = "pattern",
abstract ="In recent years, Transformer-based auto-attention
mechanisms have been successfully applied to the analysis of a
variety of context-reliant data types, from texts to images and
beyond, including data from non-Euclidean geometries. In this
paper, we present such a mechanism, designed to classify se-
quences of Symmetric Positive Definite matrices while preserving
their Riemannian geometry throughout the analysis. We apply
our method to automatic sleep staging on timeseries of EEG-
derived covariance matrices from a standard dataset, obtaining
high levels of stage-wise performance."
}