@misc{, author = {von Arnim, Axel and Lecomte, Jules and Wozniak, Stanislaw and Elosegui, Naima and Pantazi, Angeliki}, title = {Dynamic Event-based Optical Identification and Communication}, booktitle = {in 2023 IEEE International Conference on Image Processing}, publisher = {IEEE}, journal = {Frontiers in Neurorobotics}, volume = {18}, year = {2024}, month = feb, timestamp = 2024.02.12, organization = {fortiss}, abstract = {Optical identification is often done with spatial or temporal visual pattern recognition and localization. Temporal pattern recognition, depending on the technology, involves a trade-off between communication frequency, range and accurate tracking. We propose a solution with light-emitting beacons that improves this trade-off by exploiting fast event-based cameras and, for tracking, sparse neuromorphic optical flow computed with spiking neurons. In an asset monitoring use case, we demonstrate that the system, embedded in a simulated drone, is robust to relative movements and enables simultaneous communication with, and tracking of, multiple moving beacons. Finally, in a hardware lab prototype, we achieve state-of-the-art optical camera communication frequencies in the kHz magnitude.}, issn = {1662-5218}, doi = {10.3389/fnbot.2024.1290965}, keywords = {Neuromorphic Computing, Event-Based Sensing, Optical Camera Communication, Optical Flow}, url = {https://www.frontiersin.org/articles/10.3389/fnbot.2024.1290965}, } @proceedings{, author = {Schnider, Yannick and Wozniak, Stanislaw and Gehrig, Mathias and Lecomte, Jules and von Arnim, Axel and Benini, Luca and Scaramuzza, Davide and Pantazi, Angeliki}, title = {Neuromorphic Optical Flow and Real-time Implementation with Event Cameras}, publisher = {IEEE Conference on Computer Vision and Pattern Recognition Workshops 2023}, year = {2023}, month = apr, organization = {IBM Research Z{\"{u}}rich}, school = {ETH Z{\"{u}}rich; UTH Z{\"{u}}rich}, location = {Vancouver}, abstract = {Optical flow provides information on relative motion that is an important component in many computer vision pipelines. Neural networks provide high accuracy optical flow, yet their complexity is often prohibitive for application at the edge or in robots, where efficiency and latency play crucial role. To address this challenge, we build on the latest developments in event-based vision and spiking neural networks. We propose a new network architecture, inspired by Timelens, that improves the state-of-the-art self-supervised optical flow accuracy when operated both in spiking and non-spiking mode. To implement a real-time pipeline with a physical event camera, we propose a methodology for principled model simplification based on activity and latency analysis. We demonstrate high speed optical flow prediction with almost two orders of magnitude reduced complexity while maintaining the accuracy, opening the path for real-time deployments.}, doi = {10.48550/arxiv.2304.07139}, url = {https://arxiv.org/abs/2304.07139}, }