@Article{benseddik:ijrr:2020, author = {Benseddik, Hossem, Eddine and Morbidi, Fabio and Caron, Guillaume}, title = {PanoraMIS: An Ultra-wide Field of View Image Dataset for Vision-based Robot-Motion Estimation}, journal = {The International Journal of Robotics Research}, year = {2020}, volume = {39}, number = {9}, pages = {1037--1051}, month = {March}, doi = {10.1177/0278364920915248}, url = {https://hal.archives-ouvertes.fr/hal-02492460/document}, keywords = {Panoramic cameras, omnidirectional vision, visual-inertial odometry, image-based localization, autonomous robots}, abstract = {This paper presents a new dataset of ultra-wide field of view images with accurate ground truth, called PanoraMIS. The dataset covers a large spectrum of panoramic cameras (catadioptric, twin-fisheye), robotic platforms (wheeled, aerial and industrial robots), and testing environments (indoors and outdoors), and it is well suited to rigorously validate novel image-based robot-motion estimation algorithms, including visual odometry, visual SLAM, and deep learning-based methods. PanoraMIS and the accompanying documentation is publicly available on the Internet for the entire research community.}, publisher = {Sage Publications Ltd}, address = {1 Olivers Yard, 55 City Road, London EC1Y 1SP, England} }