@InProceedings{paolillo:sii:2017, author = {Paolillo, Antonio and Bolotnikova, Anastasia and Chappellet, Kevin and Kheddar, Abderrahmane}, title = {Visual Estimation of Articulated Objects Configuration During Manipulation with a Humanoid}, booktitle = {IEEE/SICE International Symposium on System Integration}, year = {2017}, address = {Taipei, Taiwan}, month = {December 11-December 14}, url = {https://hal.archives-ouvertes.fr/hal-01646158/document}, keywords = {Cameras, Visualization, Computational modeling, Tracking, Robot Sensing systems, Task analysis}, doi = {10.1109/SII.2017.8279234}, abstract = {Robotic manipulation tasks require on-line knowledge of the operated objects\textquotesingle configuration. Thus, we need to estimate online the state of the (articulated) objects that are not equipped with positioning sensors. This estimated state w.r.t the robot control frame is required by our controller to update the model and close the loop. Indeed, in the controller we use the models of the (articulated) objects as additional \textquotesingle robots\textquotesingle so that it computes the overall \textquotesingle robots-objects\textquotesingle augmented system\textquotesingle s motion and contact interaction forces that fulfill all the limitation constraints together with the physics. Because of the uncertainties due to the floating-base nature of humanoids, we address the problem of estimating the configuration of articulated objects using a virtual visual servoing-based approach. Experimental results obtained with the humanoid robot HRP-4 manipulating the paper drawer of a printer show the effectiveness of the approach.} }