@Article{yamaguchi:ras:2020, author = {Yamaguchi, Naoya and Hasegawa, Shun and Murooka, Masaki and Inaba, Masayuki}, title = {Selective grasp in occluded space by all-around proximity perceptible finger}, journal = {Robotics and Autonomous Systems}, year = {2020}, volume = {127}, number = {103464}, pages = {1--11}, month = {May}, doi = {https://doi.org/10.1016/j.robot.2020.103464}, url = {https://reader.elsevier.com/reader/sd/pii/S092188901930274X?token=5DA146BC76B4EBE5FF174749FD240F858E9411C3509AB843F08B6BDCC5ED6679B44D550DB3123337B9216907A2978CB7}, keywords = {Proximity sensor, Robot hand, Environment map, Grasp, Shape Classification}, abstract = {The goal of this research is to develop a \textquotedblleft Selective Grasp\textquotedblright system with which robots can grasp and identify the target object even in occluded environments. In pursuit of this goal, we first develop a robot hand on which proximity sensors are mounted all around. In addition to the development, we propose a sensor model of the robot hand. By using the sensor model, robots can estimate the distance to the object and calibrate the sensors. With our robot hand, robots can accurately recognize their surroundings without touch. Secondly, we propose an approach in which robots can memorize spatial information of surroundings by building an environment map. The building map motion is generated by a combination of manipulation primitives based on proximity sensors. Thirdly, we propose a grasp planning method and an object shape classification method based on the environment map. By these methods, robots can grasp objects and classify shapes of the objects in occluded spaces. Lastly, we conduct real robot experiments, through which we validate the effectiveness of our proposed Selective Grasp system.}, publisher = {Elsevier Science BV}, address = {Po Box 211, 1000 AE Amsterdam, Netherlands} }