@Article{dominey:ijhr:2009, author = {Dominey, Peter Ford and Mallet, Anthony and Yoshida, Eiichi}, title = {Real-Time Spoken-Language Programming for Cooperative Interaction with a Humanoid Apprentice}, journal = {International Journal of Humanoid Robotics}, year = {2009}, volume = {6}, number = {02}, pages = {147--171}, month = {June}, doi = {10.1142/S0219843609001711}, url = {https://www.researchgate.net/publication/220065756\_Real-Time\_Spoken-Language\_Programming\_for\_Cooperative\_Interaction\_with\_a\_Humanoid\_Apprentice}, keywords = {Real-time spoken language; cooperative interaction; humanoid apprentice.}, abstract = {An apprentice is an able-bodied individual that will interactively assist an expert, and through this interaction, acquire knowledge and skill in the given task domain. A humanoid apprentice should have a useful repertoire of sensory-motor acts that the human can command with spoken language, along with a real-time behavioral sequence acquisition ability. The learned sequences should function as executable procedures that can operate in a flexible manner that are not rigidly sensitive to initial conditions. Our study integrates these capabilities in a real-time system on the HRP-2 humanoid, for learning a cooperative assembly task. We previously defined a system for Spoken Lan- guage Programming (SLP) that allowed the user to guide the robot through an arbi-trary, task relevant, motor sequence via spoken commands, and to store this sequence as re-usable macro. Here, we significantly extend the SPL system: It integrates vision and motion planning into the SLP framework, providing a new level of flexibility in the actions that can be created, and it allows the user to create \textquotedblleft generic\textquotedblright functions with arguments (e.g. Give me X), and it allows multiple functions to be created.}, publisher = {World Scientific Publ Co PTE Ltd}, address = {5 Toh Tuck Link, Singapore 596224, Singapore} }