Commit 82a9e1f5 authored by Marcus M. Scheunemann's avatar Marcus M. Scheunemann

some more publications

parent 23062f43
% Encoding: UTF-8 % Encoding: UTF-8
@Misc{DijkScheunemann-18,
author = {Sander G. van Dijk and Marcus M. Scheunemann},
title = {{Deep Learning for Semantic Segmentation on Minimal Hardware}},
year = {2018},
abstract = {Deep learning has revolutionised many fields, but it is still challenging to transfer its success to small mobile robots with minimal hardware. Specifically, some work has been done to this effect in the RoboCup humanoid football domain, but results that are performant and efficient and still generally applicable outside of this domain are lacking. We propose an approach conceptually different from those taken previously. It is based on semantic segmentation and does achieve these desired properties. In detail, it is being able to process full VGA images in real-time on a low-power mobile processor. It can further handle multiple image dimensions without retraining, it does not require specific domain knowledge for achieving a high frame rate and it is applicable on a minimal mobile hardware.},
archiveprefix = {arXiv},
eprint = {1807.05597},
keywords = {deep learning, semantic segmentation, mobile robotics, computer vision, minimal hardware},
pdf = {/publications/DijkScheunemann-18.pdf},
primaryclass = {cs.LG},
tags={Conference (peer-reviewed)}
}
@TechReport{boldhearts-18,
author = {Marcus M. Scheunemann and Sander G. van Dijk and Rebecca Miko and Daniel Barry and George M. Evans and Daniel Polani},
title = {{Bold Hearts Team Description for RoboCup 2019 (Humanoid Kid Size League)}},
institution = {School of Computer Science, University of Hertfordshire},
year = {2018},
type = {techreport},
address = {College Lane, AL10 9AB, UK},
month = dec,
abstract = {We participated in the RoboCup 2018 competition in Montreal with our newly developed BoldBot based on the Darwin-OP and mostly self-printed custom parts. This paper is about the lessons learnt from that competition and further developments for the RoboCup 2019 competition. Firstly, we briefly introduce the team along with an overview of past achievements. We then present a simple, standalone 2D simulator we use for simplifying the entry for new members with making basic RoboCup concepts quickly accessible. We describe our approach for semantic-segmentation for our vision used in the 2018 competition, which replaced the lookup-table (LUT) implementation we had before. We also discuss the extra structural support we plan to add to the printed parts of the BoldBot and our transition to ROS 2 as our new middleware. Lastly, we will present a collection of open-source contributions of our team.},
author1_email = {marcus@mms.ai},
author1_url = {https://mms.ai},
contact = {marcus@mms.ai},
group = {Adaptive Systems Research Group},
pages = {8},
pdf = {/publications/tdp-18.pdf},
tags={Technical Report}
}
@TechReport{boldhearts-17,
author = {Marcus M. Scheunemann and Sander G. van Dijk and Alessandra Rossi and Daniel Barry and Daniel Polani},
title = {{Bold Hearts Team Description RoboCup 2018 Kid Size}},
institution = {School of Computer Science, University of Hertfordshire},
year = {2017},
type = {techreport},
address = {College Lane, AL10 9AB, UK},
month = dec,
abstract = {In this paper we describe the structure and the software of the RoboCup Humanoid Kid Size league team Bold Hearts. The development of a new platform is at the core of our work, in addition to new possibilities offered by the software. We discuss the construction and the aspects of our new robotic platform, assembled with new computational more powerful hardware and design, and highlight our solutions to vision and sensorimotor modelling. We then provide an overview of past achievements and relevant experience of our team.},
author1_email = {marcus@mms.ai},
author1_url = {https://mms.ai},
contact = {marcus@mms.ai},
group = {Adaptive Systems Research Group},
pages = {7},
pdf = {/publications/tdp-17.pdf},
url = {https://www.robocuphumanoid.org/qualification/2018/KidSize/BoldHearts/tdp.pdf},
tags={Technical Report}
}
@InProceedings{ScheunemannDautenhahnEtAl-16, @InProceedings{ScheunemannDautenhahnEtAl-16,
author = {Scheunemann, Marcus M and Dautenhahn, Kerstin and Salem, Maha and Robins, Ben}, author = {Scheunemann, Marcus M and Dautenhahn, Kerstin and Salem, Maha and Robins, Ben},
title = {{U}tilizing {B}luetooth {L}ow {E}nergy to recognize proximity, touch and humans}, title = {{Utilizing Bluetooth Low Energy to recognize proximity, touch and humans}},
booktitle = {2016 25th IEEE International Symposium on Robot and Human Interactive Communication (RO-MAN)}, booktitle = {2016 25th IEEE International Symposium on Robot and Human Interactive Communication (RO-MAN)},
year = {2016}, year = {2016},
pages = {362--367}, pages = {362--367},
month = {Aug}, month = {Aug},
publisher = {IEEE}, publisher = {IEEE},
abstract = {Interacting with humans is one of the main challenges for mobile robots in a human inhabited environment. To enable adaptive behavior, a robot needs to recognize touch gestures and/or the proximity to interacting individuals. Moreover, a robot interacting with two or more humans usually needs to distinguish between them. However, this remains both a configuration and cost intensive task. In this paper we utilize inexpensive Bluetooth Low Energy (BLE) devices and propose an easy and configurable technique to enhance the robot's capabilities to interact with surrounding people. In a noisy laboratory setting, a mobile spherical robot is utilized in three proof-of-concept experiments of the proposed system architecture. Firstly, we enhance the robot with proximity information about the individuals in the surrounding environment. Secondly, we exploit BLE to utilize it as a touch sensor. And lastly, we use BLE to distinguish between interacting individuals. Results show that observing the raw received signal strength (RSS) between BLE devices already enhances the robot's interaction capabilities and that the provided infrastructure can be facilitated to enable adaptive behavior in the future. We show one and the same sensor system can be used to detect different types of information relevant in human-robot interaction (HRI) experiments.}, abstract = {Interacting with humans is one of the main challenges for mobile robots in a human inhabited environment. To enable adaptive behavior, a robot needs to recognize touch gestures and/or the proximity to interacting individuals. Moreover, a robot interacting with two or more humans usually needs to distinguish between them. However, this remains both a configuration and cost intensive task. In this paper we utilize inexpensive Bluetooth Low Energy (BLE) devices and propose an easy and configurable technique to enhance the robot's capabilities to interact with surrounding people. In a noisy laboratory setting, a mobile spherical robot is utilized in three proof-of-concept experiments of the proposed system architecture. Firstly, we enhance the robot with proximity information about the individuals in the surrounding environment. Secondly, we exploit BLE to utilize it as a touch sensor. And lastly, we use BLE to distinguish between interacting individuals. Results show that observing the raw received signal strength (RSS) between BLE devices already enhances the robot's interaction capabilities and that the provided infrastructure can be facilitated to enable adaptive behavior in the future. We show one and the same sensor system can be used to detect different types of information relevant in human-robot interaction (HRI) experiments.},
check = {small edits from IEEEexplore (issn, capitalization in title, location)},
doi = {10.1109/ROMAN.2016.7745156}, doi = {10.1109/ROMAN.2016.7745156},
file = {ScheunemannDautenhahnEtAl-16.pdf:ScheunemannDautenhahnEtAl-16.pdf:PDF},
issn = {1944-9437}, issn = {1944-9437},
keywords = {Bluetooth;human-robot interaction;mobile robots;tactile sensors;BLE devices;HRI experiments;RSS;adaptive behavior;configurable technique;cost intensive task;human inhabited environment;human-robot interaction;humans;inexpensive bluetooth low energy;mobile spherical robots;noisy laboratory setting;proximity information;raw received signal strength;recognize proximity;sensor system;touch sensor;Bluetooth;Cameras;Mobile robots;Robot vision systems;Tactile sensors}, keywords = {Bluetooth;human-robot interaction;mobile robots;tactile sensors;BLE devices;HRI experiments;RSS;adaptive behavior;configurable technique;cost intensive task;human inhabited environment;human-robot interaction;humans;inexpensive bluetooth low energy;mobile spherical robots;noisy laboratory setting;proximity information;raw received signal strength;recognize proximity;sensor system;touch sensor;Bluetooth;Cameras;Mobile robots;Robot vision systems;Tactile sensors},
location = {New York, NY, USA}, location = {New York, NY, USA},
owner = {scheunemann}, tags = {Conference (peer-reviewed)},
tags = {own}, timestamp = {2017.10.13}
timestamp = {2017.10.13},
} }
@InProceedings{ScheunemannDautenhahn-17, @InProceedings{ScheunemannDautenhahn-17,
...@@ -28,36 +75,34 @@ ...@@ -28,36 +75,34 @@
series = {HRI '17}, series = {HRI '17},
pages = {52--52}, pages = {52--52},
address = {New York, NY, USA}, address = {New York, NY, USA},
month = feb, month = {Feb},
publisher = {ACM}, publisher = {ACM},
abstract = {This demonstration shows how inexpensive, off-the-shelf, and unobtrusive Bluetooth Low Energy (BLE) devices can be utilized for enabling robots to recognize touch gestures, to perceive proximity information, and to distinguish between interacting individuals autonomously. The received signal strength (RSS) between the BLE device attached to the robot and BLE devices attached to the interacting individuals is used to achieve this. Almost no software configuration is needed and the setup can be applied to most everyday environments and robot platforms.}, abstract = {This demonstration shows how inexpensive, off-the-shelf, and unobtrusive Bluetooth Low Energy (BLE) devices can be utilized for enabling robots to recognize touch gestures, to perceive proximity information, and to distinguish between interacting individuals autonomously. The received signal strength (RSS) between the BLE device attached to the robot and BLE devices attached to the interacting individuals is used to achieve this. Almost no software configuration is needed and the setup can be applied to most everyday environments and robot platforms.},
acmid = {3036663}, acmid = {3036663},
check = {from ACM, but address shown where published not the conference venue (location).}, check = {from ACM, but address shown where published not the conference venue (location).},
doi = {10.1145/3029798.3036663}, doi = {10.1145/3029798.3036663},
file = {ScheunemannDautenhahn-17.pdf:ScheunemannDautenhahn-17.pdf:PDF},
isbn = {978-1-4503-4885-0}, isbn = {978-1-4503-4885-0},
keywords = {autonomous interaction, bluetooth low energy, human-robot interaction, proxemics, robot design}, keywords = {autonomous interaction, bluetooth low energy, human-robot interaction, proxemics, robot design},
location = {Vienna, Austria}, location = {Vienna, Austria},
numpages = {1}, numpages = {1},
tags = {Conference (peer-reviewed)},
owner = {scheunemann}, owner = {scheunemann},
timestamp = {2017.10.13}, timestamp = {2017.10.13},
url = {http://doi.acm.org/10.1145/3029798.3036663},
} }
@InProceedings{ScheunemannDautenhahnEtAl-16b, @InProceedings{ScheunemannDautenhahnEtAl-16b,
author = {Scheunemann, Marcus M and Dautenhahn, Kerstin and Salem, Maha and Robins, Ben}, author = {Scheunemann, Marcus M and Dautenhahn, Kerstin and Salem, Maha and Robins, Ben},
title = {{U}tilizing {B}luetooth {L}ow {E}nergy for human-robot interaction}, title = {{Utilizing Bluetooth Low Energy for human-robot interaction}},
booktitle = {2016 25th IEEE International Symposium on Robot and Human Interactive Communication (RO-MAN)}, booktitle = {2016 25th IEEE International Symposium on Robot and Human Interactive Communication (RO-MAN)},
year = {2016}, year = {2016},
numpages = {1},
month = {Aug}, month = {Aug},
publisher = {IEEE}, publisher = {IEEE},
check = {why not on ieee explore???},
doi = {10.1109/ROMAN.2016.7745156}, doi = {10.1109/ROMAN.2016.7745156},
file = {ScheunemannDautenhahnEtAl-16b.pdf.pdf:ScheunemannDautenhahnEtAl-16b.pdf.pdf:PDF},
issn = {1944-9437}, issn = {1944-9437},
keywords = {Bluetooth;human-robot interaction;mobile robots;tactile sensors;BLE devices;HRI experiments;RSS;adaptive behavior;configurable technique;cost intensive task;human inhabited environment;human-robot interaction;humans;inexpensive bluetooth low energy;mobile spherical robots;noisy laboratory setting;proximity information;raw received signal strength;recognize proximity;sensor system;touch sensor;Bluetooth;Cameras;Mobile robots;Robot vision systems;Tactile sensors}, keywords = {Bluetooth;human-robot interaction;mobile robots;tactile sensors;BLE devices;HRI experiments;RSS;adaptive behavior;configurable technique;cost intensive task;human inhabited environment;human-robot interaction;humans;inexpensive bluetooth low energy;mobile spherical robots;noisy laboratory setting;proximity information;raw received signal strength;recognize proximity;sensor system;touch sensor;Bluetooth;Cameras;Mobile robots;Robot vision systems;Tactile sensors},
location = {New York, NY, USA}, location = {New York, NY, USA},
owner = {scheunemann}, owner = {scheunemann},
tags = {own}, tags = {Conference (peer-reviewed)},
timestamp = {2017.10.13}, timestamp = {2017.10.13},
} }
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment