Taunyazov, Tasbolat; Chua, Yansong; Gao, Ruihan; Soh, Harold; Wu, Yan Fast Texture Classification Using Tactile Neural Coding and Spiking Neural Network Proceedings Article In: 2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS), IEEE, Las Vegas, USA, 2020. Gao, Ruihan; Taunyazov, Tasbolat; Lin, Zhiping; Wu, Yan Supervised Autoencoder Joint Learning on Heterogeneous Tactile Sensory Data: Improving Material Classification Performance Proceedings Article In: 2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS), IEEE, Las Vegas, USA, 2020, ISBN: 978-1-7281-6212-6. Taunyazov, Tasbolat; Koh, Hui Fang; Wu, Yan; Cai, Caixia; Soh, Harold Towards Effective Tactile Identification of Textures using a Hybrid Touch Approach Proceedings Article In: 2019 International Conference on Robotics and Automation (ICRA), pp. 4269-4275, IEEE, Montreal, Canada, 2019, ISBN: 978-1-5386-6027-0.2020
@inproceedings{taunyazov2020fast,
title = {Fast Texture Classification Using Tactile Neural Coding and Spiking Neural Network},
author = {Tasbolat Taunyazov and Yansong Chua and Ruihan Gao and Harold Soh and Yan Wu },
url = {http://yan-wu.com/wp-content/uploads/2020/08/taunyazov2020fast.pdf
https://ieeexplore.ieee.org/document/9340693},
doi = {10.1109/IROS45743.2020.9340693},
year = {2020},
date = {2020-10-31},
booktitle = {2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
journal = {2020},
publisher = {IEEE},
address = {Las Vegas, USA},
abstract = {Touch is arguably the most important sensing modality in physical interactions. However, tactile sensing has been largely under-explored in robotics applications owing to the complexity in making perceptual inferences until the recent advancements in machine learning or deep learning in particular. Touch perception is strongly influenced by both its temporal dimension similar to audition and its spatial dimension similar to vision. While spatial cues can be learned episodically, temporal cues compete against the system's response/reaction time to provide accurate inferences. In this paper, we propose a fast tactile-based texture classification framework which makes use of the spiking neural network to learn from the neural coding of the conventional tactile sensor readings. The framework is implemented and tested on two independent tactile datasets collected in sliding motion on 20 material textures. Our results show that the framework is able to make much more accurate inferences ahead of time as compared to that by the state-of-the-art learning approaches.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
@inproceedings{gao2020supervised,
title = {Supervised Autoencoder Joint Learning on Heterogeneous Tactile Sensory Data: Improving Material Classification Performance},
author = {Ruihan Gao and Tasbolat Taunyazov and Zhiping Lin and Yan Wu},
url = {http://yan-wu.com/wp-content/uploads/2020/08/gao2020supervised.pdf
https://ieeexplore.ieee.org/document/9341111},
doi = {10.1109/IROS45743.2020.9341111},
isbn = {978-1-7281-6212-6},
year = {2020},
date = {2020-10-31},
booktitle = {2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
publisher = {IEEE},
address = {Las Vegas, USA},
abstract = {The sense of touch is an essential sensing modality for a robot to interact with the environment as it provides rich and multimodal sensory information upon contact. It enriches the perceptual understanding of the environment and closes the loop for action generation. One fundamental area of perception that touch dominates over other sensing modalities, is the understanding of the materials that it interacts with, for example, glass versus plastic. However, unlike the senses of vision and audition which have standardized data format, the format for tactile data is vastly dictated by the sensor manufacturer, which makes it difficult for large-scale learning on data collected from heterogeneous sensors, limiting the usefulness of publicly available tactile datasets. This paper investigates the joint learnability of data collected from two tactile sensors performing a touch sequence on some common materials. We propose a supervised recurrent autoencoder framework to perform joint material classification task to improve the training effectiveness. The framework is implemented and tested on the two sets of tactile data collected in sliding motion on 20 material textures using the iCub RoboSkin tactile sensors and the SynTouch BioTac sensor respectively. Our results show that the learning efficiency and accuracy improve for both datasets through the joint learning as compared to independent dataset training. This suggests the usefulness for large-scale open tactile datasets sharing with different sensors.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
2019
@inproceedings{taunyazov2019towards,
title = {Towards Effective Tactile Identification of Textures using a Hybrid Touch Approach},
author = {Tasbolat Taunyazov and Hui Fang Koh and Yan Wu and Caixia Cai and Harold Soh},
url = {https://ieeexplore.ieee.org/document/8793967
http://www.yan-wu.com/docs/taunyanov2019towards.pdf},
doi = {10.1109/ICRA.2019.8793967},
isbn = {978-1-5386-6027-0},
year = {2019},
date = {2019-05-24},
booktitle = {2019 International Conference on Robotics and Automation (ICRA)},
pages = {4269-4275},
publisher = {IEEE},
address = {Montreal, Canada},
abstract = {The sense of touch is arguably the first human sense to develop. Empowering robots with the sense of touch may augment their understanding of interacted objects and the environment beyond standard sensory modalities (e.g., vision). This paper investigates the effect of hybridizing touch and sliding movements for tactile-based texture classification. We develop three machine-learning methods within a framework to discriminate between surface textures; the first two methods use hand-engineered features, whilst the third leverages convolutional and recurrent neural network layers to learn feature representations from raw data. To compare these methods, we constructed a dataset comprising tactile data from 23 textures gathered using the iCub platform under a loosely constrained setup, i.e., with nonlinear motion. In line with findings from neuroscience, our experiments show that a good initial estimate can be obtained via touch data, which can be further refined via sliding; combining both touch and sliding data results in 98% classification accuracy over unseen test data.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Publications
Fast Texture Classification Using Tactile Neural Coding and Spiking Neural Network Proceedings Article In: 2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS), IEEE, Las Vegas, USA, 2020. Supervised Autoencoder Joint Learning on Heterogeneous Tactile Sensory Data: Improving Material Classification Performance Proceedings Article In: 2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS), IEEE, Las Vegas, USA, 2020, ISBN: 978-1-7281-6212-6. Towards Effective Tactile Identification of Textures using a Hybrid Touch Approach Proceedings Article In: 2019 International Conference on Robotics and Automation (ICRA), pp. 4269-4275, IEEE, Montreal, Canada, 2019, ISBN: 978-1-5386-6027-0.2020
2019