@article{10.3897/jucs.66375, author = {Alessia D'Andrea and Maria Chiara Caschera and Fernando Ferri and Patrizia Grifoni}, title = {MuBeFE: Multimodal Behavioural Features Extraction Method}, volume = {27}, number = {3}, year = {2021}, doi = {10.3897/jucs.66375}, publisher = {Journal of Universal Computer Science}, abstract = {The paper aims to provide a method to analyse and observe the characteristics that distinguish the individual communication style such as the voice intonation, the size and slant used in handwriting and the trait, pressure and dimension used for sketching. These features are referred to as Communication Extensional Features. Observing from the Communication Extensional Features, the user’s behavioural features, such as the communicative intention, the social style and personality traits can be extracted. These behavioural features are referred to as Communication Intentional Features. For the extraction of Communication Intentional Features, a method based on Hidden Markov Models is provided in the paper. The Communication Intentional Features have been extracted at the modal and multimodal level; this represents an important novelty provided by the paper. The accuracy of the method was tested both at modal and multimodal levels. The evaluation process results indicate an accuracy of 93.3% for the Modal layer (handwriting layer) and 95.3% for the Multimodal layer.}, issn = {0948-695X}, pages = {254-284}, URL = {https://doi.org/10.3897/jucs.66375}, eprint = {https://doi.org/10.3897/jucs.66375}, journal = {JUCS - Journal of Universal Computer Science} }