@article{OwoyeleTrujillodeMeloetal.2022, author = {Owoyele, Babajide and Trujillo, James and de Melo, Gerard and Pouw, Wim}, title = {Masked-Piper: masking personal identities in visual recordings while preserving multimodal information}, series = {SoftwareX}, volume = {20}, journal = {SoftwareX}, publisher = {Elsevier}, address = {Amsterdam}, issn = {2352-7110}, doi = {10.1016/j.softx.2022.101236}, pages = {4}, year = {2022}, abstract = {In this increasingly data-rich world, visual recordings of human behavior are often unable to be shared due to concerns about privacy. Consequently, data sharing in fields such as behavioral science, multimodal communication, and human movement research is often limited. In addition, in legal and other non-scientific contexts, privacy-related concerns may preclude the sharing of video recordings and thus remove the rich multimodal context that humans recruit to communicate. Minimizing the risk of identity exposure while preserving critical behavioral information would maximize utility of public resources (e.g., research grants) and time invested in audio-visual research. Here we present an open-source computer vision tool that masks the identities of humans while maintaining rich information about communicative body movements. Furthermore, this masking tool can be easily applied to many videos, leveraging computational tools to augment the reproducibility and accessibility of behavioral research. The tool is designed for researchers and practitioners engaged in kinematic and affective research. Application areas include teaching/education, communication and human movement research, CCTV, and legal contexts.}, language = {en} }