@article{ArnoldBallierLissonetal.2019, author = {Arnold, Taylor and Ballier, Nicolas and Lisson, Paula and Tilton, Lauren}, title = {Beyond lexical frequencies: using R for text analysis in the digital humanities}, series = {Language resources and evaluation}, volume = {53}, journal = {Language resources and evaluation}, number = {4}, publisher = {Springer}, address = {Dordrecht}, issn = {1574-020X}, doi = {10.1007/s10579-019-09456-6}, pages = {707 -- 733}, year = {2019}, abstract = {This paper presents a combination of R packages-user contributed toolkits written in a common core programming language-to facilitate the humanistic investigation of digitised, text-based corpora.Our survey of text analysis packages includes those of our own creation (cleanNLP and fasttextM) as well as packages built by other research groups (stringi, readtext, hyphenatr, quanteda, and hunspell). By operating on generic object types, these packages unite research innovations in corpus linguistics, natural language processing, machine learning, statistics, and digital humanities. We begin by extrapolating on the theoretical benefits of R as an elaborate gluing language for bringing together several areas of expertise and compare it to linguistic concordancers and other tool-based approaches to text analysis in the digital humanities. We then showcase the practical benefits of an ecosystem by illustrating how R packages have been integrated into a digital humanities project. Throughout, the focus is on moving beyond the bag-of-words, lexical frequency model by incorporating linguistically-driven analyses in research.}, language = {en} }