├── .github ├── .gitignore └── workflows │ ├── static.yml │ ├── R-CMD-check.yaml │ └── pkgdown.yaml ├── .Rprofile ├── vignettes ├── .gitignore ├── ConversationAlign_Step4_Analytics.Rmd ├── ConversationAlign_Step1_Read.Rmd ├── ConversationAlign_Step3_Summarize.Rmd └── ConversationAlign_Step2_Prep.Rmd ├── renv ├── .gitignore └── settings.json ├── docs ├── logo.png ├── favicon.ico ├── favicon-96x96.png ├── apple-touch-icon.png ├── reference │ ├── figures │ │ ├── logo.png │ │ └── convo_demo.jpg │ ├── ConversationAlign.html │ ├── MaronGross_2013.html │ ├── read_1file.html │ ├── NurseryRhymes.html │ └── generate_shams.html ├── web-app-manifest-192x192.png ├── web-app-manifest-512x512.png ├── deps │ ├── bootstrap-5.3.1 │ │ ├── fonts │ │ │ ├── S6uyw4BMUTPHjx4wXg.woff2 │ │ │ ├── 4iCs6KVjbNBYlgoKfw72.woff2 │ │ │ ├── S6u8w4BMUTPHjxsAXC-q.woff2 │ │ │ ├── S6uyw4BMUTPHjxAwXjeu.woff2 │ │ │ ├── XRXV3I6Li01BKofINeaB.woff2 │ │ │ ├── q5uGsou0JOdh94bfvQlt.woff2 │ │ │ ├── 1Ptug8zYS_SKggPNyC0ITw.woff2 │ │ │ ├── 4iCs6KVjbNBYlgoKcQ72j00.woff2 │ │ │ ├── 4iCs6KVjbNBYlgoKcg72j00.woff2 │ │ │ ├── 4iCs6KVjbNBYlgoKcw72j00.woff2 │ │ │ ├── 4iCs6KVjbNBYlgoKew72j00.woff2 │ │ │ ├── 4iCs6KVjbNBYlgoKfA72j00.woff2 │ │ │ ├── S6u8w4BMUTPHjxsAUi-qJCY.woff2 │ │ │ ├── S6u9w4BMUTPHh6UVSwiPGQ.woff2 │ │ │ ├── S6u9w4BMUTPHh7USSwiPGQ.woff2 │ │ │ ├── XRXV3I6Li01BKofIMeaBXso.woff2 │ │ │ ├── XRXV3I6Li01BKofIO-aBXso.woff2 │ │ │ ├── XRXV3I6Li01BKofIOOaBXso.woff2 │ │ │ ├── XRXV3I6Li01BKofIOuaBXso.woff2 │ │ │ ├── q5uGsou0JOdh94bfuQltOxU.woff2 │ │ │ ├── 1Ptug8zYS_SKggPNyCAIT5lu.woff2 │ │ │ ├── 1Ptug8zYS_SKggPNyCIIT5lu.woff2 │ │ │ ├── 1Ptug8zYS_SKggPNyCMIT5lu.woff2 │ │ │ ├── 1Ptug8zYS_SKggPNyCkIT5lu.woff2 │ │ │ ├── 4iCv6KVjbNBYlgoCxCvjsGyN.woff2 │ │ │ ├── S6u9w4BMUTPHh6UVSwaPGR_p.woff2 │ │ │ ├── S6u9w4BMUTPHh7USSwaPGR_p.woff2 │ │ │ ├── 4iCv6KVjbNBYlgoCxCvjs2yNL4U.woff2 │ │ │ ├── 4iCv6KVjbNBYlgoCxCvjtGyNL4U.woff2 │ │ │ ├── 4iCv6KVjbNBYlgoCxCvjvGyNL4U.woff2 │ │ │ ├── 4iCv6KVjbNBYlgoCxCvjvWyNL4U.woff2 │ │ │ ├── 4iCv6KVjbNBYlgoCxCvjvmyNL4U.woff2 │ │ │ ├── CSR64z1Qlv-GDxkbKVQ_fO4KTet_.woff2 │ │ │ ├── CSR64z1Qlv-GDxkbKVQ_fOAKTQ.woff2 │ │ │ ├── JTUSjIg1_i6t8kCHKm459W1hyzbi.woff2 │ │ │ ├── JTUSjIg1_i6t8kCHKm459WRhyzbi.woff2 │ │ │ ├── JTUSjIg1_i6t8kCHKm459WZhyzbi.woff2 │ │ │ ├── JTUSjIg1_i6t8kCHKm459Wdhyzbi.woff2 │ │ │ ├── JTUSjIg1_i6t8kCHKm459Wlhyw.woff2 │ │ │ ├── QGYpz_kZZAGCONcK2A4bGOj8mNhN.woff2 │ │ │ ├── CSR54z1Qlv-GDxkbKVQ_dFsvWNReuQ.woff2 │ │ │ ├── 07d40e985ad7c747025dabb9f22142c4.woff2 │ │ │ ├── 1f5e011d6aae0d98fc0518e1a303e99a.woff2 │ │ │ ├── 626330658504e338ee86aec8e957426b.woff2 │ │ │ ├── 6xK3dSBYKcSV-LCoeQqfX1RYOo3qOK7l.woff2 │ │ │ ├── CSR54z1Qlv-GDxkbKVQ_dFsvWNpeudwk.woff2 │ │ │ ├── HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 │ │ │ ├── c2f002b3a87d3f9bfeebb23d32cfd9f8.woff2 │ │ │ ├── ee91700cdbf7ce16c054c2bb8946c736.woff2 │ │ │ ├── 6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7nsDI.woff2 │ │ │ ├── 6xK3dSBYKcSV-LCoeQqfX1RYOo3qN67lqDY.woff2 │ │ │ ├── 6xK3dSBYKcSV-LCoeQqfX1RYOo3qNK7lqDY.woff2 │ │ │ ├── 6xK3dSBYKcSV-LCoeQqfX1RYOo3qNa7lqDY.woff2 │ │ │ ├── 6xK3dSBYKcSV-LCoeQqfX1RYOo3qNq7lqDY.woff2 │ │ │ ├── 6xK3dSBYKcSV-LCoeQqfX1RYOo3qO67lqDY.woff2 │ │ │ ├── 6xK3dSBYKcSV-LCoeQqfX1RYOo3qPK7lqDY.woff2 │ │ │ ├── HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvUlMI.woff2 │ │ │ ├── HI_SiYsKILxRpg3hIP6sJ7fM7PqlM-vWjMY.woff2 │ │ │ ├── HI_SiYsKILxRpg3hIP6sJ7fM7PqlMOvWjMY.woff2 │ │ │ ├── HI_SiYsKILxRpg3hIP6sJ7fM7PqlMevWjMY.woff2 │ │ │ ├── HI_SiYsKILxRpg3hIP6sJ7fM7PqlMuvWjMY.woff2 │ │ │ ├── HI_SiYsKILxRpg3hIP6sJ7fM7PqlOevWjMY.woff2 │ │ │ ├── HI_SiYsKILxRpg3hIP6sJ7fM7PqlPuvWjMY.woff2 │ │ │ ├── 6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7jsDJT9g.woff2 │ │ │ ├── 6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7ksDJT9g.woff2 │ │ │ ├── 6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7osDJT9g.woff2 │ │ │ ├── 6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7psDJT9g.woff2 │ │ │ ├── 6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7qsDJT9g.woff2 │ │ │ ├── 6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7rsDJT9g.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwlxdu.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwlxdu.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwlxdu.woff2 │ │ │ ├── HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvQlMIXxw.woff2 │ │ │ ├── HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvXlMIXxw.woff2 │ │ │ ├── HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvYlMIXxw.woff2 │ │ │ ├── HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvZlMIXxw.woff2 │ │ │ ├── HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvalMIXxw.woff2 │ │ │ ├── HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvblMIXxw.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwkxduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwlBduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmBduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmRduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmhduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmxduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwkxduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwlBduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmBduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmRduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmhduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmxduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwkxduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwlBduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmBduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmRduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmhduz8A.woff2 │ │ │ ├── 6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmxduz8A.woff2 │ │ │ ├── memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTS-muw.woff2 │ │ │ ├── memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWuU6F.woff2 │ │ │ ├── memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTS2mu1aB.woff2 │ │ │ ├── memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSCmu1aB.woff2 │ │ │ ├── memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSGmu1aB.woff2 │ │ │ ├── memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSKmu1aB.woff2 │ │ │ ├── memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSOmu1aB.woff2 │ │ │ ├── memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSumu1aB.woff2 │ │ │ ├── memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSymu1aB.woff2 │ │ │ ├── memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTUGmu1aB.woff2 │ │ │ ├── memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTVOmu1aB.woff2 │ │ │ ├── memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqW106F15M.woff2 │ │ │ ├── memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWt06F15M.woff2 │ │ │ ├── memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWtE6F15M.woff2 │ │ │ ├── memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWtU6F15M.woff2 │ │ │ ├── memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWtk6F15M.woff2 │ │ │ ├── memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWu06F15M.woff2 │ │ │ ├── memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWuk6F15M.woff2 │ │ │ ├── memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWvU6F15M.woff2 │ │ │ └── memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWxU6F15M.woff2 │ │ └── font.css │ ├── font-awesome-6.5.2 │ │ └── webfonts │ │ │ ├── fa-brands-400.ttf │ │ │ ├── fa-regular-400.ttf │ │ │ ├── fa-solid-900.ttf │ │ │ ├── fa-solid-900.woff2 │ │ │ ├── fa-brands-400.woff2 │ │ │ ├── fa-regular-400.woff2 │ │ │ ├── fa-v4compatibility.ttf │ │ │ └── fa-v4compatibility.woff2 │ ├── headroom-0.11.0 │ │ ├── jQuery.headroom.min.js │ │ └── headroom.min.js │ ├── data-deps.txt │ └── bootstrap-toc-1.0.1 │ │ └── bootstrap-toc.min.js ├── site.webmanifest ├── pkgdown.yml ├── katex-auto.js ├── link.svg ├── lightswitch.js ├── sitemap.xml ├── pkgdown.js ├── articles │ └── index.html ├── 404.html └── authors.html ├── paper ├── paper.pdf ├── overview.png └── paper.bib ├── man ├── figures │ ├── logo.png │ └── convo_demo.jpg ├── read_1file.Rd ├── MaronGross_2013.Rd ├── generate_shams.Rd ├── NurseryRhymes.Rd ├── corpus_analytics.Rd ├── load_github_data.Rd ├── NurseryRhymes_Prepped.Rd ├── read_dyads.Rd ├── ConversationAlign-package.Rd ├── summarize_dyads.Rd └── prep_dyads.Rd ├── data ├── NurseryRhymes.rda ├── MaronGross_2013.rda └── NurseryRhymes_Prepped.rda ├── _pkgdown.yml ├── inst ├── pkgdown │ └── favicon │ │ ├── favicon.ico │ │ ├── favicon-96x96.png │ │ ├── apple-touch-icon.png │ │ ├── web-app-manifest-192x192.png │ │ ├── web-app-manifest-512x512.png │ │ └── site.webmanifest └── NEWS.md ├── R ├── ConversationAlign-package.R ├── globals.R ├── data.R ├── generate_shams.R ├── utils.R ├── read_1file.R ├── zzz.R └── replacements_25.R ├── .gitignore ├── .Rbuildignore ├── tests ├── testthat.R └── testthat │ ├── test-compute_lagcorr.R │ ├── test-summarize_dyads.R │ └── test-compute_auc.R ├── ConversationAlign.Rproj ├── doc ├── CA_Step2_Prep.R ├── CA_Step1_Read.R ├── CA_Step3_Summarize.R ├── CA_Step4_Analytics.R ├── CA_Intro.R ├── CA_Step3_Summarize.Rmd ├── CA_Step4_Analytics.Rmd ├── CA_Step1_Read.Rmd ├── CA_Step2_Prep.Rmd ├── CA_Step3_Summarize.html └── CA_Step4_Analytics.html ├── data-raw └── DATASET.R ├── DESCRIPTION └── NAMESPACE /.github/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | -------------------------------------------------------------------------------- /.Rprofile: -------------------------------------------------------------------------------- 1 | source("renv/activate.R") 2 | -------------------------------------------------------------------------------- /vignettes/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | *.R 3 | -------------------------------------------------------------------------------- /renv/.gitignore: -------------------------------------------------------------------------------- 1 | library/ 2 | local/ 3 | cellar/ 4 | lock/ 5 | python/ 6 | sandbox/ 7 | staging/ 8 | -------------------------------------------------------------------------------- /docs/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/logo.png -------------------------------------------------------------------------------- /docs/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/favicon.ico -------------------------------------------------------------------------------- /paper/paper.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/paper/paper.pdf -------------------------------------------------------------------------------- /paper/overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/paper/overview.png -------------------------------------------------------------------------------- /man/figures/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/man/figures/logo.png -------------------------------------------------------------------------------- /data/NurseryRhymes.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/data/NurseryRhymes.rda -------------------------------------------------------------------------------- /docs/favicon-96x96.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/favicon-96x96.png -------------------------------------------------------------------------------- /data/MaronGross_2013.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/data/MaronGross_2013.rda -------------------------------------------------------------------------------- /docs/apple-touch-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/apple-touch-icon.png -------------------------------------------------------------------------------- /man/figures/convo_demo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/man/figures/convo_demo.jpg -------------------------------------------------------------------------------- /data/NurseryRhymes_Prepped.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/data/NurseryRhymes_Prepped.rda -------------------------------------------------------------------------------- /docs/reference/figures/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/reference/figures/logo.png -------------------------------------------------------------------------------- /_pkgdown.yml: -------------------------------------------------------------------------------- 1 | url: https://reilly-conceptscognitionlab.github.io/ConversationAlign/ 2 | template: 3 | bootstrap: 5 4 | bootswatch: lux 5 | 6 | -------------------------------------------------------------------------------- /docs/web-app-manifest-192x192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/web-app-manifest-192x192.png -------------------------------------------------------------------------------- /docs/web-app-manifest-512x512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/web-app-manifest-512x512.png -------------------------------------------------------------------------------- /inst/pkgdown/favicon/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/inst/pkgdown/favicon/favicon.ico -------------------------------------------------------------------------------- /docs/reference/figures/convo_demo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/reference/figures/convo_demo.jpg -------------------------------------------------------------------------------- /inst/pkgdown/favicon/favicon-96x96.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/inst/pkgdown/favicon/favicon-96x96.png -------------------------------------------------------------------------------- /inst/pkgdown/favicon/apple-touch-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/inst/pkgdown/favicon/apple-touch-icon.png -------------------------------------------------------------------------------- /R/ConversationAlign-package.R: -------------------------------------------------------------------------------- 1 | #' @keywords internal 2 | "_PACKAGE" 3 | 4 | ## usethis namespace: start 5 | #' @importFrom rlang sym 6 | ## usethis namespace: end 7 | NULL 8 | -------------------------------------------------------------------------------- /inst/pkgdown/favicon/web-app-manifest-192x192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/inst/pkgdown/favicon/web-app-manifest-192x192.png -------------------------------------------------------------------------------- /inst/pkgdown/favicon/web-app-manifest-512x512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/inst/pkgdown/favicon/web-app-manifest-512x512.png -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/S6uyw4BMUTPHjx4wXg.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/S6uyw4BMUTPHjx4wXg.woff2 -------------------------------------------------------------------------------- /docs/deps/font-awesome-6.5.2/webfonts/fa-brands-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/font-awesome-6.5.2/webfonts/fa-brands-400.ttf -------------------------------------------------------------------------------- /docs/deps/font-awesome-6.5.2/webfonts/fa-regular-400.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/font-awesome-6.5.2/webfonts/fa-regular-400.ttf -------------------------------------------------------------------------------- /docs/deps/font-awesome-6.5.2/webfonts/fa-solid-900.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/font-awesome-6.5.2/webfonts/fa-solid-900.ttf -------------------------------------------------------------------------------- /docs/deps/font-awesome-6.5.2/webfonts/fa-solid-900.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/font-awesome-6.5.2/webfonts/fa-solid-900.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKfw72.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKfw72.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/S6u8w4BMUTPHjxsAXC-q.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/S6u8w4BMUTPHjxsAXC-q.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/S6uyw4BMUTPHjxAwXjeu.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/S6uyw4BMUTPHjxAwXjeu.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/XRXV3I6Li01BKofINeaB.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/XRXV3I6Li01BKofINeaB.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/q5uGsou0JOdh94bfvQlt.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/q5uGsou0JOdh94bfvQlt.woff2 -------------------------------------------------------------------------------- /docs/deps/font-awesome-6.5.2/webfonts/fa-brands-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/font-awesome-6.5.2/webfonts/fa-brands-400.woff2 -------------------------------------------------------------------------------- /docs/deps/font-awesome-6.5.2/webfonts/fa-regular-400.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/font-awesome-6.5.2/webfonts/fa-regular-400.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/1Ptug8zYS_SKggPNyC0ITw.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/1Ptug8zYS_SKggPNyC0ITw.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKcQ72j00.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKcQ72j00.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKcg72j00.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKcg72j00.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKcw72j00.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKcw72j00.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKew72j00.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKew72j00.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKfA72j00.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCs6KVjbNBYlgoKfA72j00.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/S6u8w4BMUTPHjxsAUi-qJCY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/S6u8w4BMUTPHjxsAUi-qJCY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/S6u9w4BMUTPHh6UVSwiPGQ.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/S6u9w4BMUTPHh6UVSwiPGQ.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/S6u9w4BMUTPHh7USSwiPGQ.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/S6u9w4BMUTPHh7USSwiPGQ.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/XRXV3I6Li01BKofIMeaBXso.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/XRXV3I6Li01BKofIMeaBXso.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/XRXV3I6Li01BKofIO-aBXso.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/XRXV3I6Li01BKofIO-aBXso.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/XRXV3I6Li01BKofIOOaBXso.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/XRXV3I6Li01BKofIOOaBXso.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/XRXV3I6Li01BKofIOuaBXso.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/XRXV3I6Li01BKofIOuaBXso.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/q5uGsou0JOdh94bfuQltOxU.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/q5uGsou0JOdh94bfuQltOxU.woff2 -------------------------------------------------------------------------------- /docs/deps/font-awesome-6.5.2/webfonts/fa-v4compatibility.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/font-awesome-6.5.2/webfonts/fa-v4compatibility.ttf -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/1Ptug8zYS_SKggPNyCAIT5lu.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/1Ptug8zYS_SKggPNyCAIT5lu.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/1Ptug8zYS_SKggPNyCIIT5lu.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/1Ptug8zYS_SKggPNyCIIT5lu.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/1Ptug8zYS_SKggPNyCMIT5lu.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/1Ptug8zYS_SKggPNyCMIT5lu.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/1Ptug8zYS_SKggPNyCkIT5lu.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/1Ptug8zYS_SKggPNyCkIT5lu.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjsGyN.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjsGyN.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/S6u9w4BMUTPHh6UVSwaPGR_p.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/S6u9w4BMUTPHh6UVSwaPGR_p.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/S6u9w4BMUTPHh7USSwaPGR_p.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/S6u9w4BMUTPHh7USSwaPGR_p.woff2 -------------------------------------------------------------------------------- /docs/deps/font-awesome-6.5.2/webfonts/fa-v4compatibility.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/font-awesome-6.5.2/webfonts/fa-v4compatibility.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjs2yNL4U.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjs2yNL4U.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjtGyNL4U.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjtGyNL4U.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjvGyNL4U.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjvGyNL4U.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjvWyNL4U.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjvWyNL4U.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjvmyNL4U.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/4iCv6KVjbNBYlgoCxCvjvmyNL4U.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/CSR64z1Qlv-GDxkbKVQ_fO4KTet_.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/CSR64z1Qlv-GDxkbKVQ_fO4KTet_.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/CSR64z1Qlv-GDxkbKVQ_fOAKTQ.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/CSR64z1Qlv-GDxkbKVQ_fOAKTQ.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/JTUSjIg1_i6t8kCHKm459W1hyzbi.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/JTUSjIg1_i6t8kCHKm459W1hyzbi.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/JTUSjIg1_i6t8kCHKm459WRhyzbi.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/JTUSjIg1_i6t8kCHKm459WRhyzbi.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/JTUSjIg1_i6t8kCHKm459WZhyzbi.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/JTUSjIg1_i6t8kCHKm459WZhyzbi.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/JTUSjIg1_i6t8kCHKm459Wdhyzbi.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/JTUSjIg1_i6t8kCHKm459Wdhyzbi.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/JTUSjIg1_i6t8kCHKm459Wlhyw.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/JTUSjIg1_i6t8kCHKm459Wlhyw.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/QGYpz_kZZAGCONcK2A4bGOj8mNhN.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/QGYpz_kZZAGCONcK2A4bGOj8mNhN.woff2 -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | .Rhistory 3 | .RData 4 | .Ruserdata 5 | <<<<<<< HEAD 6 | .DS_Store 7 | ||||||| e5192a7 8 | ======= 9 | >>>>>>> 33b54b319d3e6fcaa4e74b003a1864b97c9e7eb1 10 | /doc/ 11 | /Meta/ 12 | -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/CSR54z1Qlv-GDxkbKVQ_dFsvWNReuQ.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/CSR54z1Qlv-GDxkbKVQ_dFsvWNReuQ.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/07d40e985ad7c747025dabb9f22142c4.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/07d40e985ad7c747025dabb9f22142c4.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/1f5e011d6aae0d98fc0518e1a303e99a.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/1f5e011d6aae0d98fc0518e1a303e99a.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/626330658504e338ee86aec8e957426b.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/626330658504e338ee86aec8e957426b.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qOK7l.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qOK7l.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/CSR54z1Qlv-GDxkbKVQ_dFsvWNpeudwk.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/CSR54z1Qlv-GDxkbKVQ_dFsvWNpeudwk.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/c2f002b3a87d3f9bfeebb23d32cfd9f8.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/c2f002b3a87d3f9bfeebb23d32cfd9f8.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/ee91700cdbf7ce16c054c2bb8946c736.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/ee91700cdbf7ce16c054c2bb8946c736.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7nsDI.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7nsDI.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qN67lqDY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qN67lqDY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNK7lqDY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNK7lqDY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNa7lqDY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNa7lqDY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNq7lqDY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNq7lqDY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qO67lqDY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qO67lqDY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qPK7lqDY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qPK7lqDY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvUlMI.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvUlMI.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlM-vWjMY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlM-vWjMY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlMOvWjMY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlMOvWjMY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlMevWjMY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlMevWjMY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlMuvWjMY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlMuvWjMY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlOevWjMY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlOevWjMY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPuvWjMY.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPuvWjMY.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7jsDJT9g.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7jsDJT9g.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7ksDJT9g.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7ksDJT9g.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7osDJT9g.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7osDJT9g.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7psDJT9g.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7psDJT9g.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7qsDJT9g.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7qsDJT9g.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7rsDJT9g.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7rsDJT9g.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwlxdu.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwlxdu.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwlxdu.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwlxdu.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwlxdu.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwlxdu.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvQlMIXxw.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvQlMIXxw.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvXlMIXxw.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvXlMIXxw.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvYlMIXxw.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvYlMIXxw.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvZlMIXxw.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvZlMIXxw.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvalMIXxw.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvalMIXxw.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvblMIXxw.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/HI_QiYsKILxRpg3hIP6sJ7fM7PqlONvblMIXxw.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwkxduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwkxduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwlBduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwlBduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmBduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmBduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmRduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmRduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmhduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmhduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmxduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3i54rwmxduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwkxduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwkxduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwlBduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwlBduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmBduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmBduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmRduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmRduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmhduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmhduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmxduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmxduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwkxduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwkxduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwlBduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwlBduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmBduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmBduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmRduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmRduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmhduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmhduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmxduz8A.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ik4zwmxduz8A.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTS-muw.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTS-muw.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWuU6F.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWuU6F.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTS2mu1aB.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTS2mu1aB.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSCmu1aB.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSCmu1aB.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSGmu1aB.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSGmu1aB.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSKmu1aB.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSKmu1aB.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSOmu1aB.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSOmu1aB.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSumu1aB.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSumu1aB.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSymu1aB.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTSymu1aB.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTUGmu1aB.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTUGmu1aB.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTVOmu1aB.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memvYaGs126MiZpBA-UvWbX2vVnXBbObj2OVTVOmu1aB.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqW106F15M.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqW106F15M.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWt06F15M.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWt06F15M.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWtE6F15M.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWtE6F15M.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWtU6F15M.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWtU6F15M.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWtk6F15M.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWtk6F15M.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWu06F15M.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWu06F15M.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWuk6F15M.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWuk6F15M.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWvU6F15M.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWvU6F15M.woff2 -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWxU6F15M.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign/HEAD/docs/deps/bootstrap-5.3.1/fonts/memtYaGs126MiZpBA-UFUIcVXSCEkx2cmqvXlWqWxU6F15M.woff2 -------------------------------------------------------------------------------- /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^renv$ 2 | ^renv\.lock$ 3 | ^.*\.Rproj$ 4 | ^\.Rproj\.user$ 5 | ^\.github$ 6 | ^docs$ 7 | ^LICENSE\.md$ 8 | ^data-raw$ 9 | ^README\.Rmd$ 10 | ^_pkgdown\.yml$ 11 | ^doc$ 12 | <<<<<<< HEAD 13 | ^paper/ 14 | ======= 15 | ^Meta$ 16 | >>>>>>> origin/main 17 | -------------------------------------------------------------------------------- /docs/reference/ConversationAlign.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /renv/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "bioconductor.version": null, 3 | "external.libraries": [], 4 | "ignored.packages": [], 5 | "package.dependency.fields": [ 6 | "Imports", 7 | "Depends", 8 | "LinkingTo" 9 | ], 10 | "r.version": null, 11 | "snapshot.type": "all", 12 | "use.cache": true, 13 | "vcs.ignore.cellar": true, 14 | "vcs.ignore.library": true, 15 | "vcs.ignore.local": true, 16 | "vcs.manage.ignores": true 17 | } 18 | -------------------------------------------------------------------------------- /tests/testthat.R: -------------------------------------------------------------------------------- 1 | # This file is part of the standard setup for testthat. 2 | # It is recommended that you do not modify it. 3 | # 4 | # Where should you do additional test configuration? 5 | # Learn more about the roles of various files in: 6 | # * https://r-pkgs.org/testing-design.html#sec-tests-files-overview 7 | # * https://testthat.r-lib.org/articles/special-files.html 8 | 9 | library(testthat) 10 | library(ConversationAlign) 11 | 12 | test_check("ConversationAlign") 13 | -------------------------------------------------------------------------------- /ConversationAlign.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | ProjectId: 968863a1-e8e0-4236-b501-3ebd3a502684 3 | 4 | RestoreWorkspace: Default 5 | SaveWorkspace: Default 6 | AlwaysSaveHistory: Default 7 | 8 | EnableCodeIndexing: Yes 9 | UseSpacesForTab: Yes 10 | NumSpacesForTab: 2 11 | Encoding: UTF-8 12 | 13 | RnwWeave: Sweave 14 | LaTeX: pdfLaTeX 15 | 16 | AutoAppendNewline: Yes 17 | StripTrailingWhitespace: Yes 18 | 19 | BuildType: Package 20 | PackageUseDevtools: Yes 21 | PackageInstallArgs: --no-multiarch --with-keep.source 22 | -------------------------------------------------------------------------------- /docs/site.webmanifest: -------------------------------------------------------------------------------- 1 | { 2 | "name": "", 3 | "short_name": "", 4 | "icons": [ 5 | { 6 | "src": "/web-app-manifest-192x192.png", 7 | "sizes": "192x192", 8 | "type": "image/png", 9 | "purpose": "maskable" 10 | }, 11 | { 12 | "src": "/web-app-manifest-512x512.png", 13 | "sizes": "512x512", 14 | "type": "image/png", 15 | "purpose": "maskable" 16 | } 17 | ], 18 | "theme_color": "#ffffff", 19 | "background_color": "#ffffff", 20 | "display": "standalone" 21 | } -------------------------------------------------------------------------------- /inst/pkgdown/favicon/site.webmanifest: -------------------------------------------------------------------------------- 1 | { 2 | "name": "", 3 | "short_name": "", 4 | "icons": [ 5 | { 6 | "src": "/web-app-manifest-192x192.png", 7 | "sizes": "192x192", 8 | "type": "image/png", 9 | "purpose": "maskable" 10 | }, 11 | { 12 | "src": "/web-app-manifest-512x512.png", 13 | "sizes": "512x512", 14 | "type": "image/png", 15 | "purpose": "maskable" 16 | } 17 | ], 18 | "theme_color": "#ffffff", 19 | "background_color": "#ffffff", 20 | "display": "standalone" 21 | } -------------------------------------------------------------------------------- /man/read_1file.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/read_1file.R 3 | \name{read_1file} 4 | \alias{read_1file} 5 | \title{read_1file} 6 | \usage{ 7 | read_1file(my_dat) 8 | } 9 | \arguments{ 10 | \item{my_dat}{one conversation transcript already in the R environment} 11 | } 12 | \value{ 13 | a dataframe formatted with 'Event_ID', "Participant_ID", "Text_Raw" fields -- ready for clean_dyads() 14 | } 15 | \description{ 16 | Reads pre-formatted dyadic (2 interlocutor) conversation transcript already imported into your R environment. 17 | } 18 | -------------------------------------------------------------------------------- /doc/CA_Step2_Prep.R: -------------------------------------------------------------------------------- 1 | ## ----message=FALSE, warning=F, echo=F----------------------------------------- 2 | # Check if devtools is installed, if not install it 3 | if (!require("devtools", quietly = TRUE)) { 4 | install.packages("devtools") 5 | } 6 | 7 | # Load devtools 8 | library(devtools) 9 | 10 | # Check if ConversationAlign is installed, if not install from GitHub 11 | if (!require("ConversationAlign", quietly = TRUE)) { 12 | devtools::install_github("Reilly-ConceptsCognitionLab/ConversationAlign") 13 | } 14 | 15 | # Load SemanticDistance 16 | library(ConversationAlign) 17 | 18 | -------------------------------------------------------------------------------- /docs/deps/headroom-0.11.0/jQuery.headroom.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * headroom.js v0.9.4 - Give your page some headroom. Hide your header until you need it 3 | * Copyright (c) 2017 Nick Williams - http://wicky.nillia.ms/headroom.js 4 | * License: MIT 5 | */ 6 | 7 | !function(a){a&&(a.fn.headroom=function(b){return this.each(function(){var c=a(this),d=c.data("headroom"),e="object"==typeof b&&b;e=a.extend(!0,{},Headroom.options,e),d||(d=new Headroom(this,e),d.init(),c.data("headroom",d)),"string"==typeof b&&(d[b](),"destroy"===b&&c.removeData("headroom"))})},a("[data-headroom]").each(function(){var b=a(this);b.headroom(b.data())}))}(window.Zepto||window.jQuery); -------------------------------------------------------------------------------- /man/MaronGross_2013.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/data.R 3 | \docType{data} 4 | \name{MaronGross_2013} 5 | \alias{MaronGross_2013} 6 | \title{Sample Dyadic Interview Transcript: Marc Maron and Terry Gross Radio Interview 2013} 7 | \format{ 8 | ## "MaronGross_2013" 9 | A data.frame with 546 obs, 2 vars: 10 | \describe{ 11 | \item{text}{text from interview} 12 | \item{speaker}{speaker identity} 13 | ... 14 | } 15 | } 16 | \usage{ 17 | MaronGross_2013 18 | } 19 | \description{ 20 | Text and talker information delineated, raw transcript, multiple lines per talker 21 | } 22 | \keyword{datasets} 23 | -------------------------------------------------------------------------------- /man/generate_shams.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/generate_shams.R 3 | \name{generate_shams} 4 | \alias{generate_shams} 5 | \title{generate_shams} 6 | \usage{ 7 | generate_shams(df_prep, seed = NULL) 8 | } 9 | \arguments{ 10 | \item{df_prep}{Output dataframe of prep_dyads().} 11 | 12 | \item{seed}{(Optional) a seed for reproducibility in random sampling} 13 | } 14 | \value{ 15 | A dataframe similar to prepped dyads, with each participant's time series randomly shuffled. 16 | } 17 | \description{ 18 | Generates a permutation of each individual dyad. Shuffled dyads may act as controls to their originals. 19 | } 20 | -------------------------------------------------------------------------------- /docs/pkgdown.yml: -------------------------------------------------------------------------------- 1 | pandoc: 3.7.0.2 2 | pkgdown: 2.1.3 3 | pkgdown_sha: ~ 4 | articles: 5 | ConversationAlign_Introduction: ConversationAlign_Introduction.html 6 | ConversationAlign_Step1_Read: ConversationAlign_Step1_Read.html 7 | ConversationAlign_Step2_Prep: ConversationAlign_Step2_Prep.html 8 | ConversationAlign_Step3_Summarize: ConversationAlign_Step3_Summarize.html 9 | ConversationAlign_Step4_Analytics: ConversationAlign_Step4_Analytics.html 10 | last_built: 2025-11-12T16:48Z 11 | urls: 12 | reference: https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference 13 | article: https://reilly-conceptscognitionlab.github.io/ConversationAlign/articles 14 | -------------------------------------------------------------------------------- /docs/katex-auto.js: -------------------------------------------------------------------------------- 1 | // https://github.com/jgm/pandoc/blob/29fa97ab96b8e2d62d48326e1b949a71dc41f47a/src/Text/Pandoc/Writers/HTML.hs#L332-L345 2 | document.addEventListener("DOMContentLoaded", function () { 3 | var mathElements = document.getElementsByClassName("math"); 4 | var macros = []; 5 | for (var i = 0; i < mathElements.length; i++) { 6 | var texText = mathElements[i].firstChild; 7 | if (mathElements[i].tagName == "SPAN") { 8 | katex.render(texText.data, mathElements[i], { 9 | displayMode: mathElements[i].classList.contains("display"), 10 | throwOnError: false, 11 | macros: macros, 12 | fleqn: false 13 | }); 14 | }}}); 15 | -------------------------------------------------------------------------------- /doc/CA_Step1_Read.R: -------------------------------------------------------------------------------- 1 | ## ----include = FALSE---------------------------------------------------------- 2 | knitr::opts_chunk$set( 3 | collapse = TRUE, 4 | comment = "#>" 5 | ) 6 | 7 | ## ----message=FALSE, warning=F, echo=F----------------------------------------- 8 | # Check if devtools is installed, if not install it 9 | if (!require("devtools", quietly = TRUE)) { 10 | install.packages("devtools") 11 | } 12 | 13 | # Load devtools 14 | library(devtools) 15 | 16 | # Check if ConversationAlign is installed, if not install from GitHub 17 | if (!require("ConversationAlign", quietly = TRUE)) { 18 | devtools::install_github("Reilly-ConceptsCognitionLab/ConversationAlign") 19 | } 20 | 21 | # Load SemanticDistance 22 | library(ConversationAlign) 23 | 24 | -------------------------------------------------------------------------------- /doc/CA_Step3_Summarize.R: -------------------------------------------------------------------------------- 1 | ## ----include = FALSE---------------------------------------------------------- 2 | knitr::opts_chunk$set( 3 | collapse = TRUE, 4 | comment = "#>" 5 | ) 6 | 7 | ## ----message=FALSE, warning=F, echo=F----------------------------------------- 8 | # Check if devtools is installed, if not install it 9 | if (!require("devtools", quietly = TRUE)) { 10 | install.packages("devtools") 11 | } 12 | 13 | # Load devtools 14 | library(devtools) 15 | 16 | # Check if ConversationAlign is installed, if not install from GitHub 17 | if (!require("ConversationAlign", quietly = TRUE)) { 18 | devtools::install_github("Reilly-ConceptsCognitionLab/ConversationAlign") 19 | } 20 | 21 | # Load SemanticDistance 22 | library(ConversationAlign) 23 | 24 | -------------------------------------------------------------------------------- /doc/CA_Step4_Analytics.R: -------------------------------------------------------------------------------- 1 | ## ----include = FALSE---------------------------------------------------------- 2 | knitr::opts_chunk$set( 3 | collapse = TRUE, 4 | comment = "#>" 5 | ) 6 | 7 | ## ----message=FALSE, warning=F, echo=F----------------------------------------- 8 | # Check if devtools is installed, if not install it 9 | if (!require("devtools", quietly = TRUE)) { 10 | install.packages("devtools") 11 | } 12 | 13 | # Load devtools 14 | library(devtools) 15 | 16 | # Check if ConversationAlign is installed, if not install from GitHub 17 | if (!require("ConversationAlign", quietly = TRUE)) { 18 | devtools::install_github("Reilly-ConceptsCognitionLab/ConversationAlign") 19 | } 20 | 21 | # Load SemanticDistance 22 | library(ConversationAlign) 23 | 24 | -------------------------------------------------------------------------------- /docs/link.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 8 | 12 | 13 | -------------------------------------------------------------------------------- /man/NurseryRhymes.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/data.R 3 | \docType{data} 4 | \name{NurseryRhymes} 5 | \alias{NurseryRhymes} 6 | \title{Text and talker information delineated, 3 separate nursery rhymes, good for computing analytics and word counts} 7 | \format{ 8 | ## "NurseryRhymes" 9 | A data.frame with 100 observations, 2 vars: 10 | \describe{ 11 | \item{Event_ID}{factor 3 different simulated conversations} 12 | \item{Participant_ID}{fictional speaker names, 2 each conversation} 13 | \item{Text_Raw}{simulated language production, actually looped phrases from nursery rhymes} 14 | ... 15 | } 16 | } 17 | \usage{ 18 | NurseryRhymes 19 | } 20 | \description{ 21 | Text and talker information delineated, 3 separate nursery rhymes, good for computing analytics and word counts 22 | } 23 | \keyword{datasets} 24 | -------------------------------------------------------------------------------- /tests/testthat/test-compute_lagcorr.R: -------------------------------------------------------------------------------- 1 | # unit testing for computing correlations 2 | 3 | # prepare data for lagged corr testing 4 | # should give corr of 1 when lagged by 2 5 | input <- data.frame( 6 | Event_ID = "corr_test", 7 | Participant_ID = rep(c("P1", "P2"), 50), 8 | Exchange_Count = rep(1:50, each = 2), 9 | emo_anger = c(rbind(1:50, c(0, 0, 1:48))) # interleave two sequences, one lagged by 2 10 | ) 11 | 12 | test_that("computed pearson correlation is correct", { 13 | # lagged by two should be equal to 1 14 | p_output <- compute_lagcorr(input, corr_type = "Pearson") 15 | expect_equal(p_output$TurnCorr_Lag2[1], 1) 16 | 17 | }) 18 | 19 | 20 | test_that("computed spearman correlation is correct", { 21 | # lagged by two should be equal to 1 22 | p_output <- compute_lagcorr(input, corr_type = "Spearman") 23 | expect_equal(p_output$TurnCorr_Lag2[1], 1) 24 | }) 25 | -------------------------------------------------------------------------------- /tests/testthat/test-summarize_dyads.R: -------------------------------------------------------------------------------- 1 | # tests for summarize_dyads larger function 2 | 3 | # data to input 4 | one_rhyme <- ConversationAlign::NurseryRhymes_Prepped[ConversationAlign::NurseryRhymes_Prepped$Event_ID == "LittleLamb",] 5 | one_rhyme <- droplevels(one_rhyme) 6 | 7 | test_that("required column names are present", { 8 | # compute summarize dyads on single nursery rhyme 9 | output <- ConversationAlign::summarize_dyads(one_rhyme) 10 | # define list of required names and evaluate 11 | col_vec <- c("Event_ID", "Participant_ID", "Dimension", "Dimension_Mean", "AUC_raw", "AUC_scaled100", "Talked_First") 12 | col_vec <- c("Event_ID", "Participant_ID", "Talked_First", "Dimension", "Dimension_Mean", "AUC_raw_Immediate", 13 | "AUC_scaled50_Immediate", "AUC_raw_Lag1", "AUC_scaled50_Lag1", "TurnCorr_Lead2", "TurnCorr_Immediate", "TurnCorr_Lag2") 14 | expect_in(col_vec, colnames(output)) 15 | }) 16 | -------------------------------------------------------------------------------- /docs/deps/data-deps.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /man/corpus_analytics.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/corpus_analytics.R 3 | \name{corpus_analytics} 4 | \alias{corpus_analytics} 5 | \title{corpus_analytics} 6 | \usage{ 7 | corpus_analytics(dat_prep) 8 | } 9 | \arguments{ 10 | \item{dat_prep}{takes dataframe produced from the df_prep() function} 11 | } 12 | \value{ 13 | dataframe with summary statistics (mean, SD, range) for numerous corpus analytics (e.g., token count, type-token-ratio, word-count-per-turn) for the target conversation corpus. Summary data structured in table format for easy export to a journal method section. 14 | } 15 | \description{ 16 | Produces a table of corpus analytics including numbers of complete observations at each step, word counts, lexical diversity (e.g., TTR), stopword ratios, etc. Granularity of the summary statistics are guided by the user (e.g., by conversation, by conversation and speaker, collapsed all) 17 | } 18 | -------------------------------------------------------------------------------- /man/load_github_data.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/utils.R 3 | \name{load_github_data} 4 | \alias{load_github_data} 5 | \title{Load all .rda files from a GitHub data folder into the package environment} 6 | \usage{ 7 | load_github_data( 8 | repo = "Reilly-ConceptsCognitionLab/ConversationAlign_Data", 9 | branch = "main", 10 | data_folder = "data", 11 | envir = parent.frame() 12 | ) 13 | } 14 | \arguments{ 15 | \item{repo}{GitHub repository (e.g., "username/repo")} 16 | 17 | \item{branch}{Branch name (default: "main")} 18 | 19 | \item{data_folder}{Remote folder containing .rda files (default: "data/")} 20 | 21 | \item{envir}{Environment to load into (default: package namespace)} 22 | } 23 | \value{ 24 | nothing, loads data (as rda files) from github repository needed for other package functions 25 | } 26 | \description{ 27 | Load all .rda files from a GitHub data folder into the package environment 28 | } 29 | -------------------------------------------------------------------------------- /R/globals.R: -------------------------------------------------------------------------------- 1 | #' @importFrom utils globalVariables 2 | NULL 3 | 4 | utils::globalVariables( 5 | c("CA_orig_stops", 6 | "clean_me", 7 | "clean_dyads", 8 | "df", 9 | "df_prep", 10 | "df_read", 11 | "df_summarize", 12 | "Exchanges", 13 | "Exchange_Count", 14 | 'Event_ID', 15 | "freq_lg10", 16 | "letter_count", 17 | "lex_freqlg10", 18 | "lex_n_morphemes", 19 | "lookup_Jul25", 20 | "measure", 21 | "MIT_Stops", 22 | "morpheme_count", 23 | "n_participants", 24 | "participant_var", 25 | 'Participant_ID', 26 | "Participant_Pair", 27 | "phon_n_lett", 28 | "phon_nsyll", 29 | "reshaped", 30 | "score", 31 | "SMART_stops", 32 | "stdev", 33 | "sumdat", 34 | "sumdat_only", 35 | "summary_statistic", 36 | 'switch_mark', 37 | "syllable_count", 38 | "Temple_stops25", 39 | "Text_Clean", 40 | "Text_Prep", 41 | "Text_Raw", 42 | "Turn_Count", 43 | "value", 44 | "word", 45 | "words_per_turn_clean", 46 | "words_per_turn_raw" 47 | )) 48 | -------------------------------------------------------------------------------- /man/NurseryRhymes_Prepped.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/data.R 3 | \docType{data} 4 | \name{NurseryRhymes_Prepped} 5 | \alias{NurseryRhymes_Prepped} 6 | \title{Text and talker information delineated, 3 separate nursery rhymes, good for computing analytics and word counts} 7 | \format{ 8 | ## "NurseryRhymes_Prepped" 9 | A data.frame with 1507 x 7 observations, 5 vars: 10 | \describe{ 11 | \item{Event_ID}{factor 3 different simulated conversations} 12 | \item{Participant_ID}{fictional speaker names, 2 each conversation} 13 | \item{Exchange_Count}{sequential numbering of exchanges by conversation, 1 exchange = 2 turns} 14 | \item{Turn_Count}{sequential numbering of turns by conversation} 15 | \item{Text_Clean}{content words} 16 | \item{emo_anger}{raw value of anger salience yoked to each word} 17 | ... 18 | } 19 | } 20 | \usage{ 21 | NurseryRhymes_Prepped 22 | } 23 | \description{ 24 | Text and talker information delineated, 3 separate nursery rhymes, good for computing analytics and word counts 25 | } 26 | \keyword{datasets} 27 | -------------------------------------------------------------------------------- /doc/CA_Intro.R: -------------------------------------------------------------------------------- 1 | ## ----include = FALSE---------------------------------------------------------- 2 | knitr::opts_chunk$set( 3 | collapse = TRUE, 4 | comment = "#>" 5 | ) 6 | 7 | ## ----message=FALSE, warning=F------------------------------------------------- 8 | # Check if devtools is installed, if not install it 9 | if (!require("devtools", quietly = TRUE)) { 10 | install.packages("devtools") 11 | } 12 | 13 | # Load devtools 14 | library(devtools) 15 | 16 | # Check if ConversationAlign is installed, if not install from GitHub 17 | if (!require("ConversationAlign", quietly = TRUE)) { 18 | devtools::install_github("Reilly-ConceptsCognitionLab/ConversationAlign") 19 | } 20 | 21 | # Load SemanticDistance 22 | library(ConversationAlign) 23 | 24 | ## ----eval=T, message=F, warning=F--------------------------------------------- 25 | knitr::kable(head(NurseryRhymes, 20), format = "simple") 26 | str(NurseryRhymes) 27 | 28 | ## ----------------------------------------------------------------------------- 29 | knitr::kable(head(MaronGross_2013, 20), format = "simple") 30 | str(MaronGross_2013) 31 | 32 | -------------------------------------------------------------------------------- /doc/CA_Step3_Summarize.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: CA Step 3 Summarize Dyads 3 | subtitle: summarize_dyads() 4 | author: Jamie Reilly, Ben Sacks, Ginny Ulichney, Gus Cooney, Chelsea Helion 5 | date: "`r format(Sys.Date(), '%B %d, %Y')`" 6 | show_toc: true 7 | slug: ConversationAlign Summarize 8 | output: 9 | rmarkdown::html_vignette: 10 | toc: yes 11 | vignette: | 12 | %\VignetteEngine{knitr::rmarkdown} 13 | %\VignetteIndexEntry{CA Step 3 Summarize Dyads} 14 | %\VignetteEncoding{UTF-8} 15 | --- 16 | 17 | ```{r, include = FALSE} 18 | knitr::opts_chunk$set( 19 | collapse = TRUE, 20 | comment = "#>" 21 | ) 22 | ``` 23 | 24 | ```{r, message=FALSE, warning=F, echo=F} 25 | # Check if devtools is installed, if not install it 26 | if (!require("devtools", quietly = TRUE)) { 27 | install.packages("devtools") 28 | } 29 | 30 | # Load devtools 31 | library(devtools) 32 | 33 | # Check if ConversationAlign is installed, if not install from GitHub 34 | if (!require("ConversationAlign", quietly = TRUE)) { 35 | devtools::install_github("Reilly-ConceptsCognitionLab/ConversationAlign") 36 | } 37 | 38 | # Load SemanticDistance 39 | library(ConversationAlign) 40 | ``` 41 | 42 | ## summarize_dyads() 43 | ` 44 | -------------------------------------------------------------------------------- /doc/CA_Step4_Analytics.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: CA Step 4 Corpus Analytics 3 | subtitle: corpus_analytics() 4 | author: Jamie Reilly, Ben Sacks, Ginny Ulichney, Gus Cooney, Chelsea Helion 5 | date: "`r format(Sys.Date(), '%B %d, %Y')`" 6 | show_toc: true 7 | slug: ConversationAlign Analytics 8 | output: 9 | rmarkdown::html_vignette: 10 | toc: yes 11 | vignette: > 12 | %\VignetteEngine{knitr::rmarkdown} 13 | %\VignetteIndexEntry{CA Step 4 Corpus Analytics} 14 | %\VignetteEncoding{UTF-8} 15 | --- 16 | 17 | ```{r, include = FALSE} 18 | knitr::opts_chunk$set( 19 | collapse = TRUE, 20 | comment = "#>" 21 | ) 22 | ``` 23 | 24 | ```{r, message=FALSE, warning=F, echo=F} 25 | # Check if devtools is installed, if not install it 26 | if (!require("devtools", quietly = TRUE)) { 27 | install.packages("devtools") 28 | } 29 | 30 | # Load devtools 31 | library(devtools) 32 | 33 | # Check if ConversationAlign is installed, if not install from GitHub 34 | if (!require("ConversationAlign", quietly = TRUE)) { 35 | devtools::install_github("Reilly-ConceptsCognitionLab/ConversationAlign") 36 | } 37 | 38 | # Load SemanticDistance 39 | library(ConversationAlign) 40 | ``` 41 | 42 | # Generate corpus analytics 43 | 44 | ` 45 | -------------------------------------------------------------------------------- /data-raw/DATASET.R: -------------------------------------------------------------------------------- 1 | ## code to prepare `DATASET` dataset goes here 2 | library(stringi) 3 | 4 | # load reilly lab default stopwords list 5 | omPath <- r"(..\ConversationAlignPackageDevelopment\data\ReillyLab_Stopwords_25.rData)" 6 | load(omPath) 7 | 8 | View(ReillyLab_Stopwords_25) 9 | 10 | # load lookup database 11 | lookupPath <- r"(..\ConversationAlignPackageDevelopment\data\lookup_db.rda)" 12 | load(lookupPath) 13 | 14 | # test lookup database for non-ascii chars 15 | testASCII_1 <- sapply(lookup_db$word, stringi::stri_enc_mark) 16 | testASCII_1u <- unique(testASCII_1) 17 | print(testASCII_1u) 18 | # print words with native encoding 19 | nativeRows <- lookup_db[testASCII_1 == "native", "word"] 20 | print(nativeRows) 21 | # convert any native encodings to ascii encodings 22 | lookup_db$word <- sapply(lookup_db$word, stringi::stri_escape_unicode) 23 | # check that native encodings have been converted to ascii 24 | nativeRows <- lookup_db[testASCII_1 == "native", "word"] 25 | print(nativeRows) 26 | testASCII_2 <- unique(sapply(lookup_db$word, stringi::stri_enc_mark)) 27 | print(testASCII_2) 28 | 29 | 30 | usethis::use_data(ReillyLab_Stopwords_25, lookup_db, 31 | internal = TRUE, overwrite = TRUE, compress = T) 32 | -------------------------------------------------------------------------------- /doc/CA_Step1_Read.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: CA Step 1 Read_Dyads 3 | subtitle: Read and Format Data for ConversationAlign 4 | author: Jamie Reilly, Ben Sacks, Ginny Ulichney, Gus Cooney, Chelsea Helion 5 | date: "`r Sys.Date()`" 6 | show_toc: true 7 | slug: ConversationAlign Read 8 | output: 9 | rmarkdown::html_vignette: 10 | toc: yes 11 | vignette: > 12 | %\VignetteEngine{knitr::rmarkdown} 13 | %\VignetteIndexEntry{CA Step 1 Read_Dyads} 14 | %\VignetteEncoding{UTF-8} 15 | --- 16 | 17 | ```{r, include = FALSE} 18 | knitr::opts_chunk$set( 19 | collapse = TRUE, 20 | comment = "#>" 21 | ) 22 | ``` 23 | 24 | # Installation 25 | Install and load the development version of `ConversationAlign` from [GitHub](https://github.com/) using the `devtools` package. 26 | ```{r, message=FALSE, warning=F, echo=F} 27 | # Check if devtools is installed, if not install it 28 | if (!require("devtools", quietly = TRUE)) { 29 | install.packages("devtools") 30 | } 31 | 32 | # Load devtools 33 | library(devtools) 34 | 35 | # Check if ConversationAlign is installed, if not install from GitHub 36 | if (!require("ConversationAlign", quietly = TRUE)) { 37 | devtools::install_github("Reilly-ConceptsCognitionLab/ConversationAlign") 38 | } 39 | 40 | # Load SemanticDistance 41 | library(ConversationAlign) 42 | ``` 43 | 44 | # Prep_Data 45 | -------------------------------------------------------------------------------- /man/read_dyads.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/read_dyads.R 3 | \name{read_dyads} 4 | \alias{read_dyads} 5 | \title{read_dyads} 6 | \usage{ 7 | read_dyads(my_path = "my_transcripts") 8 | } 9 | \arguments{ 10 | \item{my_path}{folder of conversation transcripts in csv or txt format} 11 | } 12 | \value{ 13 | a dataframe where each individual conversation transcript in a user's directory has been concatenated. 14 | read_dyads appends a unique document identifier to each conversation transcript appending its unique filename as a factor level to 'Event_ID'. 15 | } 16 | \description{ 17 | Reads pre-formatted dyadic (2 interlocutor) conversation transcripts from your machine. Transcripts must be either csv or txt format. IF you are supplying a txt file, your transcript must be formatted as an otter.ai txt file export. Your options for using csv files are more flexible. ConversationAlign minimally requires a csv file with two columns, denoting interlocutor and text. Each separate conversation transcript should be saved as a separate file. ConversationAlign will use the file names as a document ID. Within the read dyads function, set the my_path argument as the directory path to the local folder containing your transcripts on your machine (e.g., "my_transcripts"). Please see our github page for examples of properly formatted transcripts: https://github.com/Reilly-ConceptsCognitionLab/ConversationAlign 18 | } 19 | -------------------------------------------------------------------------------- /.github/workflows/static.yml: -------------------------------------------------------------------------------- 1 | # Simple workflow for deploying static content to GitHub Pages 2 | name: Deploy static content to Pages 3 | 4 | on: 5 | # Runs on pushes targeting the default branch 6 | push: 7 | branches: ["main"] 8 | 9 | # Allows you to run this workflow manually from the Actions tab 10 | workflow_dispatch: 11 | 12 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages 13 | permissions: 14 | contents: read 15 | pages: write 16 | id-token: write 17 | 18 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. 19 | # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. 20 | concurrency: 21 | group: "pages" 22 | cancel-in-progress: false 23 | 24 | jobs: 25 | # Single deploy job since we're just deploying 26 | deploy: 27 | environment: 28 | name: github-pages 29 | url: ${{ steps.deployment.outputs.page_url }} 30 | runs-on: ubuntu-latest 31 | steps: 32 | - name: Checkout 33 | uses: actions/checkout@v4 34 | - name: Setup Pages 35 | uses: actions/configure-pages@v5 36 | - name: Upload artifact 37 | uses: actions/upload-pages-artifact@v3 38 | with: 39 | # Upload entire repository 40 | path: '.' 41 | - name: Deploy to GitHub Pages 42 | id: deployment 43 | uses: actions/deploy-pages@v4 44 | -------------------------------------------------------------------------------- /.github/workflows/R-CMD-check.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | on: 4 | push: 5 | branches: [main, master] 6 | pull_request: 7 | branches: [main, master] 8 | 9 | name: R-CMD-check 10 | 11 | jobs: 12 | R-CMD-check: 13 | runs-on: ${{ matrix.config.os }} 14 | 15 | name: ${{ matrix.config.os }} (${{ matrix.config.r }}) 16 | 17 | strategy: 18 | fail-fast: false 19 | matrix: 20 | config: 21 | - {os: macos-latest, r: 'release'} 22 | - {os: windows-latest, r: 'release'} 23 | - {os: ubuntu-latest, r: 'devel', http-user-agent: 'release'} 24 | - {os: ubuntu-latest, r: 'release'} 25 | - {os: ubuntu-latest, r: 'oldrel-1'} 26 | 27 | env: 28 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 29 | R_KEEP_PKG_SOURCE: yes 30 | 31 | steps: 32 | - uses: actions/checkout@v4 33 | 34 | - uses: r-lib/actions/setup-pandoc@v2 35 | 36 | - uses: r-lib/actions/setup-r@v2 37 | with: 38 | r-version: ${{ matrix.config.r }} 39 | http-user-agent: ${{ matrix.config.http-user-agent }} 40 | use-public-rspm: true 41 | 42 | - uses: r-lib/actions/setup-r-dependencies@v2 43 | with: 44 | extra-packages: any::rcmdcheck 45 | needs: check 46 | 47 | - uses: r-lib/actions/check-r-package@v2 48 | with: 49 | upload-snapshots: true 50 | -------------------------------------------------------------------------------- /vignettes/ConversationAlign_Step4_Analytics.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: ConversationAlign_Step4_Analytics 3 | subtitle: corpus_analytics() 4 | author: Jamie Reilly, Ben Sacks, Ginny Ulichney, Gus Cooney, Chelsea Helion 5 | date: "`r format(Sys.Date(), '%B %d, %Y')`" 6 | show_toc: true 7 | output: 8 | rmarkdown::html_vignette: 9 | toc: yes 10 | vignette: > 11 | %\VignetteEngine{knitr::rmarkdown} 12 | %\VignetteIndexEntry{ConversationAlign_Step4_Analytics} 13 | %\VignetteEncoding{UTF-8} 14 | --- 15 | 16 | ```{r, include = FALSE} 17 | knitr::opts_chunk$set( 18 | collapse = TRUE, 19 | comment = "#>" 20 | ) 21 | ``` 22 | 23 | ```{r, message=FALSE, warning=F, echo=F} 24 | # Load SemanticDistance 25 | library(ConversationAlign) 26 | ``` 27 | 28 | This is a helpful addition to `ConversationAlign` that will generate a variety of corpus analytics (e.g., word count, type-token-ratio) for your conversation corpus. The output is in a summary table that is readily exportable to to the specific journal format of your choice using any number of packages such as `flextable` or `tinytable`.
29 | 30 | Generate your corpus analytics on the dataframe you created with `prep_dyads`. 31 | 32 | Arguments to `corpus_analytics` include:
33 | 1) **dat_prep**= dataframe created by ``prep_dyads()``function
34 | 35 | ```{r, eval=T, warning=F, message=F} 36 | NurseryRhymes_Analytics <- corpus_analytics(dat_prep=NurseryRhymes_Prepped) 37 | knitr::kable(head(NurseryRhymes_Analytics, 15), format = "simple", digits = 2) 38 | ``` 39 | 40 | 41 | 42 | ` 43 | -------------------------------------------------------------------------------- /tests/testthat/test-compute_auc.R: -------------------------------------------------------------------------------- 1 | # unit testing for computing area under the curve 2 | 3 | # single dyad for testing 4 | one_rhyme <- NurseryRhymes_Prepped[NurseryRhymes_Prepped$Event_ID == "LittleLamb",] 5 | one_rhyme <- droplevels(one_rhyme) 6 | 7 | 8 | test_that("auc formats properly", { 9 | # computed previously 10 | expected_auc_df <- data.frame( 11 | Event_ID = "LittleLamb", Exchanges = 50, Talked_First = as.factor("Mary"), 12 | AUC_emo_anger_raw_Immediate = 1.485925, AUC_emo_anger_scaled50_Immediate = 1.485925, 13 | AUC_emo_anger_raw_Lag1 = 1.4556, AUC_emo_anger_scaled50_Lag1 = 1.4556 14 | ) 15 | expected_auc_df$Talked_First <- factor(expected_auc_df$Talked_First, levels = c("Dave", "Mary")) 16 | # compare to computed df 17 | expect_equal(ConversationAlign:::compute_auc(one_rhyme), expected_auc_df) 18 | }) 19 | 20 | # remove all but first three exchanges 21 | one_rhyme_small <- one_rhyme[one_rhyme$Exchange_Count < 3, ] 22 | 23 | test_that("auc fills with na when under 3 exchanges", { 24 | expected_small_auc_df <- data.frame( 25 | Event_ID = "LittleLamb", Exchanges = 2, Talked_First = as.factor("Mary"), 26 | AUC_emo_anger_raw_Immediate = as.double(NA), AUC_emo_anger_scaled50_Immediate = as.double(NA), 27 | AUC_emo_anger_raw_Lag1 = as.double(NA), AUC_emo_anger_scaled50_Lag1 = as.double(NA) 28 | ) 29 | expected_small_auc_df$Talked_First <- factor(expected_small_auc_df$Talked_First, levels = c("Dave", "Mary")) 30 | # compare to computed df 31 | expect_equal(ConversationAlign:::compute_auc(one_rhyme_small), expected_small_auc_df) 32 | 33 | }) 34 | -------------------------------------------------------------------------------- /man/ConversationAlign-package.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ConversationAlign-package.R 3 | \docType{package} 4 | \name{ConversationAlign-package} 5 | \alias{ConversationAlign} 6 | \alias{ConversationAlign-package} 7 | \title{ConversationAlign: Process Text and Compute Linguistic Alignment in Conversation Transcripts} 8 | \description{ 9 | \if{html}{\figure{logo.png}{options: style='float: right' alt='logo' width='120'}} 10 | 11 | Imports conversation transcripts into R, concatenates them into a single dataframe appending event identifiers, cleans and formats the text, then yokes user-specified psycholinguistic database values to each word. 'ConversationAlign' then computes alignment indices between two interlocutors across each transcript for >40 possible semantic, lexical, and affective dimensions. In addition to alignment, 'ConversationAlign' also produces a table of analytics (e.g., token count, type-token-ratio) in a summary table describing your particular text corpus. 12 | } 13 | \seealso{ 14 | Useful links: 15 | \itemize{ 16 | \item \url{https://github.com/Reilly-ConceptsCognitionLab/ConversationAlign} 17 | } 18 | 19 | } 20 | \author{ 21 | \strong{Maintainer}: Jamie Reilly \email{jamie_reilly@temple.edu} (\href{https://orcid.org/0000-0002-0891-438X}{ORCID}) 22 | 23 | Authors: 24 | \itemize{ 25 | \item Virginia Ulichney 26 | \item Ben Sacks 27 | } 28 | 29 | Other contributors: 30 | \itemize{ 31 | \item Sarah Weinstein [contributor] 32 | \item Chelsea Helion [contributor] 33 | \item Gus Cooney [contributor] 34 | } 35 | 36 | } 37 | \keyword{internal} 38 | -------------------------------------------------------------------------------- /man/summarize_dyads.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/summarize_dyads.R 3 | \name{summarize_dyads} 4 | \alias{summarize_dyads} 5 | \title{summarize_dyads} 6 | \usage{ 7 | summarize_dyads( 8 | df_prep, 9 | custom_lags = NULL, 10 | sumdat_only = TRUE, 11 | corr_type = "Pearson" 12 | ) 13 | } 14 | \arguments{ 15 | \item{df_prep}{produced in the align_dyads function} 16 | 17 | \item{custom_lags}{integer vector, should any lags be added in addition to -2, 0, 2} 18 | 19 | \item{sumdat_only}{default=TRUE, group and summarize data, two rows per conversation, one row for each participant, false will fill down summary statistics across all exchanges} 20 | 21 | \item{corr_type}{option for computing lagged correlations turn-by-turn covariance (default='Pearson')} 22 | } 23 | \value{ 24 | either: 25 | - a grouped dataframe with summary data aggregated by converation (Event_ID) and participant if sumdat_only=T. 26 | - the origoinal dataframe 'filled down' with summary data (e.g., AUC, turn-by-turn correlations) for each conversation is sumdat_only=F. 27 | } 28 | \description{ 29 | Calculates and appends 3 measures for quantifying alignment. Appends the averaged value for each selected dimension by turn and speaker. Calculates and Spearman's rank correlation between interlocutor time series and appends by transcript. Calculates the area under the curve of the absolute difference time series between interlocutor time series. The length of the difference time series can be standardized the shortest number of exchanges present in the group using an internally defined resampling function, called with resample = TRUE. Spearman's rank correlation and area under the curve become less reliable for dyads under 30 exchanges. 30 | } 31 | -------------------------------------------------------------------------------- /.github/workflows/pkgdown.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | on: 4 | push: 5 | branches: [main, master] 6 | pull_request: 7 | release: 8 | types: [published] 9 | workflow_dispatch: 10 | 11 | name: pkgdown.yaml 12 | 13 | permissions: read-all 14 | 15 | jobs: 16 | pkgdown: 17 | runs-on: ubuntu-latest 18 | # Only restrict concurrency for non-PR jobs 19 | concurrency: 20 | group: pkgdown-${{ github.event_name != 'pull_request' || github.run_id }} 21 | env: 22 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 23 | permissions: 24 | contents: write 25 | steps: 26 | - uses: actions/checkout@v4 27 | 28 | - uses: r-lib/actions/setup-pandoc@v2 29 | 30 | - uses: r-lib/actions/setup-r@v2 31 | with: 32 | use-public-rspm: true 33 | 34 | - uses: r-lib/actions/setup-r-dependencies@v2 35 | with: 36 | extra-packages: any::pkgdown, local::. 37 | needs: website 38 | 39 | # - name: Build site 40 | # run: pkgdown::build_site_github_pages(new_process = FALSE, install = FALSE) 41 | # shell: Rscript {0} 42 | # 43 | # - name: Deploy to GitHub pages 🚀 44 | # if: github.event_name != 'pull_request' 45 | # uses: JamesIves/github-pages-deploy-action@v4.5.0 46 | # with: 47 | # clean: false 48 | # branch: gh-pages 49 | # folder: docs 50 | 51 | - name: Deploy package 52 | run: | 53 | git config --local user.name "$GITHUB_ACTOR" 54 | git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com" 55 | Rscript -e 'pkgdown::deploy_to_branch(new_process = FALSE)' 56 | -------------------------------------------------------------------------------- /R/data.R: -------------------------------------------------------------------------------- 1 | #' Sample Dyadic Interview Transcript: Marc Maron and Terry Gross Radio Interview 2013 2 | #' 3 | #' Text and talker information delineated, raw transcript, multiple lines per talker 4 | #' 5 | #' @format ## "MaronGross_2013" 6 | #' A data.frame with 546 obs, 2 vars: 7 | #' \describe{ 8 | #' \item{text}{text from interview} 9 | #' \item{speaker}{speaker identity} 10 | #' ... 11 | #' } 12 | "MaronGross_2013" 13 | 14 | 15 | ' Sample Conversation Transcript: Nursery Rhymes 16 | #' 17 | #' Text and talker information delineated, 3 separate nursery rhymes, good for computing analytics and word counts 18 | #' 19 | #' @format ## "NurseryRhymes" 20 | #' A data.frame with 100 observations, 2 vars: 21 | #' \describe{ 22 | #' \item{Event_ID}{factor 3 different simulated conversations} 23 | #' \item{Participant_ID}{fictional speaker names, 2 each conversation} 24 | #' \item{Text_Raw}{simulated language production, actually looped phrases from nursery rhymes} 25 | #' ... 26 | #' } 27 | "NurseryRhymes" 28 | 29 | 30 | ' Sample Conversation Transcript: Nursery Rhymes Prepped 31 | #' 32 | #' Text and talker information delineated, 3 separate nursery rhymes, good for computing analytics and word counts 33 | #' 34 | #' @format ## "NurseryRhymes_Prepped" 35 | #' A data.frame with 1507 x 7 observations, 5 vars: 36 | #' \describe{ 37 | #' \item{Event_ID}{factor 3 different simulated conversations} 38 | #' \item{Participant_ID}{fictional speaker names, 2 each conversation} 39 | #' \item{Exchange_Count}{sequential numbering of exchanges by conversation, 1 exchange = 2 turns} 40 | #' \item{Turn_Count}{sequential numbering of turns by conversation} 41 | #' \item{Text_Clean}{content words} 42 | #' \item{emo_anger}{raw value of anger salience yoked to each word} 43 | #' ... 44 | #' } 45 | "NurseryRhymes_Prepped" 46 | 47 | -------------------------------------------------------------------------------- /man/prep_dyads.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/prep_dyads.R 3 | \name{prep_dyads} 4 | \alias{prep_dyads} 5 | \title{prep_dyads} 6 | \usage{ 7 | prep_dyads( 8 | dat_read, 9 | lemmatize = TRUE, 10 | omit_stops = TRUE, 11 | which_stoplist = "Temple_stops25", 12 | remove_backchannel = FALSE, 13 | verbose = TRUE 14 | ) 15 | } 16 | \arguments{ 17 | \item{dat_read}{dataframe produced from read_dyads() function} 18 | 19 | \item{lemmatize}{logical, should words be lemmatized (switched to base morphological form), default is TRUE} 20 | 21 | \item{omit_stops}{option to remove stopwords, default TRUE} 22 | 23 | \item{which_stoplist}{user-specified stopword removal method with options including "none", "SMART", "MIT_stops", "CA_OriginalStops", or "Temple_Stopwords25". 24 | "Temple_Stopwords25 is the default list} 25 | 26 | \item{remove_backchannel}{logical, should turns that are full of stopwords (e.g., "Uhm yeah") be preserved as NAs or removed. Removal will 'squish' the turn before and after together into one. If NAs are preserved they are later interpolated.} 27 | 28 | \item{verbose}{display detailed output such as error messages and progress (default is TRUE)} 29 | } 30 | \value{ 31 | dataframe with text cleaned and vectorized to a one word per-row format. 32 | Lexical norms and metadata are appended to each content word. Cleaned text appears under a new column 33 | called 'Text_Clean'. Any selected dimensions (e.g., word length) and metadata are also appended to each word along 34 | with speaker identity, turn, and Event_ID (conversation identifier). 35 | } 36 | \description{ 37 | Cleans, vectorizes and appends lexical norms to all content words in a language corpus. 38 | User guides options for stopword removal and lemmatization. User selects up to three psycholinguistic dimensions to yoke norms 39 | on each content word in the original conversation transcript. 40 | } 41 | -------------------------------------------------------------------------------- /R/generate_shams.R: -------------------------------------------------------------------------------- 1 | #' generate_shams 2 | #' 3 | #' Generates a permutation of each individual dyad. Shuffled dyads may act as controls to their originals. 4 | #' 5 | #' @param df_prep Output dataframe of prep_dyads(). 6 | #' @param seed (Optional) a seed for reproducibility in random sampling 7 | #' @returns 8 | #' A dataframe similar to prepped dyads, with each participant's time series randomly shuffled. 9 | #' @importFrom magrittr %>% 10 | #' @importFrom dplyr group_by 11 | #' @importFrom dplyr summarize 12 | #' @importFrom dplyr across 13 | #' @importFrom dplyr mutate 14 | #' @importFrom dplyr n 15 | #' @export 16 | 17 | generate_shams <- function(df_prep, seed = NULL) { 18 | # if a seed is given, set it 19 | if (is.null(seed)) { # if not given, pick a random seed 20 | seed = sample(1:100000, size = 1) 21 | } 22 | 23 | # summarize down to turn means 24 | turn_mean_df <- df_prep %>% 25 | dplyr::group_by(Event_ID, Exchange_Count, Participant_ID) %>% 26 | dplyr::summarize( 27 | dplyr::across( 28 | matches("^(emo_|lex_|phon_|sem_|df_)"), 29 | ~mean(.x, na.rm = T) 30 | ), 31 | # these can be included as a sanity check 32 | Text_Prep = paste(Text_Prep, collapse = " "), 33 | Text_Clean = paste(Text_Clean, collapse = " "), 34 | .groups = "drop" 35 | ) 36 | 37 | # define function that will allow each column to be sampled identically 38 | sample_seed <- function(x, seed) { 39 | set.seed(seed) 40 | return(sample(x, size = length(x), replace = F)) 41 | } 42 | 43 | # shuffle each participant's time series 44 | sham_df <- turn_mean_df %>% 45 | dplyr::group_by(Event_ID, Participant_ID) %>% 46 | dplyr::mutate( 47 | dplyr::across( 48 | c(matches("^(emo_|lex_|phon_|sem_|df_)"), Text_Prep, Text_Clean), 49 | ~sample_seed(.x, seed = seed) 50 | ) 51 | ) %>% 52 | dplyr::group_by(Event_ID) %>% 53 | dplyr::mutate(Turn_Count = 1:dplyr::n(), .after = Event_ID) 54 | 55 | return(sham_df) 56 | } 57 | -------------------------------------------------------------------------------- /docs/deps/bootstrap-toc-1.0.1/bootstrap-toc.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Bootstrap Table of Contents v1.0.1 (http://afeld.github.io/bootstrap-toc/) 3 | * Copyright 2015 Aidan Feldman 4 | * Licensed under MIT (https://github.com/afeld/bootstrap-toc/blob/gh-pages/LICENSE.md) */ 5 | !function(a){"use strict";window.Toc={helpers:{findOrFilter:function(e,t){var n=e.find(t);return e.filter(t).add(n).filter(":not([data-toc-skip])")},generateUniqueIdBase:function(e){return a(e).text().trim().replace(/\'/gi,"").replace(/[& +$,:;=?@"#{}|^~[`%!'<>\]\.\/\(\)\*\\\n\t\b\v]/g,"-").replace(/-{2,}/g,"-").substring(0,64).replace(/^-+|-+$/gm,"").toLowerCase()||e.tagName.toLowerCase()},generateUniqueId:function(e){for(var t=this.generateUniqueIdBase(e),n=0;;n++){var r=t;if(0')},createChildNavList:function(e){var t=this.createNavList();return e.append(t),t},generateNavEl:function(e,t){var n=a('');n.attr("href","#"+e),n.text(t);var r=a("
  • ");return r.append(n),r},generateNavItem:function(e){var t=this.generateAnchor(e),n=a(e),r=n.data("toc-text")||n.text();return this.generateNavEl(t,r)},getTopLevel:function(e){for(var t=1;t<=6;t++){if(1 16 | Description: Imports conversation transcripts into R, concatenates them into a single dataframe appending event identifiers, cleans and formats the text, then yokes user-specified psycholinguistic database values to each word. 'ConversationAlign' then computes alignment indices between two interlocutors across each transcript for >40 possible semantic, lexical, and affective dimensions. In addition to alignment, 'ConversationAlign' also produces a table of analytics (e.g., token count, type-token-ratio) in a summary table describing your particular text corpus. 17 | License: LGPL (>= 3) 18 | Encoding: UTF-8 19 | Depends: 20 | R (>= 3.5) 21 | Imports: 22 | DescTools, 23 | dplyr (>= 0.4.3), 24 | httr, 25 | magrittr, 26 | purrr, 27 | rlang, 28 | stringi, 29 | stringr, 30 | textstem, 31 | tibble, 32 | tidyr, 33 | tidyselect, 34 | stats, 35 | utils, 36 | YRmisc, 37 | zoo 38 | Suggests: 39 | devtools, 40 | knitr, 41 | rmarkdown, 42 | testthat (>= 3.0.0) 43 | URL: https://github.com/Reilly-ConceptsCognitionLab/ConversationAlign 44 | RoxygenNote: 7.3.3 45 | LazyData: true 46 | VignetteBuilder: knitr 47 | Collate: 48 | 'ConversationAlign-package.R' 49 | 'compute_auc.R' 50 | 'compute_lagcorr.R' 51 | 'corpus_analytics.R' 52 | 'data.R' 53 | 'generate_shams.R' 54 | 'globals.R' 55 | 'prep_dyads.R' 56 | 'read_1file.R' 57 | 'read_dyads.R' 58 | 'replacements_25.R' 59 | 'summarize_dyads.R' 60 | 'utils.R' 61 | 'zzz.R' 62 | Config/testthat/edition: 3 63 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | export(corpus_analytics) 4 | export(generate_shams) 5 | export(prep_dyads) 6 | export(read_1file) 7 | export(read_dyads) 8 | export(summarize_dyads) 9 | importFrom(DescTools,AUC) 10 | importFrom(dplyr,across) 11 | importFrom(dplyr,bind_cols) 12 | importFrom(dplyr,bind_rows) 13 | importFrom(dplyr,consecutive_id) 14 | importFrom(dplyr,distinct) 15 | importFrom(dplyr,everything) 16 | importFrom(dplyr,filter) 17 | importFrom(dplyr,first) 18 | importFrom(dplyr,group_by) 19 | importFrom(dplyr,lag) 20 | importFrom(dplyr,left_join) 21 | importFrom(dplyr,matches) 22 | importFrom(dplyr,mutate) 23 | importFrom(dplyr,n) 24 | importFrom(dplyr,n_distinct) 25 | importFrom(dplyr,na_if) 26 | importFrom(dplyr,rename) 27 | importFrom(dplyr,rename_at) 28 | importFrom(dplyr,rename_with) 29 | importFrom(dplyr,select) 30 | importFrom(dplyr,summarise) 31 | importFrom(dplyr,summarize) 32 | importFrom(dplyr,ungroup) 33 | importFrom(httr,GET) 34 | importFrom(magrittr,"%>%") 35 | importFrom(purrr,map_dfr) 36 | importFrom(rlang,":=") 37 | importFrom(rlang,sym) 38 | importFrom(stats,cor.test) 39 | importFrom(stats,na.omit) 40 | importFrom(stats,sd) 41 | importFrom(stringi,stri_replace_all_fixed) 42 | importFrom(stringi,stri_replace_all_regex) 43 | importFrom(stringr,str_c) 44 | importFrom(stringr,str_replace) 45 | importFrom(stringr,str_replace_all) 46 | importFrom(stringr,str_squish) 47 | importFrom(stringr,str_subset) 48 | importFrom(stringr,str_trim) 49 | importFrom(textstem,lemmatize_strings) 50 | importFrom(tibble,tibble) 51 | importFrom(tidyr,drop_na) 52 | importFrom(tidyr,fill) 53 | importFrom(tidyr,pivot_longer) 54 | importFrom(tidyr,pivot_wider) 55 | importFrom(tidyr,separate) 56 | importFrom(tidyr,separate_rows) 57 | importFrom(tidyselect,all_of) 58 | importFrom(tidyselect,any_of) 59 | importFrom(tidyselect,contains) 60 | importFrom(tidyselect,ends_with) 61 | importFrom(tidyselect,everything) 62 | importFrom(tidyselect,starts_with) 63 | importFrom(tidyselect,where) 64 | importFrom(tools,R_user_dir) 65 | importFrom(utils,download.file) 66 | importFrom(utils,globalVariables) 67 | importFrom(utils,read.csv) 68 | importFrom(utils,select.list) 69 | importFrom(utils,tail) 70 | importFrom(zoo,na.approx) 71 | -------------------------------------------------------------------------------- /R/read_1file.R: -------------------------------------------------------------------------------- 1 | #' read_1file 2 | #' 3 | #' Reads pre-formatted dyadic (2 interlocutor) conversation transcript already imported into your R environment. 4 | #' 5 | #' @name read_1file 6 | #' @param my_dat one conversation transcript already in the R environment 7 | #' @returns 8 | #' a dataframe formatted with 'Event_ID', "Participant_ID", "Text_Raw" fields -- ready for clean_dyads() 9 | #' @export 10 | 11 | read_1file <- function(my_dat) { 12 | #returns name not contents of mydat 13 | object_name <- deparse(substitute(my_dat)) 14 | 15 | # Convert to data frame if not already 16 | if (!is.data.frame(my_dat)) { 17 | my_dat <- as.data.frame(my_dat) 18 | } 19 | 20 | # Store original column names for reference 21 | original_cols <- colnames(my_dat) 22 | 23 | # Standardize column names (case-insensitive) 24 | colnames(my_dat) <- tolower(colnames(my_dat)) 25 | 26 | # Initialize standardized columns 27 | standardized_cols <- colnames(my_dat) 28 | 29 | # Participant ID detection and standardization 30 | participant_pattern <- "speaker|speaker_names_raw|participant|interlocutor|patient|person|partner|source|pid|talker" 31 | participant_idx <- grepl(participant_pattern, colnames(my_dat)) 32 | if (sum(participant_idx) > 0) { 33 | standardized_cols[participant_idx] <- "Participant_ID" 34 | } 35 | 36 | # RawText detection and standardization 37 | text_pattern <- "text|turn|talker|mytext|utterance|my_text" 38 | text_idx <- grepl(text_pattern, colnames(my_dat)) 39 | if (sum(text_idx) > 0) { 40 | standardized_cols[text_idx] <- "RawText" 41 | } 42 | 43 | # Apply standardized names 44 | colnames(my_dat) <- standardized_cols 45 | 46 | # Check required columns exist 47 | required_cols <- c("Participant_ID", "RawText") 48 | missing_cols <- setdiff(required_cols, colnames(my_dat)) 49 | 50 | if (length(missing_cols) > 0) { 51 | stop(paste("Missing required columns:", 52 | paste(missing_cols, collapse = ", "), 53 | "\nAvailable columns:", 54 | paste(original_cols, collapse = ", "), 55 | "\nExpected participant columns should match:", participant_pattern, 56 | "\nExpected text columns should match:", text_pattern), 57 | call. = FALSE) 58 | } 59 | 60 | # Add Event_ID using the object's name 61 | my_dat$Event_ID <- object_name 62 | 63 | # Convert ID columns to factors 64 | id_cols <- c("Event_ID", "Participant_ID") 65 | for (col in id_cols) { 66 | if (col %in% colnames(my_dat)) { 67 | my_dat[[col]] <- as.factor(my_dat[[col]]) 68 | } 69 | } 70 | 71 | # Reorder columns to put standard ones first 72 | standard_cols <- c("Event_ID", "Participant_ID", "RawText") 73 | other_cols <- setdiff(colnames(my_dat), standard_cols) 74 | my_dat <- my_dat[, c(standard_cols, other_cols)] 75 | 76 | return(my_dat) 77 | } 78 | -------------------------------------------------------------------------------- /docs/lightswitch.js: -------------------------------------------------------------------------------- 1 | 2 | /*! 3 | * Color mode toggler for Bootstrap's docs (https://getbootstrap.com/) 4 | * Copyright 2011-2023 The Bootstrap Authors 5 | * Licensed under the Creative Commons Attribution 3.0 Unported License. 6 | * Updates for {pkgdown} by the {bslib} authors, also licensed under CC-BY-3.0. 7 | */ 8 | 9 | const getStoredTheme = () => localStorage.getItem('theme') 10 | const setStoredTheme = theme => localStorage.setItem('theme', theme) 11 | 12 | const getPreferredTheme = () => { 13 | const storedTheme = getStoredTheme() 14 | if (storedTheme) { 15 | return storedTheme 16 | } 17 | 18 | return window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light' 19 | } 20 | 21 | const setTheme = theme => { 22 | if (theme === 'auto') { 23 | document.documentElement.setAttribute('data-bs-theme', (window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light')) 24 | } else { 25 | document.documentElement.setAttribute('data-bs-theme', theme) 26 | } 27 | } 28 | 29 | function bsSetupThemeToggle () { 30 | 'use strict' 31 | 32 | const showActiveTheme = (theme, focus = false) => { 33 | var activeLabel, activeIcon; 34 | 35 | document.querySelectorAll('[data-bs-theme-value]').forEach(element => { 36 | const buttonTheme = element.getAttribute('data-bs-theme-value') 37 | const isActive = buttonTheme == theme 38 | 39 | element.classList.toggle('active', isActive) 40 | element.setAttribute('aria-pressed', isActive) 41 | 42 | if (isActive) { 43 | activeLabel = element.textContent; 44 | activeIcon = element.querySelector('span').classList.value; 45 | } 46 | }) 47 | 48 | const themeSwitcher = document.querySelector('#dropdown-lightswitch') 49 | if (!themeSwitcher) { 50 | return 51 | } 52 | 53 | themeSwitcher.setAttribute('aria-label', activeLabel) 54 | themeSwitcher.querySelector('span').classList.value = activeIcon; 55 | 56 | if (focus) { 57 | themeSwitcher.focus() 58 | } 59 | } 60 | 61 | window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', () => { 62 | const storedTheme = getStoredTheme() 63 | if (storedTheme !== 'light' && storedTheme !== 'dark') { 64 | setTheme(getPreferredTheme()) 65 | } 66 | }) 67 | 68 | window.addEventListener('DOMContentLoaded', () => { 69 | showActiveTheme(getPreferredTheme()) 70 | 71 | document 72 | .querySelectorAll('[data-bs-theme-value]') 73 | .forEach(toggle => { 74 | toggle.addEventListener('click', () => { 75 | const theme = toggle.getAttribute('data-bs-theme-value') 76 | setTheme(theme) 77 | setStoredTheme(theme) 78 | showActiveTheme(theme, true) 79 | }) 80 | }) 81 | }) 82 | } 83 | 84 | setTheme(getPreferredTheme()); 85 | bsSetupThemeToggle(); 86 | -------------------------------------------------------------------------------- /docs/sitemap.xml: -------------------------------------------------------------------------------- 1 | 2 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/404.html 3 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/LICENSE.html 4 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/articles/ConversationAlign Step1 Read.html 5 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/articles/ConversationAlign Step3 Summarize.html 6 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/articles/ConversationAlign_Introduction.html 7 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/articles/ConversationAlign_Step1_Read.html 8 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/articles/ConversationAlign_Step2_Prep.html 9 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/articles/ConversationAlign_Step3_Summarize.html 10 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/articles/ConversationAlign_Step4_Analytics.html 11 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/articles/index.html 12 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/authors.html 13 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/index.html 14 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/ConversationAlign-package.html 15 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/MaronGross_2013.html 16 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/NurseryRhymes.html 17 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/NurseryRhymes_Prepped.html 18 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/corpus_analytics.html 19 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/generate_shams.html 20 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/index.html 21 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/load_github_data.html 22 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/prep_dyads.html 23 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/read_1file.html 24 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/read_dyads.html 25 | https://reilly-conceptscognitionlab.github.io/ConversationAlign/reference/summarize_dyads.html 26 | 27 | 28 | -------------------------------------------------------------------------------- /R/zzz.R: -------------------------------------------------------------------------------- 1 | #' @include utils.R 2 | NULL 3 | 4 | #' Package Loading and Data Initialization 5 | #' 6 | #' @description Handles package initialization including loading required datasets 7 | #' from GitHub or local fallback files. 8 | #' @keywords internal 9 | #' @returns 10 | #' nothing, loads data from external repository that will be needed by other package functions 11 | #' @importFrom utils download.file 12 | #' @importFrom tools R_user_dir 13 | #' @noRd 14 | #' 15 | .onLoad <- function(libname, pkgname) { 16 | # Create package environment 17 | pkg_env <- asNamespace(pkgname) 18 | 19 | # Critical datasets 20 | critical_datasets <- c("MIT_stops", "lookup_Jul25", "SMART_stops", 21 | "CA_orig_stops", "Temple_stops25") 22 | 23 | #load from GitHub repo 24 | loaded_from <- tryCatch({ 25 | repo_url <- "https://raw.githubusercontent.com/Reilly-ConceptsCognitionLab/ConversationAlign_Data/main/data/" 26 | temp_dir <- tempdir() 27 | 28 | for(ds in critical_datasets) { 29 | temp_file <- file.path(temp_dir, paste0(ds, ".rda")) 30 | utils::download.file( 31 | url = paste0(repo_url, ds, ".rda"), 32 | destfile = temp_file, 33 | mode = "wb", 34 | quiet = TRUE 35 | ) 36 | load(temp_file, envir = pkg_env) 37 | unlink(temp_file) 38 | } 39 | "github" 40 | }, error = function(e) { 41 | # Fallback to cache 42 | cache_dir <- tools::R_user_dir(pkgname, which = "cache") 43 | cached_files <- file.path(cache_dir, paste0(critical_datasets, ".rda")) 44 | 45 | available <- file.exists(cached_files) 46 | if(any(available)) { 47 | for(cf in cached_files[available]) { 48 | load(cf, envir = pkg_env) 49 | } 50 | "cache" 51 | } else { 52 | "none" 53 | } 54 | }) 55 | 56 | # Set package option 57 | options(ConversationAlign.data_source = loaded_from) 58 | } 59 | 60 | .onAttach <- function(libname, pkgname) { 61 | pkg_env <- asNamespace(pkgname) 62 | critical_datasets <- c("MIT_stops", "lookup_Jul25", "SMART_stops", 63 | "CA_orig_stops", "Temple_stops25") 64 | 65 | still_missing <- setdiff(critical_datasets, ls(envir = pkg_env)) 66 | 67 | if(length(still_missing) > 0) { 68 | loaded_from <- getOption("ConversationAlign.data_source", default = "none") 69 | 70 | msg_type <- if(loaded_from == "none") "error" else "warning" 71 | msg <- switch( 72 | msg_type, 73 | "error" = paste( 74 | "Critical data missing:", paste(still_missing, collapse = ", "), 75 | "\nPlease use refresh_data() or contact maintainers" 76 | ), 77 | "warning" = paste( 78 | "Using cached data (missing:", paste(still_missing, collapse = ", "), ")", 79 | "\nSome features unavailable - try refresh_data()" 80 | ) 81 | ) 82 | 83 | if(msg_type == "error") { 84 | warning(msg, call. = FALSE, immediate. = TRUE) 85 | } else { 86 | packageStartupMessage(msg) 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /doc/CA_Step2_Prep.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: CA Step 2 Prep Data 3 | subtitle: prep_dyads() 4 | author: Jamie Reilly, Ben Sacks, Ginny Ulichney, Gus Cooney, Chelsea Helion 5 | date: "`r format(Sys.Date(), '%B %d, %Y')`" 6 | show_toc: true 7 | slug: ConversationAlign Prep 8 | output: 9 | rmarkdown::html_vignette: 10 | toc: yes 11 | vignette: | 12 | %\VignetteEngine{knitr::rmarkdown} 13 | %\VignetteIndexEntry{ConversationAlign Step_2 Prep_Dyads} 14 | %\VignetteEncoding{UTF-8} 15 | --- 16 | 17 | Install and load the development version of `ConversationAlign` from [GitHub](https://github.com/) using the `devtools` package. 18 | ```{r, message=FALSE, warning=F, echo=F} 19 | # Check if devtools is installed, if not install it 20 | if (!require("devtools", quietly = TRUE)) { 21 | install.packages("devtools") 22 | } 23 | 24 | # Load devtools 25 | library(devtools) 26 | 27 | # Check if ConversationAlign is installed, if not install from GitHub 28 | if (!require("ConversationAlign", quietly = TRUE)) { 29 | devtools::install_github("Reilly-ConceptsCognitionLab/ConversationAlign") 30 | } 31 | 32 | # Load SemanticDistance 33 | library(ConversationAlign) 34 | ``` 35 | 36 | ## prep_dyads() 37 | ``prep_dyads()`` uses numerous regex to clean and format the data your just read into R in the previous step. ``ConversationAlign`` applies an ordered sequence of cleaning steps on the road toward vectorizing your original text into a one-word-per row format. These steps include: converting all text to lowercase, expanding contractions, omitting all non-alphabetic characters (e.g., numbers, punctuation, line breaks). In addition to text cleaning, users guide options for stopword removal and lemmatization. During formatting ``prep_dyads()`` will prompt you to select up to three variables for computing alignment on. This works by joining values from a large internal lookup database to each word in your language transcript. ``prep_dyads()`` is customizable via the following arguments. 38 | 39 | ### Stopword removal 40 | There are two important arguments regarding stopword removal. ``omit_stops`` specifies whether or not to remove stopwords. ``which_stopwords`` specifies which stopword list you would like to apply with the default being ``Temple_stops25``. The full list of choices is: ``none``, ``SMART_stops``, ``CA_orig_stops``. ``MIT_stops``, and ``Temple_stops25``. Stopword removal is an important, yet also controversial step in text cleaning.
    41 | 42 | ### Lemmatization 43 | ``ConversationAlign`` calls the ``textstem`` package as a dependency to lemmatize your language transcript. This converts morphologiocal derivatives to their root forms. The default is lemmatize=T. Sometimes you want to retain language output in its native form. If this is the case, change the argument in clean_dyads to lemmatize=F. ``clean_dyads()`` outputs word count metrics pre/post cleaning by dyad and interlocutor. This can be useful if you are interested in whether one person just doesn't produce many words or produces a great deal of empty utterances.
    44 | 45 | ### Dimension Selection 46 | This is where the magic happens. ``prep_dyads()`` will yoke published norms for up to 45 possible dimensions to every content word in your transcript. This join is executed by merging your vectorized conversation transcript with a huge internal lexical database with norms spanning over 100k English words. ``prep_dyads()`` will prompt you to select anywhere from 1 to 3 target dimensions at a time. Enter the number corresponding to each dimension of interest separated by spaces and then hit enter (e.g., 10 14 19) ``ConversationAlign`` will append a published norm if available (e.g., concreteness, word length) to every running word in your transcript. These quantitative values are used in the subsequent ``summarize_dyads()`` step to compute alignment.
    47 | 48 | -------------------------------------------------------------------------------- /vignettes/ConversationAlign_Step1_Read.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: ConversationAlign_Step1_Read 3 | subtitle: Read and Format Data for ConversationAlign 4 | author: Jamie Reilly, Ben Sacks, Ginny Ulichney, Gus Cooney, Chelsea Helion 5 | date: "`r Sys.Date()`" 6 | show_toc: true 7 | output: 8 | rmarkdown::html_vignette: 9 | toc: yes 10 | vignette: > 11 | %\VignetteEngine{knitr::rmarkdown} 12 | %\VignetteIndexEntry{ConversationAlign_Step1_Read} 13 | %\VignetteEncoding{UTF-8} 14 | --- 15 | 16 | ```{r, include = FALSE} 17 | knitr::opts_chunk$set( 18 | collapse = TRUE, 19 | comment = "#>" 20 | ) 21 | ``` 22 | 23 | ```{r, message=FALSE, warning=F, echo=F} 24 | # Load SemanticDistance 25 | library(ConversationAlign) 26 | ``` 27 | 28 | # Reading data into R for ConversationAlign 29 | Half the battle with R is getting your data imported and formatted. This is especially true for string data and working with text. `ConversationAlign` uses a series of sequential functions to import, clean, and format your raw data. You **MUST** run each of these functions. They append important variable names and automatically reshape your data.
    30 | 31 | # Prepping your data for import 32 | - `ConversationAlign` works **ONLY** on dyadic (i.e., two person) conversation transcripts. 33 | - Each transcript must nominally contain two colummns, one column should delineate the interlocutor (person who produced the text), and another column should contain the text itself. 34 | - `ConversationAlign` contains an import function called `read_dyads()` that will scan a target folder for text samples. 35 | - `read_dyads()` will import all of your transcripts into R and concatenate them into a single dataframe. 36 | - `read_dyads()` will append each transcript's filename as a unique identifier for that conversation. This is SUPER important to remember when analyzing your data. 37 | - Store each of your individual conversation transcripts (`.csv`, `.txt`, `.ai`) that you wish to concatenate into a corpus in a folder. `ConversationAlign` will search for a folder called `my_transcripts` in the same directory as your script. However, feel free to name your folder anything you like. You can specify a custom path as an argument to read_dyads() 38 | - Each transcript must nominally contain two columns of data (Participant and Text). All other columns (e.g., meta-data) will be retained. 39 | 40 | ## `read_dyads()` 41 | Here are some exampples of `read_dyads()` in action. There is only one argument to `read_dyads()`, and that is `my_path`. This is for supplying a quoted directory path to the folder where your transcripts live. Remember to treat this folder as a staging area! Once you are finished with a set of transcripts and don't want them read into `ConversationAlign` move them out of the folder, or specify a new folder. Language data tends to proliferate quickly, and it is easy to forget what you are doing. Be a CAREFUL secretary, and record your steps. 42 | 43 | Arguments to `read_dyads` include:
    44 | 1. **my_path**: default is 'my_transcripts', change path to your folder name
    45 | ```{r, eval=F, message=F, warning=F} 46 | #will search for folder 'my_transcripts' in your current directory 47 | MyConvos <- read_dyads() 48 | 49 | #will scan custom folder called 'MyStuff' in your current directory, concatenating all files in that folder into a single dataframe 50 | MyConvos2 <- read_dyads(my_path='/MyStuff') 51 | ``` 52 | 53 | ## `read_1file()` 54 | - Read single transcript already in R environment. We will use `read_1file()` to prep the Marc Maron and Terry Gross transcript. Look at how the column headers have changed and the object name (MaronGross_2013) is now the Event_ID (a document identifier),
    55 | 56 | Arguments to `read_1file` include:
    57 | 1. **my_dat**: object already in your R environment containing text and speaker information. 58 | ```{r, eval=T, message=F, warning=F} 59 | MaryLittleLamb <- read_1file(MaronGross_2013) 60 | #print first ten rows of header 61 | knitr::kable(head(MaronGross_2013, 15), format = "pipe") 62 | ``` 63 |
    64 | -------------------------------------------------------------------------------- /docs/deps/bootstrap-5.3.1/font.css: -------------------------------------------------------------------------------- 1 | /* cyrillic-ext */ 2 | @font-face { 3 | font-family: 'Nunito Sans'; 4 | font-style: normal; 5 | font-weight: 400; 6 | font-stretch: 100%; 7 | font-display: swap; 8 | src: url(fonts/626330658504e338ee86aec8e957426b.woff2) format('woff2'); 9 | unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; 10 | } 11 | /* cyrillic */ 12 | @font-face { 13 | font-family: 'Nunito Sans'; 14 | font-style: normal; 15 | font-weight: 400; 16 | font-stretch: 100%; 17 | font-display: swap; 18 | src: url(fonts/07d40e985ad7c747025dabb9f22142c4.woff2) format('woff2'); 19 | unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; 20 | } 21 | /* vietnamese */ 22 | @font-face { 23 | font-family: 'Nunito Sans'; 24 | font-style: normal; 25 | font-weight: 400; 26 | font-stretch: 100%; 27 | font-display: swap; 28 | src: url(fonts/1f5e011d6aae0d98fc0518e1a303e99a.woff2) format('woff2'); 29 | unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; 30 | } 31 | /* latin-ext */ 32 | @font-face { 33 | font-family: 'Nunito Sans'; 34 | font-style: normal; 35 | font-weight: 400; 36 | font-stretch: 100%; 37 | font-display: swap; 38 | src: url(fonts/c2f002b3a87d3f9bfeebb23d32cfd9f8.woff2) format('woff2'); 39 | unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; 40 | } 41 | /* latin */ 42 | @font-face { 43 | font-family: 'Nunito Sans'; 44 | font-style: normal; 45 | font-weight: 400; 46 | font-stretch: 100%; 47 | font-display: swap; 48 | src: url(fonts/ee91700cdbf7ce16c054c2bb8946c736.woff2) format('woff2'); 49 | unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; 50 | } 51 | /* cyrillic-ext */ 52 | @font-face { 53 | font-family: 'Nunito Sans'; 54 | font-style: normal; 55 | font-weight: 600; 56 | font-stretch: 100%; 57 | font-display: swap; 58 | src: url(fonts/626330658504e338ee86aec8e957426b.woff2) format('woff2'); 59 | unicode-range: U+0460-052F, U+1C80-1C8A, U+20B4, U+2DE0-2DFF, U+A640-A69F, U+FE2E-FE2F; 60 | } 61 | /* cyrillic */ 62 | @font-face { 63 | font-family: 'Nunito Sans'; 64 | font-style: normal; 65 | font-weight: 600; 66 | font-stretch: 100%; 67 | font-display: swap; 68 | src: url(fonts/07d40e985ad7c747025dabb9f22142c4.woff2) format('woff2'); 69 | unicode-range: U+0301, U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; 70 | } 71 | /* vietnamese */ 72 | @font-face { 73 | font-family: 'Nunito Sans'; 74 | font-style: normal; 75 | font-weight: 600; 76 | font-stretch: 100%; 77 | font-display: swap; 78 | src: url(fonts/1f5e011d6aae0d98fc0518e1a303e99a.woff2) format('woff2'); 79 | unicode-range: U+0102-0103, U+0110-0111, U+0128-0129, U+0168-0169, U+01A0-01A1, U+01AF-01B0, U+0300-0301, U+0303-0304, U+0308-0309, U+0323, U+0329, U+1EA0-1EF9, U+20AB; 80 | } 81 | /* latin-ext */ 82 | @font-face { 83 | font-family: 'Nunito Sans'; 84 | font-style: normal; 85 | font-weight: 600; 86 | font-stretch: 100%; 87 | font-display: swap; 88 | src: url(fonts/c2f002b3a87d3f9bfeebb23d32cfd9f8.woff2) format('woff2'); 89 | unicode-range: U+0100-02BA, U+02BD-02C5, U+02C7-02CC, U+02CE-02D7, U+02DD-02FF, U+0304, U+0308, U+0329, U+1D00-1DBF, U+1E00-1E9F, U+1EF2-1EFF, U+2020, U+20A0-20AB, U+20AD-20C0, U+2113, U+2C60-2C7F, U+A720-A7FF; 90 | } 91 | /* latin */ 92 | @font-face { 93 | font-family: 'Nunito Sans'; 94 | font-style: normal; 95 | font-weight: 600; 96 | font-stretch: 100%; 97 | font-display: swap; 98 | src: url(fonts/ee91700cdbf7ce16c054c2bb8946c736.woff2) format('woff2'); 99 | unicode-range: U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+0304, U+0308, U+0329, U+2000-206F, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD; 100 | } 101 | -------------------------------------------------------------------------------- /docs/deps/headroom-0.11.0/headroom.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * headroom.js v0.11.0 - Give your page some headroom. Hide your header until you need it 3 | * Copyright (c) 2020 Nick Williams - http://wicky.nillia.ms/headroom.js 4 | * License: MIT 5 | */ 6 | 7 | !function(t,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(t=t||self).Headroom=n()}(this,function(){"use strict";function t(){return"undefined"!=typeof window}function d(t){return function(t){return t&&t.document&&function(t){return 9===t.nodeType}(t.document)}(t)?function(t){var n=t.document,o=n.body,s=n.documentElement;return{scrollHeight:function(){return Math.max(o.scrollHeight,s.scrollHeight,o.offsetHeight,s.offsetHeight,o.clientHeight,s.clientHeight)},height:function(){return t.innerHeight||s.clientHeight||o.clientHeight},scrollY:function(){return void 0!==t.pageYOffset?t.pageYOffset:(s||o.parentNode||o).scrollTop}}}(t):function(t){return{scrollHeight:function(){return Math.max(t.scrollHeight,t.offsetHeight,t.clientHeight)},height:function(){return Math.max(t.offsetHeight,t.clientHeight)},scrollY:function(){return t.scrollTop}}}(t)}function n(t,s,e){var n,o=function(){var n=!1;try{var t={get passive(){n=!0}};window.addEventListener("test",t,t),window.removeEventListener("test",t,t)}catch(t){n=!1}return n}(),i=!1,r=d(t),l=r.scrollY(),a={};function c(){var t=Math.round(r.scrollY()),n=r.height(),o=r.scrollHeight();a.scrollY=t,a.lastScrollY=l,a.direction=ls.tolerance[a.direction],e(a),l=t,i=!1}function h(){i||(i=!0,n=requestAnimationFrame(c))}var u=!!o&&{passive:!0,capture:!1};return t.addEventListener("scroll",h,u),c(),{destroy:function(){cancelAnimationFrame(n),t.removeEventListener("scroll",h,u)}}}function o(t,n){n=n||{},Object.assign(this,o.options,n),this.classes=Object.assign({},o.options.classes,n.classes),this.elem=t,this.tolerance=function(t){return t===Object(t)?t:{down:t,up:t}}(this.tolerance),this.initialised=!1,this.frozen=!1}return o.prototype={constructor:o,init:function(){return o.cutsTheMustard&&!this.initialised&&(this.addClass("initial"),this.initialised=!0,setTimeout(function(t){t.scrollTracker=n(t.scroller,{offset:t.offset,tolerance:t.tolerance},t.update.bind(t))},100,this)),this},destroy:function(){this.initialised=!1,Object.keys(this.classes).forEach(this.removeClass,this),this.scrollTracker.destroy()},unpin:function(){!this.hasClass("pinned")&&this.hasClass("unpinned")||(this.addClass("unpinned"),this.removeClass("pinned"),this.onUnpin&&this.onUnpin.call(this))},pin:function(){this.hasClass("unpinned")&&(this.addClass("pinned"),this.removeClass("unpinned"),this.onPin&&this.onPin.call(this))},freeze:function(){this.frozen=!0,this.addClass("frozen")},unfreeze:function(){this.frozen=!1,this.removeClass("frozen")},top:function(){this.hasClass("top")||(this.addClass("top"),this.removeClass("notTop"),this.onTop&&this.onTop.call(this))},notTop:function(){this.hasClass("notTop")||(this.addClass("notTop"),this.removeClass("top"),this.onNotTop&&this.onNotTop.call(this))},bottom:function(){this.hasClass("bottom")||(this.addClass("bottom"),this.removeClass("notBottom"),this.onBottom&&this.onBottom.call(this))},notBottom:function(){this.hasClass("notBottom")||(this.addClass("notBottom"),this.removeClass("bottom"),this.onNotBottom&&this.onNotBottom.call(this))},shouldUnpin:function(t){return"down"===t.direction&&!t.top&&t.toleranceExceeded},shouldPin:function(t){return"up"===t.direction&&t.toleranceExceeded||t.top},addClass:function(t){this.elem.classList.add.apply(this.elem.classList,this.classes[t].split(" "))},removeClass:function(t){this.elem.classList.remove.apply(this.elem.classList,this.classes[t].split(" "))},hasClass:function(t){return this.classes[t].split(" ").every(function(t){return this.classList.contains(t)},this.elem)},update:function(t){t.isOutOfBounds||!0!==this.frozen&&(t.top?this.top():this.notTop(),t.bottom?this.bottom():this.notBottom(),this.shouldUnpin(t)?this.unpin():this.shouldPin(t)&&this.pin())}},o.options={tolerance:{up:0,down:0},offset:0,scroller:t()?window:null,classes:{frozen:"headroom--frozen",pinned:"headroom--pinned",unpinned:"headroom--unpinned",top:"headroom--top",notTop:"headroom--not-top",bottom:"headroom--bottom",notBottom:"headroom--not-bottom",initial:"headroom"}},o.cutsTheMustard=!!(t()&&function(){}.bind&&"classList"in document.documentElement&&Object.assign&&Object.keys&&requestAnimationFrame),o}); -------------------------------------------------------------------------------- /inst/NEWS.md: -------------------------------------------------------------------------------- 1 | # ConversationAlign 0.4.0 2 | 3 | # - 2025-10-12 4 | 5 | ## Breaking Changes 6 | 7 | This is a major release including several new features and small changes. 8 | 9 | ### Added 10 | - New function ``generate_shams`` to shuffle the order of interlocutor turns within each conversation. This creates a group of control conversations. 11 | - New argument ``remove_backchannel`` in ``prep_dyads`` allows users to remove turns composed entirely of stopwords, preventing them from being filled in later stages. 12 | 13 | ### Modified 14 | - ``summarize_dyads`` now computes dAUC for a lagged time series in addition to the unchanged. This is designed to account for both ways that interlocutors can be paired in each exchange. 15 | - Updated names of standardized dAUC columns to reflect that they are standardized to 50 turns. 16 | - Updates to documentation and website for new and modified features. 17 | 18 | 19 | 20 | # ConversationAlign 0.3.1 21 | 22 | # - 2025-7-29 23 | 24 | ## Breaking changes 25 | 26 | This is a minor release involving one bug repair 27 | 28 | 29 | ### Modified 30 | - prep_dyads() added several missing cleaning functions that had been inadvertantly omitted during an earlier merge in preparation for CRAN 31 | 32 | 33 | 34 | # ConversationAlign 0.3.0 35 | 36 | # - 2025-7-21 37 | 38 | ## Breaking changes 39 | 40 | This is a minor release involving mostly optimization of functions. 41 | 42 | ### Added 43 | - Software license now LGPL 3.0 44 | 45 | ### Modified 46 | - Omitted prior GPL software license 47 | - Path to Journal of Open Source Software (JOSS) now points to main 48 | 49 | 50 | 51 | # ConversationAlign 0.2.0 52 | 53 | # - 2025-7-15 54 | 55 | ## Breaking changes 56 | 57 | This is a major release. We have included many new features and fixed numerous bugs. 58 | 59 | ### Added 60 | - Added news (you're reading it now), discussions, and issues/bug tracker to Github repo 61 | - ``prep_dyads()`` that combines two previous preparation steps `clean_dyads()` and `align_dyads()` 62 | - New function `read_1file()` that formats a conversation transcript already in your R environment 63 | - Improved handling of contractions with error detection/substition of non-ASCII character apostrophes. Contraction expansion (e.g., 'they're' to 'they are') now handled by a separate internal function call using custom regex. 64 | - Added argument ``omit_stops`` to ``prep_dyads`` specifying optional stopword removal 65 | - Added argument ``which_stoplist`` to `prep_dyads`` specifying a four stopword list options (SMART_stops, MIT_stops, Temple_stops25, CA_orig_stops). 66 | - Added pdf manual for ``ConversationAlign`` 67 | - Validation checks to `read_dyads()` will throw error and warning message if any transcript has more than or less than two participants for any individual conversation (Event_ID) 68 | - Added `corpus_analytics()` function that produces a variety of descriptive summary statistics (e.g., n-conversations, type token ratio, average turn length) in a table format. 69 | 70 | ### Modified 71 | - `summarize_dyads()` restructured in significant ways including the addition of three new arguments: `custom_lags` default lags for correlation are set at -2.0,2 users are free to specifiy additional lags , default is NULL, `sumdat_only` produces a summary dataframe with values averaged to two rows per conversation (one for each participant, `corr_type` specifies correlation to apply to lagged data 72 | - Resampling of AUC in `summarize_dyads()` has been omitted in favor of proportionally rescaling dAUC to a standardized/fixed number of turns (100) 73 | - Modified internal lookup datase ``lookup_db``, replaced with ``lookup_Jul25``, includes many more dimensions and options for scaled vs. raw versions of variables 74 | - Moved all internal data to a different repository (ConversationAlign_Data) in order to get the package within CRAN file size constraints. Package now loads data from external HTTPs source 75 | - Omitted outside calls to libraries from functions 76 | 77 | ### Fixed 78 | - Warnings and checks (variable names, etc) to read, clean, align functions 79 | 80 | ### Deprecated 81 | - We are in the process of phasing out our original stopword list from earlier versions of ConversationAlign (pre 2025). We have included the old list as an optional call to the argument clean_dyads() as ``CA_OriginalStops``. However, the default stopword list is new - ``Temple_Stops25`` 82 | - Eliminated several dimensions from lookup database: prevalence, hope, stress, politeness, empathy, prevalence, closeness, encouragement, hope, doubt, hostility, surprise 83 | - clean_dyads() and `align_dyads()` 84 | -------------------------------------------------------------------------------- /vignettes/ConversationAlign_Step3_Summarize.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: ConversationAlign_Step3_Summarize 3 | subtitle: summarize_dyads() 4 | author: Jamie Reilly, Ben Sacks, Ginny Ulichney, Gus Cooney, Chelsea Helion 5 | date: "`r format(Sys.Date(), '%B %d, %Y')`" 6 | show_toc: true 7 | output: 8 | rmarkdown::html_vignette: 9 | toc: yes 10 | vignette: | 11 | %\VignetteEngine{knitr::rmarkdown} 12 | %\VignetteIndexEntry{ConversationAlign_Step3_Summarize} 13 | %\VignetteEncoding{UTF-8} 14 | --- 15 | 16 | ```{r, include = FALSE} 17 | knitr::opts_chunk$set( 18 | collapse = TRUE, 19 | comment = "#>" 20 | ) 21 | ``` 22 | 23 | ```{r, message=FALSE, warning=F, echo=F} 24 | # Load ConversationAlign 25 | # library(devtools) 26 | # devtools::install_github("https://github.com/Reilly-ConceptsCognitionLab/ConversationAlign.git") 27 | library(ConversationAlign) 28 | ``` 29 | 30 | This is the final step where `ConversationAlign` will compute summary statistics including main effects and alignment statistics for the vectorized dataframe you produced using `prep_dyads()`. Users have several options for how to output their data, and these choices should be guided by your analysis strategy. For example, a linear mixed effects approach might involve modeling the rise and fall of values across turns. In contrast, a standard ANOVA would work on grouped summary data. 31 | 32 | - Main effects for each dimension of interest aggregated by Conversation (Event_ID) and Person (Participant_ID). For example, main effects of concreteness would involve aggregated means of all the concreteness values for words produced by Mary vs words produced by Dave in an individual conversation. 33 | - dAUC_raw: difference area under the curve reflecting the difference between interlocutors at each turn on each dimension uncorrected for conversation length. For example, if Mary's concreteness at Exchange 1 was 8 (on a scale of 0-9), and Dave's concreteness on Exchange 1 was 4, the difference between Mary and Dave in this one-turn conversation would be 4. dAUC reflects area across all turns estimated using the trapezoidal rule. 34 | - dAUC_scaled100: normalized AUC value to 100 turns using proportional scaling. e.g., (Observed AUC/Turns Raw) = (Normalized AUC)/100). 35 | - Lead_Corr2: Pearson or Spearman lagged correlation reflecting turn-by-turn covariance across partners for each specified dimension 36 | - Lag_Corr2: Lead correlation 37 | - Who_Talked_First: Interlocutor who started the conversation (needed for interepreting lead/lag stats) 38 | 39 | 40 | Arguments to ``summarize_dyads()`` include:
    41 | 1) **df_prep**= dataframe created by ``prep_dyads()``function
    42 | 2) **custom_lags**= default is NULL, any additional user-specified lagged correlations. will automatically produce lead of 2 turns, immediate response, lag of 2 turns for each dimension of interest.
    43 | 3) **sumdat_only**= boolean default is TRUE, produces grouped summary dataframe with averages by conversation and participant for each alignment dimension, FALSE retrains all of the original rows, filling down empty rows of summary statistics for the conversation (e.g., AUC)
    44 | 4) **corr_type**= default='Pearson', other option 'Spearman' for computing turn-by-turn correlations across interlocutors for each dimension of interest. 45 | 46 | ```{r, eval=T, warning=F, message=F, options(digits=3)} 47 | MarySumDat <- summarize_dyads(df_prep = NurseryRhymes_Prepped, custom_lags=NULL, sumdat_only = TRUE, corr_type='Pearson') 48 | colnames(MarySumDat) 49 | knitr::kable(head(MarySumDat, 15), format = "simple", digits = 3) 50 | ``` 51 | 52 | ## Generating sham conversations 53 | Some research questions would benefit from the use of conversations that control for some temporal effects. The function ``generate_shams()`` accepts the output of ``prep_dyads()`` and returns a data frame in the same structure with each interlocutor's time series randomly shuffled. Since the output has the same format as ``prep_dyads()`` output, it can easily be supplied to ``summarize_dyads()`` and compared to the real conversations. 54 | 55 | Arguments to ``generate_shams()`` include:
    56 | 1) **df_prep**= dataframe created by ``prep_dyads()``function
    57 | 2) **seed**= numeric, a number to supply as a seed. This allows for reproducible results.
    58 | 59 | ```{r, eval=T, warning=F, message=F} 60 | MaryShams <- generate_shams(df_prep = NurseryRhymes_Prepped, seed = 10) 61 | MarySumDatShams <- summarize_dyads(df_prep = MaryShams, custom_lags=NULL, sumdat_only = TRUE, corr_type='Pearson') 62 | knitr::kable(head(MarySumDatShams, 15), format = "simple", digits = 3) 63 | ``` 64 | -------------------------------------------------------------------------------- /vignettes/ConversationAlign_Step2_Prep.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: ConversationAlign_Step2_Prep 3 | subtitle: prep_dyads() 4 | author: Jamie Reilly, Ben Sacks, Ginny Ulichney, Gus Cooney, Chelsea Helion 5 | date: "`r format(Sys.Date(), '%B %d, %Y')`" 6 | show_toc: true 7 | output: 8 | rmarkdown::html_vignette: 9 | toc: yes 10 | vignette: | 11 | %\VignetteEngine{knitr::rmarkdown} 12 | %\VignetteIndexEntry{ConversationAlign_Step2_Prep} 13 | %\VignetteEncoding{UTF-8} 14 | --- 15 | 16 | ```{r, message=FALSE, warning=F, echo=F} 17 | # Load SemanticDistance 18 | library(ConversationAlign) 19 | ``` 20 | 21 | # Cleaning, Formatting, Aligning Norms to Your Data 22 | Lots of wild operations happen in this next step that transform your unstructured text to numeric time series objects aggregated by conversation and interlocutor. It is important that you have a handle on what `prep_dyads()` does and what processes such as lemmatization and stopword removal mean.
    23 | 24 | ``prep_dyads()`` uses numerous regex to clean and format the data your just read into R in the previous step. ``ConversationAlign`` applies an ordered sequence of cleaning steps on the road toward vectorizing your original text into a one-word-per row format. These steps include: converting all text to lowercase, expanding contractions, omitting all non-alphabetic characters (e.g., numbers, punctuation, line breaks). In addition to text cleaning, users guide options for stopword removal and lemmatization. During formatting ``prep_dyads()`` will prompt you to select up to three variables for computing alignment on. This works by joining values from a large internal lookup database to each word in your language transcript. ``prep_dyads()`` is customizable via the following arguments. 25 | 26 | ### Stopword removal 27 | There are two important arguments regarding stopword removal. ``omit_stops`` specifies whether or not to remove stopwords. ``which_stopwords`` specifies which stopword list you would like to apply with the default being ``Temple_stops25``. The full list of choices is: ``none``, ``SMART_stops``, ``CA_orig_stops``. ``MIT_stops``, and ``Temple_stops25``. Stopword removal is an important, yet also controversial step in text cleaning.
    28 | 29 | ### Lemmatization 30 | ``ConversationAlign`` calls the ``textstem`` package as a dependency to lemmatize your language transcript. This converts morphologiocal derivatives to their root forms. The default is lemmatize=T. Sometimes you want to retain language output in its native form. If this is the case, change the argument in clean_dyads to lemmatize=F. ``clean_dyads()`` outputs word count metrics pre/post cleaning by dyad and interlocutor. This can be useful if you are interested in whether one person just doesn't produce many words or produces a great deal of empty utterances.
    31 | 32 | ### Dimension Selection 33 | This is where the magic happens. ``prep_dyads()`` will yoke published norms for >40 possible dimensions to every content word in your transcript (up to 3 at a time). This join is executed by merging your vectorized conversation transcript with a huge internal lexical database with norms spanning over 100k English words. ``prep_dyads()`` will prompt you to select anywhere from 1 to 3 target dimensions at a time. Enter the number corresponding to each dimension of interest separated by spaces and then hit enter (e.g., 10 14 19) ``ConversationAlign`` will append a published norm if available (e.g., concreteness, word length) to every running word in your transcript. These quantitative values are used in the subsequent ``summarize_dyads()`` step to compute alignment.
    34 | 35 | # `prep_dyads()` 36 | -Cleans, formats, and vectorizes conversation transwcripts to a one-word-per-row format 37 | -Yokes psycholinguistic norms for up to three dimensions at a time (from <40 possible dimensions) to each content word. 38 | -Retains metadata
    39 | 40 | Arguments to `prep_dyads`:
    41 | 1) **dat_read**= name of the dataframe created during `read_dyads()`
    42 | 2) **omit_stops**= T/F (default=T) option to remove stopwords 43 | 3) **lemmatize**= lemmatize strings converting each entry to its dictionary form, default is `lemmatize=TRUE`
    44 | 4) **which_stoplist**= quoted argument specifying stopword list, options include ``none``, ``MIT_stops``, ``SMART``, ``CA_OriginalStops``, or ``Temple_stops25``. Default is ``Temple_stops25``
    45 | 5) **remove_backchannel**= logical, should turns comprised entirely of stopwords be removed or preserved as NAs. NAs are filled in future steps. Defaults is ``FALSE``. 46 | 47 | ```{r, eval=F, message=F, warning=F} 48 | #Example of running the function 49 | NurseryRhymes_Prepped <- prep_dyads(dat_read=NurseryRhymes, lemmatize=TRUE, omit_stops=T, which_stoplist="Temple_stops25") 50 | ``` 51 | 52 | ## Example of a prepped dataset 53 | This embedded as external data in the package with 'anger' values yoked to each word. 54 | ```{r} 55 | knitr::kable(head(NurseryRhymes_Prepped, 20), format = "simple", digits=2) 56 | ``` 57 | 58 | 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /docs/pkgdown.js: -------------------------------------------------------------------------------- 1 | /* http://gregfranko.com/blog/jquery-best-practices/ */ 2 | (function($) { 3 | $(function() { 4 | 5 | $('nav.navbar').headroom(); 6 | 7 | Toc.init({ 8 | $nav: $("#toc"), 9 | $scope: $("main h2, main h3, main h4, main h5, main h6") 10 | }); 11 | 12 | if ($('#toc').length) { 13 | $('body').scrollspy({ 14 | target: '#toc', 15 | offset: $("nav.navbar").outerHeight() + 1 16 | }); 17 | } 18 | 19 | // Activate popovers 20 | $('[data-bs-toggle="popover"]').popover({ 21 | container: 'body', 22 | html: true, 23 | trigger: 'focus', 24 | placement: "top", 25 | sanitize: false, 26 | }); 27 | 28 | $('[data-bs-toggle="tooltip"]').tooltip(); 29 | 30 | /* Clipboard --------------------------*/ 31 | 32 | function changeTooltipMessage(element, msg) { 33 | var tooltipOriginalTitle=element.getAttribute('data-bs-original-title'); 34 | element.setAttribute('data-bs-original-title', msg); 35 | $(element).tooltip('show'); 36 | element.setAttribute('data-bs-original-title', tooltipOriginalTitle); 37 | } 38 | 39 | if(ClipboardJS.isSupported()) { 40 | $(document).ready(function() { 41 | var copyButton = ""; 42 | 43 | $("div.sourceCode").addClass("hasCopyButton"); 44 | 45 | // Insert copy buttons: 46 | $(copyButton).prependTo(".hasCopyButton"); 47 | 48 | // Initialize tooltips: 49 | $('.btn-copy-ex').tooltip({container: 'body'}); 50 | 51 | // Initialize clipboard: 52 | var clipboard = new ClipboardJS('[data-clipboard-copy]', { 53 | text: function(trigger) { 54 | return trigger.parentNode.textContent.replace(/\n#>[^\n]*/g, ""); 55 | } 56 | }); 57 | 58 | clipboard.on('success', function(e) { 59 | changeTooltipMessage(e.trigger, 'Copied!'); 60 | e.clearSelection(); 61 | }); 62 | 63 | clipboard.on('error', function(e) { 64 | changeTooltipMessage(e.trigger,'Press Ctrl+C or Command+C to copy'); 65 | }); 66 | 67 | }); 68 | } 69 | 70 | /* Search marking --------------------------*/ 71 | var url = new URL(window.location.href); 72 | var toMark = url.searchParams.get("q"); 73 | var mark = new Mark("main#main"); 74 | if (toMark) { 75 | mark.mark(toMark, { 76 | accuracy: { 77 | value: "complementary", 78 | limiters: [",", ".", ":", "/"], 79 | } 80 | }); 81 | } 82 | 83 | /* Search --------------------------*/ 84 | /* Adapted from https://github.com/rstudio/bookdown/blob/2d692ba4b61f1e466c92e78fd712b0ab08c11d31/inst/resources/bs4_book/bs4_book.js#L25 */ 85 | // Initialise search index on focus 86 | var fuse; 87 | $("#search-input").focus(async function(e) { 88 | if (fuse) { 89 | return; 90 | } 91 | 92 | $(e.target).addClass("loading"); 93 | var response = await fetch($("#search-input").data("search-index")); 94 | var data = await response.json(); 95 | 96 | var options = { 97 | keys: ["what", "text", "code"], 98 | ignoreLocation: true, 99 | threshold: 0.1, 100 | includeMatches: true, 101 | includeScore: true, 102 | }; 103 | fuse = new Fuse(data, options); 104 | 105 | $(e.target).removeClass("loading"); 106 | }); 107 | 108 | // Use algolia autocomplete 109 | var options = { 110 | autoselect: true, 111 | debug: true, 112 | hint: false, 113 | minLength: 2, 114 | }; 115 | var q; 116 | async function searchFuse(query, callback) { 117 | await fuse; 118 | 119 | var items; 120 | if (!fuse) { 121 | items = []; 122 | } else { 123 | q = query; 124 | var results = fuse.search(query, { limit: 20 }); 125 | items = results 126 | .filter((x) => x.score <= 0.75) 127 | .map((x) => x.item); 128 | if (items.length === 0) { 129 | items = [{dir:"Sorry 😿",previous_headings:"",title:"No results found.",what:"No results found.",path:window.location.href}]; 130 | } 131 | } 132 | callback(items); 133 | } 134 | $("#search-input").autocomplete(options, [ 135 | { 136 | name: "content", 137 | source: searchFuse, 138 | templates: { 139 | suggestion: (s) => { 140 | if (s.title == s.what) { 141 | return `${s.dir} >
    ${s.title}
    `; 142 | } else if (s.previous_headings == "") { 143 | return `${s.dir} >
    ${s.title}
    > ${s.what}`; 144 | } else { 145 | return `${s.dir} >
    ${s.title}
    > ${s.previous_headings} > ${s.what}`; 146 | } 147 | }, 148 | }, 149 | }, 150 | ]).on('autocomplete:selected', function(event, s) { 151 | window.location.href = s.path + "?q=" + q + "#" + s.id; 152 | }); 153 | }); 154 | })(window.jQuery || window.$) 155 | 156 | document.addEventListener('keydown', function(event) { 157 | // Check if the pressed key is '/' 158 | if (event.key === '/') { 159 | event.preventDefault(); // Prevent any default action associated with the '/' key 160 | document.getElementById('search-input').focus(); // Set focus to the search input 161 | } 162 | }); 163 | -------------------------------------------------------------------------------- /docs/articles/index.html: -------------------------------------------------------------------------------- 1 | 2 | Articles • ConversationAlign 3 | Skip to contents 4 | 5 | 6 |
    35 |
    36 |
    39 | 40 | 55 |
    56 | 57 | 58 |
    61 | 62 | 65 | 66 |
    67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | -------------------------------------------------------------------------------- /R/replacements_25.R: -------------------------------------------------------------------------------- 1 | #' replacements_25 2 | #' 3 | #' String replacement for pattern matching and expanding lots of contractions 4 | #' @name replacements_25 5 | #' @returns 6 | #' nothing, internal function that applies a target list of contractions (e.g., it's) for replacement to prep_dyads 7 | #' @importFrom dplyr mutate 8 | #' @importFrom magrittr %>% 9 | #' @importFrom rlang := 10 | #' @keywords internal 11 | #' @noRd 12 | 13 | replacements_25 <- function(dat, wordcol) { 14 | # Apply all replacements in sequence 15 | dat %>% 16 | # Contractions starting with a/i 17 | dplyr::mutate({{wordcol}} := gsub("\\baren't\\b", "are not", {{wordcol}}, ignore.case = TRUE)) %>% 18 | dplyr::mutate({{wordcol}} := gsub("\\bcan't\\b", "cannot", {{wordcol}}, ignore.case = TRUE)) %>% 19 | dplyr::mutate({{wordcol}} := gsub("\\bcould've\\b", "could have", {{wordcol}}, ignore.case = TRUE)) %>% 20 | dplyr::mutate({{wordcol}} := gsub("\\bcouldn't\\b", "could not", {{wordcol}}, ignore.case = TRUE)) %>% 21 | dplyr::mutate({{wordcol}} := gsub("\\bdidn't\\b", "did not", {{wordcol}}, ignore.case = TRUE)) %>% 22 | dplyr::mutate({{wordcol}} := gsub("\\bdoesn't\\b", "does not", {{wordcol}}, ignore.case = TRUE)) %>% 23 | dplyr::mutate({{wordcol}} := gsub("\\bdon't\\b", "do not", {{wordcol}}, ignore.case = TRUE)) %>% 24 | dplyr::mutate({{wordcol}} := gsub("\\bdunno\\b", "do not know", {{wordcol}}, ignore.case = TRUE)) %>% 25 | dplyr::mutate({{wordcol}} := gsub("\\bgimme\\b", "give me", {{wordcol}}, ignore.case = TRUE)) %>% 26 | dplyr::mutate({{wordcol}} := gsub("\\bgonna\\b", "going to", {{wordcol}}, ignore.case = TRUE)) %>% 27 | dplyr::mutate({{wordcol}} := gsub("\\bgotta\\b", "got to", {{wordcol}}, ignore.case = TRUE)) %>% 28 | dplyr::mutate({{wordcol}} := gsub("\\bhadn't\\b", "had not", {{wordcol}}, ignore.case = TRUE)) %>% 29 | dplyr::mutate({{wordcol}} := gsub("\\bhasn't\\b", "has not", {{wordcol}}, ignore.case = TRUE)) %>% 30 | dplyr::mutate({{wordcol}} := gsub("\\bhaven't\\b", "have not", {{wordcol}}, ignore.case = TRUE)) %>% 31 | dplyr::mutate({{wordcol}} := gsub("\\bhe'll\\b", "he will", {{wordcol}}, ignore.case = TRUE)) %>% 32 | dplyr::mutate({{wordcol}} := gsub("\\bhe's\\b", "he is", {{wordcol}}, ignore.case = TRUE)) %>% 33 | dplyr::mutate({{wordcol}} := gsub("\\bi'd\\b", "i would", {{wordcol}}, ignore.case = TRUE)) %>% 34 | dplyr::mutate({{wordcol}} := gsub("\\bi'll\\b", "i will", {{wordcol}}, ignore.case = TRUE)) %>% 35 | dplyr::mutate({{wordcol}} := gsub("\\bi'm\\b", "i am", {{wordcol}}, ignore.case = TRUE)) %>% 36 | dplyr::mutate({{wordcol}} := gsub("\\bi've\\b", "i have", {{wordcol}}, ignore.case = TRUE)) %>% 37 | dplyr::mutate({{wordcol}} := gsub("\\bisn't\\b", "is not", {{wordcol}}, ignore.case = TRUE)) %>% 38 | dplyr::mutate({{wordcol}} := gsub("\\bit'll\\b", "it will", {{wordcol}}, ignore.case = TRUE)) %>% 39 | dplyr::mutate({{wordcol}} := gsub("\\bit's\\b", "it is", {{wordcol}}, ignore.case = TRUE)) %>% 40 | dplyr::mutate({{wordcol}} := gsub("\\bkinda\\b", "kind of", {{wordcol}}, ignore.case = TRUE)) %>% 41 | dplyr::mutate({{wordcol}} := gsub("\\blemme\\b", "let me", {{wordcol}}, ignore.case = TRUE)) %>% 42 | dplyr::mutate({{wordcol}} := gsub("\\bmight've\\b", "might have", {{wordcol}}, ignore.case = TRUE)) %>% 43 | dplyr::mutate({{wordcol}} := gsub("\\bmightn't\\b", "might not", {{wordcol}}, ignore.case = TRUE)) %>% 44 | dplyr::mutate({{wordcol}} := gsub("\\bmust've\\b", "must have", {{wordcol}}, ignore.case = TRUE)) %>% 45 | dplyr::mutate({{wordcol}} := gsub("\\bmustn't\\b", "must not", {{wordcol}}, ignore.case = TRUE)) %>% 46 | dplyr::mutate({{wordcol}} := gsub("\\bneedn't\\b", "need not", {{wordcol}}, ignore.case = TRUE)) %>% 47 | dplyr::mutate({{wordcol}} := gsub("\\bshe'll\\b", "she will", {{wordcol}}, ignore.case = TRUE)) %>% 48 | dplyr::mutate({{wordcol}} := gsub("\\bshe's\\b", "she is", {{wordcol}}, ignore.case = TRUE)) %>% 49 | dplyr::mutate({{wordcol}} := gsub("\\bshould've\\b", "should have", {{wordcol}}, ignore.case = TRUE)) %>% 50 | dplyr::mutate({{wordcol}} := gsub("\\bshouldn't\\b", "should not", {{wordcol}}, ignore.case = TRUE)) %>% 51 | dplyr::mutate({{wordcol}} := gsub("\\bsorta\\b", "sort of", {{wordcol}}, ignore.case = TRUE)) %>% 52 | dplyr::mutate({{wordcol}} := gsub("\\bthat's\\b", "that is", {{wordcol}}, ignore.case = TRUE)) %>% 53 | dplyr::mutate({{wordcol}} := gsub("\\bthere'd\\b", "there would", {{wordcol}}, ignore.case = TRUE)) %>% 54 | dplyr::mutate({{wordcol}} := gsub("\\bthey'll\\b", "they will", {{wordcol}}, ignore.case = TRUE)) %>% 55 | dplyr::mutate({{wordcol}} := gsub("\\bthey're\\b", "they are", {{wordcol}}, ignore.case = TRUE)) %>% 56 | dplyr::mutate({{wordcol}} := gsub("\\bthey've\\b", "they have", {{wordcol}}, ignore.case = TRUE)) %>% 57 | dplyr::mutate({{wordcol}} := gsub("\\bwanna\\b", "want to", {{wordcol}}, ignore.case = TRUE)) %>% 58 | dplyr::mutate({{wordcol}} := gsub("\\bwasn't\\b", "was not", {{wordcol}}, ignore.case = TRUE)) %>% 59 | dplyr::mutate({{wordcol}} := gsub("\\bwe'll\\b", "we will", {{wordcol}}, ignore.case = TRUE)) %>% 60 | dplyr::mutate({{wordcol}} := gsub("\\bwe're\\b", "we are", {{wordcol}}, ignore.case = TRUE)) %>% 61 | dplyr::mutate({{wordcol}} := gsub("\\bwe've\\b", "we have", {{wordcol}}, ignore.case = TRUE)) %>% 62 | dplyr::mutate({{wordcol}} := gsub("\\bweren't\\b", "were not", {{wordcol}}, ignore.case = TRUE)) %>% 63 | dplyr::mutate({{wordcol}} := gsub("\\bwon't\\b", "will not", {{wordcol}}, ignore.case = TRUE)) %>% 64 | dplyr::mutate({{wordcol}} := gsub("\\bwould've\\b", "would have", {{wordcol}}, ignore.case = TRUE)) %>% 65 | dplyr::mutate({{wordcol}} := gsub("\\bwouldn't\\b", "would not", {{wordcol}}, ignore.case = TRUE)) %>% 66 | dplyr::mutate({{wordcol}} := gsub("\\byou'd\\b", "you would", {{wordcol}}, ignore.case = TRUE)) %>% 67 | dplyr::mutate({{wordcol}} := gsub("\\byou'll\\b", "you will", {{wordcol}}, ignore.case = TRUE)) %>% 68 | dplyr::mutate({{wordcol}} := gsub("\\byou're\\b", "you are", {{wordcol}}, ignore.case = TRUE)) %>% 69 | dplyr::mutate({{wordcol}} := gsub("\\byou've\\b", "you have", {{wordcol}}, ignore.case = TRUE)) 70 | } 71 | -------------------------------------------------------------------------------- /docs/reference/MaronGross_2013.html: -------------------------------------------------------------------------------- 1 | 2 | Sample Dyadic Interview Transcript: Marc Maron and Terry Gross Radio Interview 2013 — MaronGross_2013 • ConversationAlign 3 | Skip to contents 4 | 5 | 6 |
    35 |
    36 |
    41 | 42 |
    43 |

    Text and talker information delineated, raw transcript, multiple lines per talker

    44 |
    45 | 46 |
    47 |

    Usage

    48 |
    MaronGross_2013
    49 |
    50 | 51 |
    52 |

    Format

    53 |

    ## "MaronGross_2013" 54 | A data.frame with 546 obs, 2 vars:

    text
    55 |

    text from interview

    56 | 57 |
    speaker
    58 |

    speaker identity

    59 | 60 | ... 61 | 62 |
    63 | 64 |
    66 | 67 | 68 |
    71 | 72 | 75 | 76 |
    77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | -------------------------------------------------------------------------------- /docs/404.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Page not found (404) • ConversationAlign 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | Skip to contents 23 | 24 | 25 |
    61 |
    62 |
    66 | 67 | Content not found. Please use links in the navbar. 68 | 69 |
    70 |
    71 | 72 | 73 |
    76 | 77 | 80 | 81 |
    82 |
    83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /docs/reference/read_1file.html: -------------------------------------------------------------------------------- 1 | 2 | read_1file — read_1file • ConversationAlign 3 | Skip to contents 4 | 5 | 6 |
    35 |
    36 |
    41 | 42 |
    43 |

    Reads pre-formatted dyadic (2 interlocutor) conversation transcript already imported into your R environment.

    44 |
    45 | 46 |
    47 |

    Usage

    48 |
    read_1file(my_dat)
    49 |
    50 | 51 |
    52 |

    Arguments

    53 | 54 | 55 |
    my_dat
    56 |

    one conversation transcript already in the R environment

    57 | 58 |
    59 |
    60 |

    Value

    61 |

    a dataframe formatted with 'Event_ID', "Participant_ID", "Text_Raw" fields – ready for clean_dyads()

    62 |
    63 | 64 |
    66 | 67 | 68 |
    71 | 72 | 75 | 76 |
    77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | -------------------------------------------------------------------------------- /doc/CA_Step3_Summarize.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | CA Step 3 Summarize Dyads 18 | 19 | 32 | 33 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 |

    CA Step 3 Summarize Dyads

    241 |

    summarize_dyads()

    242 |

    Jamie Reilly, Ben Sacks, Ginny Ulichney, Gus Cooney, 243 | Chelsea Helion

    244 |

    July 08, 2025

    245 | 246 | 247 |
    248 | 251 |
    252 | 253 |
    254 |

    summarize_dyads()

    255 |

    `

    256 |
    257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 272 | 273 | 274 | 275 | -------------------------------------------------------------------------------- /doc/CA_Step4_Analytics.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | CA Step 4 Corpus Analytics 18 | 19 | 32 | 33 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 |

    CA Step 4 Corpus Analytics

    241 |

    corpus_analytics()

    242 |

    Jamie Reilly, Ben Sacks, Ginny Ulichney, Gus Cooney, 243 | Chelsea Helion

    244 |

    July 08, 2025

    245 | 246 | 247 |
    248 | 251 |
    252 | 253 |
    254 |

    Generate corpus analytics

    255 |

    `

    256 |
    257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 272 | 273 | 274 | 275 | -------------------------------------------------------------------------------- /paper/paper.bib: -------------------------------------------------------------------------------- 1 | @book{pickering_understanding_2021, 2 | title = {Understanding Dialogue: Language Use and Social Interaction}, 3 | shorttitle = {Understanding Dialogue}, 4 | url = {https://books.google.com/books?hl=en&lr=&id=3RgXEAAAQBAJ&oi=fnd&pg=PR7&dq=pickering+garrod+understanding+dialogue&ots=0qe68OV8Xs&sig=ulM_ibE3lLJewbmVg-UvQvfEHhM}, 5 | urldate = {2025-06-26}, 6 | publisher = {Cambridge University Press}, 7 | author = {Pickering, Martin J. and Garrod, Simon}, 8 | year = {2021} 9 | } 10 | 11 | @article{benoit_quanteda_2018, 12 | title = {quanteda: An R Package for the Quantitative Analysis of Textual Data}, 13 | volume = {3}, 14 | copyright = {http://creativecommons.org/licenses/by/4.0/}, 15 | issn = {2475-9066}, 16 | shorttitle = {quanteda}, 17 | url = {https://doi.org/10.21105/joss.00774}, 18 | doi = {10.21105/joss.00774}, 19 | number = {30}, 20 | urldate = {2025-01-05}, 21 | journal = {Journal of Open Source Software}, 22 | author = {Benoit, Kenneth and Watanabe, Kohei and Wang, Haiyan and Nulty, Paul and Obeng, Adam and Müller, Stefan and Matsuo, Akitaka}, 23 | month = oct, 24 | year = {2018}, 25 | pages = {774} 26 | } 27 | 28 | @misc{michalke_korpus_2018, 29 | title = {{koRpus}: An R Package for Text Analysis}, 30 | copyright = {GPL (≥ 3)}, 31 | shorttitle = {{koRpus}}, 32 | url = {https://CRAN.R-project.org/package=koRpus}, 33 | abstract = {A set of tools to analyze texts. Includes, amongst others, functions for automatic language detection, hyphenation, several indices of lexical diversity (e.g., type token ratio, HD-D/vocd-D, MTLD) and readability (e.g., Flesch, SMOG, LIX, Dale-Chall). Basic import functions for language corpora are also provided, to enable frequency analyses (supports Celex and Leipzig Corpora Collection file formats) and measures like tf-idf. Note: For full functionality a local installation of TreeTagger is recommended. It is also recommended to not load this package directly, but by loading one of the available language support packages from the 'l10n' repository {\textless}https://undocumeantit.github.io/repos/l10n{\textgreater}. 'koRpus' also includes a plugin for the R GUI and IDE RKWard, providing graphical dialogs for its basic features. The respective R package 'rkward' cannot be installed directly from a repository, as it is a part of RKWard. To make full use of this feature, please install RKWard from {\textless}https://rkward.kde.org{\textgreater} (plugins are detected automatically). Due to some restrictions on CRAN, the full package sources are only available from the project homepage. To ask for help, report bugs, request features, or discuss the development of the package, please subscribe to the koRpus-dev mailing list ({\textless}http://korpusml.reaktanz.de{\textgreater}).}, 34 | urldate = {2019-12-23}, 35 | author = {Michalke, Meik and Brown, Earl and Mirisola, Alberto and Brulet, Alexandre and Hauser, Laura}, 36 | month = oct, 37 | year = {2018}, 38 | keywords = {NaturalLanguageProcessing} 39 | } 40 | 41 | @article{duran_align_2019, 42 | title = {{ALIGN}: Analyzing Linguistic Interactions with Generalizable techNiques—A Python Library}, 43 | volume = {24}, 44 | issn = {1939-1463}, 45 | shorttitle = {{ALIGN}}, 46 | url = {https://doi.org/10.1037/met0000206}, 47 | doi = {10.1037/met0000206}, 48 | abstract = {Linguistic alignment (LA) is the tendency during a conversation to reuse each other’s linguistic expressions, including lexical, conceptual, or syntactic structures. LA is often argued to be a crucial driver in reciprocal understanding and interpersonal rapport, though its precise dynamics and effects are still controversial. One barrier to more systematic investigation of these effects lies in the diversity in the methods employed to analyze LA, which makes it difficult to integrate and compare results of individual studies. To overcome this issue, we have developed ALIGN (Analyzing Linguistic Interactions with Generalizable techNiques), an open-source Python package to measure LA in conversation (https://pypi.python.org/pypi/align) along with in-depth open-source tutorials hosted on ALIGN’s GitHub repository (https://github.com/nickduran/align-linguistic-alignment). Here, we first describe the challenges in the study of LA and outline how ALIGN can address them. We then demonstrate how our analytical protocol can be applied to theory-driven questions using a complex corpus of dialogue (the Devil’s Advocate corpus; Duran \& Fusaroli, 2017). We close by identifying further challenges and point to future developments of the field. (PsycInfo Database Record (c) 2025 APA, all rights reserved)}, 49 | number = {4}, 50 | journal = {Psychological Methods}, 51 | author = {Duran, Nicholas D. and Paxton, Alexandra and Fusaroli, Riccardo}, 52 | year = {2019}, 53 | note = {Place: US Publisher: American Psychological Association}, 54 | keywords = {Conflict, Conversation, Cooperation, Deception, Interpersonal Interaction, Linguistics, Text Analysis}, 55 | pages = {419--438} 56 | } 57 | 58 | @article{reece_candor_2023, 59 | title = {The {CANDOR} corpus: {Insights} from a large multimodal dataset of naturalistic conversation}, 60 | volume = {9}, 61 | shorttitle = {The {CANDOR} corpus}, 62 | url = {https://www.science.org/doi/10.1126/sciadv.adf3197}, 63 | doi = {10.1126/sciadv.adf3197}, 64 | abstract = {People spend a substantial portion of their lives engaged in conversation, and yet, our scientific understanding of conversation is still in its infancy. Here, we introduce a large, novel, and multimodal corpus of 1656 conversations recorded in spoken English. This 7+ million word, 850-hour corpus totals more than 1 terabyte of audio, video, and transcripts, with moment-to-moment measures of vocal, facial, and semantic expression, together with an extensive survey of speakers’ postconversation reflections. By taking advantage of the considerable scope of the corpus, we explore many examples of how this large-scale public dataset may catalyze future research, particularly across disciplinary boundaries, as scholars from a variety of fields appear increasingly interested in the study of conversation.}, 65 | number = {13}, 66 | urldate = {2023-08-08}, 67 | journal = {Science Advances}, 68 | author = {Reece, Andrew and Cooney, Gus and Bull, Peter and Chung, Christine and Dawson, Bryn and Fitzpatrick, Casey and Glazer, Tamara and Knox, Dean and Liebscher, Alex and Marin, Sebastian}, 69 | month = mar, 70 | year = {2023}, 71 | note = {Publisher: American Association for the Advancement of Science}, 72 | pages = {eadf3197}, 73 | file = {Reece et al_2023_The CANDOR corpus.pdf:/Users/Jamie/Zotero/storage/F9ZU6WB5/Reece et al_2023_The CANDOR corpus.pdf:application/pdf}, 74 | } 75 | 76 | 77 | 78 | -------------------------------------------------------------------------------- /docs/reference/NurseryRhymes.html: -------------------------------------------------------------------------------- 1 | 2 | Text and talker information delineated, 3 separate nursery rhymes, good for computing analytics and word counts — NurseryRhymes • ConversationAlign 3 | Skip to contents 4 | 5 | 6 |
    35 |
    36 |
    41 | 42 |
    43 |

    Text and talker information delineated, 3 separate nursery rhymes, good for computing analytics and word counts

    44 |
    45 | 46 |
    47 |

    Usage

    48 |
    NurseryRhymes
    49 |
    50 | 51 |
    52 |

    Format

    53 |

    ## "NurseryRhymes" 54 | A data.frame with 100 observations, 2 vars:

    Event_ID
    55 |

    factor 3 different simulated conversations

    56 | 57 |
    Participant_ID
    58 |

    fictional speaker names, 2 each conversation

    59 | 60 |
    Text_Raw
    61 |

    simulated language production, actually looped phrases from nursery rhymes

    62 | 63 | ... 64 | 65 |
    66 | 67 |
    69 | 70 | 71 |
    74 | 75 | 78 | 79 |
    80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | -------------------------------------------------------------------------------- /docs/reference/generate_shams.html: -------------------------------------------------------------------------------- 1 | 2 | generate_shams — generate_shams • ConversationAlign 3 | Skip to contents 4 | 5 | 6 |
    35 |
    36 |
    41 | 42 |
    43 |

    Generates a permutation of each individual dyad. Shuffled dyads may act as controls to their originals.

    44 |
    45 | 46 |
    47 |

    Usage

    48 |
    generate_shams(df_prep, seed = NULL)
    49 |
    50 | 51 |
    52 |

    Arguments

    53 | 54 | 55 |
    df_prep
    56 |

    Output dataframe of prep_dyads().

    57 | 58 | 59 |
    seed
    60 |

    (Optional) a seed for reproducibility in random sampling

    61 | 62 |
    63 |
    64 |

    Value

    65 |

    A dataframe similar to prepped dyads, with each participant's time series randomly shuffled.

    66 |
    67 | 68 |
    70 | 71 | 72 |
    75 | 76 | 79 | 80 |
    81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | -------------------------------------------------------------------------------- /docs/authors.html: -------------------------------------------------------------------------------- 1 | 2 | Authors and Citation • ConversationAlign 3 | Skip to contents 4 | 5 | 6 |
    35 |
    36 |
    39 | 40 |
    41 |

    Authors

    42 | 43 |
    • 44 |

      Jamie Reilly. Author, maintainer. 45 |

      46 |
    • 47 |
    • 48 |

      Virginia Ulichney. Author. 49 |

      50 |
    • 51 |
    • 52 |

      Ben Sacks. Author. 53 |

      54 |
    • 55 |
    • 56 |

      Sarah Weinstein. Contributor. 57 |

      58 |
    • 59 |
    • 60 |

      Chelsea Helion. Contributor. 61 |

      62 |
    • 63 |
    • 64 |

      Gus Cooney. Contributor. 65 |

      66 |
    • 67 |
    68 | 69 |
    70 |

    Citation

    71 |

    Source: DESCRIPTION

    72 | 73 |

    Reilly J, Ulichney V, Sacks B (2025). 74 | ConversationAlign: Process Text and Compute Linguistic Alignment in Conversation Transcripts. 75 | R package version 0.4.0, https://github.com/Reilly-ConceptsCognitionLab/ConversationAlign. 76 |

    77 |
    @Manual{,
     78 |   title = {ConversationAlign: Process Text and Compute Linguistic Alignment in Conversation Transcripts},
     79 |   author = {Jamie Reilly and Virginia Ulichney and Ben Sacks},
     80 |   year = {2025},
     81 |   note = {R package version 0.4.0},
     82 |   url = {https://github.com/Reilly-ConceptsCognitionLab/ConversationAlign},
     83 | }
    84 |
    85 | 86 |
    88 | 89 | 90 |
    99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | --------------------------------------------------------------------------------