├── .Rbuildignore ├── .gitattributes ├── .gitignore ├── DESCRIPTION ├── LICENSE.md ├── NAMESPACE ├── NEWS.md ├── R ├── RcppExports.R ├── buildReference.R ├── buildReferenceFromHarmonyObj.R ├── confidenceScores.R ├── data.R ├── findVariableGenes.R ├── globals.R ├── knncorr.R ├── mapQuery.R ├── plotReference.R ├── symphony-package.R └── utils.R ├── README.Rmd ├── README.md ├── cran-comments.md ├── data ├── pbmcs_exprs_small.rda └── pbmcs_meta_small.rda ├── man ├── buildReference.Rd ├── buildReferenceFromHarmonyObj.Rd ├── calcPerCellMappingMetric.Rd ├── calcPerClusterMappingMetric.Rd ├── calcknncorr.Rd ├── calcknncorrWithinQuery.Rd ├── evaluate.Rd ├── figures │ ├── ._symphony_logo.png │ ├── README-pressure-1.png │ └── symphony_logo.png ├── findVariableGenes.Rd ├── knnPredict.Rd ├── mapQuery.Rd ├── pbmcs_exprs_small.Rd ├── pbmcs_meta_small.Rd ├── plotReference.Rd ├── rowSDs.Rd ├── runPCAQueryAlone.Rd ├── scaleDataWithStats.Rd ├── symphony.Rd └── vargenes_vst.Rd ├── pre-built_references ├── README.md └── colors.R ├── src ├── .gitignore ├── Makevars ├── Makevars.win ├── RcppExports.cpp ├── singlecellmethods.cpp └── utils.cpp └── vignettes ├── .gitignore ├── Seurat.ipynb ├── data ├── baron-mouse.rds ├── exprs_norm_all.rds ├── fetal_liver_exprs_5p.rds ├── fetal_liver_meta_5p.rds ├── meta_data_subtypes.csv ├── pancreas_baron_human_exp.rds ├── pancreas_baron_human_metadata.rds ├── pancreas_baron_mouse_biomart_homologene_exp.rds └── pancreas_baron_mouse_metadata.rds ├── libs.R ├── pbmcs_tutorial.ipynb ├── prebuilt_references_tutorial.ipynb ├── quickstart_tutorial.Rmd ├── utils.R └── utils_seurat.R /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^LICENSE\.md$ 2 | ^README\.Rmd$ 3 | ^\.ipynb_checkpoints$ 4 | ^R/\.ipynb_checkpoints$ 5 | ^src/\.ipynb_checkpoints$ 6 | ^vignettes/data 7 | ^vignettes/OLD 8 | ^vignettes/libs.R$ 9 | ^vignettes/utils.R$ 10 | ^vignettes/utils_seurat.R$ 11 | ^vignettes/testing* 12 | ^vignettes/.*\.ipynb$ 13 | ^vignettes/\.ipynb_checkpoints 14 | ^man/\.ipynb_checkpoints$ 15 | ^man/figures/\.ipynb_checkpoints$ 16 | ^tests/\.ipynb_checkpoints$ 17 | ^pre-built_references/*$ 18 | ^pre-built_references/tbru_additions/*$ 19 | ^cran-comments\.md$ 20 | ^NEWS.md$ 21 | ^data/\.ipynb_checkpoints$ 22 | ^vignettes/cache_symphony.uwot$ 23 | ^vignettes/cache_symphony_sct.uwot$ 24 | ^src/*.o$ 25 | ^src/*.so$ 26 | ^\.git$ 27 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | pre-built_references/fetal_liver_reference_3p.rds filter=lfs diff=lfs merge=lfs -text 2 | pre-built_references/pancreas_plate-based_reference.rds filter=lfs diff=lfs merge=lfs -text 3 | pre-built_references/pbmcs_10x_reference.rds filter=lfs diff=lfs merge=lfs -text 4 | pre-built_references/fetal_liver_uwot_model filter=lfs diff=lfs merge=lfs -text 5 | pre-built_references/pancreas_plate-based_uwot_model filter=lfs diff=lfs merge=lfs -text 6 | pre-built_references/pbmcs_10x_uwot_model filter=lfs diff=lfs merge=lfs -text 7 | pre-built_references/kidney_healthy_fetal_reference.rds filter=lfs diff=lfs merge=lfs -text 8 | pre-built_references/kidney_healthy_fetal_uwot_model filter=lfs diff=lfs merge=lfs -text 9 | pre_built_references/tbru_ref.rds filter=lfs diff=lfs merge=lfs -text 10 | pre_built_references/tbru_uwot_model filter=lfs diff=lfs merge=lfs -text 11 | pre_built_references/TMS_facs_reference.rds filter=lfs diff=lfs merge=lfs -text 12 | pre_built_references/TMS_facs_uwot_model filter=lfs diff=lfs merge=lfs -text 13 | pre_built_references/zhang_reference.rds filter=lfs diff=lfs merge=lfs -text 14 | pre_built_references/zhang_uwot_model filter=lfs diff=lfs merge=lfs -text 15 | vignettes/data/pancreas_baron_human_exp.rds filter=lfs diff=lfs merge=lfs -text 16 | vignettes/data/pancreas_baron_human_metadata.rds filter=lfs diff=lfs merge=lfs -text 17 | vignettes/data/pancreas_baron_mouse_biomart_homologene_exp.rds filter=lfs diff=lfs merge=lfs -text 18 | vignettes/data/pancreas_baron_mouse_metadata.rds filter=lfs diff=lfs merge=lfs -text 19 | vignettes/data/exprs_norm_all.rds filter=lfs diff=lfs merge=lfs -text 20 | vignettes/data/fetal_liver_meta_5p.rds filter=lfs diff=lfs merge=lfs -text 21 | vignettes/data/meta_data_subtypes.csv filter=lfs diff=lfs merge=lfs -text 22 | vignettes/data/fetal_liver_exprs_5p.rds filter=lfs diff=lfs merge=lfs -text 23 | vignettes/data/baron-mouse.rds filter=lfs diff=lfs merge=lfs -text 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | R/.ipynb_checkpoints 2 | src/.ipynb_checkpoints 3 | .ipynb_checkpoints 4 | .RData 5 | .Rhistory 6 | R/.RData 7 | R/.Rhistory 8 | man/.ipynb_checkpoints 9 | man/figures/.ipynb_checkpoints 10 | vignettes/testing_* 11 | inst/doc 12 | pre-built_references/*.rds 13 | pre-built_references/*uwot* 14 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: symphony 2 | Title: Efficient and Precise Single-Cell Reference Atlas Mapping 3 | Version: 0.1.1 4 | Authors@R: c( 5 | person(given = "Joyce", family = "Kang", 6 | role = c("aut", "cre"), comment = c(ORCID = "0000-0002-1962-1291"), 7 | email = "joyce_kang@hms.harvard.edu"), 8 | person("Ilya", "Korsunsky", email = "ilya.korsunsky@gmail.com", 9 | role = c("aut"), comment = c(ORCID = "0000-0003-4848-3948")), 10 | person("Soumya", "Raychaudhuri", 11 | role = c("aut"), comment = c(ORCID = "0000-0002-1901-8265")) 12 | ) 13 | Description: Implements the Symphony single-cell reference building and query mapping algorithms and additional functions described in Kang et al . 14 | License: GPL (>= 3) 15 | Encoding: UTF-8 16 | LazyData: true 17 | Roxygen: list(markdown = TRUE) 18 | RoxygenNote: 7.2.3 19 | Suggests: knitr, rmarkdown, testthat, ggthemes, ggrastr, ggrepel 20 | LinkingTo: Rcpp, RcppArmadillo 21 | Imports: methods, Rcpp, harmony, uwot, irlba, class, purrr, dplyr, 22 | ggplot2, stats, utils, magrittr, data.table, tibble, Matrix, 23 | tidyr, rlang, RColorBrewer, RANN 24 | VignetteBuilder: knitr 25 | Depends: R (>= 3.5) 26 | NeedsCompilation: yes 27 | Packaged: 2023-01-16 17:25:13 UTC; jbk37 28 | Author: Joyce Kang [aut, cre] (), 29 | Ilya Korsunsky [aut] (), 30 | Soumya Raychaudhuri [aut] () 31 | Maintainer: Joyce Kang 32 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | GNU General Public License 2 | ========================== 3 | 4 | _Version 3, 29 June 2007_ 5 | _Copyright © 2007 Free Software Foundation, Inc. <>_ 6 | 7 | Everyone is permitted to copy and distribute verbatim copies of this license 8 | document, but changing it is not allowed. 9 | 10 | ## Preamble 11 | 12 | The GNU General Public License is a free, copyleft license for software and other 13 | kinds of works. 14 | 15 | The licenses for most software and other practical works are designed to take away 16 | your freedom to share and change the works. By contrast, the GNU General Public 17 | License is intended to guarantee your freedom to share and change all versions of a 18 | program--to make sure it remains free software for all its users. We, the Free 19 | Software Foundation, use the GNU General Public License for most of our software; it 20 | applies also to any other work released this way by its authors. You can apply it to 21 | your programs, too. 22 | 23 | When we speak of free software, we are referring to freedom, not price. Our General 24 | Public Licenses are designed to make sure that you have the freedom to distribute 25 | copies of free software (and charge for them if you wish), that you receive source 26 | code or can get it if you want it, that you can change the software or use pieces of 27 | it in new free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you these rights or 30 | asking you to surrender the rights. Therefore, you have certain responsibilities if 31 | you distribute copies of the software, or if you modify it: responsibilities to 32 | respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether gratis or for a fee, 35 | you must pass on to the recipients the same freedoms that you received. You must make 36 | sure that they, too, receive or can get the source code. And you must show them these 37 | terms so they know their rights. 38 | 39 | Developers that use the GNU GPL protect your rights with two steps: **(1)** assert 40 | copyright on the software, and **(2)** offer you this License giving you legal permission 41 | to copy, distribute and/or modify it. 42 | 43 | For the developers' and authors' protection, the GPL clearly explains that there is 44 | no warranty for this free software. For both users' and authors' sake, the GPL 45 | requires that modified versions be marked as changed, so that their problems will not 46 | be attributed erroneously to authors of previous versions. 47 | 48 | Some devices are designed to deny users access to install or run modified versions of 49 | the software inside them, although the manufacturer can do so. This is fundamentally 50 | incompatible with the aim of protecting users' freedom to change the software. The 51 | systematic pattern of such abuse occurs in the area of products for individuals to 52 | use, which is precisely where it is most unacceptable. Therefore, we have designed 53 | this version of the GPL to prohibit the practice for those products. If such problems 54 | arise substantially in other domains, we stand ready to extend this provision to 55 | those domains in future versions of the GPL, as needed to protect the freedom of 56 | users. 57 | 58 | Finally, every program is threatened constantly by software patents. States should 59 | not allow patents to restrict development and use of software on general-purpose 60 | computers, but in those that do, we wish to avoid the special danger that patents 61 | applied to a free program could make it effectively proprietary. To prevent this, the 62 | GPL assures that patents cannot be used to render the program non-free. 63 | 64 | The precise terms and conditions for copying, distribution and modification follow. 65 | 66 | ## TERMS AND CONDITIONS 67 | 68 | ### 0. Definitions 69 | 70 | “This License” refers to version 3 of the GNU General Public License. 71 | 72 | “Copyright” also means copyright-like laws that apply to other kinds of 73 | works, such as semiconductor masks. 74 | 75 | “The Program” refers to any copyrightable work licensed under this 76 | License. Each licensee is addressed as “you”. “Licensees” and 77 | “recipients” may be individuals or organizations. 78 | 79 | To “modify” a work means to copy from or adapt all or part of the work in 80 | a fashion requiring copyright permission, other than the making of an exact copy. The 81 | resulting work is called a “modified version” of the earlier work or a 82 | work “based on” the earlier work. 83 | 84 | A “covered work” means either the unmodified Program or a work based on 85 | the Program. 86 | 87 | To “propagate” a work means to do anything with it that, without 88 | permission, would make you directly or secondarily liable for infringement under 89 | applicable copyright law, except executing it on a computer or modifying a private 90 | copy. Propagation includes copying, distribution (with or without modification), 91 | making available to the public, and in some countries other activities as well. 92 | 93 | To “convey” a work means any kind of propagation that enables other 94 | parties to make or receive copies. Mere interaction with a user through a computer 95 | network, with no transfer of a copy, is not conveying. 96 | 97 | An interactive user interface displays “Appropriate Legal Notices” to the 98 | extent that it includes a convenient and prominently visible feature that **(1)** 99 | displays an appropriate copyright notice, and **(2)** tells the user that there is no 100 | warranty for the work (except to the extent that warranties are provided), that 101 | licensees may convey the work under this License, and how to view a copy of this 102 | License. If the interface presents a list of user commands or options, such as a 103 | menu, a prominent item in the list meets this criterion. 104 | 105 | ### 1. Source Code 106 | 107 | The “source code” for a work means the preferred form of the work for 108 | making modifications to it. “Object code” means any non-source form of a 109 | work. 110 | 111 | A “Standard Interface” means an interface that either is an official 112 | standard defined by a recognized standards body, or, in the case of interfaces 113 | specified for a particular programming language, one that is widely used among 114 | developers working in that language. 115 | 116 | The “System Libraries” of an executable work include anything, other than 117 | the work as a whole, that **(a)** is included in the normal form of packaging a Major 118 | Component, but which is not part of that Major Component, and **(b)** serves only to 119 | enable use of the work with that Major Component, or to implement a Standard 120 | Interface for which an implementation is available to the public in source code form. 121 | A “Major Component”, in this context, means a major essential component 122 | (kernel, window system, and so on) of the specific operating system (if any) on which 123 | the executable work runs, or a compiler used to produce the work, or an object code 124 | interpreter used to run it. 125 | 126 | The “Corresponding Source” for a work in object code form means all the 127 | source code needed to generate, install, and (for an executable work) run the object 128 | code and to modify the work, including scripts to control those activities. However, 129 | it does not include the work's System Libraries, or general-purpose tools or 130 | generally available free programs which are used unmodified in performing those 131 | activities but which are not part of the work. For example, Corresponding Source 132 | includes interface definition files associated with source files for the work, and 133 | the source code for shared libraries and dynamically linked subprograms that the work 134 | is specifically designed to require, such as by intimate data communication or 135 | control flow between those subprograms and other parts of the work. 136 | 137 | The Corresponding Source need not include anything that users can regenerate 138 | automatically from other parts of the Corresponding Source. 139 | 140 | The Corresponding Source for a work in source code form is that same work. 141 | 142 | ### 2. Basic Permissions 143 | 144 | All rights granted under this License are granted for the term of copyright on the 145 | Program, and are irrevocable provided the stated conditions are met. This License 146 | explicitly affirms your unlimited permission to run the unmodified Program. The 147 | output from running a covered work is covered by this License only if the output, 148 | given its content, constitutes a covered work. This License acknowledges your rights 149 | of fair use or other equivalent, as provided by copyright law. 150 | 151 | You may make, run and propagate covered works that you do not convey, without 152 | conditions so long as your license otherwise remains in force. You may convey covered 153 | works to others for the sole purpose of having them make modifications exclusively 154 | for you, or provide you with facilities for running those works, provided that you 155 | comply with the terms of this License in conveying all material for which you do not 156 | control copyright. Those thus making or running the covered works for you must do so 157 | exclusively on your behalf, under your direction and control, on terms that prohibit 158 | them from making any copies of your copyrighted material outside their relationship 159 | with you. 160 | 161 | Conveying under any other circumstances is permitted solely under the conditions 162 | stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 163 | 164 | ### 3. Protecting Users' Legal Rights From Anti-Circumvention Law 165 | 166 | No covered work shall be deemed part of an effective technological measure under any 167 | applicable law fulfilling obligations under article 11 of the WIPO copyright treaty 168 | adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention 169 | of such measures. 170 | 171 | When you convey a covered work, you waive any legal power to forbid circumvention of 172 | technological measures to the extent such circumvention is effected by exercising 173 | rights under this License with respect to the covered work, and you disclaim any 174 | intention to limit operation or modification of the work as a means of enforcing, 175 | against the work's users, your or third parties' legal rights to forbid circumvention 176 | of technological measures. 177 | 178 | ### 4. Conveying Verbatim Copies 179 | 180 | You may convey verbatim copies of the Program's source code as you receive it, in any 181 | medium, provided that you conspicuously and appropriately publish on each copy an 182 | appropriate copyright notice; keep intact all notices stating that this License and 183 | any non-permissive terms added in accord with section 7 apply to the code; keep 184 | intact all notices of the absence of any warranty; and give all recipients a copy of 185 | this License along with the Program. 186 | 187 | You may charge any price or no price for each copy that you convey, and you may offer 188 | support or warranty protection for a fee. 189 | 190 | ### 5. Conveying Modified Source Versions 191 | 192 | You may convey a work based on the Program, or the modifications to produce it from 193 | the Program, in the form of source code under the terms of section 4, provided that 194 | you also meet all of these conditions: 195 | 196 | * **a)** The work must carry prominent notices stating that you modified it, and giving a 197 | relevant date. 198 | * **b)** The work must carry prominent notices stating that it is released under this 199 | License and any conditions added under section 7. This requirement modifies the 200 | requirement in section 4 to “keep intact all notices”. 201 | * **c)** You must license the entire work, as a whole, under this License to anyone who 202 | comes into possession of a copy. This License will therefore apply, along with any 203 | applicable section 7 additional terms, to the whole of the work, and all its parts, 204 | regardless of how they are packaged. This License gives no permission to license the 205 | work in any other way, but it does not invalidate such permission if you have 206 | separately received it. 207 | * **d)** If the work has interactive user interfaces, each must display Appropriate Legal 208 | Notices; however, if the Program has interactive interfaces that do not display 209 | Appropriate Legal Notices, your work need not make them do so. 210 | 211 | A compilation of a covered work with other separate and independent works, which are 212 | not by their nature extensions of the covered work, and which are not combined with 213 | it such as to form a larger program, in or on a volume of a storage or distribution 214 | medium, is called an “aggregate” if the compilation and its resulting 215 | copyright are not used to limit the access or legal rights of the compilation's users 216 | beyond what the individual works permit. Inclusion of a covered work in an aggregate 217 | does not cause this License to apply to the other parts of the aggregate. 218 | 219 | ### 6. Conveying Non-Source Forms 220 | 221 | You may convey a covered work in object code form under the terms of sections 4 and 222 | 5, provided that you also convey the machine-readable Corresponding Source under the 223 | terms of this License, in one of these ways: 224 | 225 | * **a)** Convey the object code in, or embodied in, a physical product (including a 226 | physical distribution medium), accompanied by the Corresponding Source fixed on a 227 | durable physical medium customarily used for software interchange. 228 | * **b)** Convey the object code in, or embodied in, a physical product (including a 229 | physical distribution medium), accompanied by a written offer, valid for at least 230 | three years and valid for as long as you offer spare parts or customer support for 231 | that product model, to give anyone who possesses the object code either **(1)** a copy of 232 | the Corresponding Source for all the software in the product that is covered by this 233 | License, on a durable physical medium customarily used for software interchange, for 234 | a price no more than your reasonable cost of physically performing this conveying of 235 | source, or **(2)** access to copy the Corresponding Source from a network server at no 236 | charge. 237 | * **c)** Convey individual copies of the object code with a copy of the written offer to 238 | provide the Corresponding Source. This alternative is allowed only occasionally and 239 | noncommercially, and only if you received the object code with such an offer, in 240 | accord with subsection 6b. 241 | * **d)** Convey the object code by offering access from a designated place (gratis or for 242 | a charge), and offer equivalent access to the Corresponding Source in the same way 243 | through the same place at no further charge. You need not require recipients to copy 244 | the Corresponding Source along with the object code. If the place to copy the object 245 | code is a network server, the Corresponding Source may be on a different server 246 | (operated by you or a third party) that supports equivalent copying facilities, 247 | provided you maintain clear directions next to the object code saying where to find 248 | the Corresponding Source. Regardless of what server hosts the Corresponding Source, 249 | you remain obligated to ensure that it is available for as long as needed to satisfy 250 | these requirements. 251 | * **e)** Convey the object code using peer-to-peer transmission, provided you inform 252 | other peers where the object code and Corresponding Source of the work are being 253 | offered to the general public at no charge under subsection 6d. 254 | 255 | A separable portion of the object code, whose source code is excluded from the 256 | Corresponding Source as a System Library, need not be included in conveying the 257 | object code work. 258 | 259 | A “User Product” is either **(1)** a “consumer product”, which 260 | means any tangible personal property which is normally used for personal, family, or 261 | household purposes, or **(2)** anything designed or sold for incorporation into a 262 | dwelling. In determining whether a product is a consumer product, doubtful cases 263 | shall be resolved in favor of coverage. For a particular product received by a 264 | particular user, “normally used” refers to a typical or common use of 265 | that class of product, regardless of the status of the particular user or of the way 266 | in which the particular user actually uses, or expects or is expected to use, the 267 | product. A product is a consumer product regardless of whether the product has 268 | substantial commercial, industrial or non-consumer uses, unless such uses represent 269 | the only significant mode of use of the product. 270 | 271 | “Installation Information” for a User Product means any methods, 272 | procedures, authorization keys, or other information required to install and execute 273 | modified versions of a covered work in that User Product from a modified version of 274 | its Corresponding Source. The information must suffice to ensure that the continued 275 | functioning of the modified object code is in no case prevented or interfered with 276 | solely because modification has been made. 277 | 278 | If you convey an object code work under this section in, or with, or specifically for 279 | use in, a User Product, and the conveying occurs as part of a transaction in which 280 | the right of possession and use of the User Product is transferred to the recipient 281 | in perpetuity or for a fixed term (regardless of how the transaction is 282 | characterized), the Corresponding Source conveyed under this section must be 283 | accompanied by the Installation Information. But this requirement does not apply if 284 | neither you nor any third party retains the ability to install modified object code 285 | on the User Product (for example, the work has been installed in ROM). 286 | 287 | The requirement to provide Installation Information does not include a requirement to 288 | continue to provide support service, warranty, or updates for a work that has been 289 | modified or installed by the recipient, or for the User Product in which it has been 290 | modified or installed. Access to a network may be denied when the modification itself 291 | materially and adversely affects the operation of the network or violates the rules 292 | and protocols for communication across the network. 293 | 294 | Corresponding Source conveyed, and Installation Information provided, in accord with 295 | this section must be in a format that is publicly documented (and with an 296 | implementation available to the public in source code form), and must require no 297 | special password or key for unpacking, reading or copying. 298 | 299 | ### 7. Additional Terms 300 | 301 | “Additional permissions” are terms that supplement the terms of this 302 | License by making exceptions from one or more of its conditions. Additional 303 | permissions that are applicable to the entire Program shall be treated as though they 304 | were included in this License, to the extent that they are valid under applicable 305 | law. If additional permissions apply only to part of the Program, that part may be 306 | used separately under those permissions, but the entire Program remains governed by 307 | this License without regard to the additional permissions. 308 | 309 | When you convey a copy of a covered work, you may at your option remove any 310 | additional permissions from that copy, or from any part of it. (Additional 311 | permissions may be written to require their own removal in certain cases when you 312 | modify the work.) You may place additional permissions on material, added by you to a 313 | covered work, for which you have or can give appropriate copyright permission. 314 | 315 | Notwithstanding any other provision of this License, for material you add to a 316 | covered work, you may (if authorized by the copyright holders of that material) 317 | supplement the terms of this License with terms: 318 | 319 | * **a)** Disclaiming warranty or limiting liability differently from the terms of 320 | sections 15 and 16 of this License; or 321 | * **b)** Requiring preservation of specified reasonable legal notices or author 322 | attributions in that material or in the Appropriate Legal Notices displayed by works 323 | containing it; or 324 | * **c)** Prohibiting misrepresentation of the origin of that material, or requiring that 325 | modified versions of such material be marked in reasonable ways as different from the 326 | original version; or 327 | * **d)** Limiting the use for publicity purposes of names of licensors or authors of the 328 | material; or 329 | * **e)** Declining to grant rights under trademark law for use of some trade names, 330 | trademarks, or service marks; or 331 | * **f)** Requiring indemnification of licensors and authors of that material by anyone 332 | who conveys the material (or modified versions of it) with contractual assumptions of 333 | liability to the recipient, for any liability that these contractual assumptions 334 | directly impose on those licensors and authors. 335 | 336 | All other non-permissive additional terms are considered “further 337 | restrictions” within the meaning of section 10. If the Program as you received 338 | it, or any part of it, contains a notice stating that it is governed by this License 339 | along with a term that is a further restriction, you may remove that term. If a 340 | license document contains a further restriction but permits relicensing or conveying 341 | under this License, you may add to a covered work material governed by the terms of 342 | that license document, provided that the further restriction does not survive such 343 | relicensing or conveying. 344 | 345 | If you add terms to a covered work in accord with this section, you must place, in 346 | the relevant source files, a statement of the additional terms that apply to those 347 | files, or a notice indicating where to find the applicable terms. 348 | 349 | Additional terms, permissive or non-permissive, may be stated in the form of a 350 | separately written license, or stated as exceptions; the above requirements apply 351 | either way. 352 | 353 | ### 8. Termination 354 | 355 | You may not propagate or modify a covered work except as expressly provided under 356 | this License. Any attempt otherwise to propagate or modify it is void, and will 357 | automatically terminate your rights under this License (including any patent licenses 358 | granted under the third paragraph of section 11). 359 | 360 | However, if you cease all violation of this License, then your license from a 361 | particular copyright holder is reinstated **(a)** provisionally, unless and until the 362 | copyright holder explicitly and finally terminates your license, and **(b)** permanently, 363 | if the copyright holder fails to notify you of the violation by some reasonable means 364 | prior to 60 days after the cessation. 365 | 366 | Moreover, your license from a particular copyright holder is reinstated permanently 367 | if the copyright holder notifies you of the violation by some reasonable means, this 368 | is the first time you have received notice of violation of this License (for any 369 | work) from that copyright holder, and you cure the violation prior to 30 days after 370 | your receipt of the notice. 371 | 372 | Termination of your rights under this section does not terminate the licenses of 373 | parties who have received copies or rights from you under this License. If your 374 | rights have been terminated and not permanently reinstated, you do not qualify to 375 | receive new licenses for the same material under section 10. 376 | 377 | ### 9. Acceptance Not Required for Having Copies 378 | 379 | You are not required to accept this License in order to receive or run a copy of the 380 | Program. Ancillary propagation of a covered work occurring solely as a consequence of 381 | using peer-to-peer transmission to receive a copy likewise does not require 382 | acceptance. However, nothing other than this License grants you permission to 383 | propagate or modify any covered work. These actions infringe copyright if you do not 384 | accept this License. Therefore, by modifying or propagating a covered work, you 385 | indicate your acceptance of this License to do so. 386 | 387 | ### 10. Automatic Licensing of Downstream Recipients 388 | 389 | Each time you convey a covered work, the recipient automatically receives a license 390 | from the original licensors, to run, modify and propagate that work, subject to this 391 | License. You are not responsible for enforcing compliance by third parties with this 392 | License. 393 | 394 | An “entity transaction” is a transaction transferring control of an 395 | organization, or substantially all assets of one, or subdividing an organization, or 396 | merging organizations. If propagation of a covered work results from an entity 397 | transaction, each party to that transaction who receives a copy of the work also 398 | receives whatever licenses to the work the party's predecessor in interest had or 399 | could give under the previous paragraph, plus a right to possession of the 400 | Corresponding Source of the work from the predecessor in interest, if the predecessor 401 | has it or can get it with reasonable efforts. 402 | 403 | You may not impose any further restrictions on the exercise of the rights granted or 404 | affirmed under this License. For example, you may not impose a license fee, royalty, 405 | or other charge for exercise of rights granted under this License, and you may not 406 | initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging 407 | that any patent claim is infringed by making, using, selling, offering for sale, or 408 | importing the Program or any portion of it. 409 | 410 | ### 11. Patents 411 | 412 | A “contributor” is a copyright holder who authorizes use under this 413 | License of the Program or a work on which the Program is based. The work thus 414 | licensed is called the contributor's “contributor version”. 415 | 416 | A contributor's “essential patent claims” are all patent claims owned or 417 | controlled by the contributor, whether already acquired or hereafter acquired, that 418 | would be infringed by some manner, permitted by this License, of making, using, or 419 | selling its contributor version, but do not include claims that would be infringed 420 | only as a consequence of further modification of the contributor version. For 421 | purposes of this definition, “control” includes the right to grant patent 422 | sublicenses in a manner consistent with the requirements of this License. 423 | 424 | Each contributor grants you a non-exclusive, worldwide, royalty-free patent license 425 | under the contributor's essential patent claims, to make, use, sell, offer for sale, 426 | import and otherwise run, modify and propagate the contents of its contributor 427 | version. 428 | 429 | In the following three paragraphs, a “patent license” is any express 430 | agreement or commitment, however denominated, not to enforce a patent (such as an 431 | express permission to practice a patent or covenant not to sue for patent 432 | infringement). To “grant” such a patent license to a party means to make 433 | such an agreement or commitment not to enforce a patent against the party. 434 | 435 | If you convey a covered work, knowingly relying on a patent license, and the 436 | Corresponding Source of the work is not available for anyone to copy, free of charge 437 | and under the terms of this License, through a publicly available network server or 438 | other readily accessible means, then you must either **(1)** cause the Corresponding 439 | Source to be so available, or **(2)** arrange to deprive yourself of the benefit of the 440 | patent license for this particular work, or **(3)** arrange, in a manner consistent with 441 | the requirements of this License, to extend the patent license to downstream 442 | recipients. “Knowingly relying” means you have actual knowledge that, but 443 | for the patent license, your conveying the covered work in a country, or your 444 | recipient's use of the covered work in a country, would infringe one or more 445 | identifiable patents in that country that you have reason to believe are valid. 446 | 447 | If, pursuant to or in connection with a single transaction or arrangement, you 448 | convey, or propagate by procuring conveyance of, a covered work, and grant a patent 449 | license to some of the parties receiving the covered work authorizing them to use, 450 | propagate, modify or convey a specific copy of the covered work, then the patent 451 | license you grant is automatically extended to all recipients of the covered work and 452 | works based on it. 453 | 454 | A patent license is “discriminatory” if it does not include within the 455 | scope of its coverage, prohibits the exercise of, or is conditioned on the 456 | non-exercise of one or more of the rights that are specifically granted under this 457 | License. You may not convey a covered work if you are a party to an arrangement with 458 | a third party that is in the business of distributing software, under which you make 459 | payment to the third party based on the extent of your activity of conveying the 460 | work, and under which the third party grants, to any of the parties who would receive 461 | the covered work from you, a discriminatory patent license **(a)** in connection with 462 | copies of the covered work conveyed by you (or copies made from those copies), or **(b)** 463 | primarily for and in connection with specific products or compilations that contain 464 | the covered work, unless you entered into that arrangement, or that patent license 465 | was granted, prior to 28 March 2007. 466 | 467 | Nothing in this License shall be construed as excluding or limiting any implied 468 | license or other defenses to infringement that may otherwise be available to you 469 | under applicable patent law. 470 | 471 | ### 12. No Surrender of Others' Freedom 472 | 473 | If conditions are imposed on you (whether by court order, agreement or otherwise) 474 | that contradict the conditions of this License, they do not excuse you from the 475 | conditions of this License. If you cannot convey a covered work so as to satisfy 476 | simultaneously your obligations under this License and any other pertinent 477 | obligations, then as a consequence you may not convey it at all. For example, if you 478 | agree to terms that obligate you to collect a royalty for further conveying from 479 | those to whom you convey the Program, the only way you could satisfy both those terms 480 | and this License would be to refrain entirely from conveying the Program. 481 | 482 | ### 13. Use with the GNU Affero General Public License 483 | 484 | Notwithstanding any other provision of this License, you have permission to link or 485 | combine any covered work with a work licensed under version 3 of the GNU Affero 486 | General Public License into a single combined work, and to convey the resulting work. 487 | The terms of this License will continue to apply to the part which is the covered 488 | work, but the special requirements of the GNU Affero General Public License, section 489 | 13, concerning interaction through a network will apply to the combination as such. 490 | 491 | ### 14. Revised Versions of this License 492 | 493 | The Free Software Foundation may publish revised and/or new versions of the GNU 494 | General Public License from time to time. Such new versions will be similar in spirit 495 | to the present version, but may differ in detail to address new problems or concerns. 496 | 497 | Each version is given a distinguishing version number. If the Program specifies that 498 | a certain numbered version of the GNU General Public License “or any later 499 | version” applies to it, you have the option of following the terms and 500 | conditions either of that numbered version or of any later version published by the 501 | Free Software Foundation. If the Program does not specify a version number of the GNU 502 | General Public License, you may choose any version ever published by the Free 503 | Software Foundation. 504 | 505 | If the Program specifies that a proxy can decide which future versions of the GNU 506 | General Public License can be used, that proxy's public statement of acceptance of a 507 | version permanently authorizes you to choose that version for the Program. 508 | 509 | Later license versions may give you additional or different permissions. However, no 510 | additional obligations are imposed on any author or copyright holder as a result of 511 | your choosing to follow a later version. 512 | 513 | ### 15. Disclaimer of Warranty 514 | 515 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. 516 | EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 517 | PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER 518 | EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 519 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE 520 | QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE 521 | DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 522 | 523 | ### 16. Limitation of Liability 524 | 525 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY 526 | COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS 527 | PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, 528 | INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE 529 | PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE 530 | OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE 531 | WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 532 | POSSIBILITY OF SUCH DAMAGES. 533 | 534 | ### 17. Interpretation of Sections 15 and 16 535 | 536 | If the disclaimer of warranty and limitation of liability provided above cannot be 537 | given local legal effect according to their terms, reviewing courts shall apply local 538 | law that most closely approximates an absolute waiver of all civil liability in 539 | connection with the Program, unless a warranty or assumption of liability accompanies 540 | a copy of the Program in return for a fee. 541 | 542 | _END OF TERMS AND CONDITIONS_ 543 | 544 | ## How to Apply These Terms to Your New Programs 545 | 546 | If you develop a new program, and you want it to be of the greatest possible use to 547 | the public, the best way to achieve this is to make it free software which everyone 548 | can redistribute and change under these terms. 549 | 550 | To do so, attach the following notices to the program. It is safest to attach them 551 | to the start of each source file to most effectively state the exclusion of warranty; 552 | and each file should have at least the “copyright” line and a pointer to 553 | where the full notice is found. 554 | 555 | 556 | Copyright (C) 557 | 558 | This program is free software: you can redistribute it and/or modify 559 | it under the terms of the GNU General Public License as published by 560 | the Free Software Foundation, either version 3 of the License, or 561 | (at your option) any later version. 562 | 563 | This program is distributed in the hope that it will be useful, 564 | but WITHOUT ANY WARRANTY; without even the implied warranty of 565 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 566 | GNU General Public License for more details. 567 | 568 | You should have received a copy of the GNU General Public License 569 | along with this program. If not, see . 570 | 571 | Also add information on how to contact you by electronic and paper mail. 572 | 573 | If the program does terminal interaction, make it output a short notice like this 574 | when it starts in an interactive mode: 575 | 576 | Copyright (C) 577 | This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'. 578 | This is free software, and you are welcome to redistribute it 579 | under certain conditions; type 'show c' for details. 580 | 581 | The hypothetical commands `show w` and `show c` should show the appropriate parts of 582 | the General Public License. Of course, your program's commands might be different; 583 | for a GUI interface, you would use an “about box”. 584 | 585 | You should also get your employer (if you work as a programmer) or school, if any, to 586 | sign a “copyright disclaimer” for the program, if necessary. For more 587 | information on this, and how to apply and follow the GNU GPL, see 588 | <>. 589 | 590 | The GNU General Public License does not permit incorporating your program into 591 | proprietary programs. If your program is a subroutine library, you may consider it 592 | more useful to permit linking proprietary applications with the library. If this is 593 | what you want to do, use the GNU Lesser General Public License instead of this 594 | License. But first, please read 595 | <>. 596 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | export(buildReference) 4 | export(buildReferenceFromHarmonyObj) 5 | export(calcPerCellMappingMetric) 6 | export(calcPerClusterMappingMetric) 7 | export(calcknncorr) 8 | export(calcknncorrWithinQuery) 9 | export(evaluate) 10 | export(findVariableGenes) 11 | export(knnPredict) 12 | export(mapQuery) 13 | export(plotReference) 14 | export(rowSDs) 15 | export(runPCAQueryAlone) 16 | export(scaleDataWithStats) 17 | export(vargenes_vst) 18 | import(RANN) 19 | import(RColorBrewer) 20 | import(data.table) 21 | import(ggplot2) 22 | import(irlba) 23 | import(stats) 24 | import(tibble) 25 | import(utils) 26 | import(uwot) 27 | importFrom(Matrix,Matrix) 28 | importFrom(Rcpp,sourceCpp) 29 | importFrom(magrittr,"%>%") 30 | importFrom(methods,as) 31 | importFrom(rlang,.data) 32 | importFrom(stats,loess) 33 | importFrom(stats,median) 34 | importFrom(stats,na.omit) 35 | importFrom(stats,quantile) 36 | useDynLib(symphony) 37 | -------------------------------------------------------------------------------- /NEWS.md: -------------------------------------------------------------------------------- 1 | # symphony 0.1.0 2 | August 5, 2021 - Initial release on CRAN 3 | -------------------------------------------------------------------------------- /R/RcppExports.R: -------------------------------------------------------------------------------- 1 | # Generated by using Rcpp::compileAttributes() -> do not edit by hand 2 | # Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 3 | 4 | exp_mean <- function(x, p, i, ncol, nrow, groups, group_sizes) { 5 | .Call('_symphony_exp_mean', PACKAGE = 'symphony', x, p, i, ncol, nrow, groups, group_sizes) 6 | } 7 | 8 | log_vmr <- function(x, p, i, ncol, nrow, means, groups, group_sizes) { 9 | .Call('_symphony_log_vmr', PACKAGE = 'symphony', x, p, i, ncol, nrow, means, groups, group_sizes) 10 | } 11 | 12 | normalizeCLR_dgc <- function(x, p, i, ncol, nrow, margin) { 13 | .Call('_symphony_normalizeCLR_dgc', PACKAGE = 'symphony', x, p, i, ncol, nrow, margin) 14 | } 15 | 16 | scaleRowsWithStats_dgc <- function(x, p, i, mean_vec, sd_vec, ncol, nrow, thresh) { 17 | .Call('_symphony_scaleRowsWithStats_dgc', PACKAGE = 'symphony', x, p, i, mean_vec, sd_vec, ncol, nrow, thresh) 18 | } 19 | 20 | scaleRows_dgc <- function(x, p, i, ncol, nrow, thresh) { 21 | .Call('_symphony_scaleRows_dgc', PACKAGE = 'symphony', x, p, i, ncol, nrow, thresh) 22 | } 23 | 24 | rowMeansWeighted_dgc <- function(x, p, i, weights, ncol, nrow) { 25 | .Call('_symphony_rowMeansWeighted_dgc', PACKAGE = 'symphony', x, p, i, weights, ncol, nrow) 26 | } 27 | 28 | rowSDs_dgc <- function(x, p, i, mean_vec, ncol, nrow, do_sqrt) { 29 | .Call('_symphony_rowSDs_dgc', PACKAGE = 'symphony', x, p, i, mean_vec, ncol, nrow, do_sqrt) 30 | } 31 | 32 | rowVarSDs_dgc <- function(x, p, i, mean_vec, sd_vec, vmax, ncol, nrow, do_sqrt) { 33 | .Call('_symphony_rowVarSDs_dgc', PACKAGE = 'symphony', x, p, i, mean_vec, sd_vec, vmax, ncol, nrow, do_sqrt) 34 | } 35 | 36 | rowSDsWeighted_dgc <- function(x, p, i, mean_vec, weights, ncol, nrow, do_sqrt) { 37 | .Call('_symphony_rowSDsWeighted_dgc', PACKAGE = 'symphony', x, p, i, mean_vec, weights, ncol, nrow, do_sqrt) 38 | } 39 | 40 | cosine_normalize_cpp <- function(V, dim) { 41 | .Call('_symphony_cosine_normalize_cpp', PACKAGE = 'symphony', V, dim) 42 | } 43 | 44 | soft_kmeans_cpp <- function(Y, Z, max_iter, sigma) { 45 | .Call('_symphony_soft_kmeans_cpp', PACKAGE = 'symphony', Y, Z, max_iter, sigma) 46 | } 47 | 48 | soft_cluster <- function(Y, Z, sigma) { 49 | .Call('_symphony_soft_cluster', PACKAGE = 'symphony', Y, Z, sigma) 50 | } 51 | 52 | compute_ref_cache <- function(Rr, Zr) { 53 | .Call('_symphony_compute_ref_cache', PACKAGE = 'symphony', Rr, Zr) 54 | } 55 | 56 | moe_correct_ref <- function(Zq, Xq, Rq, Nr, RrZtr) { 57 | .Call('_symphony_moe_correct_ref', PACKAGE = 'symphony', Zq, Xq, Rq, Nr, RrZtr) 58 | } 59 | 60 | get_betas <- function(R, Z, lambda, design) { 61 | .Call('_symphony_get_betas', PACKAGE = 'symphony', R, Z, lambda, design) 62 | } 63 | 64 | -------------------------------------------------------------------------------- /R/buildReference.R: -------------------------------------------------------------------------------- 1 | #' Function for building a Symphony reference starting from expression matrix 2 | #' 3 | #' @param exp_ref Reference gene expression (genes by cells) 4 | #' @param metadata_ref Reference cell metadata (cells by attributes) 5 | #' @param vars Reference variables to Harmonize over e.g. c('donor', 'technology') 6 | #' @param K Number of soft cluster centroids in model 7 | #' @param verbose Verbose output 8 | #' @param do_umap Perform UMAP visualization on harmonized reference embedding 9 | #' @param do_normalize Perform log(CP10K+1) normalization 10 | #' @param vargenes_method Variable gene selection method (either 'vst' or 'mvp') 11 | #' @param vargenes_groups Name of metadata column specifying groups for variable gene selection. If not NULL, calculate topn variable genes in each group separately, then pool 12 | #' @param topn Number of variable genes to subset by 13 | #' @param tau Tau parameter for Harmony step 14 | #' @param theta Theta parameter(s) for Harmony step 15 | #' @param save_uwot_path Absolute path to save the uwot model (used if do_umap is TRUE) 16 | #' @param d Number of PC dimensions 17 | #' @param additional_genes Any custom genes (e.g. marker genes) to include in addition to variable genes 18 | #' @param umap_min_dist umap parameter (see uwot documentation for details) 19 | #' @param seed Random seed 20 | #' 21 | #' @import data.table 22 | #' @import tibble 23 | #' @import irlba 24 | #' @importFrom rlang .data 25 | #' @return Symphony reference object. Integrated embedding is stored in the $Z_corr slot. Other slots include 26 | #' cell-level metadata ($meta_data), variable genes means and standard deviations ($vargenes), 27 | #' loadings from PCA ($loadings), original PCA embedding ($Z_orig), reference compression terms ($cache), 28 | #' betas from Harmony integration ($betas), cosine normalized soft cluster centroids ($centroids), 29 | #' centroids in PC space ($centroids_pc), and optional umap coordinates ($umap$embedding). 30 | #' 31 | #' @export 32 | buildReference <- function(exp_ref, # genes x cells 33 | metadata_ref, # cells x metadata fields 34 | vars = NULL, # metadata variables to Harmonize over 35 | K = 100, # number of soft clusters for Harmony 36 | verbose = FALSE, # verbose output 37 | do_umap = TRUE, # run umap on reference cells 38 | do_normalize = TRUE, # run log(CP10K+1) normalization 39 | vargenes_method = 'vst', # vst or mvp 40 | vargenes_groups = NULL, # metadata column specifying groups for vargene selection 41 | topn = 2000, # number of variable genes (per group) 42 | tau = 0, # Harmony parameter 43 | theta = 2, # Harmony parameter 44 | save_uwot_path = NULL, # Path to save uwot model (use absolute path) 45 | d = 20, # number of dimensions for PCs 46 | additional_genes = NULL, # vector of any additional genes beyond vargenes to include 47 | umap_min_dist = 0.1, # umap parameter 48 | seed = 111) { 49 | 50 | set.seed(seed) # for reproducible soft k-means and UMAP 51 | 52 | res = list(meta_data = metadata_ref) 53 | 54 | if (do_normalize) { 55 | if (verbose) message('Normalizing') 56 | exp_ref = normalizeData(exp_ref, 1e4, 'log') 57 | } 58 | 59 | if (verbose) message('Finding variable genes using ', vargenes_method, ' method') 60 | if (vargenes_method == 'mvp') { 61 | if (is.null(vargenes_groups)) { 62 | vargenes_df = findVariableGenes(exp_ref, rep('A', ncol(exp_ref)), num.bin = 20) 63 | } else { # groups specified 64 | vargenes_df = findVariableGenes(exp_ref, groups = as.character(metadata_ref[[vargenes_groups]]), 65 | num.bin = 20) 66 | } 67 | var_genes = unique(data.table(vargenes_df)[, head(.SD[order(-.data$gene_dispersion_scaled)], topn), by = .data$group][, .data$symbol]) 68 | } else if (vargenes_method == 'vst') { 69 | if (is.null(vargenes_groups)) { 70 | var_genes = vargenes_vst(exp_ref, topn = topn) 71 | } else { # groups specified 72 | var_genes = vargenes_vst(exp_ref, groups = as.character(metadata_ref[[vargenes_groups]]), topn = topn) 73 | } 74 | } else { 75 | message("Invalid variable gene selection method. Options are 'vst' or 'mvp'.") 76 | } 77 | 78 | # Add in any additional genes 79 | if(!is.null(additional_genes)) { 80 | if (verbose) message('Adding ', length(additional_genes), ' additional genes') 81 | var_genes = union(var_genes, additional_genes) 82 | } 83 | if (verbose) message('Total ' , length(var_genes), ' genes for downstream steps') 84 | 85 | # Subset gene expression matrix by the desired genes 86 | exp_ref = exp_ref[var_genes, ] 87 | 88 | if (verbose) message('Scaling and PCA') 89 | vargenes_means_sds = tibble(symbol = var_genes, mean = Matrix::rowMeans(exp_ref)) 90 | vargenes_means_sds$stddev = rowSDs(exp_ref, vargenes_means_sds$mean) 91 | 92 | # Scale data 93 | exp_ref_scaled = scaleDataWithStats(exp_ref, vargenes_means_sds$mean, vargenes_means_sds$stddev, 1) 94 | 95 | # PCA 96 | s = irlba::irlba(exp_ref_scaled, nv = d) 97 | Z_pca_ref = diag(s$d) %*% t(s$v) # [PCs by cells] 98 | res$loadings = s$u 99 | res$vargenes = vargenes_means_sds 100 | 101 | # Run Harmony integration 102 | if (!is.null(vars)) { 103 | if (verbose) message('Running Harmony integration') 104 | 105 | # Run Harmony to harmonize the reference 106 | ref_harmObj = harmony::HarmonyMatrix( 107 | data_mat = t(Z_pca_ref), ## PCA embedding matrix of cells 108 | meta_data = metadata_ref, ## dataframe with cell labels 109 | theta = theta, ## cluster diversity enforcement 110 | tau = tau, 111 | vars_use = vars, ## variable to integrate over 112 | nclust = K, ## number of clusters in Harmony model 113 | max.iter.harmony = 20, 114 | return_object = TRUE, ## return the full Harmony model object 115 | do_pca = FALSE, ## do not recompute PCs 116 | verbose = verbose 117 | ) 118 | 119 | res$centroids <- t(cosine_normalize_cpp(ref_harmObj$R %*% t(ref_harmObj$Z_corr) , 1)) 120 | res$R <- ref_harmObj$R 121 | res$betas <- harmony::moe_ridge_get_betas(ref_harmObj) 122 | res$Z_orig <- Z_pca_ref 123 | res$Z_corr <- ref_harmObj$Z_corr 124 | res$K <- K 125 | res$d <- d 126 | } else { 127 | clust_res <- soft_kmeans(Z_pca_ref, K) 128 | res$centroids <- clust_res$Y 129 | res$R <- clust_res$R 130 | res$betas <- NULL 131 | res$Z_orig <- Z_pca_ref 132 | res$Z_corr <- Z_pca_ref 133 | } 134 | 135 | # Add row and column names 136 | colnames(res$Z_orig) = row.names(metadata_ref) 137 | rownames(res$Z_orig) = paste0("PC_", seq_len(nrow(res$Z_corr))) 138 | colnames(res$Z_corr) = row.names(metadata_ref) 139 | rownames(res$Z_corr) = paste0("harmony_", seq_len(nrow(res$Z_corr))) 140 | 141 | # Compute reference compression terms 142 | if (verbose) message('Computing reference compression terms') 143 | res$cache = compute_ref_cache(res$R, res$Z_corr) 144 | 145 | # Compute centroids in harmony PC space 146 | cluster_sizes = res$cache[[1]] %>% as.matrix() 147 | centroid_sums = t(res$Z_corr %*% t(res$R)) %>% as.data.frame() 148 | centroids_pc = sweep(centroid_sums, 1, cluster_sizes, "/") 149 | colnames(centroids_pc) = paste0("harmony_", seq_len(nrow(res$Z_corr))) 150 | rownames(centroids_pc) = paste0("centroid_", seq_len(nrow(res$R))) 151 | res$centroids_pc = centroids_pc 152 | 153 | if (do_umap) { 154 | if (verbose) message('Running UMAP') 155 | umap <- uwot::umap( 156 | t(res$Z_corr), n_neighbors = 30, learning_rate = 0.5, init = "laplacian", 157 | metric = 'cosine', fast_sgd = FALSE, n_sgd_threads = 1, # for reproducibility 158 | min_dist = umap_min_dist, n_threads = 4, ret_model = TRUE 159 | ) 160 | res$umap$embedding = umap$embedding 161 | colnames(res$umap$embedding) = c('UMAP1', 'UMAP2') 162 | 163 | # Since the nn-index component of the uwot model is not able to be saved as an 164 | # object, we save the uwot model at a user-defined path. 165 | if (!is.null(save_uwot_path)) { 166 | 167 | # If file already exists, delete it (otherwise will result in an error) 168 | if (file.exists(save_uwot_path)) { 169 | if (verbose) message(paste('File already exists at that path... overwriting...')) 170 | file.remove(save_uwot_path) 171 | } 172 | 173 | model = uwot::save_uwot(umap, file = save_uwot_path, unload = FALSE, verbose = FALSE) 174 | res$save_uwot_path = save_uwot_path 175 | if (verbose) message(paste('Saved uwot model')) 176 | } 177 | } 178 | return(res) 179 | } 180 | -------------------------------------------------------------------------------- /R/buildReferenceFromHarmonyObj.R: -------------------------------------------------------------------------------- 1 | #' Function for building a Symphony reference from a Harmony object. Useful if you would like your 2 | #' code to be more modular. Note that you must have saved vargenes_means_sds and PCA loadings. 3 | #' 4 | #' @param harmony_obj Harmony object (output from HarmonyMatrix()) 5 | #' @param metadata Reference cell metadata (cells by attributes) 6 | #' @param vargenes_means_sds Variable genes in dataframe with columns named ('symbol', 'mean', 'stddev') 7 | #' @param pca_loadings Gene loadings from PCA (e.g. irlba(ref_exp_scaled, nv = 20)$u) 8 | #' @param verbose Verbose output 9 | #' @param do_umap Perform UMAP visualization on harmonized reference embedding 10 | #' @param save_uwot_path Absolute path to save the uwot model (if do_umap is TRUE) 11 | #' @param umap_min_dist UMAP parameter (see uwot documentation for details) 12 | #' @param seed Random seed 13 | #' @return Symphony reference object. Integrated embedding is stored in the $Z_corr slot. Other slots include 14 | #' cell-level metadata ($meta_data), variable genes means and standard deviations ($vargenes), 15 | #' loadings from PCA or other dimensional reduction such as CCA ($loadings), original PCA embedding ($Z_orig), 16 | #' reference compression terms ($cache), betas from Harmony integration ($betas), cosine-normalized soft cluster centroids ($centroids), 17 | #' centroids in PC space ($centroids_pc), and optional umap coordinates ($umap$embedding). 18 | #' @export 19 | buildReferenceFromHarmonyObj <- function(harmony_obj, 20 | metadata, 21 | vargenes_means_sds, 22 | pca_loadings, # genes x PCs 23 | verbose = TRUE, 24 | do_umap = TRUE, 25 | save_uwot_path = NULL, 26 | umap_min_dist = 0.1, 27 | seed = 111) { 28 | 29 | set.seed(seed) # for reproducibility 30 | 31 | if (verbose) message('Save metadata, vargenes (S), and loadings (U)') 32 | res = list(meta_data = metadata) 33 | res$vargenes = vargenes_means_sds 34 | res$loadings = pca_loadings 35 | 36 | if(verbose) message('Save R, Z_orig, Z_corr, and betas from Harmony object') 37 | res$R = harmony_obj$R 38 | res$Z_orig = harmony_obj$Z_orig 39 | res$Z_corr = harmony_obj$Z_corr 40 | res$betas = harmony::moe_ridge_get_betas(harmony_obj) 41 | 42 | if(verbose) message('Calculate final L2 normalized reference centroids (Y_cos)') 43 | res$centroids = t(cosine_normalize_cpp(harmony_obj$R %*% t(harmony_obj$Z_corr), 1)) 44 | 45 | if(verbose) message('Calculate reference compression terms (Nr and C)') 46 | res$cache = compute_ref_cache(res$R, res$Z_corr) 47 | 48 | # Add row and column names 49 | colnames(res$Z_orig) = row.names(metadata) 50 | rownames(res$Z_orig) = paste0("PC_", seq_len(nrow(res$Z_corr))) 51 | colnames(res$Z_corr) = row.names(metadata) 52 | rownames(res$Z_corr) = paste0("harmony_", seq_len(nrow(res$Z_corr))) 53 | 54 | # Compute centroids in harmony PC space 55 | cluster_sizes = res$cache[[1]] %>% as.matrix() 56 | centroid_sums = t(res$Z_corr %*% t(res$R)) %>% as.data.frame() 57 | centroids_pc = sweep(centroid_sums, 1, cluster_sizes, "/") 58 | colnames(centroids_pc) = paste0("harmony_", seq_len(nrow(res$Z_corr))) 59 | rownames(centroids_pc) = paste0("centroid_", seq_len(nrow(res$R))) 60 | res$centroids_pc = centroids_pc 61 | 62 | if (do_umap) { 63 | if (verbose) message('UMAP') 64 | umap = uwot::umap( 65 | t(res$Z_corr), n_neighbors = 30, learning_rate = 0.5, init = "laplacian", 66 | metric = 'cosine', fast_sgd = FALSE, n_sgd_threads = 1, # for reproducibility 67 | min_dist = umap_min_dist, n_threads = 4, ret_model = TRUE 68 | ) 69 | res$umap$embedding = umap$embedding 70 | colnames(res$umap$embedding) = c('UMAP1', 'UMAP2') 71 | 72 | # Since the nn-index component of the uwot model is not able to be saved as an 73 | # object, we save the uwot model at a user-defined path. 74 | if (!is.null(save_uwot_path)) { 75 | 76 | # If file already exists, delete it (otherwise will result in an error) 77 | if (file.exists(save_uwot_path)) { 78 | if (verbose) message(paste('File already exists at that path... overwriting...')) 79 | file.remove(save_uwot_path) 80 | } 81 | 82 | model = uwot::save_uwot(umap, file = save_uwot_path, unload = FALSE, verbose = FALSE) 83 | res$save_uwot_path = save_uwot_path 84 | if (verbose) message(paste('Saved uwot model')) 85 | } 86 | } 87 | if (verbose) message('Finished nicely.') 88 | return(res) 89 | } 90 | -------------------------------------------------------------------------------- /R/confidenceScores.R: -------------------------------------------------------------------------------- 1 | #' Per-cell Confidence Score: 2 | #' Calculates the weighted Mahalanobis distance for the query cells to reference clusters. Returns a vector 3 | #' of distance scores, one per query cell. Higher distance metric indicates less confidence. 4 | #' 5 | #' @param reference Reference object as returned by Symphony buildReference() 6 | #' @param query Query object as returned by Symphony mapQuery() 7 | #' @param Z_orig Define reference distribution using original PCA embedding or harmonized PC embedding 8 | #' @param metric Uses Mahalanobis by default, but added as a parameter for potential future use 9 | #' 10 | #' @import utils 11 | #' @import stats 12 | #' @return A vector of per-cell mapping metric scores for each cell. 13 | #' @export 14 | calcPerCellMappingMetric = function(reference, query, Z_orig = TRUE, metric = 'mahalanobis') { 15 | 16 | ### Calculate (weighted) covariance matrix and centroid for all k soft clusters 17 | 18 | # initialize a d x k matrix 19 | center_ks = matrix(rep(0, len = ncol(reference$centroids) * nrow(reference$Z_corr)), nrow = nrow(reference$Z_corr)) 20 | # initialize k * (d * d) tensor 21 | cov_ks = list() 22 | 23 | # Calculate 24 | for (k in 1:ncol(reference$centroids)) { 25 | if (Z_orig) { 26 | cov_k = cov.wt(t(reference$Z_orig), wt = reference$R[k,]) 27 | } else { 28 | cov_k = cov.wt(t(reference$Z_corr), wt = reference$R[k,]) 29 | } 30 | cov_ks[[k]] = cov_k$cov # covariance matrix 31 | center_ks[, k] = cov_k$center # centroid in hPC space (d x 1) 32 | } 33 | 34 | # Calculate the Mahalanobis distance from each query cell to all centroids 35 | mah_dist_ks = matrix(rep(0, len = ncol(query$Zq_pca) * ncol(reference$centroids)), nrow = ncol(query$exp)) 36 | for (k in 1:ncol(reference$centroids)) { 37 | mah_dist_ks[, k] = sqrt(mahalanobis(x = t(query$Zq_pca), center = center_ks[, k], cov = cov_ks[[k]])) 38 | } 39 | 40 | # Return the per-cell score, which is the average of the distances weighted by the clusters the cell belongs to 41 | maha = rowSums(mah_dist_ks * t(query$R)) 42 | return(maha) 43 | } 44 | 45 | #' Per-cluster Confidence Score: 46 | #' Calculates the Mahalanobis distance from user-defined query clusters to their nearest 47 | #' reference centroid after initial projection into reference PCA space. 48 | #' All query cells in a cluster get the same score. Higher distance indicates less confidence. 49 | #' Due to the instability of estimating covariance with small numbers of cells, we do not assign a 50 | #' score to clusters smaller than u * d, where d is the dimensionality of the embedding and u is specified. 51 | #' 52 | #' @param reference Reference object as returned by Symphony buildReference() 53 | #' @param query Query object as returned by Symphony mapQuery() 54 | #' @param query_cluster_labels Vector of user-defined labels denoting clusters / putative novel cell type to calculate the score for 55 | #' @param metric Uses Mahalanobis by default, but added as a parameter for potential future use 56 | #' @param u Do not assign scores to clusters smaller than u * d (see above description) 57 | #' @param lambda Optional ridge parameter added to covariance diagonal to help stabilize numeric estimates 58 | #' 59 | #' @import utils 60 | #' @import stats 61 | #' @return A data.frame of per-cluster mapping metric scores for each user-specified query cluster. 62 | #' @export 63 | calcPerClusterMappingMetric = function(reference, query, query_cluster_labels, metric = 'mahalanobis', 64 | u = 2, lambda = 0) { 65 | 66 | query_cluster_labels = as.character(query_cluster_labels) 67 | query_cluster_labels_unique = unique(query_cluster_labels) 68 | num_clusters = length(query_cluster_labels_unique) 69 | message('Calculating mapping confidence for ', num_clusters, ' query clusters') 70 | 71 | ### Calculate the Mahalobinis distance between each query cluster and it's nearest reference centroid 72 | # c denotes the number of user-defined query clusters 73 | 74 | # initialize a d x k matrix 75 | center_cs = matrix(rep(0, len = nrow(query$Zq_pca) * num_clusters), nrow(query$Zq_pca)) # init 76 | # initialize c * (d * d) tensor 77 | cov_cs = list() 78 | colnames(center_cs) = query_cluster_labels_unique 79 | 80 | # Calculate query cluster centroid and covariances in PC space 81 | for (c in 1:num_clusters) { 82 | cluster_idx = which(query_cluster_labels == query_cluster_labels_unique[c]) 83 | cluster_Zq_pca = query$Zq_pca[, cluster_idx, drop = FALSE] 84 | cov_cs[[c]] = cov(t(cluster_Zq_pca)) 85 | center_cs[, c] = rowMeans(cluster_Zq_pca) 86 | } 87 | 88 | ## Find nearest reference cluster centroid 89 | nearest_centroid_idx = max.col(t(center_cs) %*% t(reference$centroids_pc)) 90 | centroid_closest = reference$centroids_pc[nearest_centroid_idx, ] 91 | 92 | # Calculate Mahalanobis distance from query cluster to nearest reference centroid 93 | mah_dist_cs = as.data.frame(matrix(rep(0, len = num_clusters * 2), nrow = num_clusters)) # init 94 | colnames(mah_dist_cs) = c('query_cluster', 'distance_score') # init 95 | mah_dist_cs$query_cluster = query_cluster_labels_unique 96 | 97 | for (c in 1:num_clusters) { 98 | # if the number of cells in a query cluster is less than d, we add a ridge to the diagonal 99 | cluster_size = length(which(query_cluster_labels == query_cluster_labels_unique[c])) 100 | if (cluster_size < u * nrow(query$Z)) { 101 | message('(Warning) cluster contains too few cells to estimate confidence: ', query_cluster_labels_unique[c]) 102 | mah_dist_cs$distance_score[c] = NA 103 | } else { 104 | cov = cov_cs[[c]] + lambda * diag(nrow(query$Z)) # optional ridge to help stabilize numerical estimates 105 | mah_dist_cs$distance_score[c] = sqrt(mahalanobis(x = centroid_closest[c,], center = center_cs[,c], cov = cov)) 106 | } 107 | } 108 | return(mah_dist_cs) 109 | } 110 | -------------------------------------------------------------------------------- /R/data.R: -------------------------------------------------------------------------------- 1 | #' Log(CP10k+1) normalized counts matrix (genes by cells) for 10x PBMCs dataset for vignette. 2 | #' 3 | #' @format: Sparse matrix (dgCMatrix): dimensions 1,764 genes by 1,200 cells 4 | "pbmcs_exprs_small" 5 | 6 | #' Metadata for 10x PBMCs dataset for vignette. 7 | #' 8 | #' @format: A data frame with 1,200 cells and 7 metadata fields. 9 | #' \describe{ 10 | #' \item{cell_id}{unique cell ID} 11 | #' \item{donor}{dataset (3pv1, 3pv2, or 5p)} 12 | #' \item{nUMI}{number of UMIs} 13 | #' \item{nGene}{number of genes} 14 | #' \item{percent_mito}{percent mito genes} 15 | #' \item{cell_type}{cell type assigned in Symphony publication} 16 | #' \item{cell_type_broad}{cell subtype assigned in Symphony publication} 17 | #' 18 | #' } 19 | "pbmcs_meta_small" 20 | -------------------------------------------------------------------------------- /R/findVariableGenes.R: -------------------------------------------------------------------------------- 1 | #' Function to find variable genes using mean variance relationship method 2 | #' 3 | #' @importFrom methods as 4 | #' @importFrom stats loess median na.omit quantile 5 | #' @importFrom rlang .data 6 | #' 7 | #' @param X expression matrix 8 | #' @param groups vector of groups 9 | #' @param min_expr min expression cutoff 10 | #' @param max_expr max expression cutoff 11 | #' @param min_dispersion min dispersion cutoff 12 | #' @param max_dispersion max dispersion cutoff 13 | #' @param num.bin number of bins to use for scaled analysis 14 | #' @param binning.method how bins are computed 15 | #' @param return_top_n returns top n genes 16 | #' @return A data.frame of variable genes 17 | #' @export 18 | findVariableGenes <- function(X, groups, min_expr = .1, max_expr = Inf, 19 | min_dispersion = 0, max_dispersion = Inf, 20 | num.bin = 20, binning.method = "equal_width", return_top_n = 0) { 21 | #https://www.r-bloggers.com/2019/08/no-visible-binding-for-global-variable/ 22 | group <- gene_mean <- symbol <- gene_dispersion <- NULL # prevents R CMD check note 23 | 24 | ## TODO: check that groups are 0 indexed 25 | groups <- factor(groups) 26 | groups_int <- as.integer(factor(groups)) - 1 27 | groups_table <- table(groups_int) 28 | 29 | ## initially compute means in non-log space, to use in vmr function below 30 | means_nonlog <- exp_mean(X@x, X@p, X@i, ncol(X), nrow(X), groups_int, groups_table) 31 | colnames(means_nonlog) <- levels(groups) 32 | 33 | vmr <- log_vmr(X@x, X@p, X@i, ncol(X), nrow(X), means_nonlog, groups_int, groups_table) 34 | colnames(vmr) <- levels(groups) 35 | 36 | ## transform means to logspace and join means and VMR 37 | vargenes_df <- dplyr::inner_join( 38 | means_nonlog %>% log1p %>% as_tibble() %>% 39 | cbind(symbol = row.names(X)) %>% 40 | tidyr::gather(group, gene_mean, -symbol), 41 | vmr %>% as_tibble() %>% 42 | cbind(symbol = row.names(X)) %>% 43 | tidyr::gather(group, gene_dispersion, -symbol), 44 | by = c("symbol", "group") 45 | ) 46 | 47 | if (num.bin > 0) { 48 | if (binning.method == "equal_width") { 49 | .breaks <- num.bin 50 | } 51 | else if (binning.method == "equal_frequency") { 52 | .breaks <- c(-1, quantile(vargenes_df$gene_mean[vargenes_df$gene_mean > 0], probs = seq(0, 1, length.out = num.bin))) 53 | } 54 | else { 55 | stop(paste0("Invalid selection: '", binning.method, "' for 'binning.method'.")) 56 | } 57 | 58 | vargenes_df <- data.table(vargenes_df)[ 59 | , .data$the_bin := cut(.data$gene_mean, .breaks), by = .data$group 60 | ][] 61 | 62 | vargenes_df <- data.table(vargenes_df)[ 63 | , .data$gene_dispersion_scaled := scale(.data$gene_dispersion), by = c('the_bin', 'group') 64 | ][] 65 | 66 | vargenes_df <- data.table(vargenes_df)[, .data$the_bin := NULL][] 67 | } 68 | 69 | vargenes_df <- vargenes_df %>% 70 | dplyr::arrange(-.data$gene_dispersion) %>% 71 | subset(.data$gene_mean >= min_expr & .data$gene_mean <= max_expr) %>% 72 | subset(.data$gene_dispersion >= min_dispersion & .data$gene_dispersion <= max_dispersion) 73 | 74 | return(vargenes_df) 75 | # if (return_top_n > 0) { 76 | # vargenes_union <- unique(data.table(vargenes_df)[, head(.SD, return_top_n), by = group][, symbol]) 77 | # return(vargenes_union) 78 | # } else { 79 | # return(vargenes_df) 80 | # } 81 | 82 | } 83 | 84 | #' Function to find variable genes using variance stabilizing transform (vst) method 85 | #' 86 | #' @param object expression matrix 87 | #' @param groups finds variable genes within each group then pools 88 | #' @param topn Return top n genes 89 | #' @param loess.span Loess span parameter used when fitting the variance-mean relationship 90 | #' @return A data.frame of variable genes, with means and standard deviations. 91 | #' @export 92 | vargenes_vst <- function(object, groups, topn, loess.span = 0.3) { 93 | clip.max <- sqrt(ncol(object)) 94 | 95 | N <- ncol(object) 96 | if (missing(groups)) { 97 | groups <- rep('A', N) 98 | } 99 | 100 | res <- split(seq_len(N), groups) %>% lapply(function(idx) { 101 | object_group <- object[, idx] 102 | ## row means 103 | hvf.info <- data.frame(mean = Matrix::rowMeans(object_group)) 104 | 105 | ## row vars 106 | hvf.info$variance <- rowVars(object_group, hvf.info$mean) 107 | 108 | ## initialize 109 | hvf.info$variance.expected <- 0 110 | hvf.info$variance.standardized <- 0 111 | 112 | not.const <- hvf.info$variance > 0 113 | 114 | ## loess curve fit 115 | suppressWarnings({ 116 | fit <- loess(formula = log10(variance) ~ log10(mean), 117 | data = hvf.info[not.const, ], span = loess.span) 118 | }) 119 | 120 | ## extract fitted variance 121 | hvf.info$variance.expected[not.const] <- 10^fit$fitted 122 | 123 | ## get row standard deviations after clipping 124 | hvf.info$variance.standardized <- rowVarsStd( 125 | object_group, 126 | hvf.info$mean, 127 | sqrt(hvf.info$variance.expected), 128 | clip.max 129 | ) 130 | 131 | hvf.info <- hvf.info %>% 132 | tibble::rownames_to_column('symbol') %>% 133 | dplyr::arrange(-.data$variance.standardized) %>% 134 | tibble::rowid_to_column('rank') %>% 135 | transform(group = unique(groups[idx])) 136 | 137 | return(hvf.info) 138 | }) 139 | 140 | 141 | if (missing(topn)) { 142 | ## MODE 1: return table 143 | res <- Reduce(rbind, res) %>% 144 | dplyr::select(.data$group, .data$symbol, .data$rank, .data$everything()) 145 | 146 | if (length(unique(res$group)) == 1) { 147 | res$group <- NULL 148 | } 149 | } else { 150 | ## MODE 2: return genes 151 | res <- Reduce(union, lapply(res, function(x) head(x, topn)$symbol)) 152 | } 153 | return(res) 154 | } 155 | -------------------------------------------------------------------------------- /R/globals.R: -------------------------------------------------------------------------------- 1 | utils::globalVariables(c('..level..', 'UMAP1', 'UMAP2', 'geom_point_rast', 'geom_text_repel')) 2 | -------------------------------------------------------------------------------- /R/knncorr.R: -------------------------------------------------------------------------------- 1 | #' Calculates the k-NN correlation, which measures how well the sorted ordering of k nearest reference 2 | #' neighbors in a gold standard embedding correlate with the ordering for the same reference cells in 3 | #' an alternative embedding (i.e. from reference mapping). 4 | #' NOTE: it is very important for the order of reference cells (cols) in gold_ref matches that of alt_ref 5 | #' (same for matching columns of gold_query and alt_query). 6 | #' 7 | #' @param gold_ref Reference cells in gold standard embedding (PCs by cells) 8 | #' @param alt_ref Reference cells in alternative embedding (PCs by cells) 9 | #' @param gold_query Query cells in gold standard embedding (PCs by cells) 10 | #' @param alt_query Query cells in alternative embedding (PCs by cells) 11 | #' @param k Number of reference neighbors to use for kNN-correlation calculation 12 | #' 13 | #' @import RANN 14 | #' @return Vector of k-NN correlations for query cells 15 | #' @export 16 | calcknncorr = function(gold_ref, alt_ref, gold_query, alt_query, k = 500) { 17 | message('Note: This function assumes that ordering of cells (cols) between gold and alt embeddings match') 18 | # Calculate the query cells' k nearest reference neighbors in the gold standard embedding 19 | nn_in_gold = nn2(t(gold_ref), t(gold_query), k = k) 20 | 21 | corrs = numeric(ncol(gold_query)) # initialize results 22 | for (i in 1:nrow(nn_in_gold$nn.idx)) { # for each query cell 23 | neighbors_idx = nn_in_gold$nn.idx[i, ] 24 | 25 | # Get position of anchor cell in gold embedding 26 | query_anchor_gold = gold_query[, i] %>% # duplicate k times 27 | matrix(nrow = nrow(gold_query), ncol = k, byrow = FALSE) 28 | 29 | # Get position of anchor cell in alternate embedding 30 | query_anchor_alt = alt_query[, i] %>% # duplicate k times 31 | matrix(nrow = nrow(gold_query), ncol = k, byrow = FALSE) 32 | 33 | # Get positions for its nearest neighbors in the gold embedding 34 | query_neighbors_gold = gold_ref[, neighbors_idx] 35 | 36 | # Get positions for its nearest neighbors in the alt embedding 37 | query_neighbors_alt = alt_ref[, neighbors_idx] 38 | 39 | # Calculate distance between anchor cell and each neighbor in gold embedding 40 | distances_gold = sqrt(colSums((query_anchor_gold - query_neighbors_gold)**2)) 41 | 42 | # Calculate distance between anchor cell and each neighbor in alt embedding 43 | distances_alt = sqrt(colSums((query_anchor_alt - query_neighbors_alt)**2)) 44 | 45 | # Calculate Spearman correlation between the two distance vectors 46 | corrs[i] = cor(distances_gold, distances_alt, method = 'spearman') 47 | } 48 | return(corrs) 49 | } 50 | 51 | #' Calculates the k-NN correlation within the query cells only, which measures how well the sorted 52 | #' ordering of k nearest query neighbors in a query de novo PCA embedding correlate with the ordering 53 | #' for the cells in the reference mapping embedding. 54 | #' 55 | #' @param query Query object (returned from mapQuery) 56 | #' @param var Query metadata batch variable (PCA is calculated within each batch separately); if NULL, do not split by batch 57 | #' @param k Number of neighbors to use for kNN-correlation calculation 58 | #' @param topn number of variable genes to calculate within each query batch for query PCA 59 | #' @param d number of dimensions for query PCA within each query batch 60 | #' @param distance either 'euclidean' or 'cosine' 61 | #' 62 | #' @import RANN 63 | #' @return Vector of within-query k-NN correlations for query cells 64 | #' @export 65 | calcknncorrWithinQuery = function(query, var = NULL, k = 100, topn = 2000, d = 20, distance = 'euclidean') { 66 | corrs = numeric(nrow(query$meta_data)) # initialize results 67 | 68 | if (!is.null(var)) { 69 | for (batch in unique(query$meta_data[[var]])) { # for each batch 70 | message(paste0('Calculating k-NN correlation within query batch ', batch)) 71 | 72 | batch_idx = which(query$meta_data[[var]] == batch) 73 | query$exp = Matrix(query$exp, sparse = TRUE) 74 | query_exp_batch = query$exp[, batch_idx] 75 | 76 | Z_pca = runPCAQueryAlone(query_exp_batch, topn = topn, d = d) 77 | Z_mapping = query$Z[, batch_idx] 78 | 79 | # Calculate correlation & save results 80 | corrs[batch_idx] = calcknncorrWithinQueryBatch(Z_pca, Z_mapping, k, distance = distance) 81 | } 82 | 83 | } else { 84 | message('No batch var specified. Treating query as 1 batch.') 85 | Z_pca = runPCAQueryAlone(query$exp, topn = topn, d = d) 86 | Z_mapping = query$Z 87 | 88 | # Calculate correlation & save results 89 | corrs = calcknncorrWithinQueryBatch(Z_pca, Z_mapping, k, distance = distance) 90 | } 91 | return(corrs) 92 | } 93 | 94 | # Non-exported (called by calcknncorrWithinQuery above) 95 | # Calculates the k-NN correlation within a query batch. 96 | calcknncorrWithinQueryBatch = function(Z_pca, Z_mapping, k, distance) { 97 | if (!identical(dim(Z_pca), dim(Z_mapping))) { 98 | stop('Error: PCA and mapping embeddings have different dimensions') 99 | } 100 | 101 | if (distance == 'cosine') { # L2 normalize 102 | Z_pca = Z_pca %>% cosine_normalize_cpp(2) 103 | Z_mapping = Z_mapping %>% cosine_normalize_cpp(2) 104 | } 105 | 106 | # Calculate nearest neighbors in query PCA space 107 | nn_in_query_pca = nn2(t(Z_pca), t(Z_pca), k = k + 1) # k+1 because do not count itself 108 | rownames(nn_in_query_pca$nn.idx) = nn_in_query_pca$nn.idx[, 1] 109 | nn_in_query_pca$nn.idx = nn_in_query_pca$nn.idx[, -1] # do not count itself 110 | 111 | corrs_batch = numeric(nrow(nn_in_query_pca$nn.idx)) 112 | 113 | for (i in 1:ncol(Z_pca)) { # For each anchor cell, calculate k-NN-corr 114 | if(ncol(Z_pca) < k + 1) { 115 | k = ncol(Z_pca) - 1 116 | message(paste('Warning: Batch has too few cells. Using k =', k , 'instead.')) 117 | } 118 | 119 | neighbors_idx = nn_in_query_pca$nn.idx[i, ] # neighbors are defined in query PC space within each batch 120 | 121 | # Get position of anchor cell in query PCA embedding 122 | query_anchor_pca = Z_pca[, i] %>% 123 | matrix(nrow = length(Z_pca[, i]), ncol = k, byrow = FALSE) 124 | 125 | # Get position of anchor cell in mapping embedding 126 | query_anchor_mapping = Z_mapping[, i] %>% matrix(nrow = length(Z_mapping[, i]), ncol = k, byrow = FALSE) 127 | 128 | # Get positions for its nearest neighbors in the query PCA embedding 129 | query_neighbors_pca = Z_pca[, neighbors_idx] # [20 x k] 130 | 131 | # Get positions for its nearest neighbors in the mapping embedding 132 | query_neighbors_mapping = Z_mapping[, neighbors_idx] # [20 x k] 133 | 134 | # Calculate Euclidean distance between anchor cell and each neighbor in query PCA embedding 135 | distances_pca = sqrt(colSums((query_anchor_pca - query_neighbors_pca)**2)) 136 | 137 | # Calculate Euclidean distance between anchor cell and each neighbor in mapping embedding 138 | distances_mapping = sqrt(colSums((query_anchor_mapping - query_neighbors_mapping)**2)) 139 | 140 | # Calculate Spearman correlation between the two distance vectors 141 | corrs_batch[i] = cor(distances_mapping, distances_pca, method = 'spearman') 142 | } 143 | return(corrs_batch) 144 | } 145 | 146 | #' Runs a standard PCA pipeline on query (1 batch). Assumes query_exp is already normalized. 147 | #' 148 | #' @param query_exp Query expression matrix (genes x cells) 149 | #' @param topn Number of variable genes to use 150 | #' @param d Number of dimensions 151 | #' @param seed random seed 152 | #' 153 | #' @import irlba 154 | #' @return A matrix of PCs by cells 155 | #' @export 156 | runPCAQueryAlone = function(query_exp, topn = 2000, d = 20, seed = 1) { 157 | # Subset by variable genes 158 | vargenes = vargenes_vst(query_exp, topn = topn) 159 | vargenes_exp = query_exp[vargenes, ] 160 | 161 | vargenes_means_sds = tibble(symbol = vargenes, mean = Matrix::rowMeans(vargenes_exp)) 162 | vargenes_means_sds$stddev <- rowSDs(vargenes_exp, vargenes_means_sds$mean) 163 | 164 | # Scale data 165 | exp_scaled <- scaleDataWithStats(vargenes_exp, vargenes_means_sds$mean, vargenes_means_sds$stddev, 1) 166 | 167 | # Run SVD, save loadings 168 | set.seed(seed) 169 | s = irlba(exp_scaled, nv = d) 170 | Z_pca = diag(s$d) %*% t(s$v) # [PCs by cells] 171 | return(Z_pca) 172 | } 173 | -------------------------------------------------------------------------------- /R/mapQuery.R: -------------------------------------------------------------------------------- 1 | #' Function for mapping query cells to a Symphony reference 2 | #' 3 | #' @param exp_query Query gene expression (genes by cells) 4 | #' @param metadata_query Query metadata (cells by attributes) 5 | #' @param ref_obj Reference object as returned by Symphony buildReference() 6 | #' @param vars Query batch variable(s) to integrate over (column names in metadata) 7 | #' @param verbose Verbose output 8 | #' @param do_normalize Perform log(CP10K+1) normalization on query expression 9 | #' @param do_umap Perform umap projection into reference UMAP (if reference includes a uwot model) 10 | #' @param sigma Fuzziness parameter for soft clustering (sigma = 1 is hard clustering) 11 | #' 12 | #' @import utils 13 | #' @importFrom magrittr "%>%" 14 | #' @importFrom Matrix Matrix 15 | #' @return Symphony query object. Mapping embedding is in the $Z slot. Other slots include 16 | #' query expression matrix ($exp), query cell-level metadata ($meta_data), 17 | #' query cell embedding in pre-Harmonized reference PCs ($Zq_pca), query cell soft cluster 18 | #' assignments ($R), and query cells in reference UMAP coordinates ($umap). 19 | #' 20 | #' @export 21 | mapQuery = function(exp_query, 22 | metadata_query, 23 | ref_obj, # From Symphony reference building 24 | vars = NULL, # Query batch variables to harmonize over 25 | verbose = TRUE, 26 | do_normalize = TRUE, 27 | do_umap = TRUE, 28 | sigma = 0.1) { 29 | 30 | if (do_normalize) { 31 | if (verbose) message('Normalizing') 32 | exp_query = normalizeData(exp_query, 1e4, 'log') 33 | } 34 | 35 | ## Synchronize and scale query genes 36 | if (verbose) message('Scaling and synchronizing query gene expression') 37 | 38 | # Find shared genes between reference and query 39 | idx_shared_genes = which(ref_obj$vargenes$symbol %in% rownames(exp_query)) 40 | shared_genes = ref_obj$vargenes$symbol[idx_shared_genes] 41 | if (verbose) message('Found ', length(shared_genes), ' out of ', length(ref_obj$vargenes$symbol),' reference variable genes in query dataset') 42 | 43 | # Subset and scale the query cells by reference means and standard deviations 44 | exp_query_scaled = scaleDataWithStats(exp_query[shared_genes, ], 45 | ref_obj$vargenes$mean[idx_shared_genes], 46 | ref_obj$vargenes$stddev[idx_shared_genes], 1) 47 | 48 | # To add rows of zeros for missing genes, start with full matrix of zeroes 49 | exp_query_scaled_sync = matrix(0, nrow = length(ref_obj$vargenes$symbol), ncol = ncol(exp_query)) 50 | 51 | # Rows get filled with exp_query_scaled values, leaving rows of 0s where appropriate 52 | exp_query_scaled_sync[idx_shared_genes, ] = exp_query_scaled 53 | rownames(exp_query_scaled_sync) = ref_obj$vargenes$symbol 54 | colnames(exp_query_scaled_sync) = colnames(exp_query) 55 | 56 | if (verbose) message('Project query cells using reference gene loadings') 57 | ### 1. Project into PCs using reference loadings 58 | Z_pca_query = t(ref_obj$loadings) %*% exp_query_scaled_sync 59 | 60 | if (verbose) message('Clustering query cells to reference centroids') 61 | ### 2. Soft cluster assignment 62 | Z_pca_query_cos = cosine_normalize_cpp(Z_pca_query, 2) 63 | R_query = soft_cluster(ref_obj$centroids, Z_pca_query_cos, sigma) 64 | 65 | if (verbose) message('Correcting query batch effects') 66 | ### 3. Correction step with ridge regression 67 | 68 | # Make query design matrix 69 | if (!is.null(vars)) { 70 | design = droplevels(metadata_query)[,vars] %>% as.data.frame() 71 | 72 | onehot = design %>% 73 | purrr::map(function(.x) { 74 | if (length(unique(.x)) == 1) { # Special case if factor only has 1 level 75 | rep(1, length(.x)) 76 | } else { 77 | stats::model.matrix(~0 + .x) 78 | } 79 | }) %>% purrr::reduce(cbind) 80 | 81 | Xq = cbind(1, intercept = onehot) %>% t() 82 | } else { 83 | # If no batches specified, treat all query cells as single batch 84 | Xq = Matrix(rbind(rep(1, ncol(Z_pca_query)), rep(1, ncol(Z_pca_query))), sparse = TRUE) 85 | } 86 | 87 | # Mixture of experts correction (calls cpp code) 88 | Zq_corr = moe_correct_ref(as.matrix(Z_pca_query), 89 | as.matrix(Xq), 90 | as.matrix(R_query), 91 | as.matrix(ref_obj$cache[[1]]), 92 | as.matrix(ref_obj$cache[[2]])) ## TODO: add lambda parameter 93 | 94 | # Add row and column names 95 | colnames(Z_pca_query) = row.names(metadata_query) 96 | rownames(Z_pca_query) = paste0("PC_", seq_len(nrow(Zq_corr))) 97 | colnames(Zq_corr) = row.names(metadata_query) 98 | rownames(Zq_corr) = paste0("harmony_", seq_len(nrow(Zq_corr))) 99 | 100 | ## UMAP projection of query if the reference uwot model is present 101 | umap_query = NULL 102 | 103 | if (do_umap & !is.null(ref_obj$save_uwot_path)) { 104 | if (verbose) message('UMAP') 105 | ref_umap_model = uwot::load_uwot(ref_obj$save_uwot_path, verbose = FALSE) 106 | umap_query = uwot::umap_transform(t(Zq_corr), ref_umap_model) 107 | colnames(umap_query) = c('UMAP1', 'UMAP2') 108 | } 109 | 110 | if (verbose) message('All done!') 111 | return(list(exp = exp_query, meta_data = metadata_query, Z = Zq_corr, Zq_pca = Z_pca_query, 112 | R = R_query, Xq = Xq, umap = umap_query)) 113 | } 114 | -------------------------------------------------------------------------------- /R/plotReference.R: -------------------------------------------------------------------------------- 1 | #' Function to plot reference, colored by cell type 2 | #' 3 | #' @param reference Symphony reference object (must have UMAP stored) 4 | #' @param as.density if TRUE, plot as density; if FALSE, plot as individual cells 5 | #' @param bins for density, nbins parameter for stat_density_2d 6 | #' @param bandwidth for density, bandwidth parameter for stat_density_2d 7 | #' @param title Plot title 8 | #' @param color.by metadata column name for phenotype labels 9 | #' @param celltype.colors custom color mapping 10 | #' @param show.legend Show cell type legend 11 | #' @param show.labels Show cell type labels 12 | #' @param show.centroids Plot soft cluster centroid locations 13 | #' @import ggplot2 14 | #' @import RColorBrewer 15 | #' @import uwot 16 | #' @return A ggplot object. 17 | #' @export 18 | plotReference = function(reference, # Symphony reference object 19 | as.density = TRUE, # if FALSE, plot as individual cells 20 | bins = 10, # for density, nbins parameter for stat_density_2d 21 | bandwidth = 1.5, # for density, bandwidth parameter for stat_density_2d 22 | title = 'Reference', # Plot title 23 | color.by = 'cell_type', # metadata column name for cell type labels 24 | celltype.colors = NULL, # custom color mapping 25 | show.legend = TRUE, # Show cell type legend 26 | show.labels = TRUE, # Show cell type labels 27 | show.centroids = FALSE) { # Plot soft cluster centroid locations 28 | 29 | if (is.null(reference$umap)) { 30 | stop('Error: umap slot is empty. UMAP was not saved for this reference!') 31 | } 32 | 33 | umap_labels = cbind(reference$meta_data, reference$umap$embedding) 34 | 35 | p = umap_labels %>% 36 | dplyr::sample_frac(1L) %>% # permute rows randomly 37 | ggplot(aes(x = UMAP1, y = UMAP2)) 38 | 39 | if (as.density) { 40 | # Plot as density 41 | p = p + stat_density_2d(geom = 'polygon', aes(alpha = ..level.., fill = get(color.by)), 42 | contour_var = "ndensity", bins = bins, h = bandwidth) 43 | if (!is.null(celltype.colors)) { p = p + scale_fill_manual(values = celltype.colors) + 44 | labs(fill = color.by)} 45 | } else { 46 | if(requireNamespace('ggrastr')){ 47 | # Plot as individual points 48 | p = p + ggrastr::geom_point_rast(aes(col = get(color.by)), size = 0.3, stroke = 0.2, shape = 16) 49 | } else{ 50 | message('Install ggrastr to plot cell as raster. Useful if there are many cells.') 51 | p = p + geom_point(aes(col = get(color.by)), size = 0.3, stroke = 0.2, shape = 16) 52 | } 53 | if (!is.null(celltype.colors)) { p = p + scale_color_manual(values = celltype.colors) + labs(color = color.by)} 54 | } 55 | 56 | # Default formatting 57 | p = p + theme_bw() + 58 | labs(title = title) + 59 | theme(plot.title = element_text(hjust = 0.5)) + 60 | theme(legend.position="bottom") + 61 | theme(legend.text = element_text(size=8), legend.title=element_text(size=12)) + 62 | guides(colour = guide_legend(override.aes = list(size = 4))) + guides(alpha = 'none') 63 | 64 | if (show.centroids) { 65 | # Add centroid locations 66 | centroids = reference$Z_corr %*% t(reference$R) 67 | ref_umap_model = uwot::load_uwot(reference$save_uwot_path, verbose = FALSE) 68 | umap_centroids = uwot::umap_transform(t(centroids), ref_umap_model) %>% as.data.frame() 69 | colnames(umap_centroids) = c('UMAP1', 'UMAP2') 70 | 71 | p = p + geom_point(data = umap_centroids, aes(x = UMAP1, y = UMAP2, fill = 'centroid'), size = 0.3) 72 | } 73 | 74 | if (show.labels) { 75 | # Add cell type labels (at median coordinate per cell type) 76 | labels.cent = umap_labels %>% 77 | dplyr::group_by_at(color.by) %>% #group_by_at takes variable column name 78 | dplyr::select(UMAP1, UMAP2) %>% 79 | dplyr::summarize_all(median) 80 | if(requireNamespace('ggrepel')){ 81 | p = p + geom_text_repel(data = labels.cent, aes(x= UMAP1, y = UMAP2, label = get(color.by)), 82 | segment.alpha = 0.5, segment.size = 0.2, box.padding = 0.01, color = 'black') 83 | } else{ 84 | message('Install ggrepel to layout labels nicely.') 85 | p = p + ggplot2::geom_text(data = labels.cent, aes(x= UMAP1, y = UMAP2, label = get(color.by)), color = 'black') 86 | } 87 | } 88 | 89 | if (!show.legend) { 90 | p = p + theme(legend.position="none") 91 | } 92 | return(p) 93 | } 94 | -------------------------------------------------------------------------------- /R/symphony-package.R: -------------------------------------------------------------------------------- 1 | #' symphony 2 | #' 3 | #' Efficient single-cell reference atlas mapping (Kang et al.) 4 | #' 5 | #' @name symphony 6 | #' @docType package 7 | #' @useDynLib symphony 8 | #' @importFrom Rcpp sourceCpp 9 | NULL -------------------------------------------------------------------------------- /R/utils.R: -------------------------------------------------------------------------------- 1 | # Methods from singlecellmethods --------------------------------- 2 | 3 | normalizeData <- function(A, scaling_factor = 1e4, method) { 4 | if(!'dgCMatrix' %in% class(A)) A <- as(A, "dgCMatrix") 5 | 6 | if (method == "log") { 7 | A@x <- A@x / rep.int(Matrix::colSums(A), diff(A@p)) 8 | A@x <- scaling_factor * A@x 9 | A@x <- log(1 + A@x) 10 | } else if (method == "fft") { 11 | A@x <- A@x / rep.int(Matrix::colSums(A), diff(A@p)) 12 | A@x <- scaling_factor * A@x 13 | A@x <- sqrt(A@x) + sqrt(1 + A@x) 14 | } else if (method == "geneCLR") { 15 | A@x <- as.numeric(normalizeCLR_dgc(A@x, A@p, A@i, ncol(A), nrow(A), 1)) 16 | } else if (method == "cellCLR") { 17 | A@x <- as.numeric(normalizeCLR_dgc(A@x, A@p, A@i, ncol(A), nrow(A), 2)) 18 | } else { 19 | stop(sprintf("ERROR: method %s not implemented", method)) 20 | } 21 | 22 | return(A) 23 | } 24 | 25 | scaleData <- function(A, margin = 1, thresh = 10) { 26 | A <- as(A, "dgCMatrix") 27 | 28 | if (margin != 1) A <- t(A) 29 | 30 | res <- scaleRows_dgc(A@x, A@p, A@i, ncol(A), nrow(A), thresh) 31 | if (margin != 1) res <- t(res) 32 | row.names(res) <- row.names(A) 33 | colnames(res) <- colnames(A) 34 | return(res) 35 | } 36 | 37 | 38 | #' Scale data with given mean and standard deviations 39 | #' 40 | #' @param A expression matrix (genes by cells) 41 | #' @param mean_vec vector of mean values 42 | #' @param sd_vec vector of standard deviation values 43 | #' @param margin 1 for row-wise calculation 44 | #' @param thresh threshold to clip max values 45 | #' @return A matrix of scaled expression values. 46 | #' @export 47 | scaleDataWithStats <- function(A, mean_vec, sd_vec, margin = 1, thresh = 10) { 48 | if (!"dgCMatrix" %in% class(A)) 49 | A <- as(A, "dgCMatrix") 50 | 51 | if (margin != 1) A <- t(A) 52 | 53 | res <- scaleRowsWithStats_dgc(A@x, A@p, A@i, mean_vec, sd_vec, 54 | ncol(A), nrow(A), thresh) 55 | if (margin != 1) res <- t(res) 56 | row.names(res) <- row.names(A) 57 | colnames(res) <- colnames(A) 58 | return(res) 59 | } 60 | 61 | #' Calculate standard deviations by row 62 | #' 63 | #' @param A expression matrix (genes by cells) 64 | #' @param row_means row means 65 | #' @param weights weights for weighted standard dev calculation 66 | #' @return A vector of row standard deviations 67 | #' @export 68 | rowSDs <- function(A, row_means=NULL, weights=NULL) { 69 | if (is.null(row_means)) { 70 | row_means <- rowMeans(A, weights) 71 | } 72 | if (is.null(weights)) { 73 | res <- as.numeric(rowSDs_dgc(A@x, A@p, A@i, row_means, ncol(A), nrow(A), TRUE)) 74 | } else { 75 | res <- as.numeric(rowSDsWeighted_dgc(A@x, A@p, A@i, row_means, weights, ncol(A), nrow(A), TRUE)) 76 | } 77 | names(res) <- row.names(A) 78 | return(res) 79 | } 80 | 81 | rowMeans <- function(A, weights=NULL) { 82 | if (is.null(weights)) { 83 | res <- Matrix::rowMeans(A) 84 | } else { 85 | res <- as.numeric(rowMeansWeighted_dgc(A@x, A@p, A@i, weights, ncol(A), nrow(A))) 86 | } 87 | names(res) <- row.names(A) 88 | return(res) 89 | } 90 | 91 | rowVarsStd <- function(A, row_means, row_sds, vmax, weights=NULL) { 92 | if (is.null(weights)) { 93 | res <- as.numeric(rowVarSDs_dgc(A@x, A@p, A@i, row_means, row_sds, vmax, ncol(A), nrow(A), FALSE)) 94 | } 95 | # else { 96 | # res <- as.numeric(rowSDsWeighted_dgc(A@x, A@p, A@i, row_means, weights, ncol(A), nrow(A), TRUE)) 97 | # } 98 | names(res) <- row.names(A) 99 | return(res) 100 | } 101 | 102 | rowVars <- function(A, row_means=NULL, weights=NULL) { 103 | if (is.null(row_means)) { 104 | row_means <- rowMeans(A, weights) 105 | } 106 | if (is.null(weights)) { 107 | res <- as.numeric(rowSDs_dgc(A@x, A@p, A@i, row_means, ncol(A), nrow(A), FALSE)) 108 | } else { 109 | res <- as.numeric(rowSDsWeighted_dgc(A@x, A@p, A@i, row_means, weights, ncol(A), nrow(A), FALSE)) 110 | } 111 | names(res) <- row.names(A) 112 | return(res) 113 | } 114 | 115 | ## columns are observations 116 | soft_kmeans <- function(X, k, w, max_iter=20, sigma=0.1) { 117 | message('WARNING: soft_kmeans fxn uses cosine distance only') 118 | Z <- cosine_normalize_cpp(X, 2) 119 | if (missing(w)) 120 | Y <- stats::kmeans(t(Z), centers = k, iter.max = 25, nstart = 10)$centers %>% t() ## D x K 121 | res <- soft_kmeans_cpp(Y, Z, max_iter, sigma) 122 | return(res) 123 | } 124 | 125 | # Symphony utils --------------------------------- 126 | 127 | #' Predict annotations of query cells from the reference using k-NN method 128 | #' 129 | #' @param query_obj Symphony query object 130 | #' @param ref_obj Symphony reference object 131 | #' @param train_labels vector of labels to train 132 | #' @param k number of neighbors 133 | #' @param save_as string that result column will be named in query metadata 134 | #' @param confidence return k-NN confidence scores (proportion of neighbors voting for the predicted annotation) 135 | #' @param seed random seed (k-NN has some stochasticity in the case of ties) 136 | #' @return Symphony query object, with predicted reference labels stored in the 'save_as' slot of the query$meta_data 137 | #' @export 138 | knnPredict <- function(query_obj, ref_obj, 139 | train_labels, # cell labels for k-NN classification 140 | k = 5, # number of reference neighbors 141 | save_as = 'cell_type_pred_knn', # metadata column to save result 142 | confidence = TRUE, # return prediction confidence 143 | seed = 0) { # random seed 144 | set.seed(seed) 145 | if (confidence) { 146 | knn_pred = class::knn(t(ref_obj$Z_corr), t(query_obj$Z), train_labels, k = k, prob = TRUE) 147 | knn_prob = attributes(knn_pred)$prob 148 | query_obj$meta_data[save_as] = knn_pred 149 | query_obj$meta_data[paste0(save_as, '_prob')] = knn_prob 150 | } else { 151 | knn_pred = class::knn(t(ref_obj$Z_corr), t(query_obj$Z), train_labels, k = k, prob = FALSE) 152 | query_obj$meta_data[save_as] = knn_pred 153 | } 154 | return(query_obj) 155 | } 156 | 157 | ## Predict cell type using k-NN method with cos distance 158 | knnPredictCos <- function(query_obj, ref_obj, 159 | train_labels, # cell labels for knn classification 160 | k = 5) { 161 | Z_ref_cos = cosine_normalize_cpp(ref_obj$Z_corr, 2) 162 | Z_query_cos = cosine_normalize_cpp(query_obj$Z, 2) 163 | knn_pred = class::knn(t(Z_ref_cos), t(Z_query_cos), train_labels, k = k) 164 | query_obj$meta_data$cell_type_pred_knn_cos = knn_pred 165 | return(query_obj) 166 | } 167 | 168 | 169 | #' Function for evaluating F1 by cell type, 170 | #' adapted from automated cell type identifiaction benchmarking paper (Abdelaal et al. Genome Biology, 2019) 171 | #' @param true vector of true labels 172 | #' @param predicted vector of predicted labels 173 | #' @return A list of results with confusion matrix ($Conf), median F1-score ($MedF1), F1 scores per class ($F1), and accuracy ($Acc). 174 | #' @export 175 | evaluate <- function(true, predicted) { 176 | " 177 | Returns 178 | ------- 179 | Conf: confusion matrix 180 | MedF1 : median F1-score 181 | F1 : F1-score per class 182 | Acc : accuracy 183 | PercUnl : percentage of unlabeled cells 184 | PopSize : number of cells per cell type 185 | " 186 | 187 | true_lab <- unlist(true) 188 | pred_lab <- unlist(predicted) 189 | 190 | unique_true <- unlist(unique(true_lab)) 191 | unique_pred <- unlist(unique(pred_lab)) 192 | 193 | unique_all <- unique(c(unique_true,unique_pred)) 194 | conf <- table(true_lab,pred_lab) 195 | pop_size <- rowSums(conf) 196 | 197 | #pred_lab = gsub('Node..','Node',pred_lab) 198 | 199 | conf_F1 <- table(true_lab,pred_lab,exclude = c('unassigned','Unassigned','Unknown','rand','Node','ambiguous','unknown')) 200 | 201 | F1 <- vector() 202 | sum_acc <- 0 203 | 204 | for (i in c(1:length(row.names(conf_F1)))){ 205 | if(pop_size[row.names(conf_F1)[i]] == 0) { 206 | F1[i] = NA # F1 score is N/A 207 | next 208 | } 209 | 210 | findLabel = colnames(conf_F1) == row.names(conf_F1)[i] 211 | 212 | if(sum(findLabel)){ 213 | prec <- conf_F1[i,findLabel] / colSums(conf_F1)[findLabel] 214 | rec <- conf_F1[i,findLabel] / rowSums(conf_F1)[i] 215 | if (prec == 0 | rec == 0){ 216 | F1[i] = 0 217 | } else{ 218 | F1[i] <- (2*prec*rec) / (prec + rec) 219 | } 220 | sum_acc <- sum_acc + conf_F1[i,findLabel] 221 | } else { 222 | F1[i] = 0 223 | } 224 | #print(paste(row.names(conf_F1)[i], ' --> ', sum(findLabel), 'F1 =', F1[i])) 225 | } 226 | 227 | #pop_size <- pop_size[pop_size > 0] 228 | 229 | #names(F1) <- names(pop_size) 230 | names(F1) = row.names(conf_F1) 231 | 232 | med_F1 <- median(na.omit(F1)) 233 | 234 | total <- length(pred_lab) 235 | num_unlab <- sum(pred_lab == 'unassigned') + sum(pred_lab == 'Unassigned') + sum(pred_lab == 'rand') + sum(pred_lab == 'Unknown') + sum(pred_lab == 'unknown') + sum(pred_lab == 'Node') + sum(pred_lab == 'ambiguous') 236 | per_unlab <- num_unlab / total 237 | 238 | acc <- sum_acc/sum(conf_F1) 239 | 240 | result <- list(Conf = conf, MedF1 = med_F1, F1 = F1, Acc = acc, PercUnl = per_unlab, PopSize = pop_size) 241 | 242 | return(result) 243 | } 244 | -------------------------------------------------------------------------------- /README.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | output: github_document 3 | --- 4 | 5 | 6 | 7 | ```{r, include = FALSE} 8 | knitr::opts_chunk$set( 9 | collapse = TRUE, 10 | comment = "#>", 11 | fig.path = "man/figures/README-", 12 | fig.align = 'right', 13 | out.width = "100%" 14 | ) 15 | ``` 16 | 17 | # Symphony logo 18 | 19 | 20 | 21 | 22 | Efficient and precise single-cell reference atlas mapping with Symphony 23 | 24 | [Kang et al. (Nat Comm, 2021)](https://www.nature.com/articles/s41467-021-25957-x) 25 | 26 | For Python users, check out the [symphonypy](https://github.com/potulabe/symphonypy) package by Kseniya Petrova and Sergey Isaev. 27 | 28 | 29 | # Installation 30 | 31 | Symphony is available on CRAN: 32 | ``` r 33 | install.packages("symphony") 34 | ``` 35 | 36 | Install the development version of Symphony from [GitHub](https://github.com/) use: 37 | 38 | ``` r 39 | # install.packages("devtools") 40 | devtools::install_github("immunogenomics/symphony") 41 | ``` 42 | Install should take <10 mins (pending no major issues). See installation notes below. 43 | 44 | 45 | # Usage/Demos 46 | ## Tutorials 47 | 48 | * Check out the [quick start (<5 min) PBMCs tutorial](https://github.com/immunogenomics/symphony/blob/main/vignettes/pbmcs_tutorial.ipynb/) for an example of how to build a custom reference and map to it. 49 | 50 | * Check out the [pre-built references tutorial](https://github.com/immunogenomics/symphony/blob/main/vignettes/prebuilt_references_tutorial.ipynb) for examples of how to map to provided Symphony references pre-built from the datasets featured in the manuscript. 51 | 52 | ## Downloading pre-built references: 53 | 54 | * You can download pre-built references from [Zenodo](https://zenodo.org/record/5090425). 55 | 56 | 57 | ## Reference building 58 | 59 | ### Option 1: Starting from existing Harmony object 60 | 61 | This function compresses an existing Harmony object into a Symphony reference that enables query mapping. We recommend this option for most users since it allows your code to be more modular and flexible. 62 | 63 | ```{r eval=FALSE} 64 | 65 | # Run Harmony to integrate the reference cells 66 | ref_harmObj = harmony::HarmonyMatrix( 67 | data_mat = t(Z_pca_ref), # starting embedding (e.g. PCA, CCA) of cells 68 | meta_data = ref_metadata, # dataframe with cell metadata 69 | theta = c(2), # cluster diversity enforcement 70 | vars_use = c('donor'), # variable to integrate out 71 | nclust = 100, # number of clusters in Harmony model 72 | max.iter.harmony = 10, # max iterations of Harmony 73 | return_object = TRUE, # set to TRUE to return the full Harmony object 74 | do_pca = FALSE # do not recompute PCs 75 | ) 76 | 77 | # Build Symphony reference 78 | reference = buildReferenceFromHarmonyObj( 79 | ref_harmObj, # output object from HarmonyMatrix() 80 | ref_metadata, # dataframe with cell metadata 81 | vargenes_means_sds, # gene names, means, and std devs for scaling 82 | loadings, # genes x PCs 83 | verbose = TRUE, # display output? 84 | do_umap = TRUE, # run UMAP and save UMAP model to file? 85 | save_uwot_path = '/absolute/path/uwot_model_1' # filepath to save UMAP model) 86 | ``` 87 | Note that `vargenes_means_sds` requires column names `c('symbol', 'mean', 'stddev')` (see [tutorial example](https://github.com/immunogenomics/symphony/blob/main/vignettes/pbmcs_tutorial.ipynb/)). 88 | 89 | ### Option 2: Starting from reference genes by cells matrix 90 | 91 | This function performs all steps of the reference building pipeline including variable gene selection, scaling, PCA, Harmony, and Symphony compression. 92 | 93 | ```{r eval=FALSE} 94 | # Build reference 95 | reference = symphony::buildReference( 96 | ref_exp, # reference expression (genes by cells) 97 | ref_metadata, # reference metadata (cells x attributes) 98 | vars = c('donor'), # variable(s) to integrate over 99 | K = 100, # number of Harmony soft clusters 100 | verbose = TRUE, # display verbose output 101 | do_umap = TRUE, # run UMAP and save UMAP model to file 102 | do_normalize = FALSE, # perform log(CP10k) normalization on reference expression 103 | vargenes_method = 'vst', # variable gene selection method: 'vst' or 'mvp' 104 | vargenes_groups = 'donor', # metadata column specifying groups for variable gene selection within each group 105 | topn = 2000, # number of variable genes (per group) 106 | theta = 2, # Harmony parameter(s) for diversity term 107 | d = 20, # number of dimensions for PCA 108 | save_uwot_path = 'path/to/uwot_model_1', # file path to save uwot UMAP model 109 | additional_genes = NULL # vector of any additional genes to force include 110 | ) 111 | 112 | ``` 113 | 114 | ## Query mapping 115 | Once you have a prebuilt reference (e.g. loaded from a saved .rds R object), you can directly map cells from a new query dataset onto it starting from query gene expression. 116 | 117 | ```{r eval=FALSE} 118 | # Map query 119 | query = mapQuery(query_exp, # query gene expression (genes x cells) 120 | query_metadata, # query metadata (cells x attributes) 121 | reference, # Symphony reference object 122 | vars = NULL, # Query batch variables to harmonize over (NULL treats query as one batch) 123 | do_normalize = FALSE, # perform log(CP10k) normalization on query (set to FALSE if already normalized) 124 | do_umap = TRUE) # project query cells into reference UMAP 125 | ``` 126 | 127 | `query$Z` contains the harmonized query feature embedding. 128 | 129 | If your query itself has multiple sources of batch variation you would like to integrate over (e.g. technology, donors, species), you can specify them in the `vars` parameter: e.g. `vars = c('donor', 'technology')` 130 | 131 | # Installation notes 132 | ## System requirements: 133 | 134 | Symphony has been successfully installed on Linux and Mac OS X using the devtools package to install from GitHub. 135 | 136 | Dependencies: 137 | 138 | * R>=3.6.x 139 | * RANN 140 | * data.table 141 | * irlba 142 | * stats 143 | * tibble 144 | * utils 145 | * uwot 146 | * Matrix 147 | * Rcpp 148 | * magrittr 149 | * methods 150 | * rlang 151 | * ggplot2 152 | * RColorBrewer 153 | * ggrastr 154 | * ggrepel 155 | 156 | 157 | ## Troubleshooting: 158 | 159 | * You may need to install the latest version of devtools (because of the recent GitHub change from "master" to "main" terminology, which can cause `install_github` to fail). 160 | * You may also need to install the lastest version of Harmony: 161 | 162 | ``` r 163 | devtools::install_github("immunogenomics/harmony") 164 | ``` 165 | 166 | We have been notified of the following installation errors regarding `systemfonts`, `textshaping`, and `ragg` (which are all required by `ggrastr`): 167 | ``` 168 | # error when installing systemfonts 169 | ft_cache.h:9:10: fatal error: ft2build.h: No such file or directory 170 | 171 | # error when installing textshaping 172 | Configuration failed to find the harfbuzz freetype2 fribidi library 173 | 174 | # error when installing ragg 175 | :1:10: fatal error: ft2build.h: No such file or directory 176 | ``` 177 | 178 | These errors are not inherent to the Symphony package and we cannot fix them directly. However, as a workaround, you can install `systemfonts`, `textshaping`, and `ragg` separately using `install.packages()` and specify the path to the required files (replacing `/path/to` below with the path to the appropriate `include` directory containing the files). 179 | 180 | ``` 181 | # fix to install systemfonts 182 | withr::with_makevars(c(CPPFLAGS="-I/path/to/include/freetype2/"), install.packages("systemfonts")) 183 | 184 | # fix to install textshaping 185 | withr::with_makevars(c(CPPFLAGS="-I/path/to/include/harfbuzz/ -I/path/to/include/fribidi/ -I/path/to/include/freetype2/"), install.packages("textshaping")) 186 | 187 | # fix to install ragg 188 | withr::with_makevars(c(CPPFLAGS="-I/path/to/include/freetype2/"), install.packages("ragg")) 189 | 190 | ``` 191 | 192 | # Reproducing results from manuscript 193 | Code to reproduce Symphony results from the Kang et al. manuscript is available on [github.com/immunogenomics/symphony_reproducibility](https://github.com/immunogenomics/symphony_reproducibility). 194 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | # Symphony logo 5 | 6 | 7 | 8 | 9 | Efficient and precise single-cell reference atlas mapping with Symphony 10 | 11 | [Kang et al. (Nat Comm, 12 | 2021)](https://www.nature.com/articles/s41467-021-25957-x) 13 | 14 | For Python users, check out the 15 | [symphonypy](https://github.com/potulabe/symphonypy) package by Kseniya 16 | Petrova and Sergey Isaev. 17 | 18 | # Installation 19 | 20 | Symphony is available on CRAN: 21 | 22 | ``` r 23 | install.packages("symphony") 24 | ``` 25 | 26 | Install the development version of Symphony from 27 | [GitHub](https://github.com/) use: 28 | 29 | ``` r 30 | # install.packages("devtools") 31 | devtools::install_github("immunogenomics/symphony") 32 | ``` 33 | 34 | Install should take \<10 mins (pending no major issues). See 35 | installation notes below. 36 | 37 | # Usage/Demos 38 | 39 | ## Tutorials 40 | 41 | - Check out the [quick start (\<5 min) PBMCs 42 | tutorial](https://github.com/immunogenomics/symphony/blob/main/vignettes/pbmcs_tutorial.ipynb/) 43 | for an example of how to build a custom reference and map to it. 44 | 45 | - Check out the [pre-built references 46 | tutorial](https://github.com/immunogenomics/symphony/blob/main/vignettes/prebuilt_references_tutorial.ipynb) 47 | for examples of how to map to provided Symphony references pre-built 48 | from the datasets featured in the manuscript. 49 | 50 | ## Downloading pre-built references: 51 | 52 | - You can download pre-built references from 53 | [Zenodo](https://zenodo.org/record/5090425). 54 | 55 | ## Reference building 56 | 57 | ### Option 1: Starting from existing Harmony object 58 | 59 | This function compresses an existing Harmony object into a Symphony 60 | reference that enables query mapping. We recommend this option for most 61 | users since it allows your code to be more modular and flexible. 62 | 63 | ``` r 64 | 65 | # Run Harmony to integrate the reference cells 66 | ref_harmObj = harmony::HarmonyMatrix( 67 | data_mat = t(Z_pca_ref), # starting embedding (e.g. PCA, CCA) of cells 68 | meta_data = ref_metadata, # dataframe with cell metadata 69 | theta = c(2), # cluster diversity enforcement 70 | vars_use = c('donor'), # variable to integrate out 71 | nclust = 100, # number of clusters in Harmony model 72 | max.iter.harmony = 10, # max iterations of Harmony 73 | return_object = TRUE, # set to TRUE to return the full Harmony object 74 | do_pca = FALSE # do not recompute PCs 75 | ) 76 | 77 | # Build Symphony reference 78 | reference = buildReferenceFromHarmonyObj( 79 | ref_harmObj, # output object from HarmonyMatrix() 80 | ref_metadata, # dataframe with cell metadata 81 | vargenes_means_sds, # gene names, means, and std devs for scaling 82 | loadings, # genes x PCs 83 | verbose = TRUE, # display output? 84 | do_umap = TRUE, # run UMAP and save UMAP model to file? 85 | save_uwot_path = '/absolute/path/uwot_model_1' # filepath to save UMAP model) 86 | ``` 87 | 88 | Note that `vargenes_means_sds` requires column names 89 | `c('symbol', 'mean', 'stddev')` (see [tutorial 90 | example](https://github.com/immunogenomics/symphony/blob/main/vignettes/pbmcs_tutorial.ipynb/)). 91 | 92 | ### Option 2: Starting from reference genes by cells matrix 93 | 94 | This function performs all steps of the reference building pipeline 95 | including variable gene selection, scaling, PCA, Harmony, and Symphony 96 | compression. 97 | 98 | ``` r 99 | # Build reference 100 | reference = symphony::buildReference( 101 | ref_exp, # reference expression (genes by cells) 102 | ref_metadata, # reference metadata (cells x attributes) 103 | vars = c('donor'), # variable(s) to integrate over 104 | K = 100, # number of Harmony soft clusters 105 | verbose = TRUE, # display verbose output 106 | do_umap = TRUE, # run UMAP and save UMAP model to file 107 | do_normalize = FALSE, # perform log(CP10k) normalization on reference expression 108 | vargenes_method = 'vst', # variable gene selection method: 'vst' or 'mvp' 109 | vargenes_groups = 'donor', # metadata column specifying groups for variable gene selection within each group 110 | topn = 2000, # number of variable genes (per group) 111 | theta = 2, # Harmony parameter(s) for diversity term 112 | d = 20, # number of dimensions for PCA 113 | save_uwot_path = 'path/to/uwot_model_1', # file path to save uwot UMAP model 114 | additional_genes = NULL # vector of any additional genes to force include 115 | ) 116 | ``` 117 | 118 | ## Query mapping 119 | 120 | Once you have a prebuilt reference (e.g. loaded from a saved .rds R 121 | object), you can directly map cells from a new query dataset onto it 122 | starting from query gene expression. 123 | 124 | ``` r 125 | # Map query 126 | query = mapQuery(query_exp, # query gene expression (genes x cells) 127 | query_metadata, # query metadata (cells x attributes) 128 | reference, # Symphony reference object 129 | vars = NULL, # Query batch variables to harmonize over (NULL treats query as one batch) 130 | do_normalize = FALSE, # perform log(CP10k) normalization on query (set to FALSE if already normalized) 131 | do_umap = TRUE) # project query cells into reference UMAP 132 | ``` 133 | 134 | `query$Z` contains the harmonized query feature embedding. 135 | 136 | If your query itself has multiple sources of batch variation you would 137 | like to integrate over (e.g. technology, donors, species), you can 138 | specify them in the `vars` parameter: 139 | e.g. `vars = c('donor', 'technology')` 140 | 141 | # Installation notes 142 | 143 | ## System requirements: 144 | 145 | Symphony has been successfully installed on Linux and Mac OS X using the 146 | devtools package to install from GitHub. 147 | 148 | Dependencies: 149 | 150 | - R\>=3.6.x 151 | - RANN 152 | - data.table 153 | - irlba 154 | - stats 155 | - tibble 156 | - utils 157 | - uwot 158 | - Matrix 159 | - Rcpp 160 | - magrittr 161 | - methods 162 | - rlang 163 | - ggplot2 164 | - RColorBrewer 165 | - ggrastr 166 | - ggrepel 167 | 168 | ## Troubleshooting: 169 | 170 | - You may need to install the latest version of devtools (because of the 171 | recent GitHub change from “master” to “main” terminology, which can 172 | cause `install_github` to fail). 173 | - You may also need to install the lastest version of Harmony: 174 | 175 | ``` r 176 | devtools::install_github("immunogenomics/harmony") 177 | ``` 178 | 179 | We have been notified of the following installation errors regarding 180 | `systemfonts`, `textshaping`, and `ragg` (which are all required by 181 | `ggrastr`): 182 | 183 | # error when installing systemfonts 184 | ft_cache.h:9:10: fatal error: ft2build.h: No such file or directory 185 | 186 | # error when installing textshaping 187 | Configuration failed to find the harfbuzz freetype2 fribidi library 188 | 189 | # error when installing ragg 190 | :1:10: fatal error: ft2build.h: No such file or directory 191 | 192 | These errors are not inherent to the Symphony package and we cannot fix 193 | them directly. However, as a workaround, you can install `systemfonts`, 194 | `textshaping`, and `ragg` separately using `install.packages()` and 195 | specify the path to the required files (replacing `/path/to` below with 196 | the path to the appropriate `include` directory containing the files). 197 | 198 | # fix to install systemfonts 199 | withr::with_makevars(c(CPPFLAGS="-I/path/to/include/freetype2/"), install.packages("systemfonts")) 200 | 201 | # fix to install textshaping 202 | withr::with_makevars(c(CPPFLAGS="-I/path/to/include/harfbuzz/ -I/path/to/include/fribidi/ -I/path/to/include/freetype2/"), install.packages("textshaping")) 203 | 204 | # fix to install ragg 205 | withr::with_makevars(c(CPPFLAGS="-I/path/to/include/freetype2/"), install.packages("ragg")) 206 | 207 | # Reproducing results from manuscript 208 | 209 | Code to reproduce Symphony results from the Kang et al. manuscript is 210 | available on 211 | [github.com/immunogenomics/symphony_reproducibility](https://github.com/immunogenomics/symphony_reproducibility). 212 | -------------------------------------------------------------------------------- /cran-comments.md: -------------------------------------------------------------------------------- 1 | ## Test environments 2 | Linux, R 4.0.5 3 | local OS X install, R 3.6.3 4 | win-builder (release) 5 | 6 | ## R CMD check results 7 | There were no ERRORs, WARNINGs or NOTEs. This is the first submission. 8 | 9 | ## Downstream dependencies 10 | There are currently no downstream dependencies for this package. -------------------------------------------------------------------------------- /data/pbmcs_exprs_small.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/immunogenomics/symphony/bf7aaf9a2919f660d028f7ec3a775f040238a829/data/pbmcs_exprs_small.rda -------------------------------------------------------------------------------- /data/pbmcs_meta_small.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/immunogenomics/symphony/bf7aaf9a2919f660d028f7ec3a775f040238a829/data/pbmcs_meta_small.rda -------------------------------------------------------------------------------- /man/buildReference.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/buildReference.R 3 | \name{buildReference} 4 | \alias{buildReference} 5 | \title{Function for building a Symphony reference starting from expression matrix} 6 | \usage{ 7 | buildReference( 8 | exp_ref, 9 | metadata_ref, 10 | vars = NULL, 11 | K = 100, 12 | verbose = FALSE, 13 | do_umap = TRUE, 14 | do_normalize = TRUE, 15 | vargenes_method = "vst", 16 | vargenes_groups = NULL, 17 | topn = 2000, 18 | tau = 0, 19 | theta = 2, 20 | save_uwot_path = NULL, 21 | d = 20, 22 | additional_genes = NULL, 23 | umap_min_dist = 0.1, 24 | seed = 111 25 | ) 26 | } 27 | \arguments{ 28 | \item{exp_ref}{Reference gene expression (genes by cells)} 29 | 30 | \item{metadata_ref}{Reference cell metadata (cells by attributes)} 31 | 32 | \item{vars}{Reference variables to Harmonize over e.g. c('donor', 'technology')} 33 | 34 | \item{K}{Number of soft cluster centroids in model} 35 | 36 | \item{verbose}{Verbose output} 37 | 38 | \item{do_umap}{Perform UMAP visualization on harmonized reference embedding} 39 | 40 | \item{do_normalize}{Perform log(CP10K+1) normalization} 41 | 42 | \item{vargenes_method}{Variable gene selection method (either 'vst' or 'mvp')} 43 | 44 | \item{vargenes_groups}{Name of metadata column specifying groups for variable gene selection. If not NULL, calculate topn variable genes in each group separately, then pool} 45 | 46 | \item{topn}{Number of variable genes to subset by} 47 | 48 | \item{tau}{Tau parameter for Harmony step} 49 | 50 | \item{theta}{Theta parameter(s) for Harmony step} 51 | 52 | \item{save_uwot_path}{Absolute path to save the uwot model (used if do_umap is TRUE)} 53 | 54 | \item{d}{Number of PC dimensions} 55 | 56 | \item{additional_genes}{Any custom genes (e.g. marker genes) to include in addition to variable genes} 57 | 58 | \item{umap_min_dist}{umap parameter (see uwot documentation for details)} 59 | 60 | \item{seed}{Random seed} 61 | } 62 | \value{ 63 | Symphony reference object. Integrated embedding is stored in the $Z_corr slot. Other slots include 64 | cell-level metadata ($meta_data), variable genes means and standard deviations ($vargenes), 65 | loadings from PCA ($loadings), original PCA embedding ($Z_orig), reference compression terms ($cache), 66 | betas from Harmony integration ($betas), cosine normalized soft cluster centroids ($centroids), 67 | centroids in PC space ($centroids_pc), and optional umap coordinates ($umap$embedding). 68 | } 69 | \description{ 70 | Function for building a Symphony reference starting from expression matrix 71 | } 72 | -------------------------------------------------------------------------------- /man/buildReferenceFromHarmonyObj.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/buildReferenceFromHarmonyObj.R 3 | \name{buildReferenceFromHarmonyObj} 4 | \alias{buildReferenceFromHarmonyObj} 5 | \title{Function for building a Symphony reference from a Harmony object. Useful if you would like your 6 | code to be more modular. Note that you must have saved vargenes_means_sds and PCA loadings.} 7 | \usage{ 8 | buildReferenceFromHarmonyObj( 9 | harmony_obj, 10 | metadata, 11 | vargenes_means_sds, 12 | pca_loadings, 13 | verbose = TRUE, 14 | do_umap = TRUE, 15 | save_uwot_path = NULL, 16 | umap_min_dist = 0.1, 17 | seed = 111 18 | ) 19 | } 20 | \arguments{ 21 | \item{harmony_obj}{Harmony object (output from HarmonyMatrix())} 22 | 23 | \item{metadata}{Reference cell metadata (cells by attributes)} 24 | 25 | \item{vargenes_means_sds}{Variable genes in dataframe with columns named ('symbol', 'mean', 'stddev')} 26 | 27 | \item{pca_loadings}{Gene loadings from PCA (e.g. irlba(ref_exp_scaled, nv = 20)$u)} 28 | 29 | \item{verbose}{Verbose output} 30 | 31 | \item{do_umap}{Perform UMAP visualization on harmonized reference embedding} 32 | 33 | \item{save_uwot_path}{Absolute path to save the uwot model (if do_umap is TRUE)} 34 | 35 | \item{umap_min_dist}{UMAP parameter (see uwot documentation for details)} 36 | 37 | \item{seed}{Random seed} 38 | } 39 | \value{ 40 | Symphony reference object. Integrated embedding is stored in the $Z_corr slot. Other slots include 41 | cell-level metadata ($meta_data), variable genes means and standard deviations ($vargenes), 42 | loadings from PCA or other dimensional reduction such as CCA ($loadings), original PCA embedding ($Z_orig), 43 | reference compression terms ($cache), betas from Harmony integration ($betas), cosine-normalized soft cluster centroids ($centroids), 44 | centroids in PC space ($centroids_pc), and optional umap coordinates ($umap$embedding). 45 | } 46 | \description{ 47 | Function for building a Symphony reference from a Harmony object. Useful if you would like your 48 | code to be more modular. Note that you must have saved vargenes_means_sds and PCA loadings. 49 | } 50 | -------------------------------------------------------------------------------- /man/calcPerCellMappingMetric.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/confidenceScores.R 3 | \name{calcPerCellMappingMetric} 4 | \alias{calcPerCellMappingMetric} 5 | \title{Per-cell Confidence Score: 6 | Calculates the weighted Mahalanobis distance for the query cells to reference clusters. Returns a vector 7 | of distance scores, one per query cell. Higher distance metric indicates less confidence.} 8 | \usage{ 9 | calcPerCellMappingMetric( 10 | reference, 11 | query, 12 | Z_orig = TRUE, 13 | metric = "mahalanobis" 14 | ) 15 | } 16 | \arguments{ 17 | \item{reference}{Reference object as returned by Symphony buildReference()} 18 | 19 | \item{query}{Query object as returned by Symphony mapQuery()} 20 | 21 | \item{Z_orig}{Define reference distribution using original PCA embedding or harmonized PC embedding} 22 | 23 | \item{metric}{Uses Mahalanobis by default, but added as a parameter for potential future use} 24 | } 25 | \value{ 26 | A vector of per-cell mapping metric scores for each cell. 27 | } 28 | \description{ 29 | Per-cell Confidence Score: 30 | Calculates the weighted Mahalanobis distance for the query cells to reference clusters. Returns a vector 31 | of distance scores, one per query cell. Higher distance metric indicates less confidence. 32 | } 33 | -------------------------------------------------------------------------------- /man/calcPerClusterMappingMetric.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/confidenceScores.R 3 | \name{calcPerClusterMappingMetric} 4 | \alias{calcPerClusterMappingMetric} 5 | \title{Per-cluster Confidence Score: 6 | Calculates the Mahalanobis distance from user-defined query clusters to their nearest 7 | reference centroid after initial projection into reference PCA space. 8 | All query cells in a cluster get the same score. Higher distance indicates less confidence. 9 | Due to the instability of estimating covariance with small numbers of cells, we do not assign a 10 | score to clusters smaller than u * d, where d is the dimensionality of the embedding and u is specified.} 11 | \usage{ 12 | calcPerClusterMappingMetric( 13 | reference, 14 | query, 15 | query_cluster_labels, 16 | metric = "mahalanobis", 17 | u = 2, 18 | lambda = 0 19 | ) 20 | } 21 | \arguments{ 22 | \item{reference}{Reference object as returned by Symphony buildReference()} 23 | 24 | \item{query}{Query object as returned by Symphony mapQuery()} 25 | 26 | \item{query_cluster_labels}{Vector of user-defined labels denoting clusters / putative novel cell type to calculate the score for} 27 | 28 | \item{metric}{Uses Mahalanobis by default, but added as a parameter for potential future use} 29 | 30 | \item{u}{Do not assign scores to clusters smaller than u * d (see above description)} 31 | 32 | \item{lambda}{Optional ridge parameter added to covariance diagonal to help stabilize numeric estimates} 33 | } 34 | \value{ 35 | A data.frame of per-cluster mapping metric scores for each user-specified query cluster. 36 | } 37 | \description{ 38 | Per-cluster Confidence Score: 39 | Calculates the Mahalanobis distance from user-defined query clusters to their nearest 40 | reference centroid after initial projection into reference PCA space. 41 | All query cells in a cluster get the same score. Higher distance indicates less confidence. 42 | Due to the instability of estimating covariance with small numbers of cells, we do not assign a 43 | score to clusters smaller than u * d, where d is the dimensionality of the embedding and u is specified. 44 | } 45 | -------------------------------------------------------------------------------- /man/calcknncorr.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/knncorr.R 3 | \name{calcknncorr} 4 | \alias{calcknncorr} 5 | \title{Calculates the k-NN correlation, which measures how well the sorted ordering of k nearest reference 6 | neighbors in a gold standard embedding correlate with the ordering for the same reference cells in 7 | an alternative embedding (i.e. from reference mapping). 8 | NOTE: it is very important for the order of reference cells (cols) in gold_ref matches that of alt_ref 9 | (same for matching columns of gold_query and alt_query).} 10 | \usage{ 11 | calcknncorr(gold_ref, alt_ref, gold_query, alt_query, k = 500) 12 | } 13 | \arguments{ 14 | \item{gold_ref}{Reference cells in gold standard embedding (PCs by cells)} 15 | 16 | \item{alt_ref}{Reference cells in alternative embedding (PCs by cells)} 17 | 18 | \item{gold_query}{Query cells in gold standard embedding (PCs by cells)} 19 | 20 | \item{alt_query}{Query cells in alternative embedding (PCs by cells)} 21 | 22 | \item{k}{Number of reference neighbors to use for kNN-correlation calculation} 23 | } 24 | \value{ 25 | Vector of k-NN correlations for query cells 26 | } 27 | \description{ 28 | Calculates the k-NN correlation, which measures how well the sorted ordering of k nearest reference 29 | neighbors in a gold standard embedding correlate with the ordering for the same reference cells in 30 | an alternative embedding (i.e. from reference mapping). 31 | NOTE: it is very important for the order of reference cells (cols) in gold_ref matches that of alt_ref 32 | (same for matching columns of gold_query and alt_query). 33 | } 34 | -------------------------------------------------------------------------------- /man/calcknncorrWithinQuery.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/knncorr.R 3 | \name{calcknncorrWithinQuery} 4 | \alias{calcknncorrWithinQuery} 5 | \title{Calculates the k-NN correlation within the query cells only, which measures how well the sorted 6 | ordering of k nearest query neighbors in a query de novo PCA embedding correlate with the ordering 7 | for the cells in the reference mapping embedding.} 8 | \usage{ 9 | calcknncorrWithinQuery( 10 | query, 11 | var = NULL, 12 | k = 100, 13 | topn = 2000, 14 | d = 20, 15 | distance = "euclidean" 16 | ) 17 | } 18 | \arguments{ 19 | \item{query}{Query object (returned from mapQuery)} 20 | 21 | \item{var}{Query metadata batch variable (PCA is calculated within each batch separately); if NULL, do not split by batch} 22 | 23 | \item{k}{Number of neighbors to use for kNN-correlation calculation} 24 | 25 | \item{topn}{number of variable genes to calculate within each query batch for query PCA} 26 | 27 | \item{d}{number of dimensions for query PCA within each query batch} 28 | 29 | \item{distance}{either 'euclidean' or 'cosine'} 30 | } 31 | \value{ 32 | Vector of within-query k-NN correlations for query cells 33 | } 34 | \description{ 35 | Calculates the k-NN correlation within the query cells only, which measures how well the sorted 36 | ordering of k nearest query neighbors in a query de novo PCA embedding correlate with the ordering 37 | for the cells in the reference mapping embedding. 38 | } 39 | -------------------------------------------------------------------------------- /man/evaluate.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/utils.R 3 | \name{evaluate} 4 | \alias{evaluate} 5 | \title{Function for evaluating F1 by cell type, 6 | adapted from automated cell type identifiaction benchmarking paper (Abdelaal et al. Genome Biology, 2019)} 7 | \usage{ 8 | evaluate(true, predicted) 9 | } 10 | \arguments{ 11 | \item{true}{vector of true labels} 12 | 13 | \item{predicted}{vector of predicted labels} 14 | } 15 | \value{ 16 | A list of results with confusion matrix ($Conf), median F1-score ($MedF1), F1 scores per class ($F1), and accuracy ($Acc). 17 | } 18 | \description{ 19 | Function for evaluating F1 by cell type, 20 | adapted from automated cell type identifiaction benchmarking paper (Abdelaal et al. Genome Biology, 2019) 21 | } 22 | -------------------------------------------------------------------------------- /man/figures/._symphony_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/immunogenomics/symphony/bf7aaf9a2919f660d028f7ec3a775f040238a829/man/figures/._symphony_logo.png -------------------------------------------------------------------------------- /man/figures/README-pressure-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/immunogenomics/symphony/bf7aaf9a2919f660d028f7ec3a775f040238a829/man/figures/README-pressure-1.png -------------------------------------------------------------------------------- /man/figures/symphony_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/immunogenomics/symphony/bf7aaf9a2919f660d028f7ec3a775f040238a829/man/figures/symphony_logo.png -------------------------------------------------------------------------------- /man/findVariableGenes.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/findVariableGenes.R 3 | \name{findVariableGenes} 4 | \alias{findVariableGenes} 5 | \title{Function to find variable genes using mean variance relationship method} 6 | \usage{ 7 | findVariableGenes( 8 | X, 9 | groups, 10 | min_expr = 0.1, 11 | max_expr = Inf, 12 | min_dispersion = 0, 13 | max_dispersion = Inf, 14 | num.bin = 20, 15 | binning.method = "equal_width", 16 | return_top_n = 0 17 | ) 18 | } 19 | \arguments{ 20 | \item{X}{expression matrix} 21 | 22 | \item{groups}{vector of groups} 23 | 24 | \item{min_expr}{min expression cutoff} 25 | 26 | \item{max_expr}{max expression cutoff} 27 | 28 | \item{min_dispersion}{min dispersion cutoff} 29 | 30 | \item{max_dispersion}{max dispersion cutoff} 31 | 32 | \item{num.bin}{number of bins to use for scaled analysis} 33 | 34 | \item{binning.method}{how bins are computed} 35 | 36 | \item{return_top_n}{returns top n genes} 37 | } 38 | \value{ 39 | A data.frame of variable genes 40 | } 41 | \description{ 42 | Function to find variable genes using mean variance relationship method 43 | } 44 | -------------------------------------------------------------------------------- /man/knnPredict.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/utils.R 3 | \name{knnPredict} 4 | \alias{knnPredict} 5 | \title{Predict annotations of query cells from the reference using k-NN method} 6 | \usage{ 7 | knnPredict( 8 | query_obj, 9 | ref_obj, 10 | train_labels, 11 | k = 5, 12 | save_as = "cell_type_pred_knn", 13 | confidence = TRUE, 14 | seed = 0 15 | ) 16 | } 17 | \arguments{ 18 | \item{query_obj}{Symphony query object} 19 | 20 | \item{ref_obj}{Symphony reference object} 21 | 22 | \item{train_labels}{vector of labels to train} 23 | 24 | \item{k}{number of neighbors} 25 | 26 | \item{save_as}{string that result column will be named in query metadata} 27 | 28 | \item{confidence}{return k-NN confidence scores (proportion of neighbors voting for the predicted annotation)} 29 | 30 | \item{seed}{random seed (k-NN has some stochasticity in the case of ties)} 31 | } 32 | \value{ 33 | Symphony query object, with predicted reference labels stored in the 'save_as' slot of the query$meta_data 34 | } 35 | \description{ 36 | Predict annotations of query cells from the reference using k-NN method 37 | } 38 | -------------------------------------------------------------------------------- /man/mapQuery.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mapQuery.R 3 | \name{mapQuery} 4 | \alias{mapQuery} 5 | \title{Function for mapping query cells to a Symphony reference} 6 | \usage{ 7 | mapQuery( 8 | exp_query, 9 | metadata_query, 10 | ref_obj, 11 | vars = NULL, 12 | verbose = TRUE, 13 | do_normalize = TRUE, 14 | do_umap = TRUE, 15 | sigma = 0.1 16 | ) 17 | } 18 | \arguments{ 19 | \item{exp_query}{Query gene expression (genes by cells)} 20 | 21 | \item{metadata_query}{Query metadata (cells by attributes)} 22 | 23 | \item{ref_obj}{Reference object as returned by Symphony buildReference()} 24 | 25 | \item{vars}{Query batch variable(s) to integrate over (column names in metadata)} 26 | 27 | \item{verbose}{Verbose output} 28 | 29 | \item{do_normalize}{Perform log(CP10K+1) normalization on query expression} 30 | 31 | \item{do_umap}{Perform umap projection into reference UMAP (if reference includes a uwot model)} 32 | 33 | \item{sigma}{Fuzziness parameter for soft clustering (sigma = 1 is hard clustering)} 34 | } 35 | \value{ 36 | Symphony query object. Mapping embedding is in the $Z slot. Other slots include 37 | query expression matrix ($exp), query cell-level metadata ($meta_data), 38 | query cell embedding in pre-Harmonized reference PCs ($Zq_pca), query cell soft cluster 39 | assignments ($R), and query cells in reference UMAP coordinates ($umap). 40 | } 41 | \description{ 42 | Function for mapping query cells to a Symphony reference 43 | } 44 | -------------------------------------------------------------------------------- /man/pbmcs_exprs_small.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/data.R 3 | \docType{data} 4 | \name{pbmcs_exprs_small} 5 | \alias{pbmcs_exprs_small} 6 | \title{Log(CP10k+1) normalized counts matrix (genes by cells) for 10x PBMCs dataset for vignette.} 7 | \format{ 8 | : Sparse matrix (dgCMatrix): dimensions 1,764 genes by 1,200 cells 9 | } 10 | \usage{ 11 | pbmcs_exprs_small 12 | } 13 | \description{ 14 | Log(CP10k+1) normalized counts matrix (genes by cells) for 10x PBMCs dataset for vignette. 15 | } 16 | \keyword{datasets} 17 | -------------------------------------------------------------------------------- /man/pbmcs_meta_small.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/data.R 3 | \docType{data} 4 | \name{pbmcs_meta_small} 5 | \alias{pbmcs_meta_small} 6 | \title{Metadata for 10x PBMCs dataset for vignette.} 7 | \format{ 8 | : A data frame with 1,200 cells and 7 metadata fields. 9 | \describe{ 10 | \item{cell_id}{unique cell ID} 11 | \item{donor}{dataset (3pv1, 3pv2, or 5p)} 12 | \item{nUMI}{number of UMIs} 13 | \item{nGene}{number of genes} 14 | \item{percent_mito}{percent mito genes} 15 | \item{cell_type}{cell type assigned in Symphony publication} 16 | \item{cell_type_broad}{cell subtype assigned in Symphony publication} 17 | 18 | } 19 | } 20 | \usage{ 21 | pbmcs_meta_small 22 | } 23 | \description{ 24 | Metadata for 10x PBMCs dataset for vignette. 25 | } 26 | \keyword{datasets} 27 | -------------------------------------------------------------------------------- /man/plotReference.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/plotReference.R 3 | \name{plotReference} 4 | \alias{plotReference} 5 | \title{Function to plot reference, colored by cell type} 6 | \usage{ 7 | plotReference( 8 | reference, 9 | as.density = TRUE, 10 | bins = 10, 11 | bandwidth = 1.5, 12 | title = "Reference", 13 | color.by = "cell_type", 14 | celltype.colors = NULL, 15 | show.legend = TRUE, 16 | show.labels = TRUE, 17 | show.centroids = FALSE 18 | ) 19 | } 20 | \arguments{ 21 | \item{reference}{Symphony reference object (must have UMAP stored)} 22 | 23 | \item{as.density}{if TRUE, plot as density; if FALSE, plot as individual cells} 24 | 25 | \item{bins}{for density, nbins parameter for stat_density_2d} 26 | 27 | \item{bandwidth}{for density, bandwidth parameter for stat_density_2d} 28 | 29 | \item{title}{Plot title} 30 | 31 | \item{color.by}{metadata column name for phenotype labels} 32 | 33 | \item{celltype.colors}{custom color mapping} 34 | 35 | \item{show.legend}{Show cell type legend} 36 | 37 | \item{show.labels}{Show cell type labels} 38 | 39 | \item{show.centroids}{Plot soft cluster centroid locations} 40 | } 41 | \value{ 42 | A ggplot object. 43 | } 44 | \description{ 45 | Function to plot reference, colored by cell type 46 | } 47 | -------------------------------------------------------------------------------- /man/rowSDs.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/utils.R 3 | \name{rowSDs} 4 | \alias{rowSDs} 5 | \title{Calculate standard deviations by row} 6 | \usage{ 7 | rowSDs(A, row_means = NULL, weights = NULL) 8 | } 9 | \arguments{ 10 | \item{A}{expression matrix (genes by cells)} 11 | 12 | \item{row_means}{row means} 13 | 14 | \item{weights}{weights for weighted standard dev calculation} 15 | } 16 | \value{ 17 | A vector of row standard deviations 18 | } 19 | \description{ 20 | Calculate standard deviations by row 21 | } 22 | -------------------------------------------------------------------------------- /man/runPCAQueryAlone.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/knncorr.R 3 | \name{runPCAQueryAlone} 4 | \alias{runPCAQueryAlone} 5 | \title{Runs a standard PCA pipeline on query (1 batch). Assumes query_exp is already normalized.} 6 | \usage{ 7 | runPCAQueryAlone(query_exp, topn = 2000, d = 20, seed = 1) 8 | } 9 | \arguments{ 10 | \item{query_exp}{Query expression matrix (genes x cells)} 11 | 12 | \item{topn}{Number of variable genes to use} 13 | 14 | \item{d}{Number of dimensions} 15 | 16 | \item{seed}{random seed} 17 | } 18 | \value{ 19 | A matrix of PCs by cells 20 | } 21 | \description{ 22 | Runs a standard PCA pipeline on query (1 batch). Assumes query_exp is already normalized. 23 | } 24 | -------------------------------------------------------------------------------- /man/scaleDataWithStats.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/utils.R 3 | \name{scaleDataWithStats} 4 | \alias{scaleDataWithStats} 5 | \title{Scale data with given mean and standard deviations} 6 | \usage{ 7 | scaleDataWithStats(A, mean_vec, sd_vec, margin = 1, thresh = 10) 8 | } 9 | \arguments{ 10 | \item{A}{expression matrix (genes by cells)} 11 | 12 | \item{mean_vec}{vector of mean values} 13 | 14 | \item{sd_vec}{vector of standard deviation values} 15 | 16 | \item{margin}{1 for row-wise calculation} 17 | 18 | \item{thresh}{threshold to clip max values} 19 | } 20 | \value{ 21 | A matrix of scaled expression values. 22 | } 23 | \description{ 24 | Scale data with given mean and standard deviations 25 | } 26 | -------------------------------------------------------------------------------- /man/symphony.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/symphony-package.R 3 | \docType{package} 4 | \name{symphony} 5 | \alias{symphony} 6 | \title{symphony} 7 | \description{ 8 | Efficient single-cell reference atlas mapping (Kang et al.) 9 | } 10 | -------------------------------------------------------------------------------- /man/vargenes_vst.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/findVariableGenes.R 3 | \name{vargenes_vst} 4 | \alias{vargenes_vst} 5 | \title{Function to find variable genes using variance stabilizing transform (vst) method} 6 | \usage{ 7 | vargenes_vst(object, groups, topn, loess.span = 0.3) 8 | } 9 | \arguments{ 10 | \item{object}{expression matrix} 11 | 12 | \item{groups}{finds variable genes within each group then pools} 13 | 14 | \item{topn}{Return top n genes} 15 | 16 | \item{loess.span}{Loess span parameter used when fitting the variance-mean relationship} 17 | } 18 | \value{ 19 | A data.frame of variable genes, with means and standard deviations. 20 | } 21 | \description{ 22 | Function to find variable genes using variance stabilizing transform (vst) method 23 | } 24 | -------------------------------------------------------------------------------- /pre-built_references/README.md: -------------------------------------------------------------------------------- 1 | Pre-built Symphony reference objects can be used to map new query datasets. 2 | 3 | To avoid GitHub memory limits and for clearer versioning, we use Zenodo to house Symphony references. 4 | Download the reference of interest from Zenodo and place in this directory. 5 | 6 | Link to Zenodo: [https://zenodo.org/record/5090425](https://zenodo.org/record/5090425) 7 | 8 | References available for download: 9 | 10 | | Reference atlas | Filename | Num cells | Description 11 | | --- | ----------- | --------- | ---- 12 | | **10x PBMCs Atlas** | pbmcs_10x_reference.rds | 20,571 cells | Healthy human PBMCs sequenced with three 10x protocols (3'v1, 3'v2, and 5') 13 | | **Pancreatic Islet Cells Atlas** | pancreas_plate-based_reference.rds | 5,887 cells from 32 donors | Human pancreatic islet cells from four separate studies (Segerstolpe et al., Lawlor et al., Grun et al., Muraro et al.) 14 | | **Fetal Liver Hematopoeisis Atlas** | fetal_liver_reference_3p.rds | 113,063 cells from 14 donors | Human fetal liver cells (from Popescu et al., 2019), sequenced with 10x (3') 15 | | **Healthy Fetal Kidney Atlas** | kidney_healthy_fetal_reference.rds | 27,203 cells from 6 samples | Healthy human fetal kidney cells (from Stewart et al., 2019). 16 | | **Memory T Cell (CITE-seq) Atlas** | tbru_ref.rds | 500,089 cells from 259 donors | Memory T cells from a tuberculosis cohort assayed with CITE-seq (Nathan et al., 2021) 17 | | **Cross-tissue Fibroblast Atlas** | fibroblast_atlas.rds | 79,148 cells from 74 samples | Human fibroblasts across inflammatory diseases in the lung, gut, synovium, and salivary gland (Korsunsky et al., 2021) 18 | | **Cross-tissue Inflammatory Immune Atlas** | zhang_reference.rds | 307,084 immune cells from 125 donors | Human immune cells across 6 inflammatory diseases (from Zhang et al., 2021) 19 | | **Tabula Muris Senis (FACS) Atlas** | TMS_facs_reference.rds | 110,824 cells from 19 mice | Mouse cells across 23 tissues and organs 20 | 21 | To read in a reference into R, simply execute: `reference = readRDS('path/to/reference_name.rds')` 22 | 23 | Note: To be able to map query datasets into the reference UMAP coordinates, you must also download the corresponding 'uwot_model' file and set the `reference$save_uwot_path` 24 | -------------------------------------------------------------------------------- /pre-built_references/colors.R: -------------------------------------------------------------------------------- 1 | ## Defines colors used for reference cell types plotting 2 | 3 | pbmc_colors = c("B" = "#66C2A5", 4 | "DC" = "#FC8D62", 5 | "HSC" = "#8DA0CB", 6 | "MK" = "#E78AC3", 7 | "Mono_CD14" = "#A6D854", 8 | "Mono_CD16" = "#f2ec72", 9 | "NK" = "#62AAEA", 10 | "T_CD4" = "#D1C656", 11 | "T_CD8" = "#968763") 12 | 13 | pancreas_colors = c('alpha'="#ed2bb1", 14 | 'beta'="#239eb3", 15 | 'gamma'="#d1bfec", 16 | 'delta'= "#FF6347", 17 | 'stellate'="#11e38c", 18 | 'immune'="#812050", 19 | 'ductal'="#b2d27a", 20 | 'endothelial'="#4e2da6", 21 | 'acinar'="#f6bb86", 22 | 'schwann'="#115d52", 23 | 'epsilon'="#a1def0") 24 | 25 | group.colors = c( 'B cell'='#f2bd80', 26 | 'DC precursor'='#1d6d1f', 27 | 'DC1'='#8c3ba0', 28 | 'DC2'='#6533ed', 29 | 'Early Erythroid'='#83e3f0', 30 | 'Early lymphoid/T'='#fd5917', 31 | 'Endothelial cell'='#4f8c9d', 32 | 'Fibroblast'='#eb1fcb', 33 | 'Hepatocyte'='#f5cdaf', 34 | 'HSC_MPP'='#9698dc', 35 | 'ILC precursor'='#20f53d', 36 | 'Kupffer Cell'='#f283e3', 37 | 'Late Erythroid'='#ffb2be', 38 | 'Mast cell'='#f3d426', 39 | 'Megakaryocyte'='#5ebf72', 40 | 'MEMP'='#a67649', 41 | 'Mid Erythroid'='#2f5bb1', 42 | 'Mono-Mac'='#90a479', 43 | 'Monocyte'='#f6932e', 44 | 'Monocyte precursor'='#d59e9a', 45 | 'Neut-myeloid prog.'='#caf243', 46 | 'NK'='#38b5fc', 47 | 'pDC precursor'='#c82565', 48 | 'Pre pro B cell'='#d6061a', 49 | 'pre-B cell'='#e36f6f', 50 | 'pro-B cell'='#1dfee1', 51 | 'VCAM1+ EI macro.'='#506356') 52 | 53 | # Assorted colors 54 | assort.colors = c(brewer.pal(name="Set1", n = 8), brewer.pal(name="Set2", n = 8), 55 | brewer.pal(name='Set3', n = 12), 56 | brewer.pal(name="Dark2", n = 8), 57 | brewer.pal(name='Accent', n = 7), 58 | brewer.pal(name="Paired", n = 12), 59 | brewer.pal(name='Greens', n=4)) 60 | 61 | tms.colors = c(brewer.pal(name="Dark2", n = 8), brewer.pal(name="Paired", n = 12), 62 | brewer.pal(name="Set1", n = 9), brewer.pal(name="Set2", n = 8), 63 | brewer.pal(name='Accent', n = 7), brewer.pal(name='Set3', n = 12)) -------------------------------------------------------------------------------- /src/.gitignore: -------------------------------------------------------------------------------- 1 | *.o 2 | *.so 3 | *.dll 4 | -------------------------------------------------------------------------------- /src/Makevars: -------------------------------------------------------------------------------- 1 | CXX_STD = CXX11 2 | PKG_LIBS = $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) 3 | -------------------------------------------------------------------------------- /src/Makevars.win: -------------------------------------------------------------------------------- 1 | CXX_STD = CXX11 2 | PKG_LIBS = $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) 3 | PKG_CXXFLAGS="-DUSE_FLOAT_TYPES=0" 4 | 5 | -------------------------------------------------------------------------------- /src/RcppExports.cpp: -------------------------------------------------------------------------------- 1 | // Generated by using Rcpp::compileAttributes() -> do not edit by hand 2 | // Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 3 | 4 | #include 5 | #include 6 | 7 | using namespace Rcpp; 8 | 9 | #ifdef RCPP_USE_GLOBAL_ROSTREAM 10 | Rcpp::Rostream& Rcpp::Rcout = Rcpp::Rcpp_cout_get(); 11 | Rcpp::Rostream& Rcpp::Rcerr = Rcpp::Rcpp_cerr_get(); 12 | #endif 13 | 14 | // exp_mean 15 | arma::mat exp_mean(const arma::vec& x, const arma::vec& p, const arma::vec& i, int ncol, int nrow, const arma::uvec& groups, const arma::uvec& group_sizes); 16 | RcppExport SEXP _symphony_exp_mean(SEXP xSEXP, SEXP pSEXP, SEXP iSEXP, SEXP ncolSEXP, SEXP nrowSEXP, SEXP groupsSEXP, SEXP group_sizesSEXP) { 17 | BEGIN_RCPP 18 | Rcpp::RObject rcpp_result_gen; 19 | Rcpp::RNGScope rcpp_rngScope_gen; 20 | Rcpp::traits::input_parameter< const arma::vec& >::type x(xSEXP); 21 | Rcpp::traits::input_parameter< const arma::vec& >::type p(pSEXP); 22 | Rcpp::traits::input_parameter< const arma::vec& >::type i(iSEXP); 23 | Rcpp::traits::input_parameter< int >::type ncol(ncolSEXP); 24 | Rcpp::traits::input_parameter< int >::type nrow(nrowSEXP); 25 | Rcpp::traits::input_parameter< const arma::uvec& >::type groups(groupsSEXP); 26 | Rcpp::traits::input_parameter< const arma::uvec& >::type group_sizes(group_sizesSEXP); 27 | rcpp_result_gen = Rcpp::wrap(exp_mean(x, p, i, ncol, nrow, groups, group_sizes)); 28 | return rcpp_result_gen; 29 | END_RCPP 30 | } 31 | // log_vmr 32 | arma::mat log_vmr(const arma::vec& x, const arma::vec& p, const arma::vec& i, int ncol, int nrow, const arma::mat& means, const arma::uvec& groups, const arma::uvec& group_sizes); 33 | RcppExport SEXP _symphony_log_vmr(SEXP xSEXP, SEXP pSEXP, SEXP iSEXP, SEXP ncolSEXP, SEXP nrowSEXP, SEXP meansSEXP, SEXP groupsSEXP, SEXP group_sizesSEXP) { 34 | BEGIN_RCPP 35 | Rcpp::RObject rcpp_result_gen; 36 | Rcpp::RNGScope rcpp_rngScope_gen; 37 | Rcpp::traits::input_parameter< const arma::vec& >::type x(xSEXP); 38 | Rcpp::traits::input_parameter< const arma::vec& >::type p(pSEXP); 39 | Rcpp::traits::input_parameter< const arma::vec& >::type i(iSEXP); 40 | Rcpp::traits::input_parameter< int >::type ncol(ncolSEXP); 41 | Rcpp::traits::input_parameter< int >::type nrow(nrowSEXP); 42 | Rcpp::traits::input_parameter< const arma::mat& >::type means(meansSEXP); 43 | Rcpp::traits::input_parameter< const arma::uvec& >::type groups(groupsSEXP); 44 | Rcpp::traits::input_parameter< const arma::uvec& >::type group_sizes(group_sizesSEXP); 45 | rcpp_result_gen = Rcpp::wrap(log_vmr(x, p, i, ncol, nrow, means, groups, group_sizes)); 46 | return rcpp_result_gen; 47 | END_RCPP 48 | } 49 | // normalizeCLR_dgc 50 | arma::vec normalizeCLR_dgc(const arma::vec& x, const arma::vec& p, const arma::vec& i, int ncol, int nrow, int margin); 51 | RcppExport SEXP _symphony_normalizeCLR_dgc(SEXP xSEXP, SEXP pSEXP, SEXP iSEXP, SEXP ncolSEXP, SEXP nrowSEXP, SEXP marginSEXP) { 52 | BEGIN_RCPP 53 | Rcpp::RObject rcpp_result_gen; 54 | Rcpp::RNGScope rcpp_rngScope_gen; 55 | Rcpp::traits::input_parameter< const arma::vec& >::type x(xSEXP); 56 | Rcpp::traits::input_parameter< const arma::vec& >::type p(pSEXP); 57 | Rcpp::traits::input_parameter< const arma::vec& >::type i(iSEXP); 58 | Rcpp::traits::input_parameter< int >::type ncol(ncolSEXP); 59 | Rcpp::traits::input_parameter< int >::type nrow(nrowSEXP); 60 | Rcpp::traits::input_parameter< int >::type margin(marginSEXP); 61 | rcpp_result_gen = Rcpp::wrap(normalizeCLR_dgc(x, p, i, ncol, nrow, margin)); 62 | return rcpp_result_gen; 63 | END_RCPP 64 | } 65 | // scaleRowsWithStats_dgc 66 | arma::mat scaleRowsWithStats_dgc(const arma::vec& x, const arma::vec& p, const arma::vec& i, const arma::vec& mean_vec, const arma::vec& sd_vec, int ncol, int nrow, float thresh); 67 | RcppExport SEXP _symphony_scaleRowsWithStats_dgc(SEXP xSEXP, SEXP pSEXP, SEXP iSEXP, SEXP mean_vecSEXP, SEXP sd_vecSEXP, SEXP ncolSEXP, SEXP nrowSEXP, SEXP threshSEXP) { 68 | BEGIN_RCPP 69 | Rcpp::RObject rcpp_result_gen; 70 | Rcpp::RNGScope rcpp_rngScope_gen; 71 | Rcpp::traits::input_parameter< const arma::vec& >::type x(xSEXP); 72 | Rcpp::traits::input_parameter< const arma::vec& >::type p(pSEXP); 73 | Rcpp::traits::input_parameter< const arma::vec& >::type i(iSEXP); 74 | Rcpp::traits::input_parameter< const arma::vec& >::type mean_vec(mean_vecSEXP); 75 | Rcpp::traits::input_parameter< const arma::vec& >::type sd_vec(sd_vecSEXP); 76 | Rcpp::traits::input_parameter< int >::type ncol(ncolSEXP); 77 | Rcpp::traits::input_parameter< int >::type nrow(nrowSEXP); 78 | Rcpp::traits::input_parameter< float >::type thresh(threshSEXP); 79 | rcpp_result_gen = Rcpp::wrap(scaleRowsWithStats_dgc(x, p, i, mean_vec, sd_vec, ncol, nrow, thresh)); 80 | return rcpp_result_gen; 81 | END_RCPP 82 | } 83 | // scaleRows_dgc 84 | arma::mat scaleRows_dgc(const arma::vec& x, const arma::vec& p, const arma::vec& i, int ncol, int nrow, float thresh); 85 | RcppExport SEXP _symphony_scaleRows_dgc(SEXP xSEXP, SEXP pSEXP, SEXP iSEXP, SEXP ncolSEXP, SEXP nrowSEXP, SEXP threshSEXP) { 86 | BEGIN_RCPP 87 | Rcpp::RObject rcpp_result_gen; 88 | Rcpp::RNGScope rcpp_rngScope_gen; 89 | Rcpp::traits::input_parameter< const arma::vec& >::type x(xSEXP); 90 | Rcpp::traits::input_parameter< const arma::vec& >::type p(pSEXP); 91 | Rcpp::traits::input_parameter< const arma::vec& >::type i(iSEXP); 92 | Rcpp::traits::input_parameter< int >::type ncol(ncolSEXP); 93 | Rcpp::traits::input_parameter< int >::type nrow(nrowSEXP); 94 | Rcpp::traits::input_parameter< float >::type thresh(threshSEXP); 95 | rcpp_result_gen = Rcpp::wrap(scaleRows_dgc(x, p, i, ncol, nrow, thresh)); 96 | return rcpp_result_gen; 97 | END_RCPP 98 | } 99 | // rowMeansWeighted_dgc 100 | arma::vec rowMeansWeighted_dgc(const arma::vec& x, const arma::vec& p, const arma::vec& i, const arma::vec& weights, int ncol, int nrow); 101 | RcppExport SEXP _symphony_rowMeansWeighted_dgc(SEXP xSEXP, SEXP pSEXP, SEXP iSEXP, SEXP weightsSEXP, SEXP ncolSEXP, SEXP nrowSEXP) { 102 | BEGIN_RCPP 103 | Rcpp::RObject rcpp_result_gen; 104 | Rcpp::RNGScope rcpp_rngScope_gen; 105 | Rcpp::traits::input_parameter< const arma::vec& >::type x(xSEXP); 106 | Rcpp::traits::input_parameter< const arma::vec& >::type p(pSEXP); 107 | Rcpp::traits::input_parameter< const arma::vec& >::type i(iSEXP); 108 | Rcpp::traits::input_parameter< const arma::vec& >::type weights(weightsSEXP); 109 | Rcpp::traits::input_parameter< int >::type ncol(ncolSEXP); 110 | Rcpp::traits::input_parameter< int >::type nrow(nrowSEXP); 111 | rcpp_result_gen = Rcpp::wrap(rowMeansWeighted_dgc(x, p, i, weights, ncol, nrow)); 112 | return rcpp_result_gen; 113 | END_RCPP 114 | } 115 | // rowSDs_dgc 116 | arma::vec rowSDs_dgc(const arma::vec& x, const arma::vec& p, const arma::vec& i, const arma::vec& mean_vec, int ncol, int nrow, bool do_sqrt); 117 | RcppExport SEXP _symphony_rowSDs_dgc(SEXP xSEXP, SEXP pSEXP, SEXP iSEXP, SEXP mean_vecSEXP, SEXP ncolSEXP, SEXP nrowSEXP, SEXP do_sqrtSEXP) { 118 | BEGIN_RCPP 119 | Rcpp::RObject rcpp_result_gen; 120 | Rcpp::RNGScope rcpp_rngScope_gen; 121 | Rcpp::traits::input_parameter< const arma::vec& >::type x(xSEXP); 122 | Rcpp::traits::input_parameter< const arma::vec& >::type p(pSEXP); 123 | Rcpp::traits::input_parameter< const arma::vec& >::type i(iSEXP); 124 | Rcpp::traits::input_parameter< const arma::vec& >::type mean_vec(mean_vecSEXP); 125 | Rcpp::traits::input_parameter< int >::type ncol(ncolSEXP); 126 | Rcpp::traits::input_parameter< int >::type nrow(nrowSEXP); 127 | Rcpp::traits::input_parameter< bool >::type do_sqrt(do_sqrtSEXP); 128 | rcpp_result_gen = Rcpp::wrap(rowSDs_dgc(x, p, i, mean_vec, ncol, nrow, do_sqrt)); 129 | return rcpp_result_gen; 130 | END_RCPP 131 | } 132 | // rowVarSDs_dgc 133 | arma::vec rowVarSDs_dgc(const arma::vec& x, const arma::vec& p, const arma::vec& i, const arma::vec& mean_vec, const arma::vec& sd_vec, double vmax, int ncol, int nrow, bool do_sqrt); 134 | RcppExport SEXP _symphony_rowVarSDs_dgc(SEXP xSEXP, SEXP pSEXP, SEXP iSEXP, SEXP mean_vecSEXP, SEXP sd_vecSEXP, SEXP vmaxSEXP, SEXP ncolSEXP, SEXP nrowSEXP, SEXP do_sqrtSEXP) { 135 | BEGIN_RCPP 136 | Rcpp::RObject rcpp_result_gen; 137 | Rcpp::RNGScope rcpp_rngScope_gen; 138 | Rcpp::traits::input_parameter< const arma::vec& >::type x(xSEXP); 139 | Rcpp::traits::input_parameter< const arma::vec& >::type p(pSEXP); 140 | Rcpp::traits::input_parameter< const arma::vec& >::type i(iSEXP); 141 | Rcpp::traits::input_parameter< const arma::vec& >::type mean_vec(mean_vecSEXP); 142 | Rcpp::traits::input_parameter< const arma::vec& >::type sd_vec(sd_vecSEXP); 143 | Rcpp::traits::input_parameter< double >::type vmax(vmaxSEXP); 144 | Rcpp::traits::input_parameter< int >::type ncol(ncolSEXP); 145 | Rcpp::traits::input_parameter< int >::type nrow(nrowSEXP); 146 | Rcpp::traits::input_parameter< bool >::type do_sqrt(do_sqrtSEXP); 147 | rcpp_result_gen = Rcpp::wrap(rowVarSDs_dgc(x, p, i, mean_vec, sd_vec, vmax, ncol, nrow, do_sqrt)); 148 | return rcpp_result_gen; 149 | END_RCPP 150 | } 151 | // rowSDsWeighted_dgc 152 | arma::vec rowSDsWeighted_dgc(const arma::vec& x, const arma::vec& p, const arma::vec& i, const arma::vec& mean_vec, const arma::vec& weights, int ncol, int nrow, bool do_sqrt); 153 | RcppExport SEXP _symphony_rowSDsWeighted_dgc(SEXP xSEXP, SEXP pSEXP, SEXP iSEXP, SEXP mean_vecSEXP, SEXP weightsSEXP, SEXP ncolSEXP, SEXP nrowSEXP, SEXP do_sqrtSEXP) { 154 | BEGIN_RCPP 155 | Rcpp::RObject rcpp_result_gen; 156 | Rcpp::RNGScope rcpp_rngScope_gen; 157 | Rcpp::traits::input_parameter< const arma::vec& >::type x(xSEXP); 158 | Rcpp::traits::input_parameter< const arma::vec& >::type p(pSEXP); 159 | Rcpp::traits::input_parameter< const arma::vec& >::type i(iSEXP); 160 | Rcpp::traits::input_parameter< const arma::vec& >::type mean_vec(mean_vecSEXP); 161 | Rcpp::traits::input_parameter< const arma::vec& >::type weights(weightsSEXP); 162 | Rcpp::traits::input_parameter< int >::type ncol(ncolSEXP); 163 | Rcpp::traits::input_parameter< int >::type nrow(nrowSEXP); 164 | Rcpp::traits::input_parameter< bool >::type do_sqrt(do_sqrtSEXP); 165 | rcpp_result_gen = Rcpp::wrap(rowSDsWeighted_dgc(x, p, i, mean_vec, weights, ncol, nrow, do_sqrt)); 166 | return rcpp_result_gen; 167 | END_RCPP 168 | } 169 | // cosine_normalize_cpp 170 | arma::mat cosine_normalize_cpp(arma::mat& V, int dim); 171 | RcppExport SEXP _symphony_cosine_normalize_cpp(SEXP VSEXP, SEXP dimSEXP) { 172 | BEGIN_RCPP 173 | Rcpp::RObject rcpp_result_gen; 174 | Rcpp::RNGScope rcpp_rngScope_gen; 175 | Rcpp::traits::input_parameter< arma::mat& >::type V(VSEXP); 176 | Rcpp::traits::input_parameter< int >::type dim(dimSEXP); 177 | rcpp_result_gen = Rcpp::wrap(cosine_normalize_cpp(V, dim)); 178 | return rcpp_result_gen; 179 | END_RCPP 180 | } 181 | // soft_kmeans_cpp 182 | List soft_kmeans_cpp(arma::mat Y, arma::mat Z, unsigned max_iter, float sigma); 183 | RcppExport SEXP _symphony_soft_kmeans_cpp(SEXP YSEXP, SEXP ZSEXP, SEXP max_iterSEXP, SEXP sigmaSEXP) { 184 | BEGIN_RCPP 185 | Rcpp::RObject rcpp_result_gen; 186 | Rcpp::RNGScope rcpp_rngScope_gen; 187 | Rcpp::traits::input_parameter< arma::mat >::type Y(YSEXP); 188 | Rcpp::traits::input_parameter< arma::mat >::type Z(ZSEXP); 189 | Rcpp::traits::input_parameter< unsigned >::type max_iter(max_iterSEXP); 190 | Rcpp::traits::input_parameter< float >::type sigma(sigmaSEXP); 191 | rcpp_result_gen = Rcpp::wrap(soft_kmeans_cpp(Y, Z, max_iter, sigma)); 192 | return rcpp_result_gen; 193 | END_RCPP 194 | } 195 | // soft_cluster 196 | arma::mat soft_cluster(const arma::mat& Y, const arma::mat& Z, float sigma); 197 | RcppExport SEXP _symphony_soft_cluster(SEXP YSEXP, SEXP ZSEXP, SEXP sigmaSEXP) { 198 | BEGIN_RCPP 199 | Rcpp::RObject rcpp_result_gen; 200 | Rcpp::RNGScope rcpp_rngScope_gen; 201 | Rcpp::traits::input_parameter< const arma::mat& >::type Y(YSEXP); 202 | Rcpp::traits::input_parameter< const arma::mat& >::type Z(ZSEXP); 203 | Rcpp::traits::input_parameter< float >::type sigma(sigmaSEXP); 204 | rcpp_result_gen = Rcpp::wrap(soft_cluster(Y, Z, sigma)); 205 | return rcpp_result_gen; 206 | END_RCPP 207 | } 208 | // compute_ref_cache 209 | List compute_ref_cache(const arma::mat& Rr, const arma::mat& Zr); 210 | RcppExport SEXP _symphony_compute_ref_cache(SEXP RrSEXP, SEXP ZrSEXP) { 211 | BEGIN_RCPP 212 | Rcpp::RObject rcpp_result_gen; 213 | Rcpp::RNGScope rcpp_rngScope_gen; 214 | Rcpp::traits::input_parameter< const arma::mat& >::type Rr(RrSEXP); 215 | Rcpp::traits::input_parameter< const arma::mat& >::type Zr(ZrSEXP); 216 | rcpp_result_gen = Rcpp::wrap(compute_ref_cache(Rr, Zr)); 217 | return rcpp_result_gen; 218 | END_RCPP 219 | } 220 | // moe_correct_ref 221 | arma::mat moe_correct_ref(const arma::mat& Zq, const arma::mat& Xq, const arma::mat& Rq, const arma::vec& Nr, const arma::mat& RrZtr); 222 | RcppExport SEXP _symphony_moe_correct_ref(SEXP ZqSEXP, SEXP XqSEXP, SEXP RqSEXP, SEXP NrSEXP, SEXP RrZtrSEXP) { 223 | BEGIN_RCPP 224 | Rcpp::RObject rcpp_result_gen; 225 | Rcpp::RNGScope rcpp_rngScope_gen; 226 | Rcpp::traits::input_parameter< const arma::mat& >::type Zq(ZqSEXP); 227 | Rcpp::traits::input_parameter< const arma::mat& >::type Xq(XqSEXP); 228 | Rcpp::traits::input_parameter< const arma::mat& >::type Rq(RqSEXP); 229 | Rcpp::traits::input_parameter< const arma::vec& >::type Nr(NrSEXP); 230 | Rcpp::traits::input_parameter< const arma::mat& >::type RrZtr(RrZtrSEXP); 231 | rcpp_result_gen = Rcpp::wrap(moe_correct_ref(Zq, Xq, Rq, Nr, RrZtr)); 232 | return rcpp_result_gen; 233 | END_RCPP 234 | } 235 | // get_betas 236 | arma::cube get_betas(const arma::mat& R, const arma::mat& Z, const arma::mat& lambda, const arma::mat& design); 237 | RcppExport SEXP _symphony_get_betas(SEXP RSEXP, SEXP ZSEXP, SEXP lambdaSEXP, SEXP designSEXP) { 238 | BEGIN_RCPP 239 | Rcpp::RObject rcpp_result_gen; 240 | Rcpp::RNGScope rcpp_rngScope_gen; 241 | Rcpp::traits::input_parameter< const arma::mat& >::type R(RSEXP); 242 | Rcpp::traits::input_parameter< const arma::mat& >::type Z(ZSEXP); 243 | Rcpp::traits::input_parameter< const arma::mat& >::type lambda(lambdaSEXP); 244 | Rcpp::traits::input_parameter< const arma::mat& >::type design(designSEXP); 245 | rcpp_result_gen = Rcpp::wrap(get_betas(R, Z, lambda, design)); 246 | return rcpp_result_gen; 247 | END_RCPP 248 | } 249 | 250 | static const R_CallMethodDef CallEntries[] = { 251 | {"_symphony_exp_mean", (DL_FUNC) &_symphony_exp_mean, 7}, 252 | {"_symphony_log_vmr", (DL_FUNC) &_symphony_log_vmr, 8}, 253 | {"_symphony_normalizeCLR_dgc", (DL_FUNC) &_symphony_normalizeCLR_dgc, 6}, 254 | {"_symphony_scaleRowsWithStats_dgc", (DL_FUNC) &_symphony_scaleRowsWithStats_dgc, 8}, 255 | {"_symphony_scaleRows_dgc", (DL_FUNC) &_symphony_scaleRows_dgc, 6}, 256 | {"_symphony_rowMeansWeighted_dgc", (DL_FUNC) &_symphony_rowMeansWeighted_dgc, 6}, 257 | {"_symphony_rowSDs_dgc", (DL_FUNC) &_symphony_rowSDs_dgc, 7}, 258 | {"_symphony_rowVarSDs_dgc", (DL_FUNC) &_symphony_rowVarSDs_dgc, 9}, 259 | {"_symphony_rowSDsWeighted_dgc", (DL_FUNC) &_symphony_rowSDsWeighted_dgc, 8}, 260 | {"_symphony_cosine_normalize_cpp", (DL_FUNC) &_symphony_cosine_normalize_cpp, 2}, 261 | {"_symphony_soft_kmeans_cpp", (DL_FUNC) &_symphony_soft_kmeans_cpp, 4}, 262 | {"_symphony_soft_cluster", (DL_FUNC) &_symphony_soft_cluster, 3}, 263 | {"_symphony_compute_ref_cache", (DL_FUNC) &_symphony_compute_ref_cache, 2}, 264 | {"_symphony_moe_correct_ref", (DL_FUNC) &_symphony_moe_correct_ref, 5}, 265 | {"_symphony_get_betas", (DL_FUNC) &_symphony_get_betas, 4}, 266 | {NULL, NULL, 0} 267 | }; 268 | 269 | RcppExport void R_init_symphony(DllInfo *dll) { 270 | R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); 271 | R_useDynamicSymbols(dll, FALSE); 272 | } 273 | -------------------------------------------------------------------------------- /src/singlecellmethods.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | using namespace Rcpp; 8 | 9 | // [[Rcpp::depends(RcppArmadillo)]] 10 | 11 | //typedef arma::mat MATTYPE; 12 | //typedef arma::vec VECTYPE; 13 | //typedef arma::fmat MATTYPE; 14 | //typedef arma::fvec VECTYPE; 15 | 16 | 17 | // [[Rcpp::export]] 18 | arma::mat exp_mean(const arma::vec& x, const arma::vec& p, const arma::vec& i, int ncol, int nrow, const arma::uvec& groups, const arma::uvec& group_sizes) { 19 | int ngroups = group_sizes.n_elem; 20 | arma::mat res = arma::zeros(nrow, ngroups); 21 | for (int c = 0; c < ncol; c++) { 22 | for (int j = p[c]; j < p[c + 1]; j++) { 23 | // i[j] gives the row num 24 | res(i[j], groups[c]) += std::expm1(x[j]); 25 | } 26 | } 27 | 28 | for (int c = 0; c < ngroups; c++) { 29 | for (int r = 0; r < nrow; r++) { 30 | res(r, c) /= group_sizes[c]; 31 | } 32 | } 33 | 34 | return(res); 35 | } 36 | 37 | 38 | 39 | // [[Rcpp::export]] 40 | arma::mat log_vmr(const arma::vec& x, const arma::vec& p, const arma::vec& i, 41 | int ncol, int nrow, const arma::mat& means, 42 | const arma::uvec& groups, const arma::uvec& group_sizes) { 43 | 44 | int ngroups = group_sizes.n_elem; 45 | arma::mat res = arma::zeros(nrow, ngroups); 46 | arma::mat nnzero = arma::zeros(nrow, ngroups); 47 | double tmp; 48 | for (int c = 0; c < ncol; c++) { 49 | for (int j = p[c]; j < p[c + 1]; j++) { 50 | // i[j] gives the row num 51 | tmp = std::expm1(x[j]) - means(i[j], groups(c)); 52 | res(i[j], groups[c]) += tmp * tmp; 53 | nnzero(i[j], groups(c))++; 54 | } 55 | } 56 | 57 | for (int c = 0; c < ngroups; c++) { 58 | for (int r = 0; r < nrow; r++) { 59 | res(r, c) += (group_sizes[c] - nnzero(r, c)) * means(r, c) * means(r, c); 60 | res(r, c) /= (group_sizes[c] - 1); 61 | } 62 | } 63 | 64 | res = log(res / means); 65 | res.replace(arma::datum::nan, 0); 66 | 67 | return(res); 68 | } 69 | 70 | // [[Rcpp::export]] 71 | arma::vec normalizeCLR_dgc(const arma::vec& x, const arma::vec& p, const arma::vec& i, int ncol, int nrow, int margin) { 72 | arma::vec res = x; 73 | if (margin == 1) { 74 | // first compute scaling factors for each row 75 | arma::vec geo_mean = arma::zeros(nrow); 76 | for (int c = 0; c < ncol; c++) { 77 | for (int j = p[c]; j < p[c + 1]; j++) { 78 | // i[j] gives the row num 79 | geo_mean(i[j]) += std::log1p(x[j]); 80 | } 81 | } 82 | for (int i = 0; i < nrow; i++) { 83 | // geo_mean(i) = (geo_mean(i) / (1 + ncol)); 84 | geo_mean(i) = std::exp(geo_mean(i) / ncol); 85 | } 86 | // then scale data 87 | for (int c = 0; c < ncol; c++) { 88 | for (int j = p[c]; j < p[c + 1]; j++) { 89 | res(j) = std::log1p(res(j) / geo_mean(i[j])); 90 | } 91 | } 92 | } else { 93 | // first compute scaling factors for each column 94 | arma::vec geo_mean = arma::zeros(ncol); 95 | for (int c = 0; c < ncol; c++) { 96 | for (int j = p[c]; j < p[c + 1]; j++) { 97 | geo_mean(c) += std::log1p(x[j]); 98 | } 99 | geo_mean(c) = std::exp(geo_mean(c) / nrow); 100 | } 101 | 102 | // then scale data 103 | for (int c = 0; c < ncol; c++) { 104 | for (int j = p[c]; j < p[c + 1]; j++) { 105 | res(j) = std::log1p(res(j) / geo_mean(c)); 106 | } 107 | } 108 | 109 | } 110 | 111 | return res; 112 | } 113 | 114 | 115 | 116 | // [[Rcpp::export]] 117 | arma::mat scaleRowsWithStats_dgc(const arma::vec& x, const arma::vec& p, 118 | const arma::vec& i, const arma::vec& mean_vec, 119 | const arma::vec& sd_vec, int ncol, int nrow, 120 | float thresh) { 121 | // fill in non-zero elements 122 | arma::mat res = arma::zeros(nrow, ncol); 123 | for (int c = 0; c < ncol; c++) { 124 | for (int j = p[c]; j < p[c + 1]; j++) { 125 | res(i[j], c) = x(j); 126 | } 127 | } 128 | // scale rows with given means and SDs 129 | res.each_col() -= mean_vec; 130 | res.each_col() /= sd_vec; 131 | res.elem(find(res > thresh)).fill(thresh); 132 | res.elem(find(res < -thresh)).fill(-thresh); 133 | return res; 134 | } 135 | 136 | 137 | // [[Rcpp::export]] 138 | arma::mat scaleRows_dgc(const arma::vec& x, const arma::vec& p, const arma::vec& i, 139 | int ncol, int nrow, float thresh) { 140 | // (0) fill in non-zero elements 141 | arma::mat res = arma::zeros(nrow, ncol); 142 | for (int c = 0; c < ncol; c++) { 143 | for (int j = p[c]; j < p[c + 1]; j++) { 144 | res(i[j], c) = x(j); 145 | } 146 | } 147 | 148 | // (1) compute means 149 | arma::vec mean_vec = arma::zeros(nrow); 150 | for (int c = 0; c < ncol; c++) { 151 | for (int j = p[c]; j < p[c + 1]; j++) { 152 | mean_vec(i[j]) += x[j]; 153 | } 154 | } 155 | mean_vec /= ncol; 156 | 157 | // (2) compute SDs 158 | arma::vec sd_vec = arma::zeros(nrow); 159 | arma::uvec nz = arma::zeros(nrow); 160 | nz.fill(ncol); 161 | for (int c = 0; c < ncol; c++) { 162 | for (int j = p[c]; j < p[c + 1]; j++) { 163 | sd_vec(i[j]) += (x[j] - mean_vec(i[j])) * (x[j] - mean_vec(i[j])); // (x - mu)^2 164 | nz(i[j])--; 165 | } 166 | } 167 | 168 | // count for the zeros 169 | for (int r = 0; r < nrow; r++) { 170 | sd_vec(r) += nz(r) * mean_vec(r) * mean_vec(r); 171 | } 172 | 173 | sd_vec = arma::sqrt(sd_vec / (ncol - 1)); 174 | 175 | // (3) scale values 176 | res.each_col() -= mean_vec; 177 | res.each_col() /= sd_vec; 178 | res.elem(find(res > thresh)).fill(thresh); 179 | res.elem(find(res < -thresh)).fill(-thresh); 180 | return res; 181 | } 182 | 183 | 184 | // [[Rcpp::export]] 185 | arma::vec rowMeansWeighted_dgc(const arma::vec& x, const arma::vec& p, 186 | const arma::vec& i, const arma::vec& weights, 187 | int ncol, int nrow) { 188 | 189 | arma::vec res = arma::zeros(nrow); 190 | for (int c = 0; c < ncol; c++) { 191 | for (int j = p[c]; j < p[c + 1]; j++) { 192 | res[i[j]] += x[j] * weights[c]; 193 | } 194 | } 195 | 196 | res /= arma::accu(weights); 197 | return res; 198 | } 199 | 200 | // [[Rcpp::export]] 201 | arma::vec rowSDs_dgc(const arma::vec& x, const arma::vec& p, 202 | const arma::vec& i, const arma::vec& mean_vec, 203 | int ncol, int nrow, bool do_sqrt) { 204 | 205 | arma::vec sd_vec = arma::zeros(nrow); 206 | arma::uvec nz = arma::zeros(nrow); 207 | nz.fill(ncol); 208 | for (int c = 0; c < ncol; c++) { 209 | for (int j = p[c]; j < p[c + 1]; j++) { 210 | sd_vec(i[j]) += (x[j] - mean_vec(i[j])) * (x[j] - mean_vec(i[j])); // (x - mu)^2 211 | nz(i[j])--; 212 | } 213 | } 214 | 215 | // count for the zeros 216 | for (int r = 0; r < nrow; r++) { 217 | sd_vec(r) += nz(r) * mean_vec(r) * mean_vec(r); 218 | } 219 | 220 | sd_vec = sd_vec / (ncol - 1); 221 | if (do_sqrt) { 222 | sd_vec = arma::sqrt(sd_vec); 223 | } 224 | 225 | return sd_vec; 226 | } 227 | 228 | 229 | // [[Rcpp::export]] 230 | arma::vec rowVarSDs_dgc( 231 | const arma::vec& x, const arma::vec& p, 232 | const arma::vec& i, const arma::vec& mean_vec, const arma::vec& sd_vec, 233 | double vmax, int ncol, int nrow, bool do_sqrt) { 234 | 235 | arma::vec res = arma::zeros(nrow); 236 | arma::uvec nz = arma::zeros(nrow); 237 | nz.fill(ncol); 238 | double val; 239 | for (int c = 0; c < ncol; c++) { 240 | for (int j = p[c]; j < p[c + 1]; j++) { 241 | val = std::min(vmax, (x[j] - mean_vec(i[j])) / sd_vec(i[j])); 242 | res(i[j]) += val * val; // [(x - mu)/sig]^2 243 | nz(i[j])--; 244 | } 245 | } 246 | 247 | // count for the zeros 248 | for (int r = 0; r < nrow; r++) { 249 | res(r) += nz(r) * mean_vec(r) * mean_vec(r) / (sd_vec(r) * sd_vec(r)); 250 | } 251 | 252 | res = res / (ncol - 1); 253 | if (do_sqrt) { 254 | res = arma::sqrt(res); 255 | } 256 | 257 | return res; 258 | } 259 | 260 | 261 | // [[Rcpp::export]] 262 | arma::vec rowSDsWeighted_dgc(const arma::vec& x, const arma::vec& p, 263 | const arma::vec& i, const arma::vec& mean_vec, 264 | const arma::vec& weights, 265 | int ncol, int nrow, bool do_sqrt) { 266 | 267 | arma::vec sd_vec = arma::zeros(nrow); 268 | double sum_weights = arma::accu(weights); 269 | arma::vec nz = arma::zeros(nrow); 270 | nz.fill(sum_weights); 271 | for (int c = 0; c < ncol; c++) { 272 | for (int j = p[c]; j < p[c + 1]; j++) { 273 | sd_vec(i[j]) += weights[c] * (x[j] - mean_vec(i[j])) * (x[j] - mean_vec(i[j])); // w * (x - mu)^2 274 | nz(i[j]) -= weights[c]; 275 | } 276 | } 277 | 278 | // count for the zeros 279 | for (int r = 0; r < nrow; r++) { 280 | sd_vec(r) += nz(r) * mean_vec(r) * mean_vec(r); 281 | } 282 | 283 | sd_vec *= sum_weights / (sum_weights * sum_weights - arma::accu(weights % weights)); 284 | if (do_sqrt) { 285 | sd_vec = arma::sqrt(sd_vec); 286 | } 287 | return sd_vec; 288 | } 289 | 290 | 291 | // [[Rcpp::export]] 292 | arma::mat cosine_normalize_cpp(arma::mat & V, int dim) { 293 | // norm rows: dim=1 294 | // norm cols: dim=0 or dim=2 295 | if (dim == 2) dim = 0; 296 | return arma::normalise(V, 2, dim); 297 | } 298 | 299 | // [[Rcpp::export]] 300 | List soft_kmeans_cpp(arma::mat Y, arma::mat Z, unsigned max_iter, float sigma) { 301 | Y = arma::normalise(Y, 2, 0); // L2 normalize the columns 302 | Z = arma::normalise(Z, 2, 0); // L2 normalize the columns 303 | arma::mat R;// = -2 * (1 - Y.t() * Z) / sigma; // dist_mat 304 | for (unsigned i = 0; i < max_iter; i++) { 305 | R = -2 * (1 - Y.t() * Z) / sigma; // dist_mat 306 | R.each_row() -= arma::max(R, 0); 307 | R = exp(R); 308 | R.each_row() /= arma::sum(R, 0); 309 | Y = arma::normalise(Z * R.t(), 2, 0); 310 | } 311 | 312 | List result = List::create(Named("R") = R , _["Y"] = Y); 313 | return result; 314 | 315 | } 316 | -------------------------------------------------------------------------------- /src/utils.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | using namespace Rcpp; 8 | 9 | 10 | // Symphony utils---------------------------------- 11 | 12 | // [[Rcpp::depends(RcppArmadillo)]] 13 | 14 | // Computes the soft cluster assignments of query cells across reference clusters. 15 | // 16 | // Y: Reference cluster centroid locations 17 | // Z: Query cells projected into PC space (d x m) 18 | // sigma: Soft k-means "fuzziness" parameter, sigma = 0 is hard clustering 19 | // [[Rcpp::export]] 20 | arma::mat soft_cluster(const arma::mat& Y, const arma::mat& Z, float sigma) { 21 | arma::mat Y_cos = arma::normalise(Y, 2, 0); // L2 normalize the columns 22 | arma::mat Z_cos = arma::normalise(Z, 2, 0); // L2 normalize the columns 23 | arma::mat R = -2 * (1 - Y_cos.t() * Z_cos) / sigma; // dist_mat 24 | 25 | R.each_row() -= arma::max(R, 0); 26 | R = exp(R); 27 | R.each_row() /= arma::sum(R, 0); 28 | return R; 29 | } 30 | 31 | 32 | // Computes the Symphony reference compression terms, Nr and C. 33 | // 34 | // Rr: Soft cluster assignments of reference cells (cols) across clusters (rows). 35 | // Zr: Corrected embedding for reference cells (cols) in harmonized PCs (rows). 36 | // [[Rcpp::export]] 37 | List compute_ref_cache( 38 | const arma::mat& Rr, 39 | const arma::mat& Zr 40 | ) { 41 | List result(2); 42 | 43 | result[0] = arma::sum(Rr, 1); // Nr (k x 1) 44 | result[1] = Rr * Zr.t(); // C (k x d) 45 | return result; 46 | } 47 | 48 | // Computes the corrected query cell embedding. 49 | // 50 | // Zq: Query cells projected into PC space (d x m) 51 | // Xq: Query design matrix ((c + 1) x m) 52 | // Rq: Query soft cluster assignments across reference clusters (k x m) 53 | // Nr: Reference cluster sizes (first compression term) (length k) 54 | // RrZtr: Second reference compression term (C) (k x d) 55 | // [[Rcpp::export]] 56 | arma::mat moe_correct_ref( 57 | const arma::mat& Zq, // query cells projected into PC space 58 | const arma::mat& Xq, // query design matrix 59 | const arma::mat& Rq, // query soft cluster assignments 60 | const arma::vec& Nr, // ref cluster sizes 61 | const arma::mat& RrZtr // ref matrix cached 62 | ) { 63 | unsigned K = Rq.n_rows; 64 | arma::mat Zq_corr = Zq; 65 | arma::mat Xq_Rk, beta; 66 | arma::mat mat1, mat2, lambda_I; 67 | for (unsigned k = 0; k < K; k++) { 68 | Xq_Rk = Xq * arma::diagmat(Rq.row(k)); 69 | 70 | // (B+1) x (B+1) 71 | mat1 = Xq_Rk * Xq.t(); 72 | mat1(0,0) += Nr[k]; 73 | 74 | // ridge 75 | unsigned nrows = Xq.n_rows; 76 | lambda_I = arma::eye(nrows, nrows); 77 | lambda_I(0,0) -= 1; //do not penalize the intercept 78 | mat1 += lambda_I; 79 | 80 | // (B+1) x d 81 | mat2 = Xq_Rk * Zq.t(); 82 | mat2.row(0) += RrZtr.row(k); 83 | 84 | beta = arma::inv(mat1) * (mat2); 85 | beta.row(0).zeros(); //do not correct the intercept terms 86 | Zq_corr -= beta.t() * Xq_Rk; 87 | } 88 | return Zq_corr; //(d x m) 89 | } 90 | 91 | // Returns the batch coefficients of the linear mixture model as a 3D tensor. 92 | // [[Rcpp::export]] 93 | arma::cube get_betas(const arma::mat& R, const arma::mat& Z, const arma::mat& lambda, const arma::mat& design) { 94 | unsigned K = R.n_rows; 95 | unsigned B = design.n_rows; 96 | unsigned D = Z.n_rows; 97 | unsigned N = Z.n_cols; 98 | arma::cube W_cube(B, D, K); // rows, cols, slices 99 | arma::mat Phi_Rk(B, N); 100 | for (unsigned k = 0; k < K; k++) { 101 | Phi_Rk = design * arma::diagmat(R.row(k)); 102 | W_cube.slice(k) = arma::inv(Phi_Rk * design.t() + lambda) * Phi_Rk * Z.t(); 103 | } 104 | return W_cube; 105 | } 106 | -------------------------------------------------------------------------------- /vignettes/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | *.R 3 | -------------------------------------------------------------------------------- /vignettes/data/baron-mouse.rds: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:a78cfeab4d5dd9035fb9da7c71ac929f67d3ff2413da5ef26fe31a800366a393 3 | size 12315736 4 | -------------------------------------------------------------------------------- /vignettes/data/exprs_norm_all.rds: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:c901a379195339261498e9b6517d04d0e965760c7627d2e0674fed420b0d9c7a 3 | size 72611871 4 | -------------------------------------------------------------------------------- /vignettes/data/fetal_liver_exprs_5p.rds: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:c8a025e36fe491523b02e0e1024fe067751eb3e35e67b6cb46ab782ae3ae2de9 3 | size 162222504 4 | -------------------------------------------------------------------------------- /vignettes/data/fetal_liver_meta_5p.rds: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:cd6ab6015a5b91edd54d70846e18eb83766cf5226671fe9c837e25ce74fce6a2 3 | size 889278 4 | -------------------------------------------------------------------------------- /vignettes/data/meta_data_subtypes.csv: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:d6c5e89066005b911c903d6e2b6817e0085a88ba33a16f6a30a1f7f8b5d61520 3 | size 2346758 4 | -------------------------------------------------------------------------------- /vignettes/data/pancreas_baron_human_exp.rds: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:07b3c78e018033babd8477701ff70520c162f5f94b9009a470f70b1cd63a8365 3 | size 38369732 4 | -------------------------------------------------------------------------------- /vignettes/data/pancreas_baron_human_metadata.rds: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:1482863fccc6ccaf6ddcf3d784fe10605d0126dce078ae21569e5959dbbc103f 3 | size 57879 4 | -------------------------------------------------------------------------------- /vignettes/data/pancreas_baron_mouse_biomart_homologene_exp.rds: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:530859f3265f65bdc36460c9d28ea9fbc7028e2ea37a97ad90b83990b0fa1b81 3 | size 7366219 4 | -------------------------------------------------------------------------------- /vignettes/data/pancreas_baron_mouse_metadata.rds: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:aa8441b474d03c30a9fc74428c4e380f75f71ead5db09cee6563aee5e1e4ae58 3 | size 13234 4 | -------------------------------------------------------------------------------- /vignettes/libs.R: -------------------------------------------------------------------------------- 1 | # Analysis 2 | library(symphony) 3 | library(harmony) 4 | library(singlecellmethods) 5 | library(irlba) 6 | library(tidyverse) 7 | library(data.table) 8 | library(matrixStats) 9 | library(Matrix) 10 | library(plyr) 11 | library(dplyr) 12 | 13 | # Plotting 14 | library(ggplot2) 15 | library(ggthemes) 16 | library(ggrastr) 17 | library(RColorBrewer) 18 | library(patchwork) 19 | library(ggpubr) -------------------------------------------------------------------------------- /vignettes/quickstart_tutorial.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Quickstart Tutorial" 3 | output: rmarkdown::html_vignette 4 | fig_width: 6 5 | fig_height: 4 6 | vignette: > 7 | %\VignetteIndexEntry{Quickstart PBMCs Tutorial} 8 | %\VignetteEngine{knitr::rmarkdown} 9 | %\VignetteEncoding{UTF-8} 10 | --- 11 | 12 | In this tutorial, we will construct a Symphony reference from two PBMC datasets from 2 technologies (10x 3'v1 and 3'v2), then map a third dataset from a new technology (10x 5') with Symphony. The analysis follows from Fig. 2 of the paper (but with downsampled datasets to fit within CRAN limits on subdirectory size). 13 | 14 | # Installation 15 | 16 | Install Symphony with standard commands. 17 | ```{r eval=FALSE} 18 | install.packages('symphony') 19 | ``` 20 | 21 | Once Symphony is installed, load it up! 22 | ```{r} 23 | library(symphony) 24 | 25 | # Other packages for this tutorial 26 | suppressPackageStartupMessages({ 27 | # Analysis 28 | library(harmony) 29 | library(irlba) 30 | library(data.table) 31 | library(dplyr) 32 | 33 | # Plotting 34 | library(ggplot2) 35 | library(ggthemes) 36 | library(ggrastr) 37 | library(RColorBrewer) 38 | }) 39 | ``` 40 | 41 | ```{r echo=FALSE} 42 | plotBasic = function(umap_labels, # metadata, with UMAP labels in UMAP1 and UMAP2 slots 43 | title = 'Query', # Plot title 44 | color.by = 'cell_type', # metadata column name for coloring 45 | facet.by = NULL, # (optional) metadata column name for faceting 46 | color.mapping = NULL, # custom color mapping 47 | legend.position = 'right') { # Show cell type legend 48 | 49 | p = umap_labels %>% 50 | dplyr::sample_frac(1L) %>% # permute rows randomly 51 | ggplot(aes(x = UMAP1, y = UMAP2)) + 52 | geom_point_rast(aes(col = get(color.by)), size = 1, stroke = 0.4, shape = 16) 53 | if (!is.null(color.mapping)) { p = p + scale_color_manual(values = color.mapping) } 54 | 55 | # Default formatting 56 | p = p + theme_bw() + 57 | labs(title = title, color = color.by) + 58 | theme(plot.title = element_text(hjust = 0.5)) + 59 | theme(legend.position=legend.position) + 60 | theme(legend.text = element_text(size=8), legend.title=element_text(size=12)) + 61 | guides(colour = guide_legend(override.aes = list(size = 4))) + guides(alpha = 'none') 62 | 63 | if(!is.null(facet.by)) { 64 | p = p + facet_wrap(~get(facet.by)) + 65 | theme(strip.text.x = element_text(size = 12)) } 66 | return(p) 67 | } 68 | 69 | # Colors for PBMCs 70 | pbmc_colors = c("B" = "#66C2A5", "DC" = "#FC8D62", "HSC" = "#8DA0CB", "MK" = "#E78AC3", 71 | "Mono_CD14" = "#A6D854", "Mono_CD16" = "#f2ec72", "NK" = "#62AAEA", 72 | "T_CD4" = "#D1C656", "T_CD8" = "#968763") 73 | ``` 74 | 75 | # Load the data 76 | 77 | Get the expression and metadata. 78 | ```{r} 79 | load('../data/pbmcs_exprs_small.rda') 80 | load('../data/pbmcs_meta_small.rda') 81 | 82 | dim(pbmcs_exprs_small) 83 | dim(pbmcs_meta_small) 84 | 85 | pbmcs_meta_small %>% head(4) 86 | ``` 87 | 88 | Subset the dataset into reference and query. 89 | ```{r} 90 | idx_query = which(pbmcs_meta_small$donor == "5p") # use 5' dataset as the query 91 | ref_exp_full = pbmcs_exprs_small[, -idx_query] 92 | ref_metadata = pbmcs_meta_small[-idx_query, ] 93 | query_exp = pbmcs_exprs_small[, idx_query] 94 | query_metadata = pbmcs_meta_small[idx_query, ] 95 | ``` 96 | 97 | # Build Symphony reference 98 | 99 | There are two options for how to build a Symphony reference. Option 1 (`buildReferenceFromHarmonyObj`) is the more modular option, meaning that the user has more control over the preprocessing steps prior to reference compression. Option 2 (`buildReference`) builds a reference starting from expression, automating the procedure more but offering less flexibility. 100 | 101 | We'll demonstrate both options below. 102 | 103 | 104 | ## Option 1: Build from Harmony object (preferred method) 105 | 106 | This option consists of more steps than Option 2 but allows your code to be more modular and flexible if you want to do your own preprocessing steps before the Harmony integration step. We recommend this option for most users. 107 | 108 | It is important to generate `vargenes_means_sds` (containing variable gene means and standard deviations used to scale the genes) as well as save the loadings for the PCA step. 109 | 110 | Starting with the reference expression, 111 | ```{r} 112 | ref_exp_full[1:5, 1:2] # Sparse matrix with the normalized genes x cells matrix 113 | ``` 114 | 115 | Select variable genes and subset reference expression by variable genes (the command below will select the top 1,000 genes per batch, then pool them) 116 | ```{r} 117 | var_genes = vargenes_vst(ref_exp_full, groups = as.character(ref_metadata[['donor']]), topn = 1000) 118 | ref_exp = ref_exp_full[var_genes, ] 119 | dim(ref_exp) 120 | ``` 121 | 122 | Calculate and save the mean and standard deviations for each gene 123 | ```{r} 124 | vargenes_means_sds = tibble(symbol = var_genes, mean = Matrix::rowMeans(ref_exp)) 125 | vargenes_means_sds$stddev = rowSDs(ref_exp, vargenes_means_sds$mean) 126 | head(vargenes_means_sds) 127 | ``` 128 | 129 | Scale data using calculated gene means and standard deviations 130 | ```{r} 131 | ref_exp_scaled = scaleDataWithStats(ref_exp, vargenes_means_sds$mean, vargenes_means_sds$stddev, 1) 132 | ``` 133 | 134 | Run PCA (using SVD), save gene loadings (`s$u`) 135 | ```{r} 136 | set.seed(0) 137 | s = irlba(ref_exp_scaled, nv = 20) 138 | Z_pca_ref = diag(s$d) %*% t(s$v) # [pcs by cells] 139 | loadings = s$u 140 | ``` 141 | 142 | Run Harmony integration. It is important to set `return_object = TRUE`. 143 | ```{r} 144 | set.seed(0) 145 | ref_harmObj = harmony::HarmonyMatrix( 146 | data_mat = t(Z_pca_ref), ## PCA embedding matrix of cells 147 | meta_data = ref_metadata, ## dataframe with cell labels 148 | theta = c(2), ## cluster diversity enforcement 149 | vars_use = c('donor'), ## variable to integrate out 150 | nclust = 100, ## number of clusters in Harmony model 151 | max.iter.harmony = 20, ## max number of iterations 152 | return_object = TRUE, ## return the full Harmony model object 153 | do_pca = FALSE ## don't recompute PCs 154 | ) 155 | ``` 156 | 157 | To run the next function `buildReferenceFromHarmonyObj()`, you need to input the saved gene loadings (`loadings`) and `vargenes_means_sds`. 158 | ```{r} 159 | # Compress a Harmony object into a Symphony reference 160 | reference = buildReferenceFromHarmonyObj( 161 | ref_harmObj, # output object from HarmonyMatrix() 162 | ref_metadata, # reference cell metadata 163 | vargenes_means_sds, # gene names, means, and std devs for scaling 164 | loadings, # genes x PCs matrix 165 | verbose = TRUE, # verbose output 166 | do_umap = TRUE, # set to TRUE to run UMAP 167 | save_uwot_path = './testing_uwot_model_1') # file path to save uwot model 168 | ``` 169 | 170 | Save Symphony reference for later mapping (modify with your desired output path) 171 | ```{r} 172 | saveRDS(reference, './testing_reference1.rds') 173 | ``` 174 | 175 | Let's take a look at what the reference object contains: 176 | * meta_data: metadata 177 | * vargenes: variable genes, means, and standard deviations used for scaling 178 | * loadings: gene loadings for projection into pre-Harmony PC space 179 | * R: Soft cluster assignments 180 | * Z_orig: Pre-Harmony PC embedding 181 | * Z_corr: Harmonized PC embedding 182 | * centroids: locations of final Harmony soft cluster centroids 183 | * cache: pre-calculated reference-dependent portions of the mixture model 184 | * umap: UMAP coordinates 185 | * save_uwot_path: path to saved uwot model (for query UMAP projection into reference UMAP coordinates) 186 | ```{r} 187 | str(reference) 188 | ``` 189 | 190 | The harmonized embedding is located in the `Z_corr` slot of the reference object. 191 | ```{r} 192 | dim(reference$Z_corr) 193 | reference$Z_corr[1:5, 1:5] 194 | ``` 195 | 196 | Visualize reference UMAP 197 | ```{r, fig.width = 5.5, fig.height = 4} 198 | reference = readRDS('./testing_reference1.rds') 199 | umap_labels = cbind(ref_metadata, reference$umap$embedding) 200 | plotBasic(umap_labels, title = 'Reference', color.mapping = pbmc_colors) 201 | ``` 202 | 203 | ## Option 2: Build from scratch (starting with expression) 204 | 205 | This option computes a reference object starting from expression in a unified pipeline, automating the preprocessing steps. 206 | ```{r} 207 | # Build reference 208 | set.seed(0) 209 | reference = symphony::buildReference( 210 | ref_exp_full, 211 | ref_metadata, 212 | vars = c('donor'), # variables to integrate over 213 | K = 100, # number of Harmony clusters 214 | verbose = TRUE, # verbose output 215 | do_umap = TRUE, # can set to FALSE if want to run umap separately later 216 | do_normalize = FALSE, # set to TRUE if input counts are not normalized yet 217 | vargenes_method = 'vst', # method for variable gene selection ('vst' or 'mvp') 218 | vargenes_groups = 'donor', # metadata column specifying groups for variable gene selection 219 | topn = 1000, # number of variable genes to choose per group 220 | d = 20, # number of PCs 221 | save_uwot_path = './testing_uwot_model_2' # file path to save uwot model 222 | ) 223 | 224 | # Save reference (modify with your desired output path) 225 | saveRDS(reference, './testing_reference2.rds') 226 | ``` 227 | 228 | Visualize reference UMAP 229 | ```{r, fig.width = 5.5, fig.height = 4} 230 | reference = readRDS('./testing_reference2.rds') 231 | umap_labels = cbind(ref_metadata, reference$umap$embedding) 232 | plotBasic(umap_labels, title = 'Reference', color.mapping = pbmc_colors) 233 | ``` 234 | 235 | # Map query 236 | 237 | In order to map a new query dataset onto the reference, you will need a reference object saved from the steps above, as well as query cell expression and metadata. 238 | 239 | The query dataset is assumed to have been normalized in the same manner as the reference cells (here, default is log(CP10k+1) normalization). 240 | 241 | ```{r} 242 | # Read in Symphony reference to map to 243 | reference = readRDS('./testing_reference1.rds') 244 | # Map query 245 | query = mapQuery(query_exp, # query gene expression (genes x cells) 246 | query_metadata, # query metadata (cells x attributes) 247 | reference, # Symphony reference object 248 | do_normalize = FALSE, # perform log(CP10k) normalization on query 249 | do_umap = TRUE) # project query cells into reference UMAP 250 | ``` 251 | 252 | Note: Symphony assumes that the query is normalized in the same manner as the reference. Our default implementation currently uses log(CP10k+1) normalization. 253 | 254 | Let's take a look at what the query object contains: 255 | * Z: query cells in reference Harmonized embedding 256 | * Zq_pca: query cells in pre-Harmony reference PC embedding (prior to correction) 257 | * R: query cell soft cluster assignments 258 | * Xq: query cell design matrix for correction step 259 | * umap: query cells projected into reference UMAP coordinates (using uwot) 260 | * meta_data: metadata 261 | ```{r} 262 | str(query) 263 | ``` 264 | 265 | Predict query cell types using k-NN. Setting confidence = TRUE also returns the prediction confidence scores (proportion of neighbors with winning vote). 266 | ```{r} 267 | query = knnPredict(query, # query object 268 | reference, # reference object 269 | reference$meta_data$cell_type, # reference cell labels for training 270 | k = 5, # number of reference neighbors to use for prediction 271 | confidence = TRUE) 272 | ``` 273 | 274 | Query cell type predictions are now in the cell_type_pred_knn column. 275 | ```{r} 276 | head(query$meta_data) 277 | ``` 278 | 279 | ## Visualization of mapping 280 | ```{r, fig.width = 5.5, fig.height = 4} 281 | # Sync the column names for both data frames 282 | reference$meta_data$cell_type_pred_knn = NA 283 | reference$meta_data$cell_type_pred_knn_prob = NA 284 | reference$meta_data$ref_query = 'reference' 285 | query$meta_data$ref_query = 'query' 286 | 287 | # Add the UMAP coordinates to the metadata 288 | meta_data_combined = rbind(query$meta_data, reference$meta_data) 289 | umap_combined = rbind(query$umap, reference$umap$embedding) 290 | umap_combined_labels = cbind(meta_data_combined, umap_combined) 291 | 292 | # Plot UMAP visualization of all cells 293 | plotBasic(umap_combined_labels, title = 'Reference and query cells', color.by = 'ref_query') 294 | ``` 295 | 296 | Plot the reference and query side by side. 297 | ```{r, fig.width = 7, fig.height = 4} 298 | plotBasic(umap_combined_labels, title = 'Reference and query cells', 299 | color.mapping = pbmc_colors, facet.by = 'ref_query') 300 | ``` 301 | 302 | And that's a wrap! If you run into issues or have questions about Symphony or this tutorial, please open an issue on GitHub. 303 | ```{r} 304 | sessionInfo() 305 | ``` -------------------------------------------------------------------------------- /vignettes/utils.R: -------------------------------------------------------------------------------- 1 | fig.size <- function (height, width) { 2 | options(repr.plot.height = height, repr.plot.width = width) 3 | } 4 | 5 | # Colors for PBMCs 6 | pbmc_colors = c("B" = "#66C2A5", 7 | "DC" = "#FC8D62", 8 | "HSC" = "#8DA0CB", 9 | "MK" = "#E78AC3", 10 | "Mono_CD14" = "#A6D854", 11 | "Mono_CD16" = "#f2ec72", 12 | "NK" = "#62AAEA", 13 | "T_CD4" = "#D1C656", 14 | "T_CD8" = "#968763") 15 | 16 | # Colors for pancreas 17 | celltype.colors = c('alpha'="#ed2bb1", 18 | 'beta'="#239eb3", 19 | 'gamma'="#d1bfec", 20 | 'delta'= "#FF6347", 21 | 'stellate'="#11e38c", 22 | 'immune'="#812050", 23 | 'ductal'="#b2d27a", 24 | 'endothelial'="#4e2da6", 25 | 'acinar'="#f6bb86", 26 | 'schwann'="#115d52", 27 | 'epsilon'="#a1def0", 28 | 'mast'="#8fec2f") 29 | 30 | # Colors for pancreas query donors (Baron et al., 2016) 31 | querydonor.colors = c('human1' = '#b9dbf0', 32 | 'human2' = '#77a1ba', 33 | 'human3' = '#6c7ca8', 34 | 'human4' = '#364261', 35 | 'mouse1' = '#e68c8c', 36 | 'mouse2' = '#b35757') 37 | 38 | # Colors for fetal liver hematopoeisis 39 | group.colors = c( 'B cell'='#f2bd80', 40 | 'DC precursor'='#1d6d1f', 41 | 'DC1'='#8c3ba0', 42 | 'DC2'='#6533ed', 43 | 'Early Erythroid'='#83e3f0', 44 | 'Early lymphoid/T'='#fd5917', 45 | 'Endothelial cell'='#4f8c9d', 46 | 'Fibroblast'='#eb1fcb', 47 | 'Hepatocyte'='#f5cdaf', 48 | 'HSC_MPP'='#9698dc', 49 | 'ILC precursor'='#20f53d', 50 | 'Kupffer Cell'='#f283e3', 51 | 'Late Erythroid'='#ffb2be', 52 | 'Mast cell'='#f3d426', 53 | 'Megakaryocyte'='#5ebf72', 54 | 'MEMP'='#a67649', 55 | 'Mid Erythroid'='#2f5bb1', 56 | 'Mono-Mac'='#90a479', 57 | 'Monocyte'='#f6932e', 58 | 'Monocyte precursor'='#d59e9a', 59 | 'Neut-myeloid prog.'='#caf243', 60 | 'NK'='#38b5fc', 61 | 'pDC precursor'='#c82565', 62 | 'Pre pro B cell'='#d6061a', 63 | 'pre-B cell'='#e36f6f', 64 | 'pro-B cell'='#1dfee1', 65 | 'VCAM1+ EI macro.'='#506356', 66 | 'centroid' ='black') 67 | 68 | # Custom ordering to match original author publication ordering of states 69 | group.ordering = c("HSC_MPP", "Pre pro B cell", 'pro-B cell', 'pre-B cell', 'B cell', 70 | 'ILC precursor', 'Early lymphoid/T', 'NK', 'Neut-myeloid prog.', 71 | 'pDC precursor','DC precursor', 'DC1', 'DC2', 'Monocyte precursor', 'Monocyte', 72 | 'Mono-Mac', 'Kupffer Cell', 'VCAM1+ EI macro.', 'MEMP', 'Mast cell', 73 | 'Megakaryocyte', 'Early Erythroid', 'Mid Erythroid', 'Late Erythroid', 74 | 'Endothelial cell', 'Fibroblast', 'Hepatocyte') 75 | 76 | 77 | #' Basic function to plot cells, colored and faceted by metadata variables 78 | #' 79 | #' @param metadata metadata, with UMAP labels in UMAP1 and UMAP2 slots 80 | #' @param title Plot title 81 | #' @param color.by metadata column name for phenotype labels 82 | #' @param facet.by metadata column name for faceting 83 | #' @param color.mapping custom color mapping 84 | #' @param show.legend Show cell type legend 85 | 86 | plotBasic = function(umap_labels, # metadata, with UMAP labels in UMAP1 and UMAP2 slots 87 | title = 'Query', # Plot title 88 | color.by = 'cell_type', # metadata column name for coloring 89 | facet.by = NULL, # (optional) metadata column name for faceting 90 | color.mapping = NULL, # custom color mapping 91 | legend.position = 'right') { # Show cell type legend 92 | 93 | p = umap_labels %>% 94 | dplyr::sample_frac(1L) %>% # permute rows randomly 95 | ggplot(aes(x = UMAP1, y = UMAP2)) + 96 | geom_point_rast(aes(col = get(color.by)), size = 0.3, stroke = 0.2, shape = 16) 97 | if (!is.null(color.mapping)) { p = p + scale_color_manual(values = color.mapping) } 98 | 99 | # Default formatting 100 | p = p + theme_bw() + 101 | labs(title = title, color = color.by) + 102 | theme(plot.title = element_text(hjust = 0.5)) + 103 | theme(legend.position=legend.position) + 104 | theme(legend.text = element_text(size=8), legend.title=element_text(size=12)) + 105 | guides(colour = guide_legend(override.aes = list(size = 4))) + guides(alpha = 'none') 106 | 107 | if(!is.null(facet.by)) { 108 | p = p + facet_wrap(~get(facet.by)) + 109 | theme(strip.text.x = element_text(size = 12)) } 110 | return(p) 111 | } 112 | -------------------------------------------------------------------------------- /vignettes/utils_seurat.R: -------------------------------------------------------------------------------- 1 | buildReferenceFromSeurat <- function( 2 | obj, assay = 'RNA', verbose = TRUE, save_umap = TRUE, save_uwot_path = NULL 3 | ) { 4 | if(!assay %in% c('RNA', 'SCT')) { 5 | stop('Only supported assays are RNA or SCT.') 6 | } 7 | res <- list() 8 | ## TODO: check that these objects are all correctly initialized 9 | res$Z_corr <- t(obj@reductions$harmony@cell.embeddings) 10 | res$Z_orig <- t(obj@reductions$pca@cell.embeddings) 11 | message('Saved embeddings') 12 | 13 | res$R <- t(obj@reductions$harmony@misc$R[colnames(obj),]) # seurat does not subset misc slot 14 | message('Saved soft cluster assignments') 15 | 16 | if (assay == 'RNA') { 17 | vargenes_means_sds <- tibble( 18 | symbol = obj@assays[[assay]]@var.features, 19 | mean = Matrix::rowMeans(obj@assays[[assay]]@data[obj@assays[[assay]]@var.features, ]) 20 | ) 21 | vargenes_means_sds$stddev <- rowSDs( 22 | obj@assays[[assay]]@data[obj@assays[[assay]]@var.features, ], 23 | vargenes_means_sds$mean 24 | ) 25 | } else if (assay == 'SCT') { 26 | vargenes_means_sds <- tibble( 27 | symbol = obj@assays[[assay]]@var.features, 28 | mean = Matrix::rowMeans(obj@assays[[assay]]@scale.data[obj@assays[[assay]]@var.features, ]) 29 | ) 30 | asdgc = Matrix(obj@assays[[assay]]@scale.data[obj@assays[[assay]]@var.features, ], sparse = TRUE) 31 | vargenes_means_sds$stddev <- rowSDs( 32 | asdgc, 33 | vargenes_means_sds$mean 34 | ) 35 | } 36 | 37 | res$vargenes_means_sds <- vargenes_means_sds 38 | message('Saved variable gene information for ', nrow(vargenes_means_sds), ' genes.') 39 | 40 | res$loadings <- obj@reductions$pca@feature.loadings 41 | message('Saved PCA loadings.') 42 | 43 | res$meta_data <- obj@meta.data 44 | message('Saved metadata.') 45 | 46 | ## Check UMAP 47 | if (save_umap) { 48 | if (is.null(save_uwot_path)) { 49 | error('Please provide a valid path to save_uwot_path in order to save uwot model.') 50 | } 51 | if (is.null(obj@reductions$umap@misc$model)) { 52 | error('uwot model not initialiazed in Seurat object. Please do RunUMAP with umap.method=\'uwot\', return.model=TRUE first.') 53 | } 54 | res$umap <- obj@reductions$umap@misc$model 55 | res$save_uwot_path <- save_uwot_path 56 | if (file.exists(res$save_uwot_path)) { 57 | file.remove(res$save_uwot_path) 58 | } 59 | uwot::save_uwot(res$umap, save_uwot_path) 60 | } 61 | 62 | ## Build Reference! 63 | if (verbose) 64 | message("Calculate final L2 normalized reference centroids (Y_cos)") 65 | res$centroids = t(cosine_normalize_cpp(res$R %*% t(res$Z_corr), 1)) 66 | if (verbose) 67 | message("Calculate reference compression terms (Nr and C)") 68 | res$cache = compute_ref_cache(res$R, res$Z_corr) 69 | colnames(res$Z_orig) = row.names(res$metadata) 70 | rownames(res$Z_orig) = paste0("PC_", seq_len(nrow(res$Z_corr))) 71 | colnames(res$Z_corr) = row.names(res$metadata) 72 | rownames(res$Z_corr) = paste0("harmony_", seq_len(nrow(res$Z_corr))) 73 | 74 | if (verbose) 75 | message("Finished nicely.") 76 | return(res) 77 | } 78 | 79 | environment(buildReferenceFromSeurat) <- environment(symphony::buildReference) 80 | 81 | RunHarmony.Seurat <- function( 82 | object, 83 | group.by.vars, 84 | reduction = 'pca', 85 | dims.use = NULL, 86 | theta = NULL, 87 | lambda = NULL, 88 | sigma = 0.1, 89 | nclust = NULL, 90 | tau = 0, 91 | block.size = 0.05, 92 | max.iter.harmony = 10, 93 | max.iter.cluster = 20, 94 | epsilon.cluster = 1e-5, 95 | epsilon.harmony = 1e-4, 96 | plot_convergence = FALSE, 97 | verbose = TRUE, 98 | reference_values = NULL, 99 | reduction.save = "harmony", 100 | assay.use = 'RNA', 101 | project.dim = TRUE, 102 | ... 103 | ) { 104 | if (reduction == "pca") { 105 | tryCatch( 106 | embedding <- Seurat::Embeddings(object, reduction = "pca"), 107 | error = function(e) { 108 | if (verbose) { 109 | message("Harmony needs PCA. Trying to run PCA now.") 110 | } 111 | tryCatch( 112 | object <- Seurat::RunPCA( 113 | object, 114 | assay = assay.use, verbose = verbose 115 | ), 116 | error = function(e) { 117 | stop("Harmony needs PCA. Tried to run PCA and failed.") 118 | } 119 | ) 120 | } 121 | ) 122 | } else { 123 | available.dimreduc <- names(methods::slot(object = object, name = "reductions")) 124 | if (!(reduction %in% available.dimreduc)) { 125 | stop("Requested dimension reduction is not present in the Seurat object") 126 | } 127 | embedding <- Seurat::Embeddings(object, reduction = reduction) 128 | } 129 | if (is.null(dims.use)) { 130 | dims.use <- seq_len(ncol(embedding)) 131 | } 132 | dims_avail <- seq_len(ncol(embedding)) 133 | if (!all(dims.use %in% dims_avail)) { 134 | stop("trying to use more dimensions than computed. Rereun dimension reduction 135 | with more dimensions or run Harmony with fewer dimensions") 136 | } 137 | if (length(dims.use) == 1) { 138 | stop("only specified one dimension in dims.use") 139 | } 140 | metavars_df <- Seurat::FetchData(object, group.by.vars) 141 | 142 | harmonyObject <- HarmonyMatrix( 143 | embedding, 144 | metavars_df, 145 | group.by.vars, 146 | FALSE, 147 | 0, 148 | theta, 149 | lambda, 150 | sigma, 151 | nclust, 152 | tau, 153 | block.size, 154 | max.iter.harmony, 155 | max.iter.cluster, 156 | epsilon.cluster, 157 | epsilon.harmony, 158 | plot_convergence, 159 | TRUE, 160 | verbose, 161 | reference_values 162 | ) 163 | 164 | harmonyEmbed <- t(as.matrix(harmonyObject$Z_corr)) 165 | rownames(harmonyEmbed) <- row.names(embedding) 166 | colnames(harmonyEmbed) <- paste0(reduction.save, "_", seq_len(ncol(harmonyEmbed))) 167 | 168 | harmonyClusters <- t(harmonyObject$R) 169 | rownames(harmonyClusters) <- row.names(embedding) 170 | colnames(harmonyClusters) <- paste0('R', seq_len(ncol(harmonyClusters))) 171 | 172 | suppressWarnings({ 173 | harmonydata <- Seurat::CreateDimReducObject( 174 | embeddings = harmonyEmbed, 175 | stdev = as.numeric(apply(harmonyEmbed, 2, stats::sd)), 176 | assay = assay.use, 177 | key = reduction.save, 178 | misc=list(R=harmonyClusters) 179 | ) 180 | }) 181 | 182 | object[[reduction.save]] <- harmonydata 183 | if (project.dim) { 184 | object <- Seurat::ProjectDim( 185 | object, 186 | reduction = reduction.save, 187 | overwrite = TRUE, 188 | verbose = FALSE 189 | ) 190 | } 191 | return(object) 192 | } 193 | 194 | environment(RunHarmony.Seurat) <- environment(harmony::HarmonyMatrix) 195 | 196 | RunUMAP2 <- function (object, reduction.key = "UMAP_", assay = NULL, reduction.model = NULL, 197 | return.model = FALSE, umap.method = "uwot", n.neighbors = 30L, 198 | n.components = 2L, metric = "cosine", n.epochs = NULL, learning.rate = 1, 199 | min.dist = 0.3, spread = 1, set.op.mix.ratio = 1, local.connectivity = 1L, 200 | repulsion.strength = 1, negative.sample.rate = 5, a = NULL, 201 | b = NULL, uwot.sgd = FALSE, seed.use = 42, metric.kwds = NULL, 202 | angular.rp.forest = FALSE, verbose = TRUE, ...) 203 | { 204 | CheckDots(...) 205 | if (!is.null(x = seed.use)) { 206 | set.seed(seed = seed.use) 207 | } 208 | if (umap.method != "umap-learn" && getOption("Seurat.warn.umap.uwot", 209 | TRUE)) { 210 | warning("The default method for RunUMAP has changed from calling Python UMAP via reticulate to the R-native UWOT using the cosine metric", 211 | "\nTo use Python UMAP via reticulate, set umap.method to 'umap-learn' and metric to 'correlation'", 212 | "\nThis message will be shown once per session", 213 | call. = FALSE, immediate. = TRUE) 214 | options(Seurat.warn.umap.uwot = FALSE) 215 | } 216 | if (umap.method == "uwot-learn") { 217 | warning("'uwot-learn' is deprecated. Set umap.method = 'uwot' and return.model = TRUE") 218 | umap.method <- "uwot" 219 | return.model <- TRUE 220 | } 221 | if (return.model) { 222 | if (verbose) { 223 | message("UMAP will return its model") 224 | } 225 | umap.method = "uwot" 226 | } 227 | if (inherits(x = object, what = "Neighbor")) { 228 | object <- list(idx = Indices(object), dist = Distances(object)) 229 | } 230 | if (!is.null(x = reduction.model)) { 231 | if (verbose) { 232 | message("Running UMAP projection") 233 | } 234 | umap.method <- "uwot-predict" 235 | } 236 | umap.output <- switch(EXPR = umap.method, `umap-learn` = { 237 | if (!py_module_available(module = "umap")) { 238 | stop("Cannot find UMAP, please install through pip (e.g. pip install umap-learn).") 239 | } 240 | if (!is.null(x = seed.use)) { 241 | py_set_seed(seed = seed.use) 242 | } 243 | if (typeof(x = n.epochs) == "double") { 244 | n.epochs <- as.integer(x = n.epochs) 245 | } 246 | umap_import <- import(module = "umap", delay_load = TRUE) 247 | umap <- umap_import$UMAP(n_neighbors = as.integer(x = n.neighbors), 248 | n_components = as.integer(x = n.components), metric = metric, 249 | n_epochs = n.epochs, learning_rate = learning.rate, 250 | min_dist = min.dist, spread = spread, set_op_mix_ratio = set.op.mix.ratio, 251 | local_connectivity = local.connectivity, repulsion_strength = repulsion.strength, 252 | negative_sample_rate = negative.sample.rate, a = a, 253 | b = b, metric_kwds = metric.kwds, angular_rp_forest = angular.rp.forest, 254 | verbose = verbose) 255 | umap$fit_transform(as.matrix(x = object)) 256 | }, uwot = { 257 | if (metric == "correlation") { 258 | warning("UWOT does not implement the correlation metric, using cosine instead", 259 | call. = FALSE, immediate. = TRUE) 260 | metric <- "cosine" 261 | } 262 | if (is.list(x = object)) { 263 | umap(X = NULL, nn_method = object, n_threads = nbrOfWorkers(), 264 | n_components = as.integer(x = n.components), 265 | metric = metric, n_epochs = n.epochs, learning_rate = learning.rate, 266 | min_dist = min.dist, spread = spread, set_op_mix_ratio = set.op.mix.ratio, 267 | local_connectivity = local.connectivity, repulsion_strength = repulsion.strength, 268 | negative_sample_rate = negative.sample.rate, 269 | a = a, b = b, fast_sgd = uwot.sgd, verbose = verbose, 270 | ret_model = return.model) 271 | } else { 272 | umap(X = object, n_threads = nbrOfWorkers(), n_neighbors = as.integer(x = n.neighbors), 273 | n_components = as.integer(x = n.components), 274 | metric = metric, n_epochs = n.epochs, learning_rate = learning.rate, 275 | min_dist = min.dist, spread = spread, set_op_mix_ratio = set.op.mix.ratio, 276 | local_connectivity = local.connectivity, repulsion_strength = repulsion.strength, 277 | negative_sample_rate = negative.sample.rate, 278 | a = a, b = b, fast_sgd = uwot.sgd, verbose = verbose, 279 | ret_model = return.model) 280 | } 281 | }, `uwot-predict` = { 282 | if (metric == "correlation") { 283 | warning("UWOT does not implement the correlation metric, using cosine instead", 284 | call. = FALSE, immediate. = TRUE) 285 | metric <- "cosine" 286 | } 287 | if (is.null(x = reduction.model) || !inherits(x = reduction.model, 288 | what = "DimReduc")) { 289 | stop("If running projection UMAP, please pass a DimReduc object with the model stored to reduction.model.", 290 | call. = FALSE) 291 | } 292 | model <- Misc(object = reduction.model, slot = "model") 293 | if (length(x = model) == 0) { 294 | stop("The provided reduction.model does not have a model stored. Please try running umot-learn on the object first", 295 | call. = FALSE) 296 | } 297 | if (is.list(x = object)) { 298 | uwot::umap_transform(X = NULL, nn_method = object, 299 | model = model, n_threads = nbrOfWorkers(), n_epochs = n.epochs, 300 | verbose = verbose) 301 | } else { 302 | umap_transform(X = object, model = model, n_threads = nbrOfWorkers(), 303 | n_epochs = n.epochs, verbose = verbose) 304 | } 305 | }, stop("Unknown umap method: ", umap.method, call. = FALSE)) 306 | if (return.model) { 307 | # umap.output$nn_index <- NULL 308 | umap.model <- umap.output 309 | umap.output <- umap.output$embedding 310 | } 311 | colnames(x = umap.output) <- paste0(reduction.key, 1:ncol(x = umap.output)) 312 | if (inherits(x = object, what = "dist")) { 313 | rownames(x = umap.output) <- attr(x = object, "Labels") 314 | } 315 | else if (is.list(x = object)) { 316 | rownames(x = umap.output) <- rownames(x = object$idx) 317 | } 318 | else { 319 | rownames(x = umap.output) <- rownames(x = object) 320 | } 321 | umap.reduction <- CreateDimReducObject(embeddings = umap.output, 322 | key = reduction.key, assay = assay, global = TRUE) 323 | if (return.model) { 324 | Misc(umap.reduction, slot = "model") <- umap.model 325 | } 326 | return(umap.reduction) 327 | } 328 | 329 | 330 | environment(RunUMAP2) <- environment(Seurat:::RunUMAP.default) 331 | 332 | mapQuery <- function (exp_query, metadata_query, ref_obj, vars = NULL, verbose = TRUE, 333 | do_normalize = TRUE, do_umap = TRUE, sigma = 0.1, return_type = c('symphony', 'Seurat')) 334 | { 335 | if (return_type == 'Seurat') { 336 | que <- Seurat::CreateSeuratObject( 337 | counts=exp_query, 338 | meta.data=metadata_query, 339 | assay='SymphonyQuery' 340 | ) 341 | } 342 | 343 | if (do_normalize) { 344 | if (verbose) 345 | message("Normalizing") 346 | exp_query = normalizeData(exp_query, 10000, "log") 347 | } 348 | if (verbose) 349 | message("Scaling and synchronizing query gene expression") 350 | idx_shared_genes = which(ref_obj$vargenes$symbol %in% rownames(exp_query)) 351 | shared_genes = ref_obj$vargenes$symbol[idx_shared_genes] 352 | if (verbose) 353 | message("Found ", length(shared_genes), " reference variable genes in query dataset") 354 | exp_query_scaled = scaleDataWithStats(exp_query[shared_genes, 355 | ], ref_obj$vargenes$mean[idx_shared_genes], ref_obj$vargenes$stddev[idx_shared_genes], 356 | 1) 357 | exp_query_scaled_sync = matrix(0, nrow = length(ref_obj$vargenes$symbol), 358 | ncol = ncol(exp_query)) 359 | exp_query_scaled_sync[idx_shared_genes, ] = exp_query_scaled 360 | rownames(exp_query_scaled_sync) = ref_obj$vargenes$symbol 361 | colnames(exp_query_scaled_sync) = colnames(exp_query) 362 | if (verbose) 363 | message("Project query cells using reference gene loadings") 364 | Z_pca_query = t(ref_obj$loadings) %*% exp_query_scaled_sync 365 | if (verbose) 366 | message("Clustering query cells to reference centroids") 367 | Z_pca_query_cos = cosine_normalize_cpp(Z_pca_query, 2) 368 | R_query = soft_cluster(ref_obj$centroids, Z_pca_query_cos, 369 | sigma) 370 | if (verbose) 371 | message("Correcting query batch effects") 372 | if (!is.null(vars)) { 373 | design = droplevels(metadata_query)[, vars] %>% as.data.frame() 374 | onehot = design %>% purrr::map(function(.x) { 375 | if (length(unique(.x)) == 1) { 376 | rep(1, length(.x)) 377 | } 378 | else { 379 | stats::model.matrix(~0 + .x) 380 | } 381 | }) %>% purrr::reduce(cbind) 382 | Xq = cbind(1, intercept = onehot) %>% t() 383 | } 384 | else { 385 | Xq = Matrix(rbind(rep(1, ncol(Z_pca_query)), rep(1, ncol(Z_pca_query))), 386 | sparse = TRUE) 387 | } 388 | Zq_corr = moe_correct_ref(as.matrix(Z_pca_query), as.matrix(Xq), 389 | as.matrix(R_query), as.matrix(ref_obj$cache[[1]]), as.matrix(ref_obj$cache[[2]])) 390 | colnames(Z_pca_query) = row.names(metadata_query) 391 | rownames(Z_pca_query) = paste0("PC_", seq_len(nrow(Zq_corr))) 392 | colnames(Zq_corr) = row.names(metadata_query) 393 | rownames(Zq_corr) = paste0("harmony_", seq_len(nrow(Zq_corr))) 394 | umap_query = NULL 395 | if (do_umap & !is.null(ref_obj$save_uwot_path)) { 396 | if (verbose) 397 | message("UMAP") 398 | ref_umap_model = uwot::load_uwot(ref_obj$save_uwot_path, 399 | verbose = FALSE) 400 | 401 | ## UMAP may have been learned on subset of columns 402 | umap_query = uwot::umap_transform(t(Zq_corr)[, 1:ref_umap_model$norig_col], ref_umap_model) 403 | # umap_query = uwot::umap_transform(t(Zq_corr), ref_umap_model) 404 | colnames(umap_query) = c("UMAP1", "UMAP2") 405 | rownames(umap_query) <- row.names(metadata_query) 406 | } 407 | if (verbose) 408 | message("All done!") 409 | 410 | if (return_type == 'Seurat') { 411 | que@assays$SymphonyQuery@data <- exp_query 412 | que@assays$SymphonyQuery@scale.data <- exp_query_scaled_sync 413 | que[['pca']] <- Seurat::CreateDimReducObject( 414 | embeddings = t(Z_pca_query), 415 | loadings = ref_obj$loadings, 416 | stdev = as.numeric(apply(Z_pca_query, 1, stats::sd)), 417 | assay = 'SymphonyQuery', 418 | key = 'pca_' 419 | ) 420 | que[['harmony']] <- Seurat::CreateDimReducObject( 421 | embeddings = t(Zq_corr), 422 | stdev = as.numeric(apply(Zq_corr, 1, stats::sd)), 423 | assay = 'SymphonyQuery', 424 | key = 'harmony_', 425 | misc=list(R=R_query) 426 | ) 427 | que <- Seurat::ProjectDim(que, reduction = 'harmony', overwrite = TRUE, verbose = FALSE) 428 | if (do_umap) { 429 | que[['umap']] <- Seurat::CreateDimReducObject( 430 | embeddings = umap_query, 431 | assay = 'SymphonyQuery', 432 | key = 'umap_' 433 | ) 434 | } 435 | return(que) 436 | } else if (return_type == 'symphony') { 437 | return(list(Z = Zq_corr, Zq_pca = Z_pca_query, R = R_query, 438 | Xq = Xq, umap = umap_query, meta_data = metadata_query)) 439 | } else { 440 | stop(glue('The return type = \"{return_type}\" is not available.')) 441 | } 442 | 443 | } 444 | 445 | environment(mapQuery) <- environment(symphony::mapQuery) 446 | 447 | knnPredict.Seurat <- function(query_obj, ref_obj, label_transfer, k = 5, confidence = TRUE, seed = 0) 448 | { 449 | set.seed(seed) 450 | if (!label_transfer %in% colnames(ref_obj$meta_data)) { 451 | stop('Label \"{label_transfer}\" is not available in the reference metadata.') 452 | } 453 | 454 | if (confidence) { 455 | knn_pred <- class::knn(t(ref_obj$Z_corr), Embeddings(query_obj, 'harmony'), 456 | ref_obj$meta_data[[label_transfer]], k = k, prob = TRUE) 457 | knn_prob = attributes(knn_pred)$prob 458 | query_obj@meta.data[[label_transfer]] <- knn_pred 459 | query_obj@meta.data[paste0(label_transfer, '_prob')] = knn_prob 460 | } else { 461 | knn_pred <- class::knn(t(ref_obj$Z_corr), Embeddings(query_obj, 'harmony'), 462 | ref_obj$meta_data[[label_transfer]], k = k, prob = FALSE) 463 | query_obj@meta.data[[label_transfer]] <- knn_pred 464 | } 465 | return(query_obj) 466 | } 467 | --------------------------------------------------------------------------------