├── spatial-to-pseudobulk.py
├── spatial-gene-set-scoring.py
├── single-cell
├── single-cell-scvi-integrate.py
├── single-cell-generate-c2l-celltype-reference.py
├── single-cell-quality-control.ipynb
├── single-cell-nmf-analysis.py
└── single-cell-preprocessing.ipynb
├── README.md
├── spatial-c2l-cell-type-reference-mapping.py
├── spatial-qc-and-normalization.ipynb
├── spatial-region-ligand-receptor-signaling-analysis.py
├── spatial-dotplot-ar-basal-club-markers.py
├── spatial-dotplot-chemokine-expression.py
├── spatial_analysis_environment.yml
├── LICENSE
└── single_cell_analysis_environment.yml
/spatial-to-pseudobulk.py:
--------------------------------------------------------------------------------
1 | # Author: Antti Kiviaho
2 | # Date: 14.2.2024
3 | #
4 | # Updated 1.3.2024
5 | # Concatenating Visium sections into a pseudobulk for later testing
6 |
7 | import os
8 | os.chdir('/lustre/scratch/kiviaho/prostate_spatial/')
9 |
10 | import scanpy as sc
11 | import numpy as np
12 | import pandas as pd
13 | from tqdm import tqdm
14 |
15 | from utils import load_from_pickle,get_sample_ids_reorder
16 |
17 |
18 | samples = get_sample_ids_reorder()
19 | # Create an object to save to
20 | pseudobulk_df = pd.DataFrame()
21 |
22 | for s in tqdm(samples,desc='Processing samples'):
23 |
24 | # Copy the slide
25 | slide = sc.read_h5ad('./data/visium_with_regions/'+s+'_with_regions.h5ad')
26 |
27 | # Calculate a sum over the spots to get to a pseudobulk
28 | s_pseudobulk_df = pd.DataFrame(slide.layers['counts'].sum(axis=0).T,index=slide.var.index,columns=[s])
29 |
30 | # Merge to create a dataframe with samples as columns, genes as index
31 | pseudobulk_df = pd.merge(pseudobulk_df,s_pseudobulk_df,left_index=True,right_index=True,how='outer')
32 |
33 | # Fill empty
34 | pseudobulk_df = pseudobulk_df.fillna(0)
35 |
36 | pseudobulk_df.to_csv('data/spatial_pseudobulk_unnormalized.csv')
37 |
--------------------------------------------------------------------------------
/spatial-gene-set-scoring.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.chdir('/lustre/scratch/kiviaho/prostate_spatial/')
3 |
4 | import scanpy as sc
5 | import numpy as np
6 | import pandas as pd
7 |
8 | from tqdm import tqdm
9 | from utils import get_sample_ids_reorder, save_to_pickle
10 | samples = get_sample_ids_reorder()
11 |
12 | import warnings
13 | warnings.filterwarnings("ignore")
14 |
15 | if __name__ == '__main__':
16 |
17 | ######### Scanpy scoring method ###########
18 |
19 | gene_set_df = pd.read_excel('gene_sets_of_interest.xlsx',header=None).drop(columns=0).set_index(1).T
20 | custom_gene_set_scanpy_scores = {}
21 |
22 | # Calculate the scanpy scores for custom gene sets
23 | for sample in tqdm(samples, desc="Processing sample", unit="sample"):
24 | slide = sc.read_h5ad('./data/visium_with_regions/'+sample+'_with_regions.h5ad')
25 |
26 | for col in gene_set_df.columns:
27 | sc.tl.score_genes(slide,gene_set_df[col].dropna(),score_name=col,random_state=2531035)
28 |
29 | custom_gene_set_scanpy_scores[sample] = slide.obs[gene_set_df.columns].copy()
30 |
31 | # Save the dict object
32 | save_to_pickle(custom_gene_set_scanpy_scores,'./data/spatial_scanpy_score_results.pkl')
33 |
34 | print('scoring done!')
--------------------------------------------------------------------------------
/single-cell/single-cell-scvi-integrate.py:
--------------------------------------------------------------------------------
1 | from utils import load_from_pickle, save_to_pickle
2 | import scanpy as sc
3 | import anndata as ad
4 | import scib
5 | import scvi
6 | import seaborn as sns
7 | import torch
8 | import os
9 | from matplotlib import pyplot as plt
10 | from datetime import datetime
11 |
12 | # Author: Antti Kiviaho
13 | # Date: 27.2.2023
14 | #
15 | # A script for running scvi integration on single cell datasets
16 | # Dong 2020, Chen 2021, Cheng 2022, Chen 2022, Song 2022, Wong 2022, Hirz 2023
17 |
18 | if __name__ == "__main__":
19 |
20 | current_date = datetime.today().strftime('%Y%m%d')
21 |
22 | ###############
23 |
24 | # Load the data
25 | adata = load_from_pickle('./sc-reference/normalized_sc_7_datasets.pickle')
26 | adata = ad.concat(adata)
27 | adata.obs_names_make_unique() # Some duplicate indices persists
28 |
29 | # Preprocess and scale
30 | adata.obs.dataset = adata.obs.dataset.astype('category')
31 | scib.preprocessing.scale_batch(adata,batch='dataset')
32 | print('Scaling done...')
33 |
34 | adata.raw = adata
35 |
36 | adata = scib.preprocessing.hvg_batch(adata,batch_key='dataset',target_genes=2000,flavor='seurat',adataOut=True)
37 | print('HVGs calculated...')
38 |
39 | print('CUDA is available: ' + str(torch.cuda.is_available()))
40 | print('GPUs available: ' + str(torch.cuda.device_count()))
41 |
42 | print('Initiating training on GPU ...')
43 | scvi.model.SCVI.setup_anndata(adata, layer="counts", batch_key="dataset")
44 | vae = scvi.model.SCVI(adata, n_layers=2, n_latent=30, gene_likelihood="nb")
45 |
46 | vae.train(use_gpu=True)
47 |
48 | vae.save('scvi_model_'+current_date)
49 | adata.obsm["X_scVI"] = vae.get_latent_representation()
50 |
51 | sc.pp.neighbors(adata, use_rep="X_scVI",random_state=745634)
52 | sc.tl.umap(adata,random_state=745634)
53 | sc.tl.leiden(adata, key_added="VI_clusters")
54 | print('NN graph, UMAP & Leiden ready...')
55 |
56 | save_to_pickle(adata,'scvi_integrated_7_sc_datasets_'+current_date+'.pkl')
57 | print('SCVI integration saved...')
58 |
--------------------------------------------------------------------------------
/single-cell/single-cell-generate-c2l-celltype-reference.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.chdir('/lustre/scratch/kiviaho/prostate_spatial')
3 |
4 | import scanpy as sc
5 | import numpy as np
6 | import matplotlib.pyplot as plt
7 | import matplotlib as mpl
8 |
9 |
10 | os.environ["THEANO_FLAGS"] = 'device=cuda,floatX=float32,force_device=True'
11 | import cell2location
12 |
13 | from matplotlib import rcParams
14 | rcParams['pdf.fonttype'] = 42 # enables correct plotting of text for PDFs
15 |
16 |
17 | if __name__ == '__main__':
18 |
19 | #########
20 |
21 | # Change model setup in terms of layer & labels !
22 |
23 | # Change these paths when re-running
24 | results_folder = './c2l-results/cell2location_map_20230908'
25 | adata_ref_path = './single_cell_reference_with_nmf_derived_annotations_20230908.h5ad'
26 | ########
27 |
28 | # create paths and names to results folders for reference regression and cell2location models
29 | ref_run_name = results_folder + 'reference_signatures/'
30 |
31 |
32 | # Check if paths exist, and create them if not
33 | if not os.path.exists(results_folder):
34 | os.makedirs(results_folder)
35 | if not os.path.exists(ref_run_name):
36 | os.makedirs(ref_run_name)
37 |
38 | adata_ref = sc.read_h5ad(adata_ref_path)
39 |
40 | # prepare anndata for the regression model
41 | cell2location.models.RegressionModel.setup_anndata(adata=adata_ref,
42 | # layer='counts',
43 | # 10X reaction / sample / batch
44 | batch_key='dataset',
45 | # cell type, covariate used for constructing signatures
46 | labels_key='final_annotation',
47 | categorical_covariate_keys=['sample']
48 | )
49 |
50 |
51 | # create the regression model
52 | from cell2location.models import RegressionModel
53 | mod = RegressionModel(adata_ref)
54 |
55 | # view anndata_setup as a sanity check
56 | mod.view_anndata_setup()
57 | mod.train(max_epochs=250, use_gpu=True)
58 |
59 | # In this section, we export the estimated cell abundance (summary of the posterior distribution).
60 | adata_ref = mod.export_posterior(
61 | adata_ref, sample_kwargs={'num_samples': 1000, 'batch_size': 2500, 'use_gpu': True}
62 | )
63 |
64 | # Save model
65 | mod.save(ref_run_name, overwrite=True)
66 |
67 | # Save anndata object with results
68 | adata_file = ref_run_name + 'sc_reference_signatures.h5ad'
69 | adata_ref.write(adata_file)
70 | adata_file
71 |
72 | mod.plot_history(20)
73 | plt.savefig(results_folder+'c2l_single_cell_reference_training_history.png')
74 | plt.clf()
75 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ### Single cell and spatial transcriptomics highlight the interaction of club-like cells with immunosuppressive myeloid cells in prostate cancer
2 |
3 | [Link to the study](https://doi.org/10.1038/s41467-024-54364-1)
4 |
5 | **Author:** Antti Kiviaho
6 |
7 | **Email:** antti.kiviaho@tuni.fi
8 |
9 | **Last modified:** 29.11.2024
10 |
11 | This repository contains necessary code to reproduce results presented in the manuscript.
12 |
13 | ### Package versions:
14 | - **single_cell_analysis_environment.yml** – Containts package versions used in the analysis of spatial transcriptomics data.
15 | - **spatial_analysis_environment.yml** – Containts package versions used in the analysis of single-cell data.
16 |
17 | ## Spatial Transcriptomics data
18 |
19 | Files related to ST data analysis
20 |
21 | ### Data preprocessing and computations
22 |
23 | - **spatial-qc-and-normalization.ipynb** – Quality control and preprocessing of spatial transcriptomics data.
24 |
25 | - **spatial-c2l-cell-type-reference-mapping.py** – Spatial transcriptomics data deconvolution using the cell type reference created from single-cell data.
26 |
27 | - **spatial-post-c2l-cell-type-mapping.ipynb** – Division of spatial transcriptomics data into single-cell mapping-based (SCM) regions.
28 |
29 | - **spatial-gene-set-scoring.py** – Gene set scoring on spatial data.
30 |
31 | - **spatial-region-ligand-receptor-signaling-analysis.py** – Ligand-receptor interaction analysis.
32 |
33 | - **spatial-to-pseudobulk.py** – Generating pseudobulk expression data from spatial transcriptomics data.
34 |
35 | ### Data analysis and plotting results
36 |
37 | - **spatial-gene-expression-analysis.ipynb** – Gene expression analysis and plots of spatial transcriptomics data. (Figures 1c, 2d)
38 |
39 | - **spatial-gene-set-score-analysis.ipynb** – Gene set scoring-based plotting (Figures 3a, 3b, 3c, 4a, 4b, 4c, 4d).
40 |
41 | - **spatial-dotplot-ar-basal-club-markers.py** – Plot 2c generation.
42 |
43 | - **spatial-dotplot-chemokine-expression.py** – Plot 3d generation.
44 |
45 | - **multiplex_ihc_staining_analysis.ipynb** – Plots from mIHC cell classifier results (Figures 4h, 4i)
46 |
47 | - **spatial-mapping-based-clusters-receptor-ligand-analyses.ipynb** – Ligand-receptor interaction analysis-based plotting (Supplementary Figure 8).
48 |
49 | - **spatial-metastatic-tumor-sample-analysis.ipynb.ipynb** – Analysis of metastatic prostate cancer spatial transcriptomics samples (Figures 5d, 5e).
50 |
51 | - **spatial-pseudobulk-data-analysis.ipynb** – Pseudobulk spatial transcriptomics data analysis and plots (Figures 5f, 5i).
52 |
53 | - **public-sc-and-bulk-data-analysis.ipynb** – Analysis of public data He et al. 2021 + TCGA and SU2C gene expression data analyses (Figures 5a, 5b, 5g, 5h).
54 |
55 | ## Single-cell data analysis
56 |
57 | Files related to single-cell data used in the article
58 |
59 | - **single-cell-preprocessing.ipynb** – Preprocessing of single-cell datasets to attain uniform format.
60 |
61 | - **single-cell-quality-control.ipynb** – Gene filtering, doublet removal, and normalization steps carried out on each dataset individually.
62 |
63 | - **single-cell-scvi-integrate.py** – scVI-based integration of 7 preprocessed single-cell datasets to find a common embedding.
64 |
65 | - **single-cell-post-integration.ipynb** – Gene marker-based cell type annotation on the integrated dataset. Removal of sample-specfic clusters.
66 |
67 | - **single-cell-nmf-analysis.py** – Non-negative matrix factorization-based annotation of cell type-specific gene expression modules.
68 |
69 | - **single-cell-immune-celltypist.ipynb** – Celltypist-based annotation of cells annotated as immune cells on the previous round (broad annotation).
70 |
71 | - **single-cell-gene-module-based-annotation.ipynb** – Non-immune cell subtype annotation based on the NMF gene expression modules.
72 |
73 | - **single-cell-generate-c2l-celltype-reference.py** – Annotation-based regression of cell2location-compatible cell type signatures.
74 |
--------------------------------------------------------------------------------
/spatial-c2l-cell-type-reference-mapping.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.chdir('/lustre/scratch/kiviaho/prostate_spatial')
3 |
4 | import scanpy as sc
5 | import numpy as np
6 | import matplotlib.pyplot as plt
7 | import matplotlib as mpl
8 |
9 |
10 | os.environ["THEANO_FLAGS"] = 'device=cuda,floatX=float32,force_device=True'
11 | import cell2location
12 |
13 | from matplotlib import rcParams
14 | rcParams['pdf.fonttype'] = 42 # enables correct plotting of text for PDFs
15 |
16 | from utils import load_from_pickle
17 |
18 | results_folder = './c2l-results/'
19 | date = '20240125'
20 | # create paths and names to results folders for reference regression and cell2location models
21 | run_name = results_folder + 'cell2location_map_'+ date + '/'
22 |
23 |
24 |
25 | if __name__ == '__main__':
26 |
27 | # Load the single-cell cell type reference: export estimated expression in 'cell type'
28 | # cell2location_map_20230908/reference_signatures is the correct one
29 | adata_ref = sc.read_h5ad('c2l-results/cell2location_map_20230908/reference_signatures/sc_reference_signatures.h5ad')
30 |
31 | if 'means_per_cluster_mu_fg' in adata_ref.varm.keys():
32 | inf_aver = adata_ref.varm['means_per_cluster_mu_fg'][[f'means_per_cluster_mu_fg_{i}'
33 | for i in adata_ref.uns['mod']['factor_names']]].copy()
34 | else:
35 | inf_aver = adata_ref.var[[f'means_per_cluster_mu_fg_{i}'
36 | for i in adata_ref.uns['mod']['factor_names']]].copy()
37 | inf_aver.columns = adata_ref.uns['mod']['factor_names']
38 |
39 | del adata_ref
40 |
41 |
42 | # Load visium data and set it up properly (raw, unnormalized counts)
43 | adata_vis_individually = load_from_pickle('./data/normalized_no_pathology_filter_visium_data.pkl')
44 |
45 | # ADDED 25.1.2024 – concatenate also the available ARNEO samples
46 | adata_vis_arneo = load_from_pickle('./arneo/data/normalized_arneo_visium_data.pkl')
47 | adata_vis_individually.update(adata_vis_arneo)
48 |
49 | adata_vis = sc.concat(adata_vis_individually)
50 | del adata_vis_individually
51 |
52 | # find shared genes and subset both anndata and reference signatures
53 | intersect = np.intersect1d(adata_vis.var_names, inf_aver.index)
54 | adata_vis = adata_vis[:, intersect].copy()
55 | inf_aver = inf_aver.loc[intersect, :].copy()
56 |
57 | # prepare anndata for cell2location model
58 | cell2location.models.Cell2location.setup_anndata(adata=adata_vis,
59 | batch_key='sample_id',
60 | layer='counts')
61 |
62 | print('adata_vis prior to training:')
63 | adata_vis
64 |
65 | # create and train the model
66 | mod = cell2location.models.Cell2location(
67 | adata_vis, cell_state_df=inf_aver,
68 | # the expected average cell abundance: tissue-dependent
69 | # hyper-prior which can be estimated from paired histology:
70 | N_cells_per_location=21,
71 | # hyperparameter controlling normalisation of
72 | # within-experiment variation in RNA detection:
73 | detection_alpha=20
74 | )
75 | mod.view_anndata_setup()
76 |
77 | # Train the model
78 | mod.train(max_epochs=30000,
79 | # train using full data (batch_size=None)
80 | batch_size=30851, # this number is for Tampere + ARNEO ADT/APA 4 batches, three batches Tampere only 34000
81 | # use all data points in training because
82 | # we need to estimate cell abundance at all locations
83 | train_size=1,
84 | use_gpu=True,
85 | )
86 |
87 | # In this section, we export the estimated cell abundance (summary of the posterior distribution).
88 | adata_vis = mod.export_posterior(
89 | adata_vis, sample_kwargs={'num_samples': 1000, 'batch_size': mod.adata.n_obs, 'use_gpu': True}
90 | )
91 |
92 | # Save anndata object with results
93 | adata_file = 'visium_adata_with_c2l_mapping_'+date+'.h5ad'
94 | adata_vis.write(adata_file)
95 | adata_file
96 |
97 | # Save model
98 | mod.save(run_name, overwrite=True)
99 |
100 | ## PLOTTING ##
101 | # plot ELBO loss history during training, removing first 100 epochs from the plot
102 | mod.plot_history(1000)
103 | plt.legend(labels=['full data training'])
104 | plt.savefig('c2l_mapping_model_training_ELBO_'+date+'.png')
105 | plt.clf()
106 |
107 |
--------------------------------------------------------------------------------
/single-cell/single-cell-quality-control.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "# Author: Antti Kiviaho\n",
10 | "# Date: 22.2.2023\n",
11 | "# Notebook for running dataset normalization and integration\n",
12 | "# Uses the scib integration environment and pipeline:\n",
13 | "#\n",
14 | "# 1. Cell and gene QC filtering\n",
15 | "# 2. scran normalization through scib\n",
16 | "# 3. batch-aware scaling with scib (implemented in single-cell-scvi-integrate.py)\n",
17 | "# 4. batch-aware HVGs with scib (implemented in single-cell-scvi-integrate.py)\n",
18 | "# 5. scvi-integration to find a shared latent space"
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": null,
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "import os\n",
28 | "os.chdir('/lustre/scratch/kiviaho/prostate_spatial/')\n",
29 | "import numpy as np\n",
30 | "import anndata as ad\n",
31 | "import scanpy as sc\n",
32 | "import pandas as pd\n",
33 | "import seaborn as sns\n",
34 | "import scib\n",
35 | "import matplotlib.pyplot as plt\n",
36 | "from scipy import sparse\n",
37 | "from pathlib import Path\n",
38 | "from scripts.utils import load_from_pickle, save_to_pickle\n",
39 | "import warnings\n",
40 | "warnings.filterwarnings('ignore')"
41 | ]
42 | },
43 | {
44 | "cell_type": "code",
45 | "execution_count": null,
46 | "metadata": {},
47 | "outputs": [],
48 | "source": [
49 | "datasets = ['dong_2020','chen_2021','cheng_2022','chen_2022','song_2022','wong_2022','hirz_2023']\n",
50 | "adata_dict = {}\n",
51 | "for dataset_id in datasets:\n",
52 | " adata = sc.read_h5ad('./sc-reference/'+dataset_id+'/adata_obj.h5ad')\n",
53 | " adata_dict[dataset_id] = adata"
54 | ]
55 | },
56 | {
57 | "cell_type": "code",
58 | "execution_count": null,
59 | "metadata": {},
60 | "outputs": [],
61 | "source": [
62 | "def qc_filters(adata, remove_doublets=True):\n",
63 | " # requires scib-pipline-R4.0 conda environment !\n",
64 | " # import scib\n",
65 | " # Filter out cells by using a hybrid of the original publications thresholds\n",
66 | " sc.pp.filter_cells(adata, min_counts=600)\n",
67 | " sc.pp.filter_cells(adata, min_genes = 300)\n",
68 | " sc.pp.filter_genes(adata, min_counts= 10)\n",
69 | " # Leave out cells with > 20% mitochondrial reads\n",
70 | " adata = adata[adata.obs.pct_counts_mt < 20, :]\n",
71 | " if remove_doublets:\n",
72 | " sc.external.pp.scrublet(adata)\n",
73 | " adata = adata[adata.obs['predicted_doublet']==False]\n",
74 | " \n",
75 | "\n",
76 | " return adata"
77 | ]
78 | },
79 | {
80 | "cell_type": "code",
81 | "execution_count": null,
82 | "metadata": {},
83 | "outputs": [],
84 | "source": [
85 | "for dset in datasets:\n",
86 | " adata = adata_dict[dset].copy()\n",
87 | " if not sparse.issparse(adata.X):\n",
88 | " adata.X = sparse.csr_matrix(adata.X)\n",
89 | " \n",
90 | " adata = qc_filters(adata)\n",
91 | " \n",
92 | " scib.preprocessing.normalize(adata,precluster=False, sparsify=False)\n",
93 | " # add ids to the data for use after data concatenation\n",
94 | " adata.obs['dataset'] = dset\n",
95 | " adata_dict[dset] = adata\n",
96 | " del adata"
97 | ]
98 | },
99 | {
100 | "cell_type": "code",
101 | "execution_count": null,
102 | "metadata": {},
103 | "outputs": [],
104 | "source": [
105 | "save_to_pickle(adata_dict,'./sc-reference/normalized_sc_7_datasets.pickle')"
106 | ]
107 | }
108 | ],
109 | "metadata": {
110 | "kernelspec": {
111 | "display_name": "Python 3 (ipykernel)",
112 | "language": "python",
113 | "name": "python3"
114 | },
115 | "language_info": {
116 | "codemirror_mode": {
117 | "name": "ipython",
118 | "version": 3
119 | },
120 | "file_extension": ".py",
121 | "mimetype": "text/x-python",
122 | "name": "python",
123 | "nbconvert_exporter": "python",
124 | "pygments_lexer": "ipython3",
125 | "version": "3.7.12"
126 | },
127 | "orig_nbformat": 4,
128 | "vscode": {
129 | "interpreter": {
130 | "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6"
131 | }
132 | }
133 | },
134 | "nbformat": 4,
135 | "nbformat_minor": 2
136 | }
137 |
--------------------------------------------------------------------------------
/spatial-qc-and-normalization.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "# Author: Antti Kiviaho\n",
10 | "# Date: 20.1.2023\n",
11 | "# A script for running normalization and sample integration clustering.\n",
12 | "# Uses the scbi integration environment and pipeline:\n",
13 | "#\n",
14 | "#\n",
15 | "# 1. Cell and gene filtering\n",
16 | "# 2. scran normalization through R interface using\n"
17 | ]
18 | },
19 | {
20 | "cell_type": "code",
21 | "execution_count": null,
22 | "metadata": {},
23 | "outputs": [],
24 | "source": [
25 | "import os\n",
26 | "os.chdir('/lustre/scratch/kiviaho/prostate_spatial/')\n",
27 | "import numpy as np\n",
28 | "import anndata as ad\n",
29 | "import scanpy as sc\n",
30 | "import pandas as pd\n",
31 | "import seaborn as sns\n",
32 | "import scib\n",
33 | "\n",
34 | "import matplotlib.pyplot as plt\n",
35 | "\n",
36 | "# Added spot exclusion information 28.2.2024\n",
37 | "from scripts.utils import get_sample_ids_reorder, save_to_pickle, get_include_exclude_info"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {},
44 | "outputs": [],
45 | "source": [
46 | "def qc_and_normalize(adata):\n",
47 | " # QC and normalize\n",
48 | " sc.pp.filter_genes(adata, min_cells=5)\n",
49 | " sc.pp.filter_cells(adata, min_counts=500)\n",
50 | " scib.preprocessing.normalize(adata,precluster=False)\n",
51 | " return adata\n"
52 | ]
53 | },
54 | {
55 | "cell_type": "code",
56 | "execution_count": null,
57 | "metadata": {},
58 | "outputs": [],
59 | "source": [
60 | "# This non-pathology-filtered dictionary was used for cell2location\n",
61 | "# As the annotation wasn't available at the time of mapping\n",
62 | "\n",
63 | "samples = get_sample_ids_reorder(['BPH','untreated','bicalutamide','goserelin','CRPC'])\n",
64 | "samples_dict = {} # A data structure for saving data\n",
65 | "for sample_id in samples:\n",
66 | " adata_sample = sc.read_visium('./results/'+sample_id+'/outs/',library_id=sample_id)\n",
67 | " adata_sample.var_names_make_unique()\n",
68 | " adata_sample.obs_names = sample_id + '_' + adata_sample.obs_names # add ids to the data for use after data concatenation\n",
69 | " adata_sample = qc_and_normalize(adata_sample) # QC and normalize – this filters out more spots\n",
70 | " samples_dict[sample_id] = adata_sample.copy()\n",
71 | " print(sample_id + ' done')\n",
72 | " del adata_sample\n",
73 | "\n",
74 | "save_to_pickle(samples_dict,'./data/normalized_no_pathology_filter_visium_data.pkl')\n",
75 | "\n"
76 | ]
77 | },
78 | {
79 | "cell_type": "code",
80 | "execution_count": null,
81 | "metadata": {},
82 | "outputs": [],
83 | "source": [
84 | "# Revised on 28.2.2024:\n",
85 | "# Remove regions with exclude/include annotation (exclude_info)\n",
86 | "# Save each sample separately for better memory management\n",
87 | "\n",
88 | "samples = get_sample_ids_reorder()\n",
89 | "exclude_info = get_include_exclude_info()\n",
90 | "\n",
91 | "\n",
92 | "obs_data_list = [] # A data structure for saving info on valid spots\n",
93 | "for sample_id in samples:\n",
94 | " \n",
95 | " adata_sample = sc.read_visium('./results/'+sample_id+'/outs/',library_id=sample_id)\n",
96 | " adata_sample.var_names_make_unique()\n",
97 | "\n",
98 | " # add ids to the data for use after data concatenation\n",
99 | " adata_sample.obs_names = sample_id + '_' + adata_sample.obs_names\n",
100 | "\n",
101 | " # Only use this with Tampere cohort samples\n",
102 | " # Subset spots from a single sample\n",
103 | " sample_exclude_info = exclude_info.loc[adata_sample.obs_names].copy()\n",
104 | " sample_spots_to_keep = sample_exclude_info[~sample_exclude_info['Pathology'].isin(['Exclude','Lumen'])].index\n",
105 | " # Subset the sample with spots to keep\n",
106 | " adata_sample = adata_sample[sample_spots_to_keep]\n",
107 | " \n",
108 | " # QC and normalize – this filters out more spots\n",
109 | " adata_sample = qc_and_normalize(adata_sample)\n",
110 | "\n",
111 | " # Save the object\n",
112 | " adata_sample.write_h5ad('data/normalized_visium/'+sample_id+'_normalized.h5ad')\n",
113 | " \n",
114 | " # Save the obs data to a list\n",
115 | " obs_data_list.append(adata_sample.obs)\n",
116 | "\n",
117 | " print(sample_id + ' done')\n",
118 | " del adata_sample\n",
119 | "\n",
120 | "# Save the ids of all spots that passed the qc (110681)\n",
121 | "pd.DataFrame(index=pd.concat(obs_data_list).index).to_csv('./data/post_qc_and_pathology_annot_valid_spots.csv')\n",
122 | "\n"
123 | ]
124 | }
125 | ],
126 | "metadata": {
127 | "kernelspec": {
128 | "display_name": "Python 3",
129 | "language": "python",
130 | "name": "python3"
131 | },
132 | "language_info": {
133 | "codemirror_mode": {
134 | "name": "ipython",
135 | "version": 3
136 | },
137 | "file_extension": ".py",
138 | "mimetype": "text/x-python",
139 | "name": "python",
140 | "nbconvert_exporter": "python",
141 | "pygments_lexer": "ipython3",
142 | "version": "3.7.12"
143 | },
144 | "orig_nbformat": 4
145 | },
146 | "nbformat": 4,
147 | "nbformat_minor": 2
148 | }
149 |
--------------------------------------------------------------------------------
/spatial-region-ligand-receptor-signaling-analysis.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.chdir('/lustre/scratch/kiviaho/prostate_spatial/')
3 |
4 | import scanpy as sc
5 | import numpy as np
6 | import pandas as pd
7 | import anndata as ad
8 | import squidpy as sq
9 |
10 | from tqdm import tqdm
11 |
12 | import matplotlib.pyplot as plt
13 | from utils import load_from_pickle, get_sample_ids_reorder, get_sample_crop_coords, save_to_pickle
14 |
15 | import seaborn as sns
16 | sns.set_theme(style='white')
17 |
18 | import warnings
19 | warnings.filterwarnings("ignore")
20 |
21 | samples = get_sample_ids_reorder()
22 | sample_crop_coord = get_sample_crop_coords()
23 |
24 | color_dict = {
25 | 'Tumor': '#fc8d62',
26 | 'Luminal epithelium': '#8da0cb',
27 | 'Basal epithelium': '#66c2a5',
28 | 'Club epithelium': '#ffd92f',
29 | 'Immune': '#a6d854',
30 | 'Endothelium': '#e78ac3',
31 | 'Fibroblast': '#e5c494',
32 | 'Muscle': '#b3b3b3'
33 | }
34 |
35 | regions = list(color_dict.keys())
36 | region_colors = list(color_dict.values())
37 |
38 | ## Define source and target regions
39 | source = 'Club epithelium'
40 | target = 'Muscle' # Iterate over the other regions: 'Tumor', 'Luminal epithelium', 'Basal epithelium', 'Immune', 'Endothelium', 'Fibroblast', 'Muscle'
41 |
42 | # Define functions
43 |
44 | def get_spot_interfaces(dat, cluster_of_interest, interaction_cluster, annotation_key='predicted_region', added_key='proximity_analysis'):
45 |
46 | # Create an observation column for spatial segmentation
47 | dat.obs[added_key] = np.nan
48 | distance_mat = dat.obsp['spatial_distances'].todense()
49 |
50 | for idx, obs_name in enumerate(dat.obs_names):
51 | cl = dat.obs[annotation_key][idx]
52 |
53 | if cl in [cluster_of_interest, interaction_cluster]:
54 | first_nhbor_idxs = np.where(distance_mat[:, idx] == 1.0)[0] # Get first-term neighbor indices
55 |
56 | try:
57 | n_cl_neighbors = dat[first_nhbor_idxs].obs[annotation_key].value_counts()[cl] # find first-term neighbor cluster annotations
58 | all_nhbor_indices = np.where(distance_mat[:, idx] != 0)[0]
59 |
60 | if cl == cluster_of_interest:
61 | if (n_cl_neighbors >= 0) & (sum(dat.obs[annotation_key][all_nhbor_indices] == interaction_cluster) >= 2):
62 | dat.obs.at[obs_name, added_key] = cl
63 |
64 | elif cl == interaction_cluster:
65 | if (n_cl_neighbors >= 0) & (sum(dat.obs[annotation_key][all_nhbor_indices] == cluster_of_interest) >= 2):
66 | dat.obs.at[obs_name, added_key] = cl
67 |
68 | except:
69 | continue
70 |
71 | # Modify the colors to maintain the original cluster color
72 | dat.obs[added_key] = dat.obs[added_key].astype('category')
73 |
74 | return(dat)
75 |
76 | if __name__ == '__main__':
77 |
78 |
79 | # Download all samples into a dict structure
80 | adata_slides = {}
81 | for sample in tqdm(samples, unit='sample'):
82 | adata_slides[sample] = sc.read_h5ad('./data/visium_with_regions/'+sample+'_with_regions.h5ad')
83 |
84 | it=0
85 | valid_samples = []
86 |
87 | fig, axs = plt.subplots(8, 6, figsize=(18, 24),dpi=120)
88 |
89 | for i in range(8):
90 | for j in range(6):
91 |
92 | if it < len(samples) :
93 |
94 | sample_name = samples[it]
95 |
96 | slide = adata_slides[sample_name].copy()
97 | slide = get_spot_interfaces(slide, source, target)
98 |
99 | # Qualify sample only if there are 10 or more of both source and target spots
100 | if not (slide.obs['proximity_analysis'].isna().all()):
101 | if (slide.obs['proximity_analysis'].str.contains(source).sum() >= 10) & (slide.obs['proximity_analysis'].str.contains(target).sum() >= 10):
102 |
103 | ## Plotting ##
104 | slide.uns['proximity_analysis_colors'] = [color_dict[cat] for cat in slide.obs['proximity_analysis'].cat.categories]
105 |
106 | # create spatial plot
107 | if 'P320' not in sample_name:
108 | sc.pl.spatial(slide,color='proximity_analysis',title=sample_name,
109 | crop_coord=sample_crop_coord[sample_name],
110 | size=1.5, alpha_img=0, legend_loc=None,na_color='whitesmoke',
111 | ax=axs[i,j],show=False
112 | )
113 |
114 | else:
115 | sc.pl.spatial(slide,color='proximity_analysis',title=sample_name,
116 | size=1.5, alpha_img=0, legend_loc=None,na_color='whitesmoke',
117 | ax=axs[i,j],show=False
118 | )
119 |
120 | # Remove labels
121 | axs[i,j].set_xlabel(None)
122 | axs[i,j].set_ylabel(None)
123 |
124 | # Append this sample to the list
125 | valid_samples.append(sample_name)
126 | else:
127 | axs[i,j].set_visible(False)
128 | else:
129 | axs[i,j].set_visible(False)
130 | else:
131 | axs[i,j].set_visible(False)
132 |
133 | it+=1
134 |
135 |
136 | plt.tight_layout()
137 | plt.savefig('./plots/receptor_ligand_interaction_analysis/'+source+'_'+target+'_proximity_regions.pdf')
138 | plt.clf()
139 |
140 | #### Second part, using valid_samples to do ligrec #####
141 |
142 | ligrec_dict = {}
143 | for sample_name in valid_samples:
144 |
145 | slide = adata_slides[sample_name].copy()
146 |
147 | # Fill NaN's in as string as ligrec won't run otherwise
148 | slide.obs['proximity_analysis'] = slide.obs['proximity_analysis'].cat.add_categories(['NA'])
149 | slide.obs['proximity_analysis'] = slide.obs['proximity_analysis'].fillna('NA')
150 |
151 | if (len(slide.obs['proximity_analysis'].cat.categories.tolist()) == 3):
152 |
153 | ligrec_res = sq.gr.ligrec(
154 | slide,
155 | cluster_key='proximity_analysis',
156 | clusters = [source,target],
157 | complex_policy='all',
158 | show_progress_bar = False,
159 | n_perms=1000,
160 | seed=4359345,
161 | copy=True,
162 | use_raw=False
163 | )
164 |
165 | ligrec_dict[sample_name] = ligrec_res
166 |
167 | # Save the proximal spot annotations
168 | proximity_spot_annots = pd.DataFrame()
169 | for s in valid_samples:
170 | proximity_spot_annots = pd.concat([
171 | proximity_spot_annots,
172 | adata_slides[s].obs.copy()],
173 | axis=0)
174 |
175 | # Format the results
176 | proximity_spot_annots = proximity_spot_annots[['sample_id','predicted_region','proximity_analysis']]
177 | proximity_spot_annots = proximity_spot_annots.rename(columns={'proximity_analysis':'{}_{}_proximity'.format(source,target)})
178 |
179 | # Save the results
180 | proximity_spot_annots.to_csv('./data/proximity_spot_ids/{}_to_{}_spot_annotation.csv'.format(source,target))
181 | save_to_pickle(ligrec_dict,'./data/region_ligrec_analysis/'+source+'_'+target+'_slides_with_ligrec.pkl')
182 | print('{} to {} ligand-receptor interaction analysis has been saved!'.format(source,target))
--------------------------------------------------------------------------------
/spatial-dotplot-ar-basal-club-markers.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.chdir('/lustre/scratch/kiviaho/prostate_spatial/')
3 |
4 | import scanpy as sc
5 | import numpy as np
6 | import pandas as pd
7 |
8 | import matplotlib.pyplot as plt
9 | import matplotlib.colors as colors
10 | from scripts.utils import get_sample_ids_reorder, get_sample_crop_coords, get_sample_id_mask
11 | from datetime import datetime
12 |
13 | import seaborn as sns
14 | sns.set_theme(style='white')
15 |
16 | #from statsmodels.stats.multitest import fdrcorrection
17 | from tqdm import tqdm
18 |
19 | from scipy.stats import zscore
20 |
21 | import warnings
22 | warnings.filterwarnings("ignore")
23 |
24 | if __name__ == '__main__':
25 |
26 | samples = get_sample_ids_reorder()
27 | sample_crop_coord = get_sample_crop_coords()
28 | sample_id_masks = get_sample_id_mask()
29 |
30 |
31 | color_dict = {
32 | 'Tumor': '#fc8d62',
33 | 'Luminal epithelium': '#8da0cb',
34 | 'Basal epithelium': '#66c2a5',
35 | 'Club epithelium': '#ffd92f',
36 | 'Immune': '#a6d854',
37 | 'Endothelium': '#e78ac3',
38 | 'Fibroblast': '#e5c494',
39 | 'Muscle': '#b3b3b3'
40 | }
41 |
42 | regions = list(color_dict.keys())
43 | region_colors = list(color_dict.values())
44 |
45 |
46 | # Fetch normalized expression and percentage of spots that express any given gene
47 |
48 | def fetch_region_normalized_expression_and_percentage(gene_markers, sample_list, group_id, regions_list = regions, use_unnormalized = True):
49 | '''
50 | Returns the per-sample mean expression of genes of interest across all samples that are defined in sample_list
51 | inside a region defined by region parameter.
52 | '''
53 |
54 | final_expression_df = pd.DataFrame()
55 | final_percentage_df = pd.DataFrame()
56 | for region in regions_list:
57 |
58 | print('Region: ' + region)
59 | region_expression_df = pd.DataFrame()
60 | #region_pct_df = pd.DataFrame()
61 |
62 | for sample in tqdm(sample_list, desc="Processing samples", unit="sample"):
63 |
64 | slide = sc.read_h5ad('./data/visium_with_regions/'+sample+'_with_regions.h5ad')
65 | slide_subs = slide[slide.obs['predicted_region']==region].copy()
66 |
67 | # Control for the number of data points belonging to a class
68 | if slide_subs.shape[0] >= 10:
69 |
70 | present_genes = [g for g in gene_markers if g in slide_subs.var_names]
71 | missing_genes = [g for g in gene_markers if g not in slide_subs.var_names]
72 |
73 | genes_all_arr_order_match = present_genes + missing_genes
74 |
75 | #if use_unnormalized:
76 | expr_without_missing_genes = slide_subs[:,present_genes].layers['counts'].copy().todense()
77 | #else:
78 | #expr_without_missing_genes = slide_subs[:,present_genes].X.copy()
79 |
80 |
81 | expr_all_spots = np.concatenate((expr_without_missing_genes,np.full((expr_without_missing_genes.shape[0],len(missing_genes)), np.nan)),axis=1)
82 |
83 | expr_as_df = pd.DataFrame(data=expr_all_spots.T,index=genes_all_arr_order_match,columns=slide_subs.obs_names)
84 | expr_as_df = expr_as_df.loc[gene_markers]
85 |
86 | # Concatenate the counts from a single sample into a dataframe with all the spots
87 | region_expression_df = pd.concat([region_expression_df,expr_as_df],axis=1)
88 |
89 | del slide, slide_subs
90 |
91 | # Added on 4.3.2024 prior to plotting the whole thing
92 | # Previously NaN's inflated the percentage, as they were non-zero
93 | region_expression_df = region_expression_df.fillna(0)
94 | # Put the "percentage of spots expressed in" information into a dataframe
95 | region_pct_df = pd.DataFrame((region_expression_df != 0).sum(axis=1)/region_expression_df.shape[1],columns=[region])
96 |
97 |
98 | # Here you concatenate the mean of all valid spots into a dataframe
99 | final_expression_df = pd.concat([final_expression_df,region_expression_df.mean(axis=1)],axis=1)
100 | final_percentage_df = pd.concat([final_percentage_df,region_pct_df],axis=1)
101 |
102 |
103 | final_expression_df.columns = [r + ' ' + group_id for r in regions_list]
104 | final_percentage_df.columns = [r + ' ' + group_id for r in regions_list]
105 | return(final_expression_df, final_percentage_df)
106 |
107 |
108 |
109 |
110 | # Get lists of samples in corresponding groupings
111 | #normal_samples = get_sample_ids_reorder(['BPH'])
112 | unt_samples = get_sample_ids_reorder(['untreated'])
113 | trt_samples = get_sample_ids_reorder(['bicalutamide','goserelin','degarelix','degarelix_apalutamide'])
114 | #crpc_samples = get_sample_ids_reorder(['CRPC'])
115 |
116 | ar_signaling_genes = ['AR','ABCC4','FKBP5','KLK3','MAF','NKX3-1','PMEPA1','div1', # AR regulated genes
117 | 'KRT5','KRT15','TP63','div2', # Canonical basal markers
118 | 'MMP7','PIGR','LTF','SCGB1A1','SCGB3A1','div3', # Club-like markers
119 | #'KRT4','PSCA','WFDC2','CYP2F1','TSPAN8','CLU','KRT19','KLF5','ANXA3','PPP1R1B','S100A11','KRT8','ATP1B1', # All progenitor markers (Baures et al. Cancers 2022)
120 | 'S100A11','WFDC2','CLU','KRT19','KLF5','ATP1B1','KRT4', # Select representative progenitor markers for main plot
121 | 'EGFR','MET' # Putative stemness-type receptors #,'IGF1R','ERBB4','ERBB2'
122 | ]
123 |
124 | expr_unt, pct_unt = fetch_region_normalized_expression_and_percentage(ar_signaling_genes, unt_samples,group_id='(TRNA)',regions_list=regions[:4])
125 | expr_trt, pct_trt = fetch_region_normalized_expression_and_percentage(ar_signaling_genes, trt_samples,group_id='(NEADT)',regions_list=regions[:4])
126 |
127 |
128 |
129 | #plot_df = pd.concat([normal_ar,untreated_ar,treated_ar,crpc_ar],axis=1).T
130 | expr_df = pd.concat([expr_unt,expr_trt],axis=1).T
131 | pct_df = pd.concat([pct_unt,pct_trt],axis=1).T
132 | expr_df = expr_df.apply(lambda x: zscore(x, nan_policy='omit'))
133 | regions_mod = expr_df.index.tolist()
134 |
135 | # Format to long
136 | plot_df = expr_df.melt(ignore_index=False)
137 | plot_df.columns = ['gene','expression']
138 |
139 | # Format to long
140 | pct_df = pct_df.melt(ignore_index=False)
141 | pct_df.columns = ['gene','percentage']
142 |
143 | plot_df['percentage'] = pct_df['percentage'].copy()
144 |
145 | plot_df = plot_df.reset_index(names='region')
146 |
147 |
148 | # Create the dotplot
149 | sns.set_theme(style='white')
150 |
151 | width = 12
152 | height = 4
153 |
154 | fig, ax = plt.subplots(figsize=(width, height))
155 | yticks_list = list(np.arange(2,(len(regions_mod)*2)+2,2)[::-1])
156 |
157 |
158 | # Get control over interactions order and gap
159 | plot_df['region_y'] = plot_df['region'].map(dict(zip(regions_mod,yticks_list)))
160 | sns.scatterplot(x='gene', y='region_y', size='percentage', hue='expression',
161 | hue_norm=(-2,2),data=plot_df, sizes=(30, 300), palette='bwr', ax=ax,legend=True,
162 | )
163 |
164 | plt.ylim(0,yticks_list[0]+2)
165 | plt.yticks(yticks_list,regions_mod)
166 | plt.xticks(rotation=45)
167 | plt.legend(loc='center left',handlelength=1.5, handleheight=1.5, bbox_to_anchor=(1.05, 0.5))
168 | plt.tight_layout()
169 |
170 | #plt.savefig('plots/normalized_gene_expression_heatmaps/ar_club_markers_expression_nan_fixed.pdf')
171 |
172 | # Save the plot source data
173 | plot_df.to_excel('./source_data/figure_2c.xlsx',index=False)
174 |
--------------------------------------------------------------------------------
/spatial-dotplot-chemokine-expression.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.chdir('/lustre/scratch/kiviaho/prostate_spatial/')
3 |
4 | import scanpy as sc
5 | import numpy as np
6 | import pandas as pd
7 |
8 | import matplotlib.pyplot as plt
9 | import matplotlib.colors as colors
10 | from utils import get_sample_ids_reorder, get_sample_crop_coords, get_sample_id_mask
11 | from datetime import datetime
12 |
13 | import seaborn as sns
14 | sns.set_theme(style='white')
15 |
16 | #from statsmodels.stats.multitest import fdrcorrection
17 | from tqdm import tqdm
18 |
19 | from scipy.stats import zscore
20 |
21 | import warnings
22 | warnings.filterwarnings("ignore")
23 |
24 | if __name__ == '__main__':
25 |
26 | samples = get_sample_ids_reorder()
27 | sample_crop_coord = get_sample_crop_coords()
28 | sample_id_masks = get_sample_id_mask()
29 |
30 |
31 | color_dict = {
32 | 'Tumor': '#fc8d62',
33 | 'Luminal epithelium': '#8da0cb',
34 | 'Basal epithelium': '#66c2a5',
35 | 'Club epithelium': '#ffd92f',
36 | 'Immune': '#a6d854',
37 | 'Endothelium': '#e78ac3',
38 | 'Fibroblast': '#e5c494',
39 | 'Muscle': '#b3b3b3'
40 | }
41 |
42 | regions = list(color_dict.keys())
43 | region_colors = list(color_dict.values())
44 |
45 |
46 | # Fetch normalized expression and percentage of spots that express any given gene
47 |
48 | def fetch_region_normalized_expression_and_percentage(gene_markers, sample_list, group_id, regions_list = regions, use_unnormalized = True):
49 | '''
50 | Returns the per-sample mean expression of genes of interest across all samples that are defined in sample_list
51 | inside a region defined by region parameter.
52 | '''
53 |
54 | final_expression_df = pd.DataFrame()
55 | final_percentage_df = pd.DataFrame()
56 | for region in regions_list:
57 |
58 | print('Region: ' + region)
59 | region_expression_df = pd.DataFrame()
60 | #region_pct_df = pd.DataFrame()
61 |
62 | for sample in tqdm(sample_list, desc="Processing samples", unit="sample"):
63 |
64 | slide = sc.read_h5ad('./data/visium_with_regions/'+sample+'_with_regions.h5ad')
65 | slide_subs = slide[slide.obs['predicted_region']==region].copy()
66 |
67 | # Control for the number of data points belonging to a class
68 | if slide_subs.shape[0] >= 10:
69 |
70 | present_genes = [g for g in gene_markers if g in slide_subs.var_names]
71 | missing_genes = [g for g in gene_markers if g not in slide_subs.var_names]
72 |
73 | genes_all_arr_order_match = present_genes + missing_genes
74 |
75 | #if use_unnormalized:
76 | expr_without_missing_genes = slide_subs[:,present_genes].layers['counts'].copy().todense()
77 | #else:
78 | #expr_without_missing_genes = slide_subs[:,present_genes].X.copy()
79 |
80 |
81 | expr_all_spots = np.concatenate((expr_without_missing_genes,np.full((expr_without_missing_genes.shape[0],len(missing_genes)), np.nan)),axis=1)
82 |
83 | expr_as_df = pd.DataFrame(data=expr_all_spots.T,index=genes_all_arr_order_match,columns=slide_subs.obs_names)
84 | expr_as_df = expr_as_df.loc[gene_markers]
85 |
86 | # Concatenate the counts from a single sample into a dataframe with all the spots
87 | region_expression_df = pd.concat([region_expression_df,expr_as_df],axis=1)
88 |
89 | del slide, slide_subs
90 |
91 | # Added on 4.3.2024 prior to plotting the whole thing
92 | # Previously NaN's inflated the percentage, as they were non-zero
93 | region_expression_df = region_expression_df.fillna(0)
94 | # Put the "percentage of spots expressed in" information into a dataframe
95 | region_pct_df = pd.DataFrame((region_expression_df != 0).sum(axis=1)/region_expression_df.shape[1],columns=[region])
96 |
97 |
98 | # Here you concatenate the mean of all valid spots into a dataframe
99 | final_expression_df = pd.concat([final_expression_df,region_expression_df.mean(axis=1)],axis=1)
100 | final_percentage_df = pd.concat([final_percentage_df,region_pct_df],axis=1)
101 |
102 |
103 | final_expression_df.columns = [r + ' ' + group_id for r in regions_list]
104 | final_percentage_df.columns = [r + ' ' + group_id for r in regions_list]
105 | return(final_expression_df, final_percentage_df)
106 |
107 |
108 |
109 |
110 | # Get lists of samples in corresponding groupings
111 | unt_samples = get_sample_ids_reorder(['untreated'])
112 | trt_samples = get_sample_ids_reorder(['bicalutamide','goserelin','degarelix','degarelix_apalutamide'])
113 |
114 |
115 | chemokine_marker_genes = [
116 | 'CEBPB','NFKB1','IL1RN','CD68','PLAUR','div0',
117 | 'CXCL1','CXCL2','CXCL3','CXCL5','CXCL6','CXCL8','CXCR2','div1', # Left out 'CXCR1',
118 | 'CXCL16','CXCR6','div2',
119 | 'CCL20','CCR6','div3',
120 | 'CCL2','CCL3','CCL4','CCL5','CCR2','CCR5','div4',
121 | 'CXCL9','CXCL10','CXCL11','CXCR3','div5',
122 | 'CCL17','CCL22','CCR4','div6',
123 | 'CCL19','CCL21','CCR7','div7',
124 | 'CXCL12','CXCR4'
125 | ]
126 |
127 |
128 | # Fetch the relevant expression
129 | expr_unt, pct_unt = fetch_region_normalized_expression_and_percentage(chemokine_marker_genes, unt_samples,group_id='(TRNA)',regions_list=regions)
130 | expr_trt, pct_trt = fetch_region_normalized_expression_and_percentage(chemokine_marker_genes, trt_samples,group_id='(NEADT)',regions_list=regions)
131 |
132 |
133 | #plot_df = pd.concat([normal_ar,untreated_ar,treated_ar,crpc_ar],axis=1).T
134 | expr_df = pd.concat([expr_unt,expr_trt],axis=1).T#.fillna(0) ## Added on 4.3.2024 as CXCR1 expression in treated was invisible
135 | pct_df = pd.concat([pct_unt,pct_trt],axis=1).T#.fillna(0) ## Added on 4.3.2024 as CXCR1 expression in treated was invisible
136 |
137 | expr_df.to_csv('expr_df.csv')
138 | pct_df.to_csv('pct_df.csv')
139 |
140 | expr_df = expr_df.apply(lambda x: zscore(x, nan_policy='omit'))
141 | regions_mod = expr_df.index.tolist()
142 |
143 | # Format to long
144 | plot_df = expr_df.melt(ignore_index=False)
145 | plot_df.columns = ['gene','expression']
146 |
147 | # Format to long
148 | pct_df = pct_df.melt(ignore_index=False)
149 | pct_df.columns = ['gene','percentage']
150 |
151 | plot_df['percentage'] = pct_df['percentage'].copy()
152 |
153 | plot_df = plot_df.reset_index(names='region')
154 |
155 |
156 | # Create the dotplot
157 | sns.set_theme(style='white')
158 |
159 | width = 15
160 | height = 5.5
161 |
162 | fig, ax = plt.subplots(figsize=(width, height))
163 | yticks_list = list(np.arange(2,(len(regions_mod)*2)+2,2)[::-1])
164 |
165 |
166 | # Get control over interactions order and gap
167 | plot_df['region_y'] = plot_df['region'].map(dict(zip(regions_mod,yticks_list)))
168 | sns.scatterplot(x='gene', y='region_y', size='percentage', hue='expression',
169 | hue_norm=(-2,2),data=plot_df, sizes=(30, 300), palette='bwr', ax=ax,legend=True,
170 | )
171 |
172 | plt.ylim(0,yticks_list[0]+2)
173 | plt.yticks(yticks_list,regions_mod)
174 | plt.xticks(rotation=45)
175 | plt.legend(loc='center left',handlelength=1.5, handleheight=1.5, bbox_to_anchor=(1.05, 0.5))
176 | plt.tight_layout()
177 |
178 | plot_df.to_excel('./supplementary_tables/source_data_chemokine_dotplot.xlsx')
179 |
180 | plt.savefig('plots/normalized_gene_expression_heatmaps/chemokine_markers_expression_dotplot.pdf',transparent=True)
181 |
182 | # Save the plot source data
183 | plot_df.to_excel('./source_data/figure_3d.xlsx',index=False)
184 |
--------------------------------------------------------------------------------
/single-cell/single-cell-nmf-analysis.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.chdir('/lustre/scratch/kiviaho/prostate_spatial')
3 | import numpy as np
4 | import pandas as pd
5 | import scanpy as sc
6 | from itertools import product
7 | import nimfa
8 | import argparse
9 |
10 |
11 | def save_to_pickle(obj,filename):
12 | import pickle
13 | with open(filename, 'wb') as handle:
14 | pickle.dump(obj, handle, protocol=pickle.HIGHEST_PROTOCOL)
15 |
16 | def nmf_preprocessing(dat,n_var_genes=2000):
17 |
18 | dat.X = dat.layers['counts'].copy()
19 | sc.pp.filter_genes(dat,min_counts=10)
20 | sc.pp.normalize_total(dat)
21 | sc.pp.scale(dat)
22 | sc.pp.highly_variable_genes(dat,n_top_genes=n_var_genes,flavor='seurat_v3',
23 | subset=True, layer='counts')
24 |
25 | # Replace negative entries with zero
26 | dat.X[dat.X < 0] = 0
27 |
28 | return(dat)
29 |
30 | def calculate_matrix_orders(arr):
31 | # get the indices that would sort each row of the array
32 | sort_indices = np.argsort(-arr, axis=1)
33 |
34 | # create an array to mark the sorted order
35 | sorted_order = np.empty_like(sort_indices)
36 | rows, cols = np.indices(arr.shape)
37 | sorted_order[rows, sort_indices] = cols
38 |
39 | # replace each entry in the original array with its index in the sorted order
40 | result = sorted_order.astype(int)
41 |
42 | return result
43 |
44 |
45 | # Helper function to calculate the Jaccard index of two lists
46 | def jaccard(list1, list2):
47 | set1 = set(list1)
48 | set2 = set(list2)
49 | intersection = set1.intersection(set2)
50 | union = set1.union(set2)
51 | return len(intersection)/len(union)
52 |
53 |
54 | ################# Params ########################
55 |
56 | parser = argparse.ArgumentParser()
57 | parser.add_argument('--filename', type=str, help='Name of the file to be used')
58 | parser.add_argument('--n_var_genes', type=int,default=2000, help='The target number of HVGs')
59 | parser.add_argument('--min_genes_in_initial_module', type=int,default=5, help='The number of genes required in the initial NMF module')
60 | parser.add_argument('--required_jaccard_index', type=float,default=0.05, help='The required Jaccard overlap for two gene modules')
61 | args = parser.parse_args()
62 |
63 | filename = args.filename
64 | n_hvgs = args.n_var_genes
65 | min_genes_in_initial_module = args.min_genes_in_initial_module
66 | required_jaccard_index = args.required_jaccard_index
67 | min_cells = 100
68 | nmf_comp_range = np.arange(5,11)[::-1] # Changed in the latest run
69 |
70 | if __name__ == '__main__':
71 |
72 | # Download data
73 | ctype_dat = sc.read_h5ad(filename)
74 |
75 | # Initialize dict objects
76 | nmf_sample_dict = {}
77 | dq_samples = {}
78 |
79 | ################# First part: calculating NMFs ########################
80 |
81 | for s in np.unique(ctype_dat.obs['sample']):
82 |
83 | if len(ctype_dat.obs[ctype_dat.obs['sample']==s]) >= min_cells:
84 |
85 | # Subset the data to only include a single sample
86 | dat = ctype_dat[ctype_dat.obs['sample'] == s]
87 |
88 | # Do a sample-specific preprocessing
89 | dat = nmf_preprocessing(dat,n_hvgs)
90 |
91 | for n_comps in nmf_comp_range:
92 |
93 | # Perform non-negative matrix factorization using nsNMF
94 | f = nimfa.Nsnmf(dat.X, rank=n_comps)
95 | f_fit = f()
96 |
97 | # Extract the resulting matrices into variables W and H
98 | W = np.array(f_fit.basis())
99 | H = np.array(f_fit.coef())
100 |
101 | res = {'samples':dat.obs,'genes':dat.var,'sample_weights':W,'gene_weights':H}
102 |
103 | gene_w = res['gene_weights'].T
104 | genes = list(res['genes'].index)
105 |
106 | mat_1 = calculate_matrix_orders(gene_w.T).T
107 | mat_2 = calculate_matrix_orders(gene_w)
108 |
109 | # Iterate through the factors (columns)
110 | genes_by_factors = {}
111 | for factor in range(gene_w.shape[1]):
112 | valid_genes = list()
113 | # Iterate through the genes, starting from the highest weighted gene of this factor
114 | for i in np.argsort(mat_1[:,factor]):
115 | if mat_2[i,factor] ==0: # Is this the factor the gene effects the most?
116 |
117 | # If yes, add it to the list of genes
118 | valid_genes.append(genes[i])
119 | else:
120 | # if not, stop adding genes into the list, move on to the next factor
121 | break
122 | # Append the list of valid genes into a dictionary under the appropriate key
123 | genes_by_factors['factor'+str(factor)] = valid_genes
124 |
125 |
126 | all_lists_have_at_least = True
127 |
128 | for lst in genes_by_factors.values():
129 | if len(lst) < min_genes_in_initial_module:
130 | all_lists_have_at_least = False
131 | break
132 |
133 | if all_lists_have_at_least:
134 | print(s+": valid factors (min. "+str(min_genes_in_initial_module)+" genes) when using n_comps="+str(n_comps))
135 | nmf_sample_dict[s] = genes_by_factors
136 | break
137 | else:
138 | dq_samples[s] = len(ctype_dat.obs[ctype_dat.obs['sample']==s])
139 |
140 | for k in list(dq_samples.keys()):
141 | print(k + ' not processed for too few cells (' + str(dq_samples[k])+')')
142 |
143 | ################# Second part: Observing module overlaps ########################
144 |
145 | # A dictionary to keep track of overlapping factors
146 | overlapping_factors = {}
147 |
148 | # Go through each sample factor combination
149 | for (sample1, factor1), (sample2, factor2) in product([(sample, factor) for sample in nmf_sample_dict for factor in nmf_sample_dict[sample]], repeat=2):
150 | # Ignore if it's the same sample and factor combination
151 | if sample1 == sample2 and factor1 == factor2:
152 | continue
153 | # Calculate the Jaccard index of the two lists
154 | jaccard_index = jaccard(nmf_sample_dict[sample1][factor1], nmf_sample_dict[sample2][factor2])
155 | # If the overlap is more than 5%, add it to the overlapping_factors dictionary
156 | if jaccard_index > required_jaccard_index:
157 | overlapping_factors.setdefault((sample1, factor1), set()).add((sample2, factor2))
158 |
159 | # A set to keep track of factors with more than two overlaps
160 | valid_factors = set()
161 |
162 | # Go through each factor
163 | for key, value in overlapping_factors.items():
164 | # If the factor overlaps with at least one other factors, add it to the valid_factors set
165 | if len(value) >= 2:
166 | valid_factors.add(key)
167 |
168 | # A dictionary to keep track of how many overlapping factors each gene has
169 | gene_overlaps = {}
170 |
171 | # Go through each valid factor
172 | for factor in valid_factors:
173 | # Get the sample and factor
174 | sample, factor_name = factor
175 | # Go through each gene in the factor
176 | for gene in nmf_sample_dict[sample][factor_name]:
177 | # Add one to the gene's overlaps count
178 | gene_overlaps[gene] = gene_overlaps.get(gene, 0) + 1
179 |
180 | # Create an empty 2D array to serve as the adjacency matrix
181 | adj_matrix = [[0 for gene2 in gene_overlaps.keys()] for gene1 in gene_overlaps.keys()]
182 |
183 | # Loop through each valid factor
184 | for factor in valid_factors:
185 | # Get the sample and factor
186 | sample, factor_name = factor
187 |
188 | # Loop through each gene in the factor
189 | for gene1 in nmf_sample_dict[sample][factor_name]:
190 | # Loop through each other gene in the same factor
191 | for gene2 in nmf_sample_dict[sample][factor_name]:
192 | # Ignore if it's the same gene
193 | if gene1 == gene2:
194 | continue
195 | # Increment the value for this pair of genes in the adjacency matrix
196 | adj_matrix[list(gene_overlaps.keys()).index(gene1)][list(gene_overlaps.keys()).index(gene2)] += 1
197 |
198 | adj_df = pd.DataFrame(adj_matrix,columns=list(gene_overlaps.keys()),index=list(gene_overlaps.keys()))
199 |
200 | # Save the adjacency matrix to a csv
201 | save_file = filename.replace('.h5ad','_nmf_derived_gene_adjacencies_mod_'+
202 | str(min_genes_in_initial_module)+'_jac'
203 | +str(required_jaccard_index)+'.csv')
204 | adj_df.to_csv(save_file)
205 |
206 |
207 |
--------------------------------------------------------------------------------
/single-cell/single-cell-preprocessing.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import pandas as pd\n",
11 | "import scanpy as sc\n",
12 | "import anndata as ad\n",
13 | "from pathlib import Path\n",
14 | "import glob\n",
15 | "import warnings\n",
16 | "warnings.filterwarnings('ignore')\n",
17 | "\n",
18 | "import os\n",
19 | "os.chdir('/lustre/scratch/kiviaho/prostate_spatial/')"
20 | ]
21 | },
22 | {
23 | "cell_type": "markdown",
24 | "metadata": {},
25 | "source": [
26 | "### Formatting Dong et al. 2020 data"
27 | ]
28 | },
29 | {
30 | "cell_type": "code",
31 | "execution_count": null,
32 | "metadata": {},
33 | "outputs": [],
34 | "source": [
35 | "sc_files = glob.glob('sc-reference/dong_2020/*txt') \n",
36 | "dong_annot = pd.read_csv('./sc-reference/dong_2020/dong_2020_annot.csv',sep=';',index_col=0)\n",
37 | "dong_annot = dong_annot.rename(columns={'cells':'celltype_orig'})\n",
38 | "\n",
39 | "# Download the files into a list and concatenate together\n",
40 | "adata_list = []\n",
41 | "for file in sc_files:\n",
42 | " s_abbr = '_'.join(file.split('/')[2].split('_')[0:2])\n",
43 | " \n",
44 | " with open(file) as x:\n",
45 | " ncols = len(x.readline().split('\\t'))\n",
46 | "\n",
47 | " df = pd.read_csv(file, usecols=range(1,ncols),delimiter='\\t',index_col=0)\n",
48 | " adata = ad.AnnData(df).T\n",
49 | "\n",
50 | " #### ADDING METADATA ####\n",
51 | " adata.obs_names = s_abbr + '_' + adata.obs_names\n",
52 | " meta = adata.obs.copy()\n",
53 | " meta['sample'] = s_abbr\n",
54 | " meta['patient'] = s_abbr\n",
55 | " meta = meta.merge(dong_annot,how='left',left_index=True,right_index=True)\n",
56 | " meta['phenotype'] = 'CRPC'\n",
57 | " meta['dataset'] = 'dong_2020'\n",
58 | "\n",
59 | " adata.obs = meta.copy()\n",
60 | " ##########\n",
61 | " adata.obs_names_make_unique()\n",
62 | "\n",
63 | " # Since the genes were originally named with ENSEMBL ID, we have to make them unique.\n",
64 | " adata.var_names_make_unique()\n",
65 | " adata_list.append(adata)\n",
66 | " \n",
67 | "adata_concat = ad.concat(adata_list, join='outer', fill_value=0)\n",
68 | "\n",
69 | "adata_concat.obs\n",
70 | "adata_concat.write('sc-reference/dong_2020/adata_obj.h5ad')"
71 | ]
72 | },
73 | {
74 | "cell_type": "markdown",
75 | "metadata": {},
76 | "source": [
77 | "### Formatting Chen et al. 2021 data"
78 | ]
79 | },
80 | {
81 | "cell_type": "code",
82 | "execution_count": null,
83 | "metadata": {},
84 | "outputs": [],
85 | "source": [
86 | "\n",
87 | "adata = sc.read_csv('sc-reference/chen_2021/GSM4203181_data.raw.matrix.txt',delimiter='\\t')\n",
88 | "adata\n",
89 | "adata = adata.T\n",
90 | "\n",
91 | "chen_obs = adata.obs\n",
92 | "#### ADDING METADATA ####\n",
93 | "\n",
94 | "chen_obs['sample'] = ['chen_'+s.split('-')[1] for s in chen_obs.index]\n",
95 | "chen_obs['patient'] = chen_obs['sample']\n",
96 | "chen_obs['celltype_orig'] = 'unknown'\n",
97 | "chen_obs['phenotype'] = 'PCa'\n",
98 | "chen_obs['dataset'] = 'chen_2021'\n",
99 | "\n",
100 | "##########\n",
101 | "\n",
102 | "\n",
103 | "if (chen_obs.index == adata.obs_names).all():\n",
104 | " adata.obs = chen_obs\n",
105 | "adata.obs_names = adata.obs['sample'] + '_' + [s.split('-')[0] for s in adata.obs_names] + '.1'\n",
106 | "adata.obs.index = adata.obs.index.set_names(['cell'])\n",
107 | "adata.obs\n",
108 | "\n",
109 | "adata.write('sc-reference/chen_2021/adata_obj.h5ad')"
110 | ]
111 | },
112 | {
113 | "attachments": {},
114 | "cell_type": "markdown",
115 | "metadata": {},
116 | "source": [
117 | "### Formatting Song et al. 2022 data"
118 | ]
119 | },
120 | {
121 | "cell_type": "code",
122 | "execution_count": null,
123 | "metadata": {},
124 | "outputs": [],
125 | "source": [
126 | "# This information is from supplementary file 1 of the article Song et al. 2022 Nature Comms\n",
127 | "# The cell type annotations are available without cell IDs, so merging isn't possible.\n",
128 | "# Number of analysed cells is 21743\n",
129 | "song_samples = ['AUG_PB1A', 'AUG_PB1B','MAY_PB1A','MAY_PB1B', 'MAY_PB2A','MAY_PB2B', # BIOPSIES\n",
130 | " 'PR5186','PR5196','PR5199','PR5269', # UNPAIRED RPs\n",
131 | " 'PR5249_N','PR5249_T', # NORMAL PAIRED RPs\n",
132 | " 'PR5251_N','PR5251_T', # NORMAL PAIRED RPs\n",
133 | " 'PR5254_N','PR5254_T', # NORMAL PAIRED RPs\n",
134 | " 'PR5261_N','PR5261_T'] # NORMAL PAIRED RPs\n",
135 | "\n",
136 | "song_patients = ['P1','P1','P2','P2','P3','P3',\n",
137 | " 'P4','P5','P6','P7',\n",
138 | " 'P8','P8',\n",
139 | " 'P9','P9',\n",
140 | " 'P10','P10',\n",
141 | " 'P11','P11']\n",
142 | "song_phenotype = list(np.repeat('PCa',10)) + ['normal','PCa','normal','PCa','normal','PCa','normal','PCa']\n",
143 | "\n",
144 | "\n",
145 | "# Replace some of the idents with those matching file names\n",
146 | "song_file_names = song_samples.copy()\n",
147 | "song_file_names[:] = ['AUG_PB_1A' if x=='AUG_PB1A' else x for x in song_file_names]\n",
148 | "song_file_names[:] = ['AUG_PB_1B' if x=='AUG_PB1B' else x for x in song_file_names]\n",
149 | "\n",
150 | "song_file_names[:] = ['PB1A' if x=='MAY_PB1A' else x for x in song_file_names]\n",
151 | "song_file_names[:] = ['PB1B' if x=='MAY_PB1B' else x for x in song_file_names]\n",
152 | "\n",
153 | "song_file_names[:] = ['PB2A' if x=='MAY_PB2A' else x for x in song_file_names]\n",
154 | "song_file_names[:] = ['PB2B' if x=='MAY_PB2B' else x for x in song_file_names]\n",
155 | "\n",
156 | "\n",
157 | "adata_samples_list = []\n",
158 | "for idx,file_abbr in enumerate(song_file_names):\n",
159 | "\n",
160 | " # Find all files generated from one sample\n",
161 | " file_name_list = glob.glob('sc-reference/song_2022/*'+file_abbr+'*')\n",
162 | " sample_abbr = song_samples[idx]\n",
163 | " patient_abbr = song_patients[idx]\n",
164 | " phenot_abbr = song_phenotype[idx]\n",
165 | "\n",
166 | " sample_adata = []\n",
167 | "\n",
168 | " # Read in each file and append them to a sample-specific list\n",
169 | " for f in file_name_list:\n",
170 | " # exp_abbr = f.split('/')[2].split('_')[0]\n",
171 | " adata = sc.read_csv(f,dtype=np.int16,delimiter='\\t').T\n",
172 | " adata.obs_names = sample_abbr +'_'+ adata.obs_names + '-1'\n",
173 | " sample_adata.append(adata)\n",
174 | " adata_concat_one_sample = ad.concat(sample_adata, join='outer', fill_value=0)\n",
175 | "\n",
176 | " # Concatenate together data from the same sample but from different sequencing runs.\n",
177 | " adata_concat_one_sample.obs['sample'] = sample_abbr\n",
178 | " adata_concat_one_sample.obs['patient'] = 'song_'+patient_abbr\n",
179 | " adata_concat_one_sample.obs['celltype_orig'] = 'unknown'\n",
180 | " adata_concat_one_sample.obs['phenotype'] = phenot_abbr\n",
181 | " adata_concat_one_sample.obs['dataset'] = 'song_2022'\n",
182 | " adata_samples_list.append(adata_concat_one_sample)\n",
183 | "\n",
184 | "adata_concat_all = ad.concat(adata_samples_list, join='outer', fill_value=0)\n",
185 | "adata_concat_all.obs\n",
186 | "\n",
187 | "adata_concat_all.write('sc-reference/song_2022/adata_obj.h5ad')\n"
188 | ]
189 | },
190 | {
191 | "cell_type": "markdown",
192 | "metadata": {},
193 | "source": [
194 | "### Formatting Cheng et al. 2022 data "
195 | ]
196 | },
197 | {
198 | "cell_type": "code",
199 | "execution_count": null,
200 | "metadata": {},
201 | "outputs": [],
202 | "source": [
203 | "data_dirs = glob.glob('sc-reference/cheng_2022/results/*')\n",
204 | "new_names = pd.read_csv('sc-reference/cheng_2022/sample_shorthands.txt',sep='\\t')\n",
205 | "\n",
206 | "adata_list = []\n",
207 | "for dir in data_dirs:\n",
208 | " # Get the shorhand\n",
209 | " sample = dir.split('/')[-1]\n",
210 | " shorthand = new_names[new_names['old']==sample]['new'].item()\n",
211 | " patient = shorthand.split('_')[0]\n",
212 | " if 'CRPC' in patient:\n",
213 | " phenot_abbr = 'CRPC'\n",
214 | " else:\n",
215 | " phenot_abbr = 'PCa'\n",
216 | "\n",
217 | " adata = sc.read_10x_mtx(dir+'/outs/filtered_feature_bc_matrix')\n",
218 | "\n",
219 | " adata.obs['sample'] = shorthand\n",
220 | " adata.obs['patient'] = 'cheng_'+patient\n",
221 | " adata.obs['celltype_orig'] = 'unknown'\n",
222 | " adata.obs['phenotype'] = phenot_abbr\n",
223 | " adata.obs['dataset'] = 'cheng_2022'\n",
224 | "\n",
225 | " adata.obs_names = shorthand + '_' + adata.obs_names\n",
226 | " adata_list.append(adata)\n",
227 | "\n",
228 | "adata_concat = ad.concat(adata_list, join='outer', fill_value=0)\n",
229 | "adata_concat.obs\n",
230 | "adata_concat.write('sc-reference/cheng_2022/adata_obj.h5ad')"
231 | ]
232 | },
233 | {
234 | "attachments": {},
235 | "cell_type": "markdown",
236 | "metadata": {},
237 | "source": [
238 | "### Formatting Wong et al. 2022 data"
239 | ]
240 | },
241 | {
242 | "cell_type": "code",
243 | "execution_count": null,
244 | "metadata": {},
245 | "outputs": [],
246 | "source": [
247 | "dat = sc.read_h5ad('sc-reference/wong_2022/wong_2022_data.h5ad')\n",
248 | "annot = dat.obs.copy()\n",
249 | "cell_annot = pd.read_csv('sc-reference/wong_2022/GSE185344_PH_scRNA.rename_cluster.csv')\n",
250 | "cell_annot.index = cell_annot['Unnamed: 0']\n",
251 | "merged_obs = pd.merge(annot,cell_annot,left_index=True,right_index=True,how='left')\n",
252 | "\n",
253 | "if (merged_obs.index == dat.obs.index).all():\n",
254 | " dat.obs = merged_obs\n",
255 | " del dat.raw\n",
256 | "\n",
257 | "# Format the phenotypes\n",
258 | "phenot = [s.split('_')[2] for s in dat.obs['orig.ident']]\n",
259 | "phenot = [w.replace('Benign', 'normal') for w in phenot]\n",
260 | "phenot = [w.replace('Tumor', 'PCa') for w in phenot]\n",
261 | "\n",
262 | "# Format the patient \n",
263 | "patient = ['_'.join(s.split('_')[:2]) for s in dat.obs['orig.ident']]\n",
264 | "\n",
265 | "new_obs = pd.DataFrame()\n",
266 | "new_obs.index = dat.obs.index.copy()\n",
267 | "new_obs['sample'] = 'wong2022_'+dat.obs['orig.ident'].copy()\n",
268 | "new_obs['patient'] = ['wong2022_'+ p for p in patient]\n",
269 | "new_obs['celltype_orig'] = dat.obs['cellactivity_clusters'].copy()\n",
270 | "new_obs['phenotype'] = phenot\n",
271 | "new_obs['dataset'] = 'wong_2022'\n",
272 | "\n",
273 | "dat.obs = new_obs\n",
274 | "\n",
275 | "# Lose the unnecessary column 'features'\n",
276 | "dat.var = dat.var.drop(columns='features')\n",
277 | "\n",
278 | "dat.write('sc-reference/wong_2022/adata_obj.h5ad')\n"
279 | ]
280 | },
281 | {
282 | "attachments": {},
283 | "cell_type": "markdown",
284 | "metadata": {},
285 | "source": [
286 | "### Formatting Chen et al. 2022 data"
287 | ]
288 | },
289 | {
290 | "cell_type": "code",
291 | "execution_count": null,
292 | "metadata": {},
293 | "outputs": [],
294 | "source": [
295 | "data_dirs = glob.glob('sc-reference/chen_2022/results/*')\n",
296 | "\n",
297 | "adata_list = []\n",
298 | "for dir in data_dirs:\n",
299 | " # Get the shorhand\n",
300 | " print(dir)\n",
301 | " sample = dir.split('/')[-1]\n",
302 | " if 'PCa' in sample:\n",
303 | " phenot_abbr = 'PCa'\n",
304 | " else:\n",
305 | " phenot_abbr = 'normal'\n",
306 | "\n",
307 | " adata = sc.read_10x_mtx(dir+'/outs/filtered_feature_bc_matrix')\n",
308 | "\n",
309 | " adata.obs['sample'] = 'chen2022_'+sample\n",
310 | " adata.obs['patient'] = 'chen2022_'+sample\n",
311 | " adata.obs['celltype_orig'] = 'unknown'\n",
312 | " adata.obs['phenotype'] = phenot_abbr\n",
313 | " adata.obs['dataset'] = 'chen_2022'\n",
314 | "\n",
315 | " adata.obs_names = sample + '_' + adata.obs_names\n",
316 | " adata_list.append(adata)\n",
317 | "\n",
318 | "adata_concat = ad.concat(adata_list, join='outer', fill_value=0)\n",
319 | "adata_concat.obs\n",
320 | "adata_concat.write('sc-reference/chen_2022/adata_obj.h5ad')\n"
321 | ]
322 | },
323 | {
324 | "cell_type": "markdown",
325 | "metadata": {},
326 | "source": [
327 | "### Formatting Hirz et al. 2023 data "
328 | ]
329 | },
330 | {
331 | "cell_type": "code",
332 | "execution_count": null,
333 | "metadata": {},
334 | "outputs": [],
335 | "source": [
336 | "\n",
337 | "# Download the files into a list and concatenate together\n",
338 | "# There are no counts for GSM5494349_SCG-PCA2-T-LG.count.csv\n",
339 | "# PCA24 samples have been prepared another way, excluded\n",
340 | "\n",
341 | "sc_files = sorted(glob.glob('sc-reference/hirz_2023/*SCG*')) # adjacent normal tissue\n",
342 | "print(sc_files)\n",
343 | "print()\n",
344 | "hirz_annot = pd.read_csv('sc-reference/hirz_2023/GSE181294_scRNAseq.ano.csv',index_col=0)\n",
345 | "hirz_annot = hirz_annot.drop(columns=['sample'])\n",
346 | "hirz_annot = hirz_annot.rename(columns={'cells':'celltype_orig'})\n",
347 | "\n",
348 | "adata_list = []\n",
349 | "for f in sc_files:\n",
350 | " name_split = f.split('/')[-1].split('_')[-1].split('-')\n",
351 | " sample = ('_').join(name_split[1:3])\n",
352 | " patient = name_split[1]\n",
353 | " if name_split[2] == 'N':\n",
354 | " phenot_abbr = 'normal'\n",
355 | " else:\n",
356 | " phenot_abbr = 'PCa'\n",
357 | " \n",
358 | " adata = sc.read_csv(f,dtype=np.int16)\n",
359 | " adata = adata.T\n",
360 | "\n",
361 | " ####### Add metadata columns\n",
362 | " meta = adata.obs.copy()\n",
363 | " meta['sample'] = 'hirz_'+sample\n",
364 | " meta['patient'] = 'hirz_'+patient\n",
365 | " meta = meta.merge(hirz_annot,how='left',left_index=True,right_index=True,)\n",
366 | " meta['phenotype'] = phenot_abbr\n",
367 | " meta['dataset'] = 'hirz_2023'\n",
368 | "\n",
369 | " if (meta.index == adata.obs_names).all():\n",
370 | " adata.obs = meta.copy()\n",
371 | " print(sample)\n",
372 | " adata_list.append(adata)\n",
373 | "\n",
374 | "adata_concat = ad.concat(adata_list, join='outer', fill_value=0)\n",
375 | "\n",
376 | "adata_concat.obs\n",
377 | "adata_concat.write('sc-reference/hirz_2023/adata_obj.h5ad')"
378 | ]
379 | }
380 | ],
381 | "metadata": {
382 | "kernelspec": {
383 | "display_name": "Python 3",
384 | "language": "python",
385 | "name": "python3"
386 | },
387 | "language_info": {
388 | "codemirror_mode": {
389 | "name": "ipython",
390 | "version": 3
391 | },
392 | "file_extension": ".py",
393 | "mimetype": "text/x-python",
394 | "name": "python",
395 | "nbconvert_exporter": "python",
396 | "pygments_lexer": "ipython3",
397 | "version": "3.7.12"
398 | },
399 | "orig_nbformat": 4
400 | },
401 | "nbformat": 4,
402 | "nbformat_minor": 2
403 | }
404 |
--------------------------------------------------------------------------------
/spatial_analysis_environment.yml:
--------------------------------------------------------------------------------
1 | # packages in environment at /home/ak431480/.conda/envs/squidpy:
2 | #
3 | # Name Version Build Channel
4 | _libgcc_mutex 0.1 main
5 | _openmp_mutex 5.1 1_gnu
6 | _r-mutex 1.0.0 anacondar_1
7 | aiohttp 3.8.3 pypi_0 pypi
8 | aiosignal 1.3.1 pypi_0 pypi
9 | alabaster 0.7.12 pypi_0 pypi
10 | anndata 0.8.0 pypi_0 pypi
11 | anndata2ri 1.1 pypi_0 pypi
12 | annoy 1.17.1 pypi_0 pypi
13 | anyio 3.6.2 pypi_0 pypi
14 | appdirs 1.4.4 pypi_0 pypi
15 | argon2-cffi 21.3.0 pyhd8ed1ab_0 conda-forge
16 | argon2-cffi-bindings 21.2.0 py38h0a891b7_2 conda-forge
17 | asciitree 0.3.3 pypi_0 pypi
18 | astor 0.8.1 pypi_0 pypi
19 | asttokens 2.2.0 pypi_0 pypi
20 | async-timeout 4.0.2 pypi_0 pypi
21 | attrs 22.1.0 pypi_0 pypi
22 | autograd 1.6.2 pypi_0 pypi
23 | autograd-gamma 0.5.0 pypi_0 pypi
24 | babel 2.11.0 pypi_0 pypi
25 | backcall 0.2.0 pyh9f0ad1d_0 conda-forge
26 | backports 1.0 pyhd8ed1ab_3 conda-forge
27 | backports-zoneinfo 0.2.1 pypi_0 pypi
28 | backports.functools_lru_cache 1.6.4 pyhd8ed1ab_0 conda-forge
29 | beautifulsoup4 4.11.1 pyha770c72_0 conda-forge
30 | binutils_impl_linux-64 2.38 h2a08ee3_1
31 | binutils_linux-64 2.38.0 hc2dff05_0
32 | blas 1.1 openblas conda-forge
33 | bleach 5.0.1 pyhd8ed1ab_0 conda-forge
34 | blosc2 2.0.0 pypi_0 pypi
35 | build 0.9.0 pypi_0 pypi
36 | bwidget 1.9.14 ha770c72_1 conda-forge
37 | bzip2 1.0.8 h7f98852_4 conda-forge
38 | c-ares 1.18.1 h7f98852_0 conda-forge
39 | ca-certificates 2022.10.11 h06a4308_0
40 | cachey 0.2.1 pypi_0 pypi
41 | cairo 1.16.0 h18b612c_1001 conda-forge
42 | certifi 2022.9.24 py38h06a4308_0
43 | cffi 1.15.1 pypi_0 pypi
44 | charset-normalizer 2.1.1 pypi_0 pypi
45 | click 8.1.3 pypi_0 pypi
46 | cloudpickle 2.2.0 pypi_0 pypi
47 | cmake 3.25.0 pypi_0 pypi
48 | commonmark 0.9.1 pypi_0 pypi
49 | contourpy 1.0.6 pypi_0 pypi
50 | curl 7.86.0 h5eee18b_0
51 | cycler 0.11.0 pypi_0 pypi
52 | cython 0.29.33 pypi_0 pypi
53 | dask 2022.11.1 pypi_0 pypi
54 | dask-image 2022.9.0 pypi_0 pypi
55 | debugpy 1.6.4 pypi_0 pypi
56 | decorator 5.1.1 pyhd8ed1ab_0 conda-forge
57 | defusedxml 0.7.1 pyhd8ed1ab_0 conda-forge
58 | deprecated 1.2.13 pypi_0 pypi
59 | docrep 0.3.2 pypi_0 pypi
60 | docstring-parser 0.15 pypi_0 pypi
61 | docutils 0.19 pypi_0 pypi
62 | dunamai 1.15.0 pypi_0 pypi
63 | entrypoints 0.4 pyhd8ed1ab_0 conda-forge
64 | et-xmlfile 1.1.0 pypi_0 pypi
65 | executing 1.2.0 pyhd8ed1ab_0 conda-forge
66 | fasteners 0.18 pypi_0 pypi
67 | fbpca 1.0 pypi_0 pypi
68 | fcsparser 0.2.4 pypi_0 pypi
69 | flit-core 3.8.0 pyhd8ed1ab_0 conda-forge
70 | fontconfig 2.14.1 hef1e5e3_0
71 | fonttools 4.38.0 pypi_0 pypi
72 | formulaic 0.6.6 pypi_0 pypi
73 | freetype 2.10.4 h0708190_1 conda-forge
74 | freetype-py 2.3.0 pypi_0 pypi
75 | fribidi 1.0.10 h36c2ea0_0 conda-forge
76 | frozenlist 1.3.3 pypi_0 pypi
77 | fsspec 2022.11.0 pypi_0 pypi
78 | future 0.18.3 pypi_0 pypi
79 | gcc_impl_linux-64 11.2.0 h1234567_1
80 | gcc_linux-64 11.2.0 h5c386dc_0
81 | geosketch 1.2 pypi_0 pypi
82 | get-version 3.5.4 pypi_0 pypi
83 | gfortran_impl_linux-64 11.2.0 h1234567_1
84 | gfortran_linux-64 11.2.0 hc2dff05_0
85 | glib 2.69.1 he621ea3_2
86 | gprofiler-official 1.0.0 pypi_0 pypi
87 | graphite2 1.3.14 h295c915_1
88 | graphlib-backport 1.0.3 pypi_0 pypi
89 | gseapy 1.1.0 pypi_0 pypi
90 | gsva 1.0.6 pypi_0 pypi
91 | gxx_impl_linux-64 11.2.0 h1234567_1
92 | gxx_linux-64 11.2.0 hc2dff05_0
93 | h5py 3.7.0 pypi_0 pypi
94 | harfbuzz 4.3.0 hd55b92a_0
95 | heapdict 1.0.1 pypi_0 pypi
96 | hsluv 5.0.3 pypi_0 pypi
97 | icu 58.2 hf484d3e_1000 conda-forge
98 | idna 3.4 pypi_0 pypi
99 | igraph 0.10.2 pypi_0 pypi
100 | imageio 2.22.4 pypi_0 pypi
101 | imagesize 1.4.1 pypi_0 pypi
102 | importlib-metadata 5.1.0 pypi_0 pypi
103 | importlib-resources 5.10.0 pypi_0 pypi
104 | importlib_resources 5.10.2 pyhd8ed1ab_0 conda-forge
105 | inflect 6.0.2 pypi_0 pypi
106 | interface-meta 1.3.0 pypi_0 pypi
107 | intervaltree 2.1.0 pypi_0 pypi
108 | ipykernel 6.17.1 pypi_0 pypi
109 | ipython 8.7.0 pypi_0 pypi
110 | ipython-genutils 0.2.0 pypi_0 pypi
111 | ipython_genutils 0.2.0 py_1 conda-forge
112 | ipywidgets 8.0.2 pypi_0 pypi
113 | jedi 0.18.2 pyhd8ed1ab_0 conda-forge
114 | jinja2 3.1.2 pyhd8ed1ab_1 conda-forge
115 | joblib 1.2.0 pypi_0 pypi
116 | jpeg 9e h166bdaf_1 conda-forge
117 | jsonschema 4.17.3 pyhd8ed1ab_0 conda-forge
118 | jupyter 1.0.0 pypi_0 pypi
119 | jupyter-client 7.4.7 pypi_0 pypi
120 | jupyter-console 6.4.4 pypi_0 pypi
121 | jupyter-core 5.1.0 pypi_0 pypi
122 | jupyter-server 1.23.3 pypi_0 pypi
123 | jupyter_client 7.0.6 pyhd8ed1ab_0 conda-forge
124 | jupyter_core 5.1.3 py38h578d9bd_0 conda-forge
125 | jupyterlab-widgets 3.0.3 pypi_0 pypi
126 | jupyterlab_pygments 0.2.2 pyhd8ed1ab_0 conda-forge
127 | kernel-headers_linux-64 2.6.32 he073ed8_15 conda-forge
128 | keyutils 1.6.1 h166bdaf_0 conda-forge
129 | kiwisolver 1.4.4 pypi_0 pypi
130 | krb5 1.19.3 h3790be6_0 conda-forge
131 | ld_impl_linux-64 2.38 h1181459_1
132 | leidenalg 0.9.0 pypi_0 pypi
133 | lerc 3.0 h295c915_0
134 | libcurl 7.86.0 h91b91d3_0
135 | libdeflate 1.8 h7f8727e_5
136 | libedit 3.1.20191231 he28a2e2_2 conda-forge
137 | libev 4.33 h516909a_1 conda-forge
138 | libffi 3.4.2 h6a678d5_6
139 | libgcc-devel_linux-64 11.2.0 h1234567_1
140 | libgcc-ng 11.2.0 h1234567_1
141 | libgfortran-ng 11.2.0 h00389a5_1
142 | libgfortran5 11.2.0 h1234567_1
143 | libgomp 11.2.0 h1234567_1
144 | libnghttp2 1.46.0 hce63b2e_0
145 | libopenblas 0.3.20 pthreads_h78a6416_0 conda-forge
146 | libpng 1.6.37 hbc83047_0
147 | libsodium 1.0.18 h36c2ea0_1 conda-forge
148 | libssh2 1.10.0 ha56f1ee_2 conda-forge
149 | libstdcxx-devel_linux-64 11.2.0 h1234567_1
150 | libstdcxx-ng 11.2.0 h1234567_1
151 | libtiff 4.4.0 hecacb30_2
152 | libuuid 2.32.1 h7f98852_1000 conda-forge
153 | libwebp-base 1.2.4 h5eee18b_0
154 | libxcb 1.13 h7f98852_1004 conda-forge
155 | libxml2 2.9.14 h74e7548_0
156 | lifelines 0.27.8 pypi_0 pypi
157 | llvmlite 0.38.1 pypi_0 pypi
158 | locket 1.0.0 pypi_0 pypi
159 | louvain 0.8.0 pypi_0 pypi
160 | lz4-c 1.9.3 h9c3ff4c_1 conda-forge
161 | magicgui 0.6.1 pypi_0 pypi
162 | make 4.3 hd18ef5c_1 conda-forge
163 | markupsafe 2.1.1 py38h0a891b7_1 conda-forge
164 | matplotlib 3.6.2 pypi_0 pypi
165 | matplotlib-inline 0.1.6 pyhd8ed1ab_0 conda-forge
166 | matplotlib-scalebar 0.8.1 pypi_0 pypi
167 | matplotlib-venn 0.11.9 pypi_0 pypi
168 | mistune 2.0.4 pyhd8ed1ab_0 conda-forge
169 | msgpack 1.0.4 pypi_0 pypi
170 | multidict 6.0.2 pypi_0 pypi
171 | napari 0.4.15 pypi_0 pypi
172 | napari-console 0.0.6 pypi_0 pypi
173 | napari-plugin-engine 0.2.0 pypi_0 pypi
174 | napari-svg 0.1.6 pypi_0 pypi
175 | natsort 8.2.0 pypi_0 pypi
176 | nbclassic 0.4.8 pypi_0 pypi
177 | nbclient 0.7.2 pyhd8ed1ab_0 conda-forge
178 | nbconvert 7.2.5 pypi_0 pypi
179 | nbconvert-core 7.2.8 pyhd8ed1ab_0 conda-forge
180 | nbconvert-pandoc 7.2.8 pyhd8ed1ab_0 conda-forge
181 | nbformat 5.7.0 pypi_0 pypi
182 | ncls 0.0.66 pypi_0 pypi
183 | ncurses 6.3 h5eee18b_3
184 | nest-asyncio 1.5.6 pyhd8ed1ab_0 conda-forge
185 | networkx 2.8.8 pypi_0 pypi
186 | notebook 6.5.2 pypi_0 pypi
187 | notebook-shim 0.2.2 pypi_0 pypi
188 | npe2 0.6.1 pypi_0 pypi
189 | numba 0.55.2 pypi_0 pypi
190 | numcodecs 0.10.2 pypi_0 pypi
191 | numexpr 2.8.4 pypi_0 pypi
192 | numpy 1.22.4 pypi_0 pypi
193 | numpydoc 1.5.0 pypi_0 pypi
194 | omnipath 1.0.5 pypi_0 pypi
195 | openblas 0.3.20 pthreads_h320a7e8_0 conda-forge
196 | openpyxl 3.1.2 pypi_0 pypi
197 | openssl 1.1.1s h7f8727e_0
198 | packaging 21.3 pypi_0 pypi
199 | palantir 1.0.1 pypi_0 pypi
200 | pandas 1.5.2 pypi_0 pypi
201 | pandoc 2.12 h06a4308_1
202 | pandocfilters 1.5.0 pyhd8ed1ab_0 conda-forge
203 | pango 1.50.7 h05da053_0
204 | parso 0.8.3 pyhd8ed1ab_0 conda-forge
205 | partd 1.3.0 pypi_0 pypi
206 | patsy 0.5.3 pypi_0 pypi
207 | pcre 8.45 h9c3ff4c_0 conda-forge
208 | pcre2 10.37 h032f7d1_0 conda-forge
209 | pep517 0.13.0 pypi_0 pypi
210 | pexpect 4.8.0 pyh1a96a4e_2 conda-forge
211 | phenograph 1.5.7 pypi_0 pypi
212 | pickleshare 0.7.5 py_1003 conda-forge
213 | pillow 9.3.0 pypi_0 pypi
214 | pims 0.6.1 pypi_0 pypi
215 | pint 0.20.1 pypi_0 pypi
216 | pip 22.2.2 py38h06a4308_0
217 | pixman 0.38.0 h516909a_1003 conda-forge
218 | pkgutil-resolve-name 1.3.10 pyhd8ed1ab_0 conda-forge
219 | platformdirs 2.5.4 pypi_0 pypi
220 | prometheus_client 0.15.0 pyhd8ed1ab_0 conda-forge
221 | prompt-toolkit 3.0.33 pypi_0 pypi
222 | psutil 5.9.4 pypi_0 pypi
223 | psygnal 0.6.1 pypi_0 pypi
224 | pthread-stubs 0.4 h36c2ea0_1001 conda-forge
225 | ptyprocess 0.7.0 pyhd3deb0d_0 conda-forge
226 | pure_eval 0.2.2 pyhd8ed1ab_0 conda-forge
227 | py-cpuinfo 9.0.0 pypi_0 pypi
228 | pycparser 2.21 pyhd8ed1ab_0 conda-forge
229 | pydantic 1.10.2 pypi_0 pypi
230 | pydot 1.4.2 pypi_0 pypi
231 | pygments 2.13.0 pypi_0 pypi
232 | pynndescent 0.5.8 pypi_0 pypi
233 | pyopengl 3.1.6 pypi_0 pypi
234 | pyparsing 3.0.9 pypi_0 pypi
235 | pyqt5 5.15.7 pypi_0 pypi
236 | pyqt5-qt5 5.15.2 pypi_0 pypi
237 | pyqt5-sip 12.11.0 pypi_0 pypi
238 | pyranges 0.0.120 pypi_0 pypi
239 | pyrle 0.0.35 pypi_0 pypi
240 | pyrsistent 0.19.2 pypi_0 pypi
241 | python 3.8.15 h7a1cb2a_2
242 | python-dateutil 2.8.2 pyhd8ed1ab_0 conda-forge
243 | python-fastjsonschema 2.16.2 pyhd8ed1ab_0 conda-forge
244 | python_abi 3.8 2_cp38 conda-forge
245 | pytomlpp 1.0.11 pypi_0 pypi
246 | pytz 2022.6 pypi_0 pypi
247 | pytz-deprecation-shim 0.1.0.post0 pypi_0 pypi
248 | pywavelets 1.4.1 pypi_0 pypi
249 | pyyaml 6.0 pypi_0 pypi
250 | pyzmq 24.0.1 pypi_0 pypi
251 | qtconsole 5.4.0 pypi_0 pypi
252 | qtpy 2.3.0 pypi_0 pypi
253 | r-askpass 1.1 r42h76d94ec_0
254 | r-assertthat 0.2.1 r42hc72bb7e_3 conda-forge
255 | r-backports 1.4.1 r42h76d94ec_0
256 | r-base 4.2.0 h1ae530e_0
257 | r-base64enc 0.1_3 r42h76d94ec_4
258 | r-bh 1.78.0_0 r42hc72bb7e_1 conda-forge
259 | r-bit 4.0.4 r42h76d94ec_0
260 | r-bit64 4.0.5 r42h76d94ec_0
261 | r-blob 1.2.3 r42hc72bb7e_1 conda-forge
262 | r-boot 1.3_28.1 r42hc72bb7e_0 conda-forge
263 | r-bslib 0.3.1 r42h6115d3f_0
264 | r-cachem 1.0.6 r42h76d94ec_0
265 | r-callr 3.7.3 r42hc72bb7e_0 conda-forge
266 | r-cellranger 1.1.0 r42hc72bb7e_1005 conda-forge
267 | r-class 7.3_20 r42h76d94ec_0
268 | r-cli 3.3.0 r42h884c59f_0
269 | r-clipr 0.8.0 r42hc72bb7e_1 conda-forge
270 | r-cluster 2.1.3 r42h640688f_0
271 | r-codetools 0.2_18 r42hc72bb7e_1 conda-forge
272 | r-colorspace 2.0_3 r42h76d94ec_0
273 | r-commonmark 1.8.0 r42h76d94ec_0
274 | r-cpp11 0.4.3 r42hc72bb7e_0 conda-forge
275 | r-crayon 1.5.2 r42hc72bb7e_1 conda-forge
276 | r-crul 1.3 r42h785f33e_1 conda-forge
277 | r-curl 4.3.2 r42h76d94ec_0
278 | r-data.table 1.14.2 r42h76d94ec_0
279 | r-dbi 1.1.3 r42hc72bb7e_1 conda-forge
280 | r-dbplyr 2.2.1 r42hc72bb7e_1 conda-forge
281 | r-digest 0.6.29 r42h884c59f_0
282 | r-dplyr 1.0.9 r42h884c59f_0
283 | r-dqrng 0.3.0 r42h884c59f_0
284 | r-dtplyr 1.2.2 r42hc72bb7e_2 conda-forge
285 | r-e1071 1.7_9 r42h884c59f_0
286 | r-ellipsis 0.3.2 r42h76d94ec_0
287 | r-evaluate 0.19 r42hc72bb7e_0 conda-forge
288 | r-fansi 1.0.3 r42h76d94ec_0
289 | r-farver 2.1.0 r42h884c59f_0
290 | r-fastmap 1.1.0 r42h884c59f_0
291 | r-fontawesome 0.4.0 r42hc72bb7e_0 conda-forge
292 | r-forcats 0.5.2 r42hc72bb7e_1 conda-forge
293 | r-foreach 1.5.2 r42hc72bb7e_1 conda-forge
294 | r-foreign 0.8_82 r42h76d94ec_0
295 | r-formatr 1.13 r42hc72bb7e_0 conda-forge
296 | r-fs 1.5.2 r42h884c59f_0
297 | r-future 1.30.0 r42hc72bb7e_0 conda-forge
298 | r-future.apply 1.10.0 r42hc72bb7e_0 conda-forge
299 | r-gargle 1.2.1 r42hc72bb7e_1 conda-forge
300 | r-generics 0.1.3 r42hc72bb7e_1 conda-forge
301 | r-gistr 0.9.0 r42hc72bb7e_1 conda-forge
302 | r-globals 0.16.2 r42hc72bb7e_0 conda-forge
303 | r-glue 1.6.2 r42h76d94ec_0
304 | r-googledrive 2.0.0 r42hc72bb7e_1 conda-forge
305 | r-googlesheets4 1.0.1 r42h785f33e_1 conda-forge
306 | r-gower 1.0.0 r42h76d94ec_0
307 | r-gtable 0.3.1 r42hc72bb7e_1 conda-forge
308 | r-hardhat 1.2.0 r42hc72bb7e_1 conda-forge
309 | r-haven 2.5.0 r42h884c59f_0
310 | r-hexbin 1.28.2 r42h640688f_0
311 | r-highr 0.10 r42hc72bb7e_0 conda-forge
312 | r-hms 1.1.2 r42hc72bb7e_1 conda-forge
313 | r-htmltools 0.5.2 r42h76d94ec_0
314 | r-htmlwidgets 1.5.4 r42h6115d3f_0
315 | r-httpcode 0.3.0 r42ha770c72_2 conda-forge
316 | r-httpuv 1.6.5 r42h884c59f_0
317 | r-httr 1.4.4 r42hc72bb7e_1 conda-forge
318 | r-ids 1.0.1 r42hc72bb7e_2 conda-forge
319 | r-irdisplay 1.1 r42hd8ed1ab_1 conda-forge
320 | r-irkernel 1.3.1 r42h785f33e_0 conda-forge
321 | r-isoband 0.2.5 r42h884c59f_0
322 | r-iterators 1.0.14 r42hc72bb7e_1 conda-forge
323 | r-jquerylib 0.1.4 r42hc72bb7e_1 conda-forge
324 | r-jsonlite 1.8.0 r42h76d94ec_0
325 | r-kernsmooth 2.23_20 r42h640688f_0
326 | r-knitr 1.41 r42hc72bb7e_0 conda-forge
327 | r-labeling 0.4.2 r42hc72bb7e_2 conda-forge
328 | r-later 1.3.0 r42h884c59f_0
329 | r-lattice 0.20_45 r42h76d94ec_0
330 | r-lazyeval 0.2.2 r42h76d94ec_0
331 | r-lifecycle 1.0.1 r42h142f84f_0
332 | r-listenv 0.9.0 r42hc72bb7e_0 conda-forge
333 | r-lobstr 1.1.1 r42h884c59f_0
334 | r-lubridate 1.8.0 r42h884c59f_0
335 | r-magrittr 2.0.3 r42h76d94ec_0
336 | r-maps 3.4.0 r42h76d94ec_0
337 | r-mass 7.3_57 r42h76d94ec_0
338 | r-mime 0.12 r42h76d94ec_0
339 | r-modelmetrics 1.2.2.2 r42h884c59f_0
340 | r-munsell 0.5.0 r42hc72bb7e_1005 conda-forge
341 | r-nlme 3.1_157 r42h640688f_0
342 | r-nnet 7.3_17 r42h76d94ec_0
343 | r-numderiv 2016.8_1.1 r42hc72bb7e_4 conda-forge
344 | r-openssl 2.0.2 r42h76d94ec_0
345 | r-parallelly 1.34.0 r42hc72bb7e_0 conda-forge
346 | r-pbdzmq 0.3_7 r42h884c59f_0
347 | r-pillar 1.8.1 r42hc72bb7e_1 conda-forge
348 | r-pkgconfig 2.0.3 r42hc72bb7e_2 conda-forge
349 | r-plyr 1.8.7 r42h884c59f_0
350 | r-prettyunits 1.1.1 r42hc72bb7e_2 conda-forge
351 | r-proc 1.18.0 r42h884c59f_0
352 | r-processx 3.5.3 r42h76d94ec_0
353 | r-progress 1.2.2 r42hc72bb7e_3 conda-forge
354 | r-progressr 0.13.0 r42hc72bb7e_0 conda-forge
355 | r-promises 1.2.0.1 r42h884c59f_0
356 | r-proxy 0.4_26 r42h76d94ec_0
357 | r-pryr 0.1.5 r42h884c59f_0
358 | r-ps 1.7.0 r42h76d94ec_0
359 | r-purrr 0.3.4 r42h76d94ec_0
360 | r-quantmod 0.4.20 r42hc72bb7e_1 conda-forge
361 | r-r6 2.5.1 r42hc72bb7e_1 conda-forge
362 | r-randomforest 4.7_1.1 r42h640688f_0
363 | r-rappdirs 0.3.3 r42h76d94ec_0
364 | r-rbokeh 0.5.2 r42hc72bb7e_2 conda-forge
365 | r-rcolorbrewer 1.1_3 r42h785f33e_1 conda-forge
366 | r-rcpp 1.0.8.3 r42h884c59f_0
367 | r-readr 2.1.2 r42h884c59f_0
368 | r-readxl 1.4.0 r42h884c59f_0
369 | r-rematch 1.0.1 r42hc72bb7e_1005 conda-forge
370 | r-rematch2 2.1.2 r42hc72bb7e_2 conda-forge
371 | r-repr 1.1.5 r42h785f33e_0 conda-forge
372 | r-reprex 2.0.2 r42hc72bb7e_1 conda-forge
373 | r-reshape2 1.4.4 r42h884c59f_0
374 | r-rlang 1.0.2 r42h884c59f_0
375 | r-rmarkdown 2.19 r42hc72bb7e_0 conda-forge
376 | r-rpart 4.1.16 r42h76d94ec_0
377 | r-rstudioapi 0.14 r42hc72bb7e_1 conda-forge
378 | r-rvest 1.0.3 r42hc72bb7e_1 conda-forge
379 | r-sass 0.4.1 r42h884c59f_0
380 | r-scales 1.2.1 r42hc72bb7e_1 conda-forge
381 | r-selectr 0.4_2 r42hc72bb7e_2 conda-forge
382 | r-shape 1.4.6 r42ha770c72_1 conda-forge
383 | r-shiny 1.7.4 r42h785f33e_0 conda-forge
384 | r-sitmo 2.0.2 r42h884c59f_0
385 | r-sourcetools 0.1.7 r42h884c59f_0
386 | r-spatial 7.3_15 r42h76d94ec_0
387 | r-squarem 2021.1 r42hc72bb7e_1 conda-forge
388 | r-stringi 1.7.6 r42h884c59f_0
389 | r-stringr 1.4.0 r42h6115d3f_0
390 | r-sys 3.4 r42h76d94ec_0
391 | r-tibble 3.1.7 r42h76d94ec_0
392 | r-tidyr 1.2.0 r42h884c59f_0
393 | r-tidyselect 1.1.2 r42h142f84f_0
394 | r-timedate 4022.108 r42hc72bb7e_0 conda-forge
395 | r-tinytex 0.43 r42hc72bb7e_0 conda-forge
396 | r-triebeard 0.3.0 r42h884c59f_0
397 | r-ttr 0.24.3 r42h76d94ec_0
398 | r-tzdb 0.3.0 r42h884c59f_0
399 | r-urltools 1.7.3 r42h884c59f_0
400 | r-utf8 1.2.2 r42h76d94ec_0
401 | r-uuid 1.1_0 r42h76d94ec_0
402 | r-vctrs 0.4.1 r42h884c59f_0
403 | r-viridislite 0.4.1 r42hc72bb7e_1 conda-forge
404 | r-vroom 1.5.7 r42h884c59f_0
405 | r-withr 2.5.0 r42hc72bb7e_1 conda-forge
406 | r-xfun 0.31 r42h76d94ec_0
407 | r-xml2 1.3.3 r42h884c59f_0
408 | r-xtable 1.8_4 r42hc72bb7e_4 conda-forge
409 | r-xts 0.12.1 r42h76d94ec_0
410 | r-yaml 2.3.5 r42h76d94ec_0
411 | r-zoo 1.8_10 r42h76d94ec_0
412 | readline 8.2 h5eee18b_0
413 | requests 2.28.1 pypi_0 pypi
414 | rich 12.6.0 pypi_0 pypi
415 | rpy2 3.5.8 pypi_0 pypi
416 | scanorama 1.7 pypi_0 pypi
417 | scanpy 1.9.1 pypi_0 pypi
418 | scib 1.1.1 pypi_0 pypi
419 | scikit-image 0.19.3 pypi_0 pypi
420 | scikit-learn 1.1.3 pypi_0 pypi
421 | scikit-posthocs 0.8.0 pypi_0 pypi
422 | scipy 1.9.3 pypi_0 pypi
423 | scrublet 0.2.3 pypi_0 pypi
424 | seaborn 0.12.1 pypi_0 pypi
425 | seacells 0.3.2 pypi_0 pypi
426 | send2trash 1.8.0 pyhd8ed1ab_0 conda-forge
427 | session-info 1.0.0 pypi_0 pypi
428 | setuptools 65.5.0 py38h06a4308_0
429 | six 1.16.0 pyh6c4a22f_0 conda-forge
430 | slicerator 1.1.0 pypi_0 pypi
431 | sniffio 1.3.0 pypi_0 pypi
432 | snowballstemmer 2.2.0 pypi_0 pypi
433 | sorted-nearest 0.0.38 pypi_0 pypi
434 | sortedcontainers 2.4.0 pypi_0 pypi
435 | soupsieve 2.3.2.post1 pyhd8ed1ab_0 conda-forge
436 | sphinx 5.3.0 pypi_0 pypi
437 | sphinxcontrib-applehelp 1.0.2 pypi_0 pypi
438 | sphinxcontrib-devhelp 1.0.2 pypi_0 pypi
439 | sphinxcontrib-htmlhelp 2.0.0 pypi_0 pypi
440 | sphinxcontrib-jsmath 1.0.1 pypi_0 pypi
441 | sphinxcontrib-qthelp 1.0.3 pypi_0 pypi
442 | sphinxcontrib-serializinghtml 1.1.5 pypi_0 pypi
443 | sqlite 3.40.0 h5082296_0
444 | squidpy 1.2.3 pypi_0 pypi
445 | stack_data 0.6.2 pyhd8ed1ab_0 conda-forge
446 | statsmodels 0.13.5 pypi_0 pypi
447 | stdlib-list 0.8.0 pypi_0 pypi
448 | superqt 0.4.0 pypi_0 pypi
449 | sysroot_linux-64 2.12 he073ed8_15 conda-forge
450 | tables 3.8.0 pypi_0 pypi
451 | tabulate 0.9.0 pypi_0 pypi
452 | terminado 0.17.0 pypi_0 pypi
453 | texttable 1.6.7 pypi_0 pypi
454 | threadpoolctl 3.1.0 pypi_0 pypi
455 | tifffile 2022.10.10 pypi_0 pypi
456 | tinycss2 1.2.1 pyhd8ed1ab_0 conda-forge
457 | tk 8.6.12 h1ccaba5_0
458 | tktable 2.10 hb7b940f_3 conda-forge
459 | tomli 2.0.1 pypi_0 pypi
460 | toolz 0.12.0 pypi_0 pypi
461 | tornado 6.2 pypi_0 pypi
462 | tqdm 4.64.1 pypi_0 pypi
463 | traitlets 5.6.0 pypi_0 pypi
464 | typer 0.7.0 pypi_0 pypi
465 | typing-extensions 4.4.0 hd8ed1ab_0 conda-forge
466 | typing_extensions 4.4.0 pyha770c72_0 conda-forge
467 | tzdata 2022.7 pypi_0 pypi
468 | tzlocal 4.2 pypi_0 pypi
469 | umap-learn 0.5.3 pypi_0 pypi
470 | urllib3 1.26.13 pypi_0 pypi
471 | validators 0.20.0 pypi_0 pypi
472 | venn 0.1.3 pypi_0 pypi
473 | vispy 0.12.1 pypi_0 pypi
474 | wcwidth 0.2.5 pypi_0 pypi
475 | webencodings 0.5.1 pypi_0 pypi
476 | websocket-client 1.4.2 pypi_0 pypi
477 | wheel 0.37.1 pyhd3eb1b0_0
478 | widgetsnbextension 4.0.3 pypi_0 pypi
479 | wrapt 1.14.1 pypi_0 pypi
480 | xarray 2022.11.0 pypi_0 pypi
481 | xgboost 2.0.0 pypi_0 pypi
482 | xlsxwriter 3.1.7 pypi_0 pypi
483 | xorg-kbproto 1.0.7 h7f98852_1002 conda-forge
484 | xorg-libice 1.0.10 h7f98852_0 conda-forge
485 | xorg-libsm 1.2.3 hd9c2040_1000 conda-forge
486 | xorg-libx11 1.7.2 h7f98852_0 conda-forge
487 | xorg-libxau 1.0.9 h7f98852_0 conda-forge
488 | xorg-libxdmcp 1.1.3 h7f98852_0 conda-forge
489 | xorg-libxext 1.3.4 h7f98852_1 conda-forge
490 | xorg-libxrender 0.9.10 h7f98852_1003 conda-forge
491 | xorg-renderproto 0.11.1 h7f98852_1002 conda-forge
492 | xorg-xextproto 7.3.0 h7f98852_1002 conda-forge
493 | xorg-xproto 7.0.31 h7f98852_1007 conda-forge
494 | xz 5.2.6 h5eee18b_0
495 | yarl 1.8.1 pypi_0 pypi
496 | zarr 2.13.3 pypi_0 pypi
497 | zeromq 4.3.4 h9c3ff4c_1 conda-forge
498 | zipp 3.11.0 pyhd8ed1ab_0 conda-forge
499 | zlib 1.2.13 h5eee18b_0
500 | zstd 1.5.2 ha4553b6_0
501 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/single_cell_analysis_environment.yml:
--------------------------------------------------------------------------------
1 | # packages in environment at /home/ak431480/.conda/envs/scib-pipeline-R4.0:
2 | #
3 | # Name Version Build Channel
4 | _libgcc_mutex 0.1 conda_forge conda-forge
5 | _openmp_mutex 4.5 2_kmp_llvm conda-forge
6 | _r-mutex 1.0.1 anacondar_1 conda-forge
7 | absl-py 1.4.0 pyhd8ed1ab_0 conda-forge
8 | adjusttext 0.7.3 pypi_0 pypi
9 | aioeasywebdav 2.4.0 py37h89c1867_1001 conda-forge
10 | aiohttp 3.8.3 py37h540881e_0 conda-forge
11 | aiosignal 1.3.1 pyhd8ed1ab_0 conda-forge
12 | amply 0.1.5 pyhd8ed1ab_0 conda-forge
13 | anndata 0.8.0 pyhd8ed1ab_1 conda-forge
14 | anndata2ri 1.1 pyhdfd78af_0 bioconda
15 | anyio 3.6.2 pyhd8ed1ab_0 conda-forge
16 | appdirs 1.4.4 pyh9f0ad1d_0 conda-forge
17 | argon2-cffi 21.3.0 pyhd8ed1ab_0 conda-forge
18 | argon2-cffi-bindings 21.2.0 py37h540881e_2 conda-forge
19 | async-timeout 4.0.2 pyhd8ed1ab_0 conda-forge
20 | asynctest 0.13.0 py_0 conda-forge
21 | atk-1.0 2.38.0 hd4edc92_1 conda-forge
22 | attmap 0.13.2 pyhd8ed1ab_0 conda-forge
23 | attrs 22.2.0 pyh71513ae_0 conda-forge
24 | backcall 0.2.0 pyh9f0ad1d_0 conda-forge
25 | backports 1.0 pyhd8ed1ab_3 conda-forge
26 | backports.functools_lru_cache 1.6.4 pyhd8ed1ab_0 conda-forge
27 | backports.zoneinfo 0.2.1 py37h540881e_5 conda-forge
28 | bbknn 1.3.9 py37h8f50634_1 bioconda
29 | bcrypt 3.2.2 py37h540881e_0 conda-forge
30 | beautifulsoup4 4.11.1 pyha770c72_0 conda-forge
31 | binutils_impl_linux-64 2.39 he00db2b_1 conda-forge
32 | bioconductor-beachmat 2.6.4 r40h399db7b_1 bioconda
33 | bioconductor-biobase 2.50.0 r40hd029910_1 bioconda
34 | bioconductor-biocgenerics 0.36.0 r40hdfd78af_1 bioconda
35 | bioconductor-biocneighbors 1.8.2 r40h399db7b_0 bioconda
36 | bioconductor-biocparallel 1.24.1 r40h399db7b_0 bioconda
37 | bioconductor-biocsingular 1.6.0 r40h399db7b_1 bioconda
38 | bioconductor-bluster 1.0.0 r40h399db7b_2 bioconda
39 | bioconductor-delayedarray 0.16.3 r40hd029910_0 bioconda
40 | bioconductor-delayedmatrixstats 1.12.3 r40hdfd78af_0 bioconda
41 | bioconductor-edger 3.32.1 r40h399db7b_0 bioconda
42 | bioconductor-genomeinfodb 1.26.4 r40hdfd78af_0 bioconda
43 | bioconductor-genomeinfodbdata 1.2.4 r40hdfd78af_2 bioconda
44 | bioconductor-genomicranges 1.42.0 r40hd029910_1 bioconda
45 | bioconductor-hdf5array 1.18.1 r40hd029910_0 bioconda
46 | bioconductor-iranges 2.24.1 r40hd029910_0 bioconda
47 | bioconductor-limma 3.46.0 r40hd029910_1 bioconda
48 | bioconductor-matrixgenerics 1.2.1 r40hdfd78af_0 bioconda
49 | bioconductor-rhdf5 2.34.0 r40h399db7b_1 bioconda
50 | bioconductor-rhdf5filters 1.2.0 r40h399db7b_1 bioconda
51 | bioconductor-rhdf5lib 1.12.1 r40hd029910_0 bioconda
52 | bioconductor-s4vectors 0.28.1 r40hd029910_0 bioconda
53 | bioconductor-scater 1.18.6 r40hdfd78af_0 bioconda
54 | bioconductor-scran 1.18.5 r40h399db7b_0 bioconda
55 | bioconductor-scuttle 1.0.4 r40h399db7b_0 bioconda
56 | bioconductor-singlecellexperiment 1.12.0 r40hdfd78af_1 bioconda
57 | bioconductor-sparsematrixstats 1.2.1 r40h399db7b_0 bioconda
58 | bioconductor-summarizedexperiment 1.20.0 r40hdfd78af_1 bioconda
59 | bioconductor-xvector 0.30.0 r40hd029910_1 bioconda
60 | bioconductor-zlibbioc 1.36.0 r40hd029910_1 bioconda
61 | bleach 5.0.1 pyhd8ed1ab_0 conda-forge
62 | blinker 1.5 pyhd8ed1ab_0 conda-forge
63 | boto3 1.26.52 pyhd8ed1ab_0 conda-forge
64 | botocore 1.29.52 pyhd8ed1ab_0 conda-forge
65 | brotli 1.0.9 h166bdaf_8 conda-forge
66 | brotli-bin 1.0.9 h166bdaf_8 conda-forge
67 | brotlipy 0.7.0 py37h540881e_1004 conda-forge
68 | bwidget 1.9.14 ha770c72_1 conda-forge
69 | bzip2 1.0.8 h7f98852_4 conda-forge
70 | c-ares 1.18.1 h7f98852_0 conda-forge
71 | ca-certificates 2022.12.7 ha878542_0 conda-forge
72 | cached-property 1.5.2 hd8ed1ab_1 conda-forge
73 | cached_property 1.5.2 pyha770c72_1 conda-forge
74 | cachetools 4.2.4 pypi_0 pypi
75 | cairo 1.16.0 ha61ee94_1014 conda-forge
76 | certifi 2022.12.7 pyhd8ed1ab_0 conda-forge
77 | cffi 1.15.1 py37h43b0acd_1 conda-forge
78 | charset-normalizer 2.1.1 pyhd8ed1ab_0 conda-forge
79 | chex 0.1.5 pyhd8ed1ab_0 conda-forge
80 | click 8.1.3 py37h89c1867_0 conda-forge
81 | coin-or-cbc 2.10.8 h3786ebc_0 conda-forge
82 | coin-or-cgl 0.60.6 h6f57e76_2 conda-forge
83 | coin-or-clp 1.17.7 hc56784d_2 conda-forge
84 | coin-or-osi 0.108.7 h2720bb7_2 conda-forge
85 | coin-or-utils 2.11.6 h202d8b1_2 conda-forge
86 | coincbc 2.10.8 0_metapackage conda-forge
87 | colorama 0.4.6 pyhd8ed1ab_0 conda-forge
88 | commonmark 0.9.1 py_0 conda-forge
89 | configargparse 1.5.3 pyhd8ed1ab_0 conda-forge
90 | connection_pool 0.0.3 pyhd3deb0d_0 conda-forge
91 | cryptography 38.0.2 py37h5994e8b_1 conda-forge
92 | curl 7.86.0 h2283fc2_1 conda-forge
93 | cycler 0.11.0 pyhd8ed1ab_0 conda-forge
94 | cython 0.29.33 pypi_0 pypi
95 | dataclasses 0.8 pyhc8e2a94_3 conda-forge
96 | datrie 0.8.2 py37h540881e_4 conda-forge
97 | debugpy 1.6.3 py37hd23a5d3_0 conda-forge
98 | decorator 5.1.1 pyhd8ed1ab_0 conda-forge
99 | defusedxml 0.7.1 pyhd8ed1ab_0 conda-forge
100 | deprecated 1.2.13 pypi_0 pypi
101 | dm-tree 0.1.7 py37hfb7772e_0 conda-forge
102 | docrep 0.3.2 pyh44b312d_0 conda-forge
103 | docutils 0.19 py37h89c1867_0 conda-forge
104 | dpath 2.0.6 py37h89c1867_1 conda-forge
105 | dropbox 11.36.0 pyhd8ed1ab_0 conda-forge
106 | dunamai 1.15.0 pyhd8ed1ab_0 conda-forge
107 | entrypoints 0.4 pyhd8ed1ab_0 conda-forge
108 | et_xmlfile 1.0.1 py_1001 conda-forge
109 | exceptiongroup 1.1.0 pyhd8ed1ab_0 conda-forge
110 | expat 2.5.0 h27087fc_0 conda-forge
111 | fbpca 1.0 py_0 conda-forge
112 | filechunkio 1.8 py_2 conda-forge
113 | filelock 3.9.0 pyhd8ed1ab_0 conda-forge
114 | flax 0.5.0 pyhd8ed1ab_0 conda-forge
115 | flit-core 3.8.0 pyhd8ed1ab_0 conda-forge
116 | font-ttf-dejavu-sans-mono 2.37 hab24e00_0 conda-forge
117 | font-ttf-inconsolata 3.000 h77eed37_0 conda-forge
118 | font-ttf-source-code-pro 2.038 h77eed37_0 conda-forge
119 | font-ttf-ubuntu 0.83 hab24e00_0 conda-forge
120 | fontconfig 2.14.1 hc2a2eb6_0 conda-forge
121 | fonts-conda-ecosystem 1 0 conda-forge
122 | fonts-conda-forge 1 0 conda-forge
123 | fonttools 4.38.0 py37h540881e_0 conda-forge
124 | freetype 2.12.1 hca18f0e_1 conda-forge
125 | fribidi 1.0.10 h36c2ea0_0 conda-forge
126 | frozenlist 1.3.1 py37h540881e_0 conda-forge
127 | fsspec 2022.11.0 pyhd8ed1ab_0 conda-forge
128 | ftputil 5.0.4 pyhd8ed1ab_0 conda-forge
129 | future 0.18.2 py37h89c1867_5 conda-forge
130 | future-fstrings 1.2.0 pypi_0 pypi
131 | gcc 12.2.0 h26027b1_11 conda-forge
132 | gcc_impl_linux-64 12.2.0 hcc96c02_19 conda-forge
133 | gdk-pixbuf 2.42.8 hff1cb4f_1 conda-forge
134 | geos 3.11.0 h27087fc_0 conda-forge
135 | geosketch 1.2 py_0 bioconda
136 | get_version 3.5.4 pyhd8ed1ab_0 conda-forge
137 | gettext 0.21.1 h27087fc_0 conda-forge
138 | gfortran_impl_linux-64 12.2.0 h55be85b_19 conda-forge
139 | giflib 5.2.1 h36c2ea0_2 conda-forge
140 | git 2.39.1 pl5321h693f4a3_0 conda-forge
141 | gitdb 4.0.10 pyhd8ed1ab_0 conda-forge
142 | gitpython 3.1.30 pyhd8ed1ab_0 conda-forge
143 | glpk 5.0 h445213a_0 conda-forge
144 | gmp 6.2.1 h58526e2_0 conda-forge
145 | google-api-core 2.11.0 pyhd8ed1ab_0 conda-forge
146 | google-api-python-client 2.73.0 pyhd8ed1ab_0 conda-forge
147 | google-auth 1.35.0 pypi_0 pypi
148 | google-auth-httplib2 0.1.0 pyhd8ed1ab_1 conda-forge
149 | google-auth-oauthlib 0.4.1 py_2 conda-forge
150 | google-cloud-core 2.3.2 pyhd8ed1ab_0 conda-forge
151 | google-cloud-storage 2.7.0 pyh1a96a4e_0 conda-forge
152 | google-crc32c 1.1.2 py37h5d4fa31_3 conda-forge
153 | google-resumable-media 2.4.0 pyhd8ed1ab_0 conda-forge
154 | googleapis-common-protos 1.57.1 pyhd8ed1ab_0 conda-forge
155 | graphite2 1.3.13 h58526e2_1001 conda-forge
156 | graphviz 7.0.5 h2e5815a_0 conda-forge
157 | grpc-cpp 1.47.1 h30feacc_7 conda-forge
158 | grpcio 1.46.3 py37h0327239_0 conda-forge
159 | gsl 2.7 he838d99_0 conda-forge
160 | gtk2 2.24.33 h90689f9_2 conda-forge
161 | gts 0.7.6 h64030ff_2 conda-forge
162 | gxx 12.2.0 h26027b1_11 conda-forge
163 | gxx_impl_linux-64 12.2.0 hcc96c02_19 conda-forge
164 | h5py 3.7.0 nompi_py37hf1ce037_101 conda-forge
165 | harfbuzz 6.0.0 h8e241bc_0 conda-forge
166 | hdf5 1.12.2 nompi_h4df4325_100 conda-forge
167 | httplib2 0.21.0 pyhd8ed1ab_0 conda-forge
168 | humanfriendly 10.0 py37h89c1867_2 conda-forge
169 | icu 70.1 h27087fc_0 conda-forge
170 | idna 3.4 pyhd8ed1ab_0 conda-forge
171 | igraph 0.10.3 pypi_0 pypi
172 | imageio 2.25.1 pypi_0 pypi
173 | importlib-metadata 1.7.0 pypi_0 pypi
174 | importlib_metadata 4.11.4 hd8ed1ab_0 conda-forge
175 | importlib_resources 5.10.2 pyhd8ed1ab_0 conda-forge
176 | iniconfig 2.0.0 pyhd8ed1ab_0 conda-forge
177 | intervaltree 2.1.0 py_1 conda-forge
178 | ipykernel 6.16.2 pyh210e3f2_0 conda-forge
179 | ipython 7.33.0 py37h89c1867_0 conda-forge
180 | ipython_genutils 0.2.0 py_1 conda-forge
181 | ipywidgets 8.0.4 pyhd8ed1ab_0 conda-forge
182 | jax 0.3.25 pyhd8ed1ab_0 conda-forge
183 | jaxlib 0.3.22 cpu_py37hb467de3_0 conda-forge
184 | jedi 0.18.2 pyhd8ed1ab_0 conda-forge
185 | jinja2 3.1.2 pyhd8ed1ab_1 conda-forge
186 | jmespath 1.0.1 pyhd8ed1ab_0 conda-forge
187 | joblib 1.2.0 pyhd8ed1ab_0 conda-forge
188 | jpeg 9e h166bdaf_2 conda-forge
189 | jsonschema 4.17.3 pyhd8ed1ab_0 conda-forge
190 | jupyter_client 7.4.9 pyhd8ed1ab_0 conda-forge
191 | jupyter_core 4.11.1 py37h89c1867_0 conda-forge
192 | jupyter_server 1.23.4 pyhd8ed1ab_0 conda-forge
193 | jupyterlab_pygments 0.2.2 pyhd8ed1ab_0 conda-forge
194 | jupyterlab_widgets 3.0.5 pyhd8ed1ab_0 conda-forge
195 | kernel-headers_linux-64 2.6.32 he073ed8_15 conda-forge
196 | keyutils 1.6.1 h166bdaf_0 conda-forge
197 | kiwisolver 1.4.4 py37h7cecad7_0 conda-forge
198 | krb5 1.19.3 h08a2579_0 conda-forge
199 | lcms2 2.14 h6ed2654_0 conda-forge
200 | ld_impl_linux-64 2.39 hcc3a1bd_1 conda-forge
201 | leidenalg 0.9.1 pypi_0 pypi
202 | lerc 4.0.0 h27087fc_0 conda-forge
203 | libabseil 20220623.0 cxx17_h05df665_6 conda-forge
204 | libblas 3.9.0 16_linux64_openblas conda-forge
205 | libbrotlicommon 1.0.9 h166bdaf_8 conda-forge
206 | libbrotlidec 1.0.9 h166bdaf_8 conda-forge
207 | libbrotlienc 1.0.9 h166bdaf_8 conda-forge
208 | libcblas 3.9.0 16_linux64_openblas conda-forge
209 | libcrc32c 1.1.2 h9c3ff4c_0 conda-forge
210 | libcurl 7.86.0 h2283fc2_1 conda-forge
211 | libdeflate 1.14 h166bdaf_0 conda-forge
212 | libedit 3.1.20191231 he28a2e2_2 conda-forge
213 | libev 4.33 h516909a_1 conda-forge
214 | libffi 3.4.2 h7f98852_5 conda-forge
215 | libgcc-devel_linux-64 12.2.0 h3b97bd3_19 conda-forge
216 | libgcc-ng 12.2.0 h65d4601_19 conda-forge
217 | libgd 2.3.3 h18fbbfe_3 conda-forge
218 | libgfortran-ng 12.2.0 h69a702a_19 conda-forge
219 | libgfortran5 12.2.0 h337968e_19 conda-forge
220 | libgit2 1.5.0 hdb3ecda_1 conda-forge
221 | libglib 2.74.1 h606061b_1 conda-forge
222 | libgomp 12.2.0 h65d4601_19 conda-forge
223 | libhwloc 2.8.0 h32351e8_1 conda-forge
224 | libiconv 1.17 h166bdaf_0 conda-forge
225 | liblapack 3.9.0 16_linux64_openblas conda-forge
226 | liblapacke 3.9.0 16_linux64_openblas conda-forge
227 | libllvm11 11.1.0 he0ac6c6_5 conda-forge
228 | libnghttp2 1.51.0 hff17c54_0 conda-forge
229 | libnsl 2.0.0 h7f98852_0 conda-forge
230 | libopenblas 0.3.21 pthreads_h78a6416_3 conda-forge
231 | libpng 1.6.39 h753d276_0 conda-forge
232 | libprotobuf 3.21.8 h6239696_0 conda-forge
233 | librsvg 2.54.4 h7abd40a_0 conda-forge
234 | libsanitizer 12.2.0 h46fd767_19 conda-forge
235 | libsodium 1.0.18 h36c2ea0_1 conda-forge
236 | libsqlite 3.40.0 h753d276_0 conda-forge
237 | libssh2 1.10.0 hf14f497_3 conda-forge
238 | libstdcxx-devel_linux-64 12.2.0 h3b97bd3_19 conda-forge
239 | libstdcxx-ng 12.2.0 h46fd767_19 conda-forge
240 | libtiff 4.4.0 h82bc61c_5 conda-forge
241 | libtool 2.4.7 h27087fc_0 conda-forge
242 | libuuid 2.32.1 h7f98852_1000 conda-forge
243 | libwebp 1.2.4 h522a892_0 conda-forge
244 | libwebp-base 1.2.4 h166bdaf_0 conda-forge
245 | libxcb 1.13 h7f98852_1004 conda-forge
246 | libxml2 2.10.3 h7463322_0 conda-forge
247 | libzlib 1.2.13 h166bdaf_4 conda-forge
248 | llvm-openmp 15.0.7 h0cdce71_0 conda-forge
249 | llvmlite 0.39.1 py37h0761922_0 conda-forge
250 | logmuse 0.2.6 pyh8c360ce_0 conda-forge
251 | make 4.3 hd18ef5c_1 conda-forge
252 | markdown 3.3.4 pypi_0 pypi
253 | markupsafe 2.1.1 py37h540881e_1 conda-forge
254 | matplotlib-base 3.5.3 py37hf395dca_2 conda-forge
255 | matplotlib-inline 0.1.6 pyhd8ed1ab_0 conda-forge
256 | matplotlib-venn 0.11.9 pypi_0 pypi
257 | mistune 2.0.4 pyhd8ed1ab_0 conda-forge
258 | mkl 2022.2.1 h84fe81f_16997 conda-forge
259 | msgpack-python 1.0.4 py37h7cecad7_0 conda-forge
260 | multidict 6.0.2 py37h540881e_1 conda-forge
261 | multipledispatch 0.6.0 py_0 conda-forge
262 | munkres 1.1.4 pyh9f0ad1d_0 conda-forge
263 | natsort 8.2.0 pyhd8ed1ab_0 conda-forge
264 | nbclassic 0.4.8 pyhd8ed1ab_0 conda-forge
265 | nbclient 0.7.0 pyhd8ed1ab_0 conda-forge
266 | nbconvert 7.2.8 pyhd8ed1ab_0 conda-forge
267 | nbconvert-core 7.2.8 pyhd8ed1ab_0 conda-forge
268 | nbconvert-pandoc 7.2.8 pyhd8ed1ab_0 conda-forge
269 | nbformat 5.7.3 pyhd8ed1ab_0 conda-forge
270 | ncurses 6.3 h27087fc_1 conda-forge
271 | nest-asyncio 1.5.6 pyhd8ed1ab_0 conda-forge
272 | networkx 2.7 pyhd8ed1ab_0 conda-forge
273 | nimfa 1.4.0 pypi_0 pypi
274 | ninja 1.11.0 h924138e_0 conda-forge
275 | notebook 6.5.2 pyha770c72_1 conda-forge
276 | notebook-shim 0.2.2 pyhd8ed1ab_0 conda-forge
277 | numba 0.56.3 py37hf081915_0 conda-forge
278 | numpy 1.21.6 py37h976b520_0 conda-forge
279 | numpyro 0.10.1 pyhd8ed1ab_0 conda-forge
280 | nvidia-cublas-cu11 11.10.3.66 pypi_0 pypi
281 | nvidia-cuda-nvrtc-cu11 11.7.99 pypi_0 pypi
282 | nvidia-cuda-runtime-cu11 11.7.99 pypi_0 pypi
283 | nvidia-cudnn-cu11 8.5.0.96 pypi_0 pypi
284 | oauth2client 4.1.3 py_0 conda-forge
285 | oauthlib 3.2.2 pyhd8ed1ab_0 conda-forge
286 | openblas 0.3.21 pthreads_h320a7e8_3 conda-forge
287 | openjpeg 2.5.0 h7d73246_1 conda-forge
288 | openpyxl 3.0.10 py37h540881e_1 conda-forge
289 | openssl 3.0.7 h0b41bf4_1 conda-forge
290 | opt_einsum 3.3.0 pyhd8ed1ab_1 conda-forge
291 | optax 0.1.4 pyhd8ed1ab_0 conda-forge
292 | packaging 23.0 pyhd8ed1ab_0 conda-forge
293 | pandas 1.3.5 py37he8f5f7f_0 conda-forge
294 | pandoc 2.19.2 h32600fe_1 conda-forge
295 | pandocfilters 1.5.0 pyhd8ed1ab_0 conda-forge
296 | pango 1.50.12 hd33c08f_1 conda-forge
297 | paramiko 2.12.0 pyhd8ed1ab_0 conda-forge
298 | parso 0.8.3 pyhd8ed1ab_0 conda-forge
299 | patsy 0.5.3 pypi_0 pypi
300 | pcre2 10.40 hc3806b6_0 conda-forge
301 | peppy 0.35.3 pyhd8ed1ab_0 conda-forge
302 | perl 5.32.1 2_h7f98852_perl5 conda-forge
303 | pexpect 4.8.0 pyh1a96a4e_2 conda-forge
304 | pickleshare 0.7.5 py_1003 conda-forge
305 | pillow 9.2.0 py37h850a105_2 conda-forge
306 | pip 22.3.1 pyhd8ed1ab_0 conda-forge
307 | pixman 0.40.0 h36c2ea0_0 conda-forge
308 | pkgutil-resolve-name 1.3.10 pyhd8ed1ab_0 conda-forge
309 | plac 1.3.5 pyhd8ed1ab_0 conda-forge
310 | pluggy 1.0.0 py37h89c1867_3 conda-forge
311 | ply 3.11 py_1 conda-forge
312 | prettytable 3.6.0 pyhd8ed1ab_0 conda-forge
313 | prometheus_client 0.15.0 pyhd8ed1ab_0 conda-forge
314 | prompt-toolkit 3.0.36 pyha770c72_0 conda-forge
315 | protobuf 4.21.8 py37hd23a5d3_0 conda-forge
316 | psutil 5.9.3 py37h540881e_0 conda-forge
317 | pthread-stubs 0.4 h36c2ea0_1001 conda-forge
318 | ptyprocess 0.7.0 pyhd3deb0d_0 conda-forge
319 | pulp 2.6.0 py37h89c1867_1 conda-forge
320 | pyasn1 0.4.8 py_0 conda-forge
321 | pyasn1-modules 0.2.7 py_0 conda-forge
322 | pycparser 2.21 pyhd8ed1ab_0 conda-forge
323 | pydeprecate 0.3.1 pyhd8ed1ab_0 conda-forge
324 | pydot 1.4.2 pypi_0 pypi
325 | pygments 2.14.0 pyhd8ed1ab_0 conda-forge
326 | pyjwt 2.6.0 pyhd8ed1ab_0 conda-forge
327 | pynacl 1.5.0 py37h540881e_1 conda-forge
328 | pynndescent 0.5.7 pypi_0 pypi
329 | pyopenssl 23.0.0 pyhd8ed1ab_0 conda-forge
330 | pyparsing 3.0.9 pyhd8ed1ab_0 conda-forge
331 | pyro-api 0.1.2 pyhd8ed1ab_0 conda-forge
332 | pyro-ppl 1.8.4 pyhd8ed1ab_0 conda-forge
333 | pyrsistent 0.18.1 py37h540881e_1 conda-forge
334 | pysftp 0.2.9 py_1 conda-forge
335 | pysocks 1.7.1 py37h89c1867_5 conda-forge
336 | pytest 7.2.0 py37h89c1867_0 conda-forge
337 | python 3.7.12 hf930737_100_cpython conda-forge
338 | python-annoy 1.17.0 py37hd23a5d3_4 conda-forge
339 | python-dateutil 2.8.2 pyhd8ed1ab_0 conda-forge
340 | python-fastjsonschema 2.16.2 pyhd8ed1ab_0 conda-forge
341 | python-irodsclient 1.1.6 pyhd8ed1ab_0 conda-forge
342 | python-tzdata 2022.7 pyhd8ed1ab_0 conda-forge
343 | python_abi 3.7 3_cp37m conda-forge
344 | pytorch-lightning 1.5.8 pyhd8ed1ab_0 conda-forge
345 | pytz 2022.7.1 pyhd8ed1ab_0 conda-forge
346 | pytz-deprecation-shim 0.1.0.post0 py37h89c1867_2 conda-forge
347 | pyu2f 0.1.5 pyhd8ed1ab_0 conda-forge
348 | pywavelets 1.3.0 pypi_0 pypi
349 | pyyaml 6.0 py37h540881e_4 conda-forge
350 | pyzmq 24.0.1 py37h0c0c2a8_0 conda-forge
351 | r-abind 1.4_5 r40hc72bb7e_1003 conda-forge
352 | r-askpass 1.1 r40hcfec24a_2 conda-forge
353 | r-assertthat 0.2.1 r40hc72bb7e_2 conda-forge
354 | r-backports 1.4.1 r40hcfec24a_0 conda-forge
355 | r-base 4.0.5 hb87df5d_8 conda-forge
356 | r-base64enc 0.1_3 r40hcfec24a_1004 conda-forge
357 | r-beeswarm 0.4.0 r40hcfec24a_1 conda-forge
358 | r-bh 1.78.0_0 r40hc72bb7e_0 conda-forge
359 | r-bit 4.0.4 r40hcfec24a_0 conda-forge
360 | r-bit64 4.0.5 r40hcfec24a_0 conda-forge
361 | r-bitops 1.0_7 r40h06615bd_0 conda-forge
362 | r-blob 1.2.3 r40hc72bb7e_0 conda-forge
363 | r-boot 1.3_28 r40hc72bb7e_0 conda-forge
364 | r-brew 1.0_7 r40hc72bb7e_0 conda-forge
365 | r-brio 1.1.3 r40hcfec24a_0 conda-forge
366 | r-broom 1.0.1 r40hc72bb7e_0 conda-forge
367 | r-bslib 0.4.0 r40hc72bb7e_0 conda-forge
368 | r-cachem 1.0.6 r40hcfec24a_0 conda-forge
369 | r-callr 3.7.2 r40hc72bb7e_0 conda-forge
370 | r-caret 6.0_93 r40h06615bd_0 conda-forge
371 | r-catools 1.18.2 r40h7525677_0 conda-forge
372 | r-cellranger 1.1.0 r40hc72bb7e_1004 conda-forge
373 | r-class 7.3_20 r40hcfec24a_0 conda-forge
374 | r-cli 3.4.1 r40h7525677_0 conda-forge
375 | r-clipr 0.8.0 r40hc72bb7e_0 conda-forge
376 | r-cluster 2.1.3 r40h8da6f51_0 conda-forge
377 | r-codetools 0.2_18 r40hc72bb7e_0 conda-forge
378 | r-colorspace 2.0_3 r40h06615bd_0 conda-forge
379 | r-commonmark 1.8.0 r40h06615bd_0 conda-forge
380 | r-cowplot 1.1.1 r40hc72bb7e_0 conda-forge
381 | r-cpp11 0.4.2 r40hc72bb7e_0 conda-forge
382 | r-crayon 1.5.1 r40hc72bb7e_0 conda-forge
383 | r-credentials 1.3.2 r40hc72bb7e_0 conda-forge
384 | r-crosstalk 1.2.0 r40hc72bb7e_0 conda-forge
385 | r-crul 1.3 r40h785f33e_0 conda-forge
386 | r-curl 4.3.2 r40hcfec24a_0 conda-forge
387 | r-data.table 1.14.2 r40hcfec24a_0 conda-forge
388 | r-dbi 1.1.3 r40hc72bb7e_0 conda-forge
389 | r-dbplyr 2.2.1 r40hc72bb7e_0 conda-forge
390 | r-deldir 1.0_6 r40h8da6f51_0 conda-forge
391 | r-desc 1.4.2 r40hc72bb7e_0 conda-forge
392 | r-devtools 2.4.4 r40hc72bb7e_0 conda-forge
393 | r-diffobj 0.3.5 r40hcfec24a_0 conda-forge
394 | r-digest 0.6.29 r40h03ef668_0 conda-forge
395 | r-downlit 0.4.2 r40hc72bb7e_0 conda-forge
396 | r-dplyr 1.0.10 r40h7525677_0 conda-forge
397 | r-dqrng 0.3.0 r40h7525677_0 conda-forge
398 | r-dtplyr 1.2.2 r40hc72bb7e_0 conda-forge
399 | r-e1071 1.7_11 r40h7525677_0 conda-forge
400 | r-ellipsis 0.3.2 r40hcfec24a_0 conda-forge
401 | r-essentials 4.0 r40hd8ed1ab_2002 conda-forge
402 | r-evaluate 0.16 r40hc72bb7e_0 conda-forge
403 | r-fansi 1.0.3 r40h06615bd_0 conda-forge
404 | r-farver 2.1.1 r40h7525677_0 conda-forge
405 | r-fastmap 1.1.0 r40h03ef668_0 conda-forge
406 | r-fitdistrplus 1.1_8 r40hc72bb7e_0 conda-forge
407 | r-fnn 1.1.3.1 r40h7525677_0 conda-forge
408 | r-fontawesome 0.3.0 r40hc72bb7e_0 conda-forge
409 | r-forcats 0.5.2 r40hc72bb7e_0 conda-forge
410 | r-foreach 1.5.2 r40hc72bb7e_0 conda-forge
411 | r-foreign 0.8_82 r40hcfec24a_0 conda-forge
412 | r-formatr 1.12 r40hc72bb7e_0 conda-forge
413 | r-fs 1.5.2 r40h7525677_1 conda-forge
414 | r-futile.logger 1.4.3 r40hc72bb7e_1003 conda-forge
415 | r-futile.options 1.0.1 r40hc72bb7e_1002 conda-forge
416 | r-future 1.28.0 r40hc72bb7e_0 conda-forge
417 | r-future.apply 1.9.1 r40hc72bb7e_0 conda-forge
418 | r-gargle 1.2.1 r40hc72bb7e_0 conda-forge
419 | r-generics 0.1.3 r40hc72bb7e_0 conda-forge
420 | r-gert 1.5.0 r40h163148b_2 conda-forge
421 | r-ggbeeswarm 0.6.0 r40ha770c72_1003 conda-forge
422 | r-ggplot2 3.3.6 r40hc72bb7e_0 conda-forge
423 | r-ggrepel 0.9.1 r40h03ef668_0 conda-forge
424 | r-ggridges 0.5.4 r40hc72bb7e_0 conda-forge
425 | r-gh 1.3.1 r40hc72bb7e_0 conda-forge
426 | r-gistr 0.9.0 r40hc72bb7e_0 conda-forge
427 | r-gitcreds 0.1.2 r40hc72bb7e_0 conda-forge
428 | r-glmnet 4.1_2 r40h8da6f51_0 conda-forge
429 | r-globals 0.16.1 r40hc72bb7e_0 conda-forge
430 | r-glue 1.6.2 r40h06615bd_0 conda-forge
431 | r-goftest 1.2_3 r40h06615bd_0 conda-forge
432 | r-googledrive 2.0.0 r40hc72bb7e_0 conda-forge
433 | r-googlesheets4 1.0.1 r40h785f33e_0 conda-forge
434 | r-gower 1.0.0 r40hcfec24a_0 conda-forge
435 | r-gplots 3.1.3 r40hc72bb7e_0 conda-forge
436 | r-gridextra 2.3 r40hc72bb7e_1003 conda-forge
437 | r-gtable 0.3.1 r40hc72bb7e_0 conda-forge
438 | r-gtools 3.9.3 r40h06615bd_0 conda-forge
439 | r-hardhat 1.2.0 r40hc72bb7e_0 conda-forge
440 | r-haven 2.5.0 r40h7525677_0 conda-forge
441 | r-here 1.0.1 r40hc72bb7e_0 conda-forge
442 | r-hexbin 1.28.2 r40h8da6f51_0 conda-forge
443 | r-highr 0.9 r40hc72bb7e_0 conda-forge
444 | r-hms 1.1.2 r40hc72bb7e_0 conda-forge
445 | r-htmltools 0.5.3 r40h7525677_0 conda-forge
446 | r-htmlwidgets 1.5.4 r40hc72bb7e_0 conda-forge
447 | r-httpcode 0.3.0 r40ha770c72_1 conda-forge
448 | r-httpuv 1.6.6 r40h7525677_0 conda-forge
449 | r-httr 1.4.4 r40hc72bb7e_0 conda-forge
450 | r-ica 1.0_3 r40hc72bb7e_0 conda-forge
451 | r-ids 1.0.1 r40hc72bb7e_1 conda-forge
452 | r-igraph 1.3.4 r40hb34fc8a_0 conda-forge
453 | r-ini 0.3.1 r40hc72bb7e_1003 conda-forge
454 | r-ipred 0.9_13 r40h06615bd_0 conda-forge
455 | r-irdisplay 1.1 r40hd8ed1ab_0 conda-forge
456 | r-irkernel 1.3 r40hc72bb7e_0 conda-forge
457 | r-irlba 2.3.5 r40h5f7b363_0 conda-forge
458 | r-isoband 0.2.5 r40h03ef668_0 conda-forge
459 | r-iterators 1.0.14 r40hc72bb7e_0 conda-forge
460 | r-jquerylib 0.1.4 r40hc72bb7e_0 conda-forge
461 | r-jsonlite 1.8.0 r40h06615bd_0 conda-forge
462 | r-kernsmooth 2.23_20 r40h742201e_0 conda-forge
463 | r-knitr 1.40 r40hc72bb7e_0 conda-forge
464 | r-labeling 0.4.2 r40hc72bb7e_1 conda-forge
465 | r-lambda.r 1.2.4 r40hc72bb7e_1 conda-forge
466 | r-later 1.2.0 r40h03ef668_0 conda-forge
467 | r-lattice 0.20_45 r40hcfec24a_0 conda-forge
468 | r-lava 1.6.10 r40hc72bb7e_0 conda-forge
469 | r-lazyeval 0.2.2 r40hcfec24a_2 conda-forge
470 | r-leiden 0.4.3 r40hc72bb7e_0 conda-forge
471 | r-lifecycle 1.0.2 r40hc72bb7e_0 conda-forge
472 | r-listenv 0.8.0 r40hc72bb7e_1 conda-forge
473 | r-lmtest 0.9_40 r40h8da6f51_0 conda-forge
474 | r-lobstr 1.1.2 r40h7525677_0 conda-forge
475 | r-locfit 1.5_9.4 r40hcfec24a_1 conda-forge
476 | r-lsei 1.3_0 r40hc3ea6d6_1 conda-forge
477 | r-lubridate 1.8.0 r40h03ef668_0 conda-forge
478 | r-magrittr 2.0.3 r40h06615bd_0 conda-forge
479 | r-maps 3.4.0 r40hcfec24a_0 conda-forge
480 | r-mass 7.3_58.1 r40h06615bd_0 conda-forge
481 | r-matrix 1.4_1 r40h0154571_0 conda-forge
482 | r-matrixstats 0.62.0 r40h06615bd_0 conda-forge
483 | r-memoise 2.0.1 r40hc72bb7e_0 conda-forge
484 | r-mgcv 1.8_40 r40h0154571_0 conda-forge
485 | r-mime 0.12 r40hcfec24a_0 conda-forge
486 | r-miniui 0.1.1.1 r40hc72bb7e_1002 conda-forge
487 | r-modelmetrics 1.2.2.2 r40h03ef668_1 conda-forge
488 | r-modelr 0.1.9 r40hc72bb7e_0 conda-forge
489 | r-munsell 0.5.0 r40hc72bb7e_1004 conda-forge
490 | r-nlme 3.1_159 r40h8da6f51_0 conda-forge
491 | r-nnet 7.3_17 r40hcfec24a_0 conda-forge
492 | r-npsurv 0.5_0 r40hc72bb7e_0 conda-forge
493 | r-numderiv 2016.8_1.1 r40hc72bb7e_3 conda-forge
494 | r-openssl 2.0.3 r40h1f3e0c5_0 conda-forge
495 | r-parallelly 1.32.1 r40hc72bb7e_0 conda-forge
496 | r-patchwork 1.1.2 r40hc72bb7e_0 conda-forge
497 | r-pbapply 1.5_0 r40hc72bb7e_0 conda-forge
498 | r-pbdzmq 0.3_7 r40h42bf92c_0 conda-forge
499 | r-pillar 1.8.1 r40hc72bb7e_0 conda-forge
500 | r-pkgbuild 1.3.1 r40hc72bb7e_0 conda-forge
501 | r-pkgconfig 2.0.3 r40hc72bb7e_1 conda-forge
502 | r-pkgdown 2.0.6 r40hc72bb7e_0 conda-forge
503 | r-pkgload 1.3.0 r40hc72bb7e_0 conda-forge
504 | r-plotly 4.10.0 r40hc72bb7e_0 conda-forge
505 | r-plyr 1.8.7 r40h7525677_0 conda-forge
506 | r-png 0.1_7 r40hcfec24a_1004 conda-forge
507 | r-polyclip 1.10_0 r40h7525677_2 conda-forge
508 | r-praise 1.0.0 r40hc72bb7e_1005 conda-forge
509 | r-prettyunits 1.1.1 r40hc72bb7e_1 conda-forge
510 | r-proc 1.18.0 r40h03ef668_0 conda-forge
511 | r-processx 3.7.0 r40h06615bd_0 conda-forge
512 | r-prodlim 2019.11.13 r40h03ef668_1 conda-forge
513 | r-profvis 0.3.7 r40hcfec24a_0 conda-forge
514 | r-progress 1.2.2 r40hc72bb7e_2 conda-forge
515 | r-progressr 0.11.0 r40hc72bb7e_0 conda-forge
516 | r-promises 1.2.0.1 r40h03ef668_0 conda-forge
517 | r-proxy 0.4_27 r40h06615bd_0 conda-forge
518 | r-pryr 0.1.5 r40h7525677_0 conda-forge
519 | r-ps 1.7.1 r40h06615bd_0 conda-forge
520 | r-purrr 0.3.4 r40hcfec24a_1 conda-forge
521 | r-quantmod 0.4.20 r40hc72bb7e_0 conda-forge
522 | r-r6 2.5.1 r40hc72bb7e_0 conda-forge
523 | r-ragg 0.3.1 r40h71971c5_0 conda-forge
524 | r-randomforest 4.6_14 r40h8da6f51_1004 conda-forge
525 | r-rann 2.6.1 r40h7525677_2 conda-forge
526 | r-rappdirs 0.3.3 r40hcfec24a_0 conda-forge
527 | r-rbokeh 0.5.2 r40hc72bb7e_1 conda-forge
528 | r-rcmdcheck 1.4.0 r40h785f33e_0 conda-forge
529 | r-rcolorbrewer 1.1_3 r40h785f33e_0 conda-forge
530 | r-rcpp 1.0.9 r40h7525677_1 conda-forge
531 | r-rcppannoy 0.0.19 r40h7525677_0 conda-forge
532 | r-rcpparmadillo 0.11.2.3.1 r40h9f5de39_0 conda-forge
533 | r-rcppeigen 0.3.3.9.2 r40h43535f1_0 conda-forge
534 | r-rcpphnsw 0.4.1 r40h7525677_0 conda-forge
535 | r-rcppparallel 5.1.5 r40h7525677_0 conda-forge
536 | r-rcppprogress 0.4.2 r40hc72bb7e_1 conda-forge
537 | r-rcpptoml 0.1.7 r40h03ef668_1 conda-forge
538 | r-rcurl 1.98_1.8 r40h06615bd_0 conda-forge
539 | r-readr 2.1.2 r40h03ef668_0 conda-forge
540 | r-readxl 1.4.1 r40hf23e330_0 conda-forge
541 | r-recipes 1.0.1 r40hc72bb7e_0 conda-forge
542 | r-recommended 4.0 r40hd8ed1ab_1004 conda-forge
543 | r-rematch 1.0.1 r40hc72bb7e_1004 conda-forge
544 | r-rematch2 2.1.2 r40hc72bb7e_1 conda-forge
545 | r-remotes 2.4.2 r40hc72bb7e_0 conda-forge
546 | r-repr 1.1.4 r40h785f33e_0 conda-forge
547 | r-reprex 2.0.2 r40hc72bb7e_0 conda-forge
548 | r-reshape2 1.4.4 r40h03ef668_1 conda-forge
549 | r-reticulate 1.27 r40h38f115c_0 conda-forge
550 | r-rgeos 0.5_9 r40hf730bdb_3 conda-forge
551 | r-rlang 1.0.6 r40h7525677_0 conda-forge
552 | r-rmarkdown 2.16 r40hc72bb7e_0 conda-forge
553 | r-rocr 1.0_11 r40hc72bb7e_1 conda-forge
554 | r-roxygen2 7.2.1 r40h7525677_0 conda-forge
555 | r-rpart 4.1.16 r40hcfec24a_0 conda-forge
556 | r-rprojroot 2.0.3 r40hc72bb7e_0 conda-forge
557 | r-rspectra 0.16_1 r40h9f5de39_0 conda-forge
558 | r-rstudioapi 0.14 r40hc72bb7e_0 conda-forge
559 | r-rsvd 1.0.5 r40hc72bb7e_0 conda-forge
560 | r-rtsne 0.16 r40h37cf8d7_0 conda-forge
561 | r-rversions 2.1.2 r40hc72bb7e_0 conda-forge
562 | r-rvest 1.0.3 r40hc72bb7e_0 conda-forge
563 | r-sass 0.4.2 r40h7525677_0 conda-forge
564 | r-scales 1.2.1 r40hc72bb7e_0 conda-forge
565 | r-scattermore 0.8 r40h06615bd_0 conda-forge
566 | r-sctransform 0.3.4 r40hef8c1a7_0 conda-forge
567 | r-selectr 0.4_2 r40hc72bb7e_1 conda-forge
568 | r-sessioninfo 1.2.2 r40hc72bb7e_0 conda-forge
569 | r-seurat 3.2.3 r40h03ef668_0 conda-forge
570 | r-seuratobject 4.1.1 r40h7525677_0 conda-forge
571 | r-shape 1.4.6 r40ha770c72_0 conda-forge
572 | r-shiny 1.7.2 r40h785f33e_0 conda-forge
573 | r-sitmo 2.0.2 r40h7525677_0 conda-forge
574 | r-snow 0.4_4 r40hc72bb7e_0 conda-forge
575 | r-sourcetools 0.1.7 r40h03ef668_1002 conda-forge
576 | r-sp 1.5_0 r40h06615bd_0 conda-forge
577 | r-spatial 7.3_15 r40hcfec24a_0 conda-forge
578 | r-spatstat 1.64_1 r40h0357c0b_0 conda-forge
579 | r-spatstat.data 2.2_0 r40hc72bb7e_0 conda-forge
580 | r-spatstat.utils 2.3_1 r40h06615bd_0 conda-forge
581 | r-squarem 2021.1 r40hc72bb7e_0 conda-forge
582 | r-statmod 1.4.37 r40hc3ea6d6_0 conda-forge
583 | r-stringi 1.7.8 r40h30a9eb7_0 conda-forge
584 | r-stringr 1.4.1 r40hc72bb7e_0 conda-forge
585 | r-survival 3.4_0 r40h06615bd_0 conda-forge
586 | r-sys 3.4 r40hcfec24a_0 conda-forge
587 | r-systemfonts 1.0.4 r40hef9c87a_0 conda-forge
588 | r-tensor 1.5 r40hc72bb7e_1003 conda-forge
589 | r-testthat 3.1.4 r40h7525677_0 conda-forge
590 | r-tibble 3.1.8 r40h06615bd_0 conda-forge
591 | r-tidyr 1.2.1 r40h7525677_0 conda-forge
592 | r-tidyselect 1.1.2 r40hc72bb7e_0 conda-forge
593 | r-tidyverse 1.3.2 r40hc72bb7e_0 conda-forge
594 | r-timedate 4021.104 r40hc72bb7e_0 conda-forge
595 | r-tinytex 0.42 r40hc72bb7e_0 conda-forge
596 | r-triebeard 0.3.0 r40h7525677_1004 conda-forge
597 | r-ttr 0.24.3 r40h06615bd_0 conda-forge
598 | r-tzdb 0.3.0 r40h7525677_0 conda-forge
599 | r-urlchecker 1.0.1 r40hc72bb7e_0 conda-forge
600 | r-urltools 1.7.3 r40h7525677_2 conda-forge
601 | r-usethis 2.1.6 r40hc72bb7e_0 conda-forge
602 | r-utf8 1.2.2 r40hcfec24a_0 conda-forge
603 | r-uuid 1.1_0 r40h06615bd_0 conda-forge
604 | r-uwot 0.1.14 r40h7525677_0 conda-forge
605 | r-vctrs 0.4.1 r40h7525677_0 conda-forge
606 | r-vipor 0.4.5 r40hc72bb7e_1003 conda-forge
607 | r-viridis 0.6.2 r40hc72bb7e_0 conda-forge
608 | r-viridislite 0.4.1 r40hc72bb7e_0 conda-forge
609 | r-vroom 1.5.7 r40h03ef668_0 conda-forge
610 | r-waldo 0.4.0 r40hc72bb7e_0 conda-forge
611 | r-whisker 0.4 r40hc72bb7e_1 conda-forge
612 | r-withr 2.5.0 r40hc72bb7e_0 conda-forge
613 | r-xfun 0.33 r40h7525677_0 conda-forge
614 | r-xml2 1.3.3 r40h7525677_1 conda-forge
615 | r-xopen 1.0.0 r40hc72bb7e_1003 conda-forge
616 | r-xtable 1.8_4 r40hc72bb7e_3 conda-forge
617 | r-xts 0.12.1 r40h06615bd_0 conda-forge
618 | r-yaml 2.3.5 r40h06615bd_0 conda-forge
619 | r-zip 2.2.1 r40h06615bd_0 conda-forge
620 | r-zoo 1.8_11 r40h06615bd_0 conda-forge
621 | re2 2022.06.01 h27087fc_1 conda-forge
622 | readline 8.1.2 h0f457ee_0 conda-forge
623 | requests 2.28.2 pyhd8ed1ab_0 conda-forge
624 | requests-oauthlib 1.3.1 pyhd8ed1ab_0 conda-forge
625 | reretry 0.11.8 pyhd8ed1ab_0 conda-forge
626 | rich 13.1.0 pyhd8ed1ab_0 conda-forge
627 | rpy2 3.5.1 py37r40hda87dfa_0 conda-forge
628 | rsa 4.9 pyhd8ed1ab_0 conda-forge
629 | s3transfer 0.6.0 pyhd8ed1ab_0 conda-forge
630 | scanorama 1.7 py_0 bioconda
631 | scanpy 1.9.1 pypi_0 pypi
632 | scgen 2.1.0 pypi_0 pypi
633 | scib 1.1.1 pypi_0 pypi
634 | scikit-image 0.19.3 pypi_0 pypi
635 | scikit-learn 1.0.2 py37hf9e9bfc_0 conda-forge
636 | scikit-misc 0.1.4 pypi_0 pypi
637 | scipy 1.7.3 py37hf2a6cf1_0 conda-forge
638 | scrublet 0.2.3 pypi_0 pypi
639 | scvi-tools 0.16.1 pyhd8ed1ab_0 conda-forge
640 | seaborn 0.12.2 pypi_0 pypi
641 | sed 4.8 he412f7d_0 conda-forge
642 | send2trash 1.8.0 pyhd8ed1ab_0 conda-forge
643 | session-info 1.0.0 pypi_0 pypi
644 | setuptools 59.8.0 py37h89c1867_1 conda-forge
645 | simplegeneric 0.8.1 py_1 conda-forge
646 | six 1.16.0 pyh6c4a22f_0 conda-forge
647 | slacker 0.14.0 py_0 conda-forge
648 | sleef 3.5.1 h9b69904_2 conda-forge
649 | smart_open 6.3.0 pyhd8ed1ab_1 conda-forge
650 | smmap 3.0.5 pyh44b312d_0 conda-forge
651 | snakemake 7.20.0 hdfd78af_0 bioconda
652 | snakemake-minimal 7.20.0 pyhdfd78af_0 bioconda
653 | sniffio 1.3.0 pyhd8ed1ab_0 conda-forge
654 | sortedcontainers 2.4.0 pyhd8ed1ab_0 conda-forge
655 | soupsieve 2.3.2.post1 pyhd8ed1ab_0 conda-forge
656 | sqlite 3.40.0 h4ff8645_0 conda-forge
657 | statsmodels 0.13.5 pypi_0 pypi
658 | stdlib-list 0.8.0 pypi_0 pypi
659 | stone 3.3.1 pyhd8ed1ab_0 conda-forge
660 | stopit 1.1.2 py_0 conda-forge
661 | sysroot_linux-64 2.12 he073ed8_15 conda-forge
662 | tabulate 0.9.0 pyhd8ed1ab_1 conda-forge
663 | tbb 2021.7.0 h924138e_1 conda-forge
664 | tensorboard 2.3.0 py_0 conda-forge
665 | tensorboard-plugin-wit 1.8.1 pyhd8ed1ab_0 conda-forge
666 | terminado 0.17.1 pyh41d4057_0 conda-forge
667 | texttable 1.6.7 pypi_0 pypi
668 | threadpoolctl 3.1.0 pyh8a188c0_0 conda-forge
669 | throttler 1.2.1 pyhd8ed1ab_0 conda-forge
670 | tifffile 2021.11.2 pypi_0 pypi
671 | tinycss2 1.2.1 pyhd8ed1ab_0 conda-forge
672 | tk 8.6.12 h27826a3_0 conda-forge
673 | tktable 2.10 hb7b940f_3 conda-forge
674 | tomli 2.0.1 pyhd8ed1ab_0 conda-forge
675 | toolz 0.12.0 pyhd8ed1ab_0 conda-forge
676 | toposort 1.7 pyhd8ed1ab_0 conda-forge
677 | torch 1.13.1 pypi_0 pypi
678 | torchmetrics 0.11.0 pyhd8ed1ab_0 conda-forge
679 | torchvision 0.14.1 pypi_0 pypi
680 | tornado 6.2 py37h540881e_0 conda-forge
681 | tqdm 4.64.1 pyhd8ed1ab_0 conda-forge
682 | traitlets 5.8.1 pyhd8ed1ab_0 conda-forge
683 | trvaep 0.1.0 pypi_0 pypi
684 | typing-extensions 4.4.0 hd8ed1ab_0 conda-forge
685 | typing_extensions 4.4.0 pyha770c72_0 conda-forge
686 | tzdata 2022g h191b570_0 conda-forge
687 | tzlocal 4.2 py37h89c1867_1 conda-forge
688 | ubiquerg 0.6.2 pyhd8ed1ab_0 conda-forge
689 | umap-learn 0.5.3 py37h89c1867_0 conda-forge
690 | unicodedata2 14.0.0 py37h540881e_1 conda-forge
691 | upsetplot 0.8.0 pypi_0 pypi
692 | uritemplate 4.1.1 pyhd8ed1ab_0 conda-forge
693 | urllib3 1.26.14 pyhd8ed1ab_0 conda-forge
694 | veracitools 0.1.3 py_0 conda-forge
695 | wcwidth 0.2.6 pyhd8ed1ab_0 conda-forge
696 | webencodings 0.5.1 py_1 conda-forge
697 | websocket-client 1.4.2 pyhd8ed1ab_0 conda-forge
698 | werkzeug 2.2.2 pyhd8ed1ab_0 conda-forge
699 | wheel 0.38.4 pyhd8ed1ab_0 conda-forge
700 | widgetsnbextension 4.0.5 pyhd8ed1ab_0 conda-forge
701 | wrapt 1.14.1 py37h540881e_0 conda-forge
702 | xlsxwriter 3.1.9 pypi_0 pypi
703 | xorg-kbproto 1.0.7 h7f98852_1002 conda-forge
704 | xorg-libice 1.0.10 h7f98852_0 conda-forge
705 | xorg-libsm 1.2.3 hd9c2040_1000 conda-forge
706 | xorg-libx11 1.7.2 h7f98852_0 conda-forge
707 | xorg-libxau 1.0.9 h7f98852_0 conda-forge
708 | xorg-libxdmcp 1.1.3 h7f98852_0 conda-forge
709 | xorg-libxext 1.3.4 h7f98852_1 conda-forge
710 | xorg-libxrender 0.9.10 h7f98852_1003 conda-forge
711 | xorg-libxt 1.2.1 h7f98852_2 conda-forge
712 | xorg-renderproto 0.11.1 h7f98852_1002 conda-forge
713 | xorg-xextproto 7.3.0 h7f98852_1002 conda-forge
714 | xorg-xproto 7.0.31 h7f98852_1007 conda-forge
715 | xz 5.2.6 h166bdaf_0 conda-forge
716 | yaml 0.2.5 h7f98852_2 conda-forge
717 | yarl 1.7.2 py37h540881e_2 conda-forge
718 | yte 1.5.1 py37h89c1867_0 conda-forge
719 | zeromq 4.3.4 h9c3ff4c_1 conda-forge
720 | zipp 3.11.0 pyhd8ed1ab_0 conda-forge
721 | zlib 1.2.13 h166bdaf_4 conda-forge
722 | zstd 1.5.2 h3eb15da_5 conda-forge
723 |
--------------------------------------------------------------------------------