├── .gitignore ├── LICENSE ├── README.md ├── configs └── inference_ribodiffusion.py ├── datasets ├── __init__.py └── utils.py ├── diffusion ├── __init__.py └── noise_schedule.py ├── example └── R1107.pdb ├── fig └── pipeline.png ├── main.py ├── models ├── GVP_diff.py ├── __init__.py ├── ema.py ├── esm_block.py ├── esm_utils.py ├── transformer_layer.py └── utils.py ├── requirements.txt ├── run_lib.py ├── sampling.py ├── split_data ├── seq_identity_0.8_split_0.tsv ├── seq_identity_0.8_split_1.tsv ├── seq_identity_0.8_split_2.tsv ├── seq_identity_0.8_split_3.tsv ├── struct_tmscore_0.6_split_0.tsv ├── struct_tmscore_0.6_split_1.tsv ├── struct_tmscore_0.6_split_2.tsv └── struct_tmscore_0.6_split_3.tsv └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | .idea/* 3 | *.xml 4 | ckpts/exp_inf.pth 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 ml4bio 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # RiboDiffusion 2 | 3 | Tertiary Structure-based RNA Inverse Folding with Generative Diffusion Models 4 | 5 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://github.com/ml4bio/RiboDiffusion/blob/main/LICENSE) 6 | [![ArXiv](http://img.shields.io/badge/q.bio-arXiv%3A2404.11143-B31B1B.svg)](https://arxiv.org/abs/2404.11199) 7 | 8 | ![cover](fig/pipeline.png) 9 | 10 | ## Installation 11 | 12 | Please refer to `requirements.txt` for the required packages. 13 | 14 | Model checkpoint can be downloaded from [here](https://drive.google.com/drive/folders/10BNyCNjxGDJ4rEze9yPGPDXa73iu1skx?usp=drive_link). 15 | Another checkpoint trained on the full dataset (with extra 0.1 Gaussian noise for coordinates) can be downloaded from [here](https://drive.google.com/file/d/1-IfWkLa5asu4SeeZAQ09oWm4KlpBMPmq/view?usp=sharing). 16 | 17 | Download and put the checkpoint files in the `ckpts` folder. 18 | 19 | ## Usage 20 | 21 | Inference demo notebook to get started: 22 | Open In Colab. 23 | 24 | Run the following command to run the example for one sequence generation: 25 | ```bash 26 | CUDA_VISIBLE_DEVICES=0 python main.py --PDB_file example/R1107.pdb 27 | ``` 28 | The generated sequence will be saved in `exp_inf/fasta/R1107_0.fasta`. 29 | 30 | Multiple sequence generation can be run by: 31 | ```bash 32 | CUDA_VISIBLE_DEVICES=0 python main.py --PDB_file example/R1107.pdb --config.eval.n_samples 10 33 | ``` 34 | 35 | For more sequence diversity, you can use `exp_inf_large.pth` or adjust the conditional scaling weight by `--config.eval.cond_scale`. 36 | 37 | An example for adjusting the conditional scaling weight is as follows: 38 | ```bash 39 | CUDA_VISIBLE_DEVICES=0 python main.py --PDB_file example/R1107.pdb --config.eval.n_samples 10 --config.eval.dynamic_threshold --config.eval.cond_scale 0.4 40 | ``` 41 | 42 | ## Citation 43 | 44 | If you find this work useful, please cite: 45 | 46 | ``` 47 | @article{10.1093/bioinformatics/btae259, 48 | author = {Huang, Han and Lin, Ziqian and He, Dongchen and Hong, Liang and Li, Yu}, 49 | title = {RiboDiffusion: tertiary structure-based RNA inverse folding with generative diffusion models}, 50 | journal = {Bioinformatics}, 51 | volume = {40}, 52 | number = {Supplement_1}, 53 | pages = {i347-i356}, 54 | year = {2024}, 55 | month = {06}, 56 | issn = {1367-4811} 57 | } 58 | ``` 59 | 60 | ## License 61 | This project is licensed under the [MIT License](LICENSE). -------------------------------------------------------------------------------- /configs/inference_ribodiffusion.py: -------------------------------------------------------------------------------- 1 | """Training diffusion model on rna inverse design with given split.""" 2 | 3 | import ml_collections 4 | import torch 5 | 6 | def get_config(): 7 | config = ml_collections.ConfigDict() 8 | 9 | # Misc config 10 | config.exp_type = 'vpsde' 11 | config.device = torch.device('cuda:0') if torch.cuda.is_available() else torch.device('cpu') 12 | config.seed = 42 13 | config.save = True 14 | 15 | # Data config 16 | config.data = data = ml_collections.ConfigDict() 17 | data.seq_centered = True 18 | data.radius = 4.5 19 | data.top_k = 10 20 | data.num_rbf = 16 21 | data.num_posenc = 16 22 | data.num_conformers = 1 23 | data.add_noise = -1.0 24 | data.knn_num = 10 25 | 26 | # SDE 27 | config.sde = sde = ml_collections.ConfigDict() 28 | sde.schedule = 'cosine' # 'linear', 'cosine' 29 | sde.continuous_beta_0 = 0.1 30 | sde.continuous_beta_1 = 20. 31 | 32 | # sampling 33 | config.sampling = sampling = ml_collections.ConfigDict() 34 | sampling.method = 'ancestral' 35 | ## set smaller for faster eval 36 | sampling.steps = 200 37 | 38 | # Model config 39 | config.model = model = ml_collections.ConfigDict() 40 | model.geometric_data_parallel = False 41 | model.ema_decay = 0.999 42 | model.pred_data = True 43 | model.self_cond = True 44 | model.name = 'GVPTransCond' 45 | model.node_in_dim = (8, 4) 46 | model.node_h_dim = (512, 128) 47 | model.edge_in_dim = (32, 1) 48 | model.edge_h_dim = (128, 1) 49 | model.num_layers = 4 50 | model.drop_rate = 0.1 51 | model.out_dim = 4 52 | model.time_cond = True 53 | model.dihedral_angle = True 54 | model.num_trans_layer = 8 55 | model.drop_struct = -1. 56 | 57 | model.trans = trans = ml_collections.ConfigDict() 58 | trans.encoder_embed_dim = 512 59 | trans.encoder_attention_heads = 16 60 | trans.attention_dropout = 0.1 61 | trans.dropout = 0.1 62 | trans.encoder_ffn_embed_dim = 1024 63 | 64 | # optimization 65 | config.optim = optim = ml_collections.ConfigDict() 66 | optim.weight_decay = 0 67 | optim.optimizer = 'AdamW' 68 | optim.lr = 2e-4 69 | optim.beta1 = 0.9 70 | optim.eps = 1e-8 71 | optim.warmup = 20000 72 | optim.grad_clip = 20. 73 | optim.disable_grad_log = True 74 | 75 | # Evaluation config 76 | config.eval = eval = ml_collections.ConfigDict() 77 | eval.model_path = '' 78 | eval.test_perplexity = False 79 | eval.test_recovery = True 80 | eval.n_samples = 1 81 | eval.sampling_steps = 50 82 | eval.cond_scale = -1. 83 | eval.dynamic_threshold = False 84 | eval.dynamic_thresholding_percentile = 0.95 85 | 86 | return config -------------------------------------------------------------------------------- /datasets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ml4bio/RiboDiffusion/3ac7a557f470c25d95379acedf75a9a49f70ef6e/datasets/__init__.py -------------------------------------------------------------------------------- /datasets/utils.py: -------------------------------------------------------------------------------- 1 | import math 2 | import numpy as np 3 | import torch 4 | import torch.nn.functional as F 5 | from tqdm import tqdm 6 | from Bio.PDB.PDBParser import PDBParser 7 | from Bio import SeqIO 8 | import torch 9 | import torch_geometric 10 | import torch_cluster 11 | from scipy.spatial.transform import Rotation 12 | import os 13 | import gc 14 | import pickle 15 | import pdb 16 | 17 | NUM_TO_LETTER = np.array(['A', 'G', 'C', 'U']) 18 | LETTER_TO_NUM = {'A': 0, 'G': 1, 'C': 2, 'U': 3} 19 | 20 | def get_posenc(edge_index, num_posenc=16): 21 | # From https://github.com/jingraham/neurips19-graph-protein-design 22 | num_posenc = num_posenc 23 | d = edge_index[0] - edge_index[1] 24 | 25 | frequency = torch.exp( 26 | torch.arange(0, num_posenc, 2, dtype=torch.float32, device=d.device) 27 | * -(np.log(10000.0) / num_posenc) 28 | ) 29 | 30 | angles = d.unsqueeze(-1) * frequency 31 | E = torch.cat((torch.cos(angles), torch.sin(angles)), -1) 32 | return E 33 | 34 | 35 | def get_orientations(X): 36 | # X : num_conf x num_res x 3 37 | forward = normalize(X[:, 1:] - X[:, :-1]) 38 | backward = normalize(X[:, :-1] - X[:, 1:]) 39 | forward = F.pad(forward, [0, 0, 0, 1]) 40 | backward = F.pad(backward, [0, 0, 1, 0]) 41 | return torch.cat([forward.unsqueeze(-2), backward.unsqueeze(-2)], -2) 42 | 43 | 44 | def get_orientations_single(X): 45 | # X : num_res x 3 46 | forward = normalize(X[1:] - X[:-1]) 47 | backward = normalize(X[:-1] - X[1:]) 48 | forward = F.pad(forward, [0, 0, 0, 1]) 49 | backward = F.pad(backward, [0, 0, 1, 0]) 50 | return torch.cat([forward.unsqueeze(-2), backward.unsqueeze(-2)], -2) 51 | 52 | def get_sidechains(X): 53 | # X : num_conf x num_res x 3 x 3 54 | p, origin, n = X[:, :, 0], X[:, :, 1], X[:, :, 2] 55 | n, p = normalize(n - origin), normalize(p - origin) 56 | return torch.cat([n.unsqueeze_(-2), p.unsqueeze_(-2)], -2) 57 | 58 | def get_sidechains_single(X): 59 | # X : num_res x 3 x 3 60 | p, origin, n = X[:, 0], X[:, 1], X[:, 2] 61 | n, p = normalize(n - origin), normalize(p - origin) 62 | return torch.cat([n.unsqueeze_(-2), p.unsqueeze_(-2)], -2) 63 | 64 | def normalize(tensor, dim=-1): 65 | ''' 66 | Normalizes a `torch.Tensor` along dimension `dim` without `nan`s. 67 | ''' 68 | return torch.nan_to_num( 69 | torch.div(tensor, torch.linalg.norm(tensor, dim=dim, keepdim=True))) 70 | 71 | 72 | def rbf(D, D_min=0., D_max=20., D_count=16): 73 | ''' 74 | From https://github.com/jingraham/neurips19-graph-protein-design 75 | 76 | Returns an RBF embedding of `torch.Tensor` `D` along a new axis=-1. 77 | That is, if `D` has shape [...dims], then the returned tensor will have 78 | shape [...dims, D_count]. 79 | 80 | TODO switch to DimeNet RBFs 81 | ''' 82 | D_mu = torch.linspace(D_min, D_max, D_count, device=D.device) 83 | D_mu = D_mu.view([1, -1]) 84 | D_sigma = (D_max - D_min) / D_count 85 | D_expand = torch.unsqueeze(D, -1) 86 | 87 | RBF = torch.exp(-((D_expand - D_mu) / D_sigma) ** 2) 88 | return RBF 89 | 90 | 91 | @torch.no_grad() 92 | def construct_data_single(coords, seq=None, mask=None, num_posenc=16, num_rbf=16, knn_num=10): 93 | coords = torch.as_tensor(coords, dtype=torch.float32) # num_res x 3 x 3 94 | # seq is np.array/string, convert to torch.tensor 95 | if isinstance(seq, np.ndarray): 96 | seq = torch.as_tensor(seq, dtype=torch.long) 97 | else: 98 | seq = torch.as_tensor( 99 | [LETTER_TO_NUM[residue] for residue in seq], 100 | dtype=torch.long 101 | ) 102 | 103 | # Compute features 104 | # node positions: num_res x 3 105 | coord_C = coords[:, 1].clone() 106 | # Construct merged edge index 107 | edge_index = torch_cluster.knn_graph(coord_C, k=knn_num) 108 | edge_index = torch_geometric.utils.coalesce(edge_index) 109 | 110 | # Node attributes: num_res x 2 x 3, each 111 | orientations = get_orientations_single(coord_C) 112 | sidechains = get_sidechains_single(coords) 113 | 114 | # Edge displacement vectors: num_edges x 3 115 | edge_vectors = coord_C[edge_index[0]] - coord_C[edge_index[1]] 116 | 117 | # Edge RBF features: num_edges x num_rbf 118 | edge_rbf = rbf(edge_vectors.norm(dim=-1), D_count=num_rbf) 119 | # Edge positional encodings: num_edges x num_posenc 120 | edge_posenc = get_posenc(edge_index, num_posenc) 121 | 122 | node_s = (seq.unsqueeze(-1) == torch.arange(4).unsqueeze(0)).float() 123 | node_v = torch.cat([orientations, sidechains], dim=-2) 124 | edge_s = torch.cat([edge_rbf, edge_posenc], dim=-1) 125 | edge_v = normalize(edge_vectors).unsqueeze(-2) 126 | 127 | node_s, node_v, edge_s, edge_v = map( 128 | torch.nan_to_num, 129 | (node_s, node_v, edge_s, edge_v) 130 | ) 131 | 132 | # add mask for invalid residues 133 | if mask is None: 134 | mask = coords.sum(dim=(2, 3)) == 0. 135 | mask = torch.tensor(mask) 136 | 137 | return {'seq': seq, 138 | 'coords': coords, 139 | 'node_s': node_s, 140 | 'node_v': node_v, 141 | 'edge_s': edge_s, 142 | 'edge_v': edge_v, 143 | 'edge_index': edge_index, 144 | 'mask': mask} 145 | 146 | 147 | # read PDB files directly; modify from ESM 148 | def parse_pdb_direct(pdb_path, temp_save_path=None, chain=None): 149 | if temp_save_path is not None: 150 | try: 151 | if os.path.exists(temp_save_path): 152 | with open(temp_save_path, 'rb') as f: 153 | seq, xyz, mask = pickle.load(f) 154 | return seq, xyz, mask 155 | except: 156 | # pass 157 | print(f"Error in reading {temp_save_path}, re-generate it.") 158 | 159 | xyz, seq, doubles, min_resn, max_resn = {}, {}, {}, np.inf, -np.inf 160 | with open(pdb_path, "rb") as f: 161 | for line in f: 162 | line = line.decode("utf-8", "ignore").rstrip() 163 | 164 | if line[:6] == "HETATM" and line[17:17 + 3] == "MSE": 165 | line = line.replace("HETATM", "ATOM ") 166 | line = line.replace("MSE", "MET") 167 | 168 | if line[:4] == "ATOM": 169 | ch = line[21:22] 170 | if ch == chain or chain is None: 171 | atom = line[12:12 + 4].strip() 172 | resi = line[17:17 + 3] 173 | resi_extended = line[16:17 + 3].strip() 174 | resn = line[22:22 + 5].strip() 175 | x, y, z = [float(line[i:(i + 8)]) for i in [30, 38, 46]] 176 | 177 | if resn[-1].isalpha(): 178 | resa, resn = resn[-1], int(resn[:-1]) - 1 179 | else: 180 | resa, resn = "", int(resn) - 1 181 | if resn < min_resn: min_resn = resn 182 | if resn > max_resn: max_resn = resn 183 | if resn not in xyz: xyz[resn] = {} 184 | if resa not in xyz[resn]: xyz[resn][resa] = {} 185 | if resn not in seq: seq[resn] = {} 186 | if resa not in seq[resn]: 187 | seq[resn][resa] = resi 188 | elif seq[resn][resa] != resi_extended: 189 | # doubles mark locations in the pdb file where multi residue entries are 190 | # present. There's a known bug in TmAlign binary that doesn't read / skip 191 | # these entries, so we mark them to create a sequence that is aligned with 192 | # gap tokens in such locations. 193 | doubles[resn] = True 194 | 195 | if atom not in xyz[resn][resa]: 196 | xyz[resn][resa][atom] = np.array([x, y, z]) 197 | 198 | # convert to numpy arrays, fill in missing values 199 | seq_, xyz_, mask = [], [], [] 200 | for resn in range(min_resn, max_resn + 1): 201 | ## residue name as seq 202 | if resn in seq: 203 | for k in sorted(seq[resn]): 204 | # seq_.append(aa_3_N.get(seq[resn][k], 20)) 205 | seq_.append(seq[resn][k].strip()) 206 | else: 207 | # seq_.append(20) 208 | continue 209 | ## xyz coordinates [L, 3, 3] 210 | coords_tmp = np.zeros((3, 3)) 211 | if resn in xyz: 212 | for k in sorted(xyz[resn]): 213 | res_name = seq[resn][k].strip() 214 | if "C4'" in xyz[resn][k]: coords_tmp[0] = xyz[resn][k]["C4'"] 215 | if "C1'" in xyz[resn][k]: coords_tmp[1] = xyz[resn][k]["C1'"] 216 | if res_name in ['A', 'G'] and "N9" in xyz[resn][k]: coords_tmp[2] = xyz[resn][k]["N9"] 217 | if res_name in ['C', 'U'] and "N1" in xyz[resn][k]: coords_tmp[2] = xyz[resn][k]["N1"] 218 | xyz_.append(coords_tmp) 219 | mask.append(np.all(coords_tmp != 0.)) 220 | 221 | seq_ = ''.join(seq_) 222 | assert len(seq_) == len(xyz_) 223 | xyz_ = np.array(xyz_, dtype=np.float32) 224 | mask = np.array(mask) 225 | 226 | if temp_save_path is not None: 227 | pickle.dump((seq_, xyz_, mask), open(temp_save_path, 'wb')) 228 | return seq_, xyz_, mask 229 | 230 | def PDBtoData(pdb_path, num_posenc, num_rbf, knn_num): 231 | seq, coords, mask = parse_pdb_direct(pdb_path) 232 | return construct_data_single( 233 | coords, 234 | seq, 235 | mask, 236 | num_posenc=num_posenc, 237 | num_rbf=num_rbf, 238 | knn_num=knn_num, 239 | ) 240 | 241 | def sample_to_fasta(sample, pdb_name, fasta_path): 242 | seq = ''.join(list(NUM_TO_LETTER[sample.cpu().numpy()])) 243 | with open(fasta_path, 'w') as f: 244 | f.write(f'>{pdb_name}\n') 245 | f.write(f'{seq}\n') 246 | -------------------------------------------------------------------------------- /diffusion/__init__.py: -------------------------------------------------------------------------------- 1 | from .noise_schedule import NoiseScheduleVP 2 | -------------------------------------------------------------------------------- /diffusion/noise_schedule.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn.functional as F 3 | import math 4 | 5 | 6 | class NoiseScheduleVP: 7 | def __init__(self, 8 | schedule='discrete', 9 | betas=None, 10 | alphas_cumprod=None, 11 | continuous_beta_0=0.1, 12 | continuous_beta_1=20., 13 | dtype=torch.float32): 14 | """ 15 | Create a wrapper class for the forward SDE (VP type). From DPM-Solver. 16 | Notes: cosine schedule for continuous-time setting may face numerical issues. Please refer to the latest version 17 | of DPM-Solver (https://github.com/LuChengTHU/dpm-solver) for further modification. 18 | """ 19 | 20 | if schedule not in ['discrete', 'linear', 'cosine', 'discrete_poly']: 21 | raise ValueError( 22 | "Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear' or 'cosine'".format( 23 | schedule)) 24 | 25 | self.schedule = schedule 26 | if 'discrete' in schedule: 27 | if schedule == 'discrete_poly': 28 | alphas_cumprod = get_polynomial_schedule(1000, power=2) 29 | log_alphas = 0.5 * torch.log(alphas_cumprod) 30 | elif betas is not None: 31 | log_alphas = 0.5 * torch.log(1 - betas).cumsum(dim=0) 32 | else: 33 | assert alphas_cumprod is not None 34 | log_alphas = 0.5 * torch.log(alphas_cumprod) 35 | self.total_N = len(log_alphas) 36 | self.T = 1. 37 | self.t_array = torch.linspace(0., 1., self.total_N + 1)[1:].reshape((1, -1)).to(dtype=dtype) 38 | self.log_alpha_array = log_alphas.reshape((1, -1,)).to(dtype=dtype) 39 | else: 40 | self.total_N = 1000 41 | self.beta_0 = continuous_beta_0 42 | self.beta_1 = continuous_beta_1 43 | self.cosine_s = 0.008 44 | self.cosine_beta_max = 999. 45 | self.cosine_t_max = math.atan(self.cosine_beta_max * (1. + self.cosine_s) / math.pi) * 2. * ( 46 | 1. + self.cosine_s) / math.pi - self.cosine_s 47 | self.cosine_log_alpha_0 = math.log(math.cos(self.cosine_s / (1. + self.cosine_s) * math.pi / 2.)) 48 | if schedule == 'cosine': 49 | # For the cosine schedule, T = 1 will have numerical issues. So we manually set the ending time T. 50 | # Note that T = 0.9946 may be not the optimal setting. However, we find it works well. 51 | self.T = 0.9946 52 | else: 53 | self.T = 1. 54 | 55 | 56 | def numerical_clip_alpha(self, log_alphas, clipped_lambda=-5.1): 57 | """ 58 | For some beta schedules such as cosine schedule, the log-SNR has numerical isssues. 59 | We clip the log-SNR near t=T within -5.1 to ensure the stability. 60 | Such a trick is very useful for diffusion models with the cosine schedule, such as i-DDPM, guided-diffusion and GLIDE. 61 | """ 62 | log_sigmas = 0.5 * torch.log(1. - torch.exp(2. * log_alphas)) 63 | lambs = log_alphas - log_sigmas 64 | idx = torch.searchsorted(torch.flip(lambs, [0]), clipped_lambda) 65 | if idx > 0: 66 | log_alphas = log_alphas[:-idx] 67 | return log_alphas 68 | 69 | 70 | def marginal_log_mean_coeff(self, t): 71 | """Compute log(alpha_t) of a given continuous-time label t in [0,T].""" 72 | if 'discrete' in self.schedule: 73 | return interpolate_fn(t.reshape((-1, 1)), self.t_array.to(t.device), self.log_alpha_array.to(t.device)).reshape((-1)) 74 | elif self.schedule == 'linear': 75 | return -0.25 * t ** 2 * (self.beta_1 - self.beta_0) - 0.5 * t * self.beta_0 76 | elif self.schedule == 'cosine': 77 | log_alpha_fn = lambda s: torch.log(torch.cos((s + self.cosine_s) / (1. + self.cosine_s) * math.pi / 2.)) 78 | log_alpha_t = log_alpha_fn(t) - self.cosine_log_alpha_0 79 | return log_alpha_t 80 | 81 | def marginal_alpha(self, t): 82 | """Compute alpha_t of a given continuous-time label t in [0, T].""" 83 | return torch.exp(self.marginal_log_mean_coeff(t)) 84 | 85 | def marginal_std(self, t): 86 | """Compute sigma_t of a given continuous-time label t in [0, T].""" 87 | return torch.sqrt(1. - torch.exp(2. * self.marginal_log_mean_coeff(t))) 88 | 89 | def marginal_prob(self, t): 90 | log_mean_coeff = self.marginal_log_mean_coeff(t) 91 | return torch.exp(log_mean_coeff), torch.sqrt(1. - torch.exp(2. * log_mean_coeff)) 92 | 93 | def marginal_lambda(self, t): 94 | """ 95 | Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T]. 96 | """ 97 | log_mean_coeff = self.marginal_log_mean_coeff(t) 98 | log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff)) 99 | return log_mean_coeff - log_std 100 | 101 | def inverse_lambda(self, lamb): 102 | """ 103 | Compute the continuous-time label t in [0, T] of a given half-logSNR lambda_t. 104 | """ 105 | if self.schedule == 'linear': 106 | tmp = 2. * (self.beta_1 - self.beta_0) * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) 107 | Delta = self.beta_0**2 + tmp 108 | return tmp / (torch.sqrt(Delta) + self.beta_0) / (self.beta_1 - self.beta_0) 109 | elif 'discrete' in self.schedule: 110 | log_alpha = -0.5 * torch.logaddexp(torch.zeros((1,)).to(lamb.device), -2. * lamb) 111 | t = interpolate_fn(log_alpha.reshape((-1, 1)), torch.flip(self.log_alpha_array.to(lamb.device), [1]), torch.flip(self.t_array.to(lamb.device), [1])) 112 | return t.reshape((-1,)) 113 | else: 114 | log_alpha = -0.5 * torch.logaddexp(-2. * lamb, torch.zeros((1,)).to(lamb)) 115 | t_fn = lambda log_alpha_t: torch.arccos(torch.exp(log_alpha_t + self.cosine_log_alpha_0)) * 2. * (1. + self.cosine_s) / math.pi - self.cosine_s 116 | t = t_fn(log_alpha) 117 | return t 118 | 119 | def get_noiseLevel(self, t): 120 | alpha_t = self.marginal_alpha(t) 121 | sigma_t = self.marginal_std(t) 122 | return torch.log(alpha_t ** 2 / sigma_t ** 2) 123 | 124 | def add_noise(self, data, t): 125 | # TODO: adding noise to data 126 | pass 127 | 128 | 129 | ############################################################# 130 | # other utility functions 131 | ############################################################# 132 | 133 | def interpolate_fn(x, xp, yp): 134 | """ 135 | A piecewise linear function y = f(x), using xp and yp as keypoints. 136 | We implement f(x) in a differentiable way (i.e. applicable for autograd). 137 | The function f(x) is well-defined for all x-axis. (For x beyond the bounds of xp, we use the outmost points of xp to define the linear function.) 138 | 139 | Args: 140 | x: PyTorch tensor with shape [N, C], where N is the batch size, C is the number of channels (we use C = 1 for DPM-Solver). 141 | xp: PyTorch tensor with shape [C, K], where K is the number of keypoints. 142 | yp: PyTorch tensor with shape [C, K]. 143 | Returns: 144 | The function values f(x), with shape [N, C]. 145 | """ 146 | N, K = x.shape[0], xp.shape[1] 147 | all_x = torch.cat([x.unsqueeze(2), xp.unsqueeze(0).repeat((N, 1, 1))], dim=2) 148 | sorted_all_x, x_indices = torch.sort(all_x, dim=2) 149 | x_idx = torch.argmin(x_indices, dim=2) 150 | cand_start_idx = x_idx - 1 151 | start_idx = torch.where( 152 | torch.eq(x_idx, 0), 153 | torch.tensor(1, device=x.device), 154 | torch.where( 155 | torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, 156 | ), 157 | ) 158 | end_idx = torch.where(torch.eq(start_idx, cand_start_idx), start_idx + 2, start_idx + 1) 159 | start_x = torch.gather(sorted_all_x, dim=2, index=start_idx.unsqueeze(2)).squeeze(2) 160 | end_x = torch.gather(sorted_all_x, dim=2, index=end_idx.unsqueeze(2)).squeeze(2) 161 | start_idx2 = torch.where( 162 | torch.eq(x_idx, 0), 163 | torch.tensor(0, device=x.device), 164 | torch.where( 165 | torch.eq(x_idx, K), torch.tensor(K - 2, device=x.device), cand_start_idx, 166 | ), 167 | ) 168 | y_positions_expanded = yp.unsqueeze(0).expand(N, -1, -1) 169 | start_y = torch.gather(y_positions_expanded, dim=2, index=start_idx2.unsqueeze(2)).squeeze(2) 170 | end_y = torch.gather(y_positions_expanded, dim=2, index=(start_idx2 + 1).unsqueeze(2)).squeeze(2) 171 | cand = start_y + (x - start_x) * (end_y - start_y) / (end_x - start_x) 172 | return cand 173 | 174 | 175 | def expand_dims(v, dims): 176 | """ 177 | Expand the tensor `v` to the dim `dims`. 178 | 179 | Args: 180 | `v`: a PyTorch tensor with shape [N]. 181 | `dim`: a `int`. 182 | Returns: 183 | a PyTorch tensor with shape [N, 1, 1, ..., 1] and the total dimension is `dims`. 184 | """ 185 | return v[(...,) + (None,)*(dims - 1)] 186 | 187 | 188 | def get_polynomial_schedule(time_steps, s=1e-4, power=2): 189 | """ 190 | A noise schedule based on a simple polynomial equation: 1 - x^power. (from E3 Diffusion) 191 | """ 192 | steps = time_steps + 1 193 | x = torch.linspace(0, steps, steps) 194 | alphas2 = (1 - torch.pow(x / steps, power))**2 195 | 196 | # clip alpha_t / alpha_t-1. This may help improve stability during sampling. 197 | alphas2 = torch.cat([torch.ones(1), alphas2], dim=0) 198 | alphas_step = (alphas2[1:] / alphas2[:-1]) 199 | alphas_step = torch.clamp(alphas_step, min=0.001, max=1.) 200 | alphas2 = torch.cumprod(alphas_step, dim=0) 201 | 202 | precision = 1 - 2 * s 203 | alphas2 = precision * alphas2 + s 204 | 205 | return alphas2[1:] 206 | 207 | 208 | if __name__ == "__main__": 209 | poly_sch = NoiseScheduleVP('discrete_poly') 210 | cos_sch = NoiseScheduleVP('cosine') 211 | lin_sch = NoiseScheduleVP('linear') 212 | t1 = torch.tensor(0.1) 213 | t2 = torch.tensor(0.2) 214 | 215 | t3 = torch.tensor(0.5) 216 | t4 = torch.tensor(0.9) 217 | time_steps = [torch.tensor(0.1), torch.tensor(0.2), torch.tensor(0.3), torch.tensor(0.4), 218 | torch.tensor(0.6), torch.tensor(0.7), torch.tensor(0.8), torch.tensor(0.9)] 219 | 220 | for sch in [poly_sch, cos_sch, lin_sch]: 221 | print(sch.schedule) 222 | # for tt in time_steps: 223 | # print(tt) 224 | # print(sch.marginal_alpha(tt), sch.marginal_std(tt)) 225 | print(sch.marginal_prob(torch.tensor(0.))) 226 | print('-'*80) 227 | 228 | -------------------------------------------------------------------------------- /example/R1107.pdb: -------------------------------------------------------------------------------- 1 | ATOM 1 O5' G 0 1 -9.332 10.937 7.364 1.00105.84 O 2 | ATOM 2 C5' G 0 1 -7.952 10.664 7.173 1.00111.19 C 3 | ATOM 3 C4' G 0 1 -7.547 9.375 7.839 1.00108.78 C 4 | ATOM 4 O4' G 0 1 -6.174 9.058 7.490 1.00100.15 O 5 | ATOM 5 C3' G 0 1 -7.561 9.392 9.361 1.00125.07 C 6 | ATOM 6 O3' G 0 1 -8.846 9.120 9.894 1.00126.19 O 7 | ATOM 7 C2' G 0 1 -6.523 8.339 9.724 1.00114.01 C 8 | ATOM 8 O2' G 0 1 -7.074 7.035 9.619 1.00112.81 O 9 | ATOM 9 C1' G 0 1 -5.499 8.520 8.605 1.00116.24 C 10 | ATOM 10 N9 G 0 1 -4.412 9.454 8.959 1.00109.32 N 11 | ATOM 11 C8 G 0 1 -3.919 10.403 8.097 1.00112.06 C 12 | ATOM 12 N7 G 0 1 -2.956 11.116 8.603 1.00114.92 N 13 | ATOM 13 C5 G 0 1 -2.799 10.603 9.879 1.00112.22 C 14 | ATOM 14 C6 G 0 1 -1.890 10.991 10.889 1.00113.78 C 15 | ATOM 15 O6 G 0 1 -1.040 11.885 10.831 1.00120.63 O 16 | ATOM 16 N1 G 0 1 -2.041 10.227 12.041 1.00115.36 N 17 | ATOM 17 C2 G 0 1 -2.956 9.215 12.195 1.00121.09 C 18 | ATOM 18 N2 G 0 1 -2.952 8.589 13.382 1.00125.78 N 19 | ATOM 19 N3 G 0 1 -3.810 8.840 11.255 1.00108.72 N 20 | ATOM 20 C4 G 0 1 -3.680 9.568 10.124 1.00107.70 C 21 | ATOM 21 P G 0 2 -9.727 10.294 10.549 1.00135.18 P 22 | ATOM 22 OP1 G 0 2 -11.116 9.786 10.689 1.00122.02 O 23 | ATOM 23 OP2 G 0 2 -9.480 11.550 9.797 1.00120.34 O 24 | ATOM 24 O5' G 0 2 -9.110 10.476 12.006 1.00114.90 O 25 | ATOM 25 C5' G 0 2 -9.058 9.390 12.918 1.00116.05 C 26 | ATOM 26 C4' G 0 2 -8.022 9.627 13.986 1.00115.38 C 27 | ATOM 27 O4' G 0 2 -6.690 9.503 13.417 1.00116.21 O 28 | ATOM 28 C3' G 0 2 -8.020 11.014 14.606 1.00116.37 C 29 | ATOM 29 O3' G 0 2 -9.030 11.211 15.577 1.00125.16 O 30 | ATOM 30 C2' G 0 2 -6.602 11.123 15.141 1.00122.62 C 31 | ATOM 31 O2' G 0 2 -6.445 10.342 16.316 1.00117.62 O 32 | ATOM 32 C1' G 0 2 -5.826 10.453 14.010 1.00124.11 C 33 | ATOM 33 N9 G 0 2 -5.446 11.438 12.979 1.00122.98 N 34 | ATOM 34 C8 G 0 2 -5.841 11.480 11.663 1.00128.52 C 35 | ATOM 35 N7 G 0 2 -5.338 12.492 11.007 1.00128.69 N 36 | ATOM 36 C5 G 0 2 -4.569 13.163 11.948 1.00120.04 C 37 | ATOM 37 C6 G 0 2 -3.780 14.339 11.833 1.00120.85 C 38 | ATOM 38 O6 G 0 2 -3.594 15.053 10.839 1.00117.91 O 39 | ATOM 39 N1 G 0 2 -3.170 14.660 13.043 1.00121.02 N 40 | ATOM 40 C2 G 0 2 -3.303 13.950 14.212 1.00120.27 C 41 | ATOM 41 N2 G 0 2 -2.640 14.411 15.280 1.00113.99 N 42 | ATOM 42 N3 G 0 2 -4.029 12.856 14.329 1.00112.84 N 43 | ATOM 43 C4 G 0 2 -4.630 12.524 13.169 1.00115.99 C 44 | ATOM 44 P G 0 3 -9.953 12.524 15.496 1.00130.12 P 45 | ATOM 45 OP1 G 0 3 -11.349 12.151 15.839 1.00133.25 O 46 | ATOM 46 OP2 G 0 3 -9.672 13.190 14.199 1.00118.56 O 47 | ATOM 47 O5' G 0 3 -9.378 13.467 16.642 1.00124.12 O 48 | ATOM 48 C5' G 0 3 -8.900 12.922 17.861 1.00121.83 C 49 | ATOM 49 C4' G 0 3 -7.791 13.763 18.436 1.00124.42 C 50 | ATOM 50 O4' G 0 3 -6.567 13.552 17.679 1.00124.94 O 51 | ATOM 51 C3' G 0 3 -7.996 15.267 18.368 1.00121.14 C 52 | ATOM 52 O3' G 0 3 -8.870 15.778 19.357 1.00126.02 O 53 | ATOM 53 C2' G 0 3 -6.572 15.786 18.461 1.00123.35 C 54 | ATOM 54 O2' G 0 3 -6.080 15.662 19.788 1.00132.28 O 55 | ATOM 55 C1' G 0 3 -5.849 14.768 17.585 1.00123.16 C 56 | ATOM 56 N9 G 0 3 -5.851 15.201 16.176 1.00130.28 N 57 | ATOM 57 C8 G 0 3 -6.566 14.676 15.125 1.00139.29 C 58 | ATOM 58 N7 G 0 3 -6.355 15.303 13.998 1.00134.04 N 59 | ATOM 59 C5 G 0 3 -5.457 16.311 14.326 1.00127.06 C 60 | ATOM 60 C6 G 0 3 -4.853 17.315 13.523 1.00116.78 C 61 | ATOM 61 O6 G 0 3 -4.997 17.530 12.312 1.00101.50 O 62 | ATOM 62 N1 G 0 3 -4.001 18.119 14.276 1.00116.43 N 63 | ATOM 63 C2 G 0 3 -3.761 17.978 15.623 1.00114.98 C 64 | ATOM 64 N2 G 0 3 -2.911 18.848 16.181 1.00105.93 N 65 | ATOM 65 N3 G 0 3 -4.314 17.049 16.378 1.00108.43 N 66 | ATOM 66 C4 G 0 3 -5.142 16.259 15.668 1.00118.61 C 67 | ATOM 67 P G 0 4 -9.929 16.921 18.961 1.00138.93 P 68 | ATOM 68 OP1 G 0 4 -11.051 16.879 19.932 1.00144.00 O 69 | ATOM 69 OP2 G 0 4 -10.218 16.783 17.510 1.00120.81 O 70 | ATOM 70 O5' G 0 4 -9.127 18.283 19.167 1.00128.99 O 71 | ATOM 71 C5' G 0 4 -8.595 18.629 20.438 1.00122.56 C 72 | ATOM 72 C4' G 0 4 -7.388 19.527 20.316 1.00131.31 C 73 | ATOM 73 O4' G 0 4 -6.439 18.954 19.377 1.00131.70 O 74 | ATOM 74 C3' G 0 4 -7.637 20.925 19.771 1.00130.03 C 75 | ATOM 75 O3' G 0 4 -8.149 21.825 20.738 1.00137.57 O 76 | ATOM 76 C2' G 0 4 -6.267 21.316 19.239 1.00118.75 C 77 | ATOM 77 O2' G 0 4 -5.401 21.684 20.302 1.00113.06 O 78 | ATOM 78 C1' G 0 4 -5.784 19.985 18.666 1.00117.66 C 79 | ATOM 79 N9 G 0 4 -6.125 19.859 17.238 1.00117.48 N 80 | ATOM 80 C8 G 0 4 -7.006 18.974 16.667 1.00132.20 C 81 | ATOM 81 N7 G 0 4 -7.099 19.111 15.373 1.00138.26 N 82 | ATOM 82 C5 G 0 4 -6.232 20.153 15.071 1.00119.71 C 83 | ATOM 83 C6 G 0 4 -5.910 20.751 13.826 1.00101.79 C 84 | ATOM 84 O6 G 0 4 -6.343 20.473 12.701 1.00105.16 O 85 | ATOM 85 N1 G 0 4 -4.980 21.774 13.978 1.00 94.89 N 86 | ATOM 86 C2 G 0 4 -4.431 22.172 15.173 1.00106.82 C 87 | ATOM 87 N2 G 0 4 -3.551 23.181 15.116 1.00103.56 N 88 | ATOM 88 N3 G 0 4 -4.722 21.622 16.339 1.00108.23 N 89 | ATOM 89 C4 G 0 4 -5.623 20.626 16.214 1.00110.52 C 90 | ATOM 90 P G 0 5 -9.229 22.936 20.309 1.00137.96 P 91 | ATOM 91 OP1 G 0 5 -9.763 23.558 21.546 1.00121.03 O 92 | ATOM 92 OP2 G 0 5 -10.161 22.311 19.336 1.00139.45 O 93 | ATOM 93 O5' G 0 5 -8.371 24.037 19.540 1.00107.57 O 94 | ATOM 94 C5' G 0 5 -7.434 24.840 20.240 1.00123.43 C 95 | ATOM 95 C4' G 0 5 -6.619 25.699 19.306 1.00133.54 C 96 | ATOM 96 O4' G 0 5 -5.882 24.865 18.373 1.00137.61 O 97 | ATOM 97 C3' G 0 5 -7.391 26.649 18.405 1.00124.51 C 98 | ATOM 98 O3' G 0 5 -7.836 27.819 19.069 1.00141.11 O 99 | ATOM 99 C2' G 0 5 -6.390 26.915 17.290 1.00116.76 C 100 | ATOM 100 O2' G 0 5 -5.386 27.822 17.719 1.00124.59 O 101 | ATOM 101 C1' G 0 5 -5.755 25.533 17.133 1.00123.26 C 102 | ATOM 102 N9 G 0 5 -6.449 24.748 16.097 1.00121.17 N 103 | ATOM 103 C8 G 0 5 -7.339 23.716 16.271 1.00127.63 C 104 | ATOM 104 N7 G 0 5 -7.791 23.239 15.142 1.00130.16 N 105 | ATOM 105 C5 G 0 5 -7.167 24.008 14.168 1.00116.93 C 106 | ATOM 106 C6 G 0 5 -7.260 23.961 12.753 1.00111.79 C 107 | ATOM 107 O6 G 0 5 -7.937 23.203 12.045 1.00108.72 O 108 | ATOM 108 N1 G 0 5 -6.451 24.928 12.164 1.00100.87 N 109 | ATOM 109 C2 G 0 5 -5.654 25.824 12.835 1.00101.41 C 110 | ATOM 110 N2 G 0 5 -4.948 26.678 12.078 1.00 93.69 N 111 | ATOM 111 N3 G 0 5 -5.560 25.877 14.151 1.00 99.96 N 112 | ATOM 112 C4 G 0 5 -6.337 24.947 14.744 1.00112.38 C 113 | ATOM 113 P C 0 6 -8.633 28.954 18.256 1.00136.33 P 114 | ATOM 114 OP1 C 0 6 -9.541 29.657 19.198 1.00152.43 O 115 | ATOM 115 OP2 C 0 6 -9.196 28.343 17.024 1.00115.11 O 116 | ATOM 116 O5' C 0 6 -7.491 29.970 17.812 1.00120.19 O 117 | ATOM 117 C5' C 0 6 -7.794 31.104 17.016 1.00129.42 C 118 | ATOM 118 C4' C 0 6 -6.787 31.280 15.909 1.00126.66 C 119 | ATOM 119 O4' C 0 6 -6.442 29.988 15.353 1.00112.35 O 120 | ATOM 120 C3' C 0 6 -7.259 32.104 14.719 1.00139.27 C 121 | ATOM 121 O3' C 0 6 -7.052 33.489 14.931 1.00163.01 O 122 | ATOM 122 C2' C 0 6 -6.435 31.555 13.558 1.00139.54 C 123 | ATOM 123 O2' C 0 6 -5.168 32.192 13.493 1.00130.34 O 124 | ATOM 124 C1' C 0 6 -6.227 30.093 13.965 1.00125.42 C 125 | ATOM 125 N1 C 0 6 -7.141 29.146 13.285 1.00114.22 N 126 | ATOM 126 C2 C 0 6 -7.103 28.951 11.897 1.00104.59 C 127 | ATOM 127 O2 C 0 6 -6.312 29.601 11.194 1.00 99.73 O 128 | ATOM 128 N3 C 0 6 -7.955 28.054 11.344 1.00116.84 N 129 | ATOM 129 C4 C 0 6 -8.800 27.359 12.110 1.00116.87 C 130 | ATOM 130 N4 C 0 6 -9.616 26.484 11.517 1.00114.50 N 131 | ATOM 131 C5 C 0 6 -8.847 27.527 13.522 1.00112.65 C 132 | ATOM 132 C6 C 0 6 -8.004 28.417 14.056 1.00114.56 C 133 | ATOM 133 P C 0 7 -8.246 34.550 14.763 1.00184.60 P 134 | ATOM 134 OP1 C 0 7 -8.695 34.928 16.127 1.00178.75 O 135 | ATOM 135 OP2 C 0 7 -9.242 34.031 13.796 1.00175.51 O 136 | ATOM 136 O5' C 0 7 -7.528 35.810 14.101 1.00163.15 O 137 | ATOM 137 C5' C 0 7 -6.491 35.634 13.138 1.00175.79 C 138 | ATOM 138 C4' C 0 7 -6.968 35.950 11.742 1.00188.27 C 139 | ATOM 139 O4' C 0 7 -8.415 35.819 11.721 1.00205.52 O 140 | ATOM 140 C3' C 0 7 -6.653 37.364 11.253 1.00211.49 C 141 | ATOM 141 O3' C 0 7 -6.216 37.382 9.874 1.00241.38 O 142 | ATOM 142 C2' C 0 7 -7.962 38.124 11.487 1.00233.17 C 143 | ATOM 143 O2' C 0 7 -8.176 39.214 10.614 1.00225.92 O 144 | ATOM 144 C1' C 0 7 -9.016 37.030 11.306 1.00233.89 C 145 | ATOM 145 N1 C 0 7 -10.236 37.232 12.103 1.00247.50 N 146 | ATOM 146 C2 C 0 7 -11.425 36.677 11.628 1.00224.63 C 147 | ATOM 147 O2 C 0 7 -11.397 36.034 10.567 1.00181.33 O 148 | ATOM 148 N3 C 0 7 -12.563 36.847 12.341 1.00234.36 N 149 | ATOM 149 C4 C 0 7 -12.547 37.545 13.479 1.00231.51 C 150 | ATOM 150 N4 C 0 7 -13.698 37.685 14.143 1.00207.45 N 151 | ATOM 151 C5 C 0 7 -11.349 38.129 13.985 1.00222.45 C 152 | ATOM 152 C6 C 0 7 -10.229 37.950 13.269 1.00238.84 C 153 | ATOM 153 P A 0 8 -7.147 37.057 8.590 1.00255.98 P 154 | ATOM 154 OP1 A 0 8 -6.213 36.997 7.439 1.00232.30 O 155 | ATOM 155 OP2 A 0 8 -8.301 37.983 8.476 1.00223.22 O 156 | ATOM 156 O5' A 0 8 -7.661 35.565 8.807 1.00200.01 O 157 | ATOM 157 C5' A 0 8 -6.987 34.479 8.191 1.00165.71 C 158 | ATOM 158 C4' A 0 8 -7.938 33.388 7.765 1.00142.38 C 159 | ATOM 159 O4' A 0 8 -7.801 32.248 8.646 1.00142.98 O 160 | ATOM 160 C3' A 0 8 -9.425 33.711 7.808 1.00143.99 C 161 | ATOM 161 O3' A 0 8 -9.863 34.441 6.672 1.00148.44 O 162 | ATOM 162 C2' A 0 8 -10.067 32.330 7.915 1.00152.38 C 163 | ATOM 163 O2' A 0 8 -10.180 31.728 6.636 1.00168.48 O 164 | ATOM 164 C1' A 0 8 -9.026 31.554 8.725 1.00133.63 C 165 | ATOM 165 N9 A 0 8 -9.411 31.400 10.137 1.00127.37 N 166 | ATOM 166 C8 A 0 8 -9.019 32.157 11.209 1.00130.66 C 167 | ATOM 167 N7 A 0 8 -9.542 31.755 12.339 1.00131.53 N 168 | ATOM 168 C5 A 0 8 -10.326 30.670 11.990 1.00121.80 C 169 | ATOM 169 C6 A 0 8 -11.137 29.814 12.750 1.00124.36 C 170 | ATOM 170 N6 A 0 8 -11.291 29.930 14.071 1.00124.80 N 171 | ATOM 171 N1 A 0 8 -11.787 28.826 12.097 1.00131.19 N 172 | ATOM 172 C2 A 0 8 -11.624 28.720 10.770 1.00132.25 C 173 | ATOM 173 N3 A 0 8 -10.887 29.465 9.948 1.00114.97 N 174 | ATOM 174 C4 A 0 8 -10.256 30.436 10.632 1.00117.84 C 175 | ATOM 175 P C 0 9 -11.416 34.829 6.489 1.00138.63 P 176 | ATOM 176 OP1 C 0 9 -11.511 35.857 5.423 1.00148.34 O 177 | ATOM 177 OP2 C 0 9 -11.982 35.107 7.833 1.00162.19 O 178 | ATOM 178 O5' C 0 9 -12.098 33.505 5.919 1.00117.73 O 179 | ATOM 179 C5' C 0 9 -13.511 33.401 5.813 1.00126.48 C 180 | ATOM 180 C4' C 0 9 -14.011 32.066 6.309 1.00116.02 C 181 | ATOM 181 O4' C 0 9 -13.149 31.584 7.371 1.00138.59 O 182 | ATOM 182 C3' C 0 9 -15.401 32.072 6.927 1.00112.43 C 183 | ATOM 183 O3' C 0 9 -16.432 32.010 5.959 1.00106.40 O 184 | ATOM 184 C2' C 0 9 -15.362 30.869 7.862 1.00104.03 C 185 | ATOM 185 O2' C 0 9 -15.561 29.663 7.139 1.00125.90 O 186 | ATOM 186 C1' C 0 9 -13.912 30.908 8.347 1.00127.13 C 187 | ATOM 187 N1 C 0 9 -13.758 31.627 9.633 1.00120.34 N 188 | ATOM 188 C2 C 0 9 -14.084 30.988 10.832 1.00127.41 C 189 | ATOM 189 O2 C 0 9 -14.516 29.825 10.801 1.00132.33 O 190 | ATOM 190 N3 C 0 9 -13.926 31.657 11.999 1.00132.82 N 191 | ATOM 191 C4 C 0 9 -13.460 32.908 12.000 1.00142.52 C 192 | ATOM 192 N4 C 0 9 -13.320 33.531 13.172 1.00143.86 N 193 | ATOM 193 C5 C 0 9 -13.115 33.578 10.793 1.00131.41 C 194 | ATOM 194 C6 C 0 9 -13.274 32.905 9.648 1.00124.20 C 195 | ATOM 195 P A 0 10 -17.096 33.367 5.416 1.00126.33 P 196 | ATOM 196 OP1 A 0 10 -16.545 33.626 4.060 1.00109.52 O 197 | ATOM 197 OP2 A 0 10 -16.964 34.398 6.477 1.00112.87 O 198 | ATOM 198 O5' A 0 10 -18.642 33.013 5.293 1.00145.90 O 199 | ATOM 199 C5' A 0 10 -19.178 31.888 5.971 1.00129.56 C 200 | ATOM 200 C4' A 0 10 -20.134 32.294 7.063 1.00136.53 C 201 | ATOM 201 O4' A 0 10 -20.962 33.392 6.607 1.00123.38 O 202 | ATOM 202 C3' A 0 10 -21.123 31.219 7.485 1.00133.48 C 203 | ATOM 203 O3' A 0 10 -20.567 30.325 8.433 1.00138.97 O 204 | ATOM 204 C2' A 0 10 -22.299 32.027 8.020 1.00113.30 C 205 | ATOM 205 O2' A 0 10 -22.046 32.461 9.348 1.00114.96 O 206 | ATOM 206 C1' A 0 10 -22.273 33.251 7.105 1.00115.55 C 207 | ATOM 207 N9 A 0 10 -23.199 33.150 5.959 1.00115.78 N 208 | ATOM 208 C8 A 0 10 -22.882 33.085 4.624 1.00114.64 C 209 | ATOM 209 N7 A 0 10 -23.929 33.031 3.833 1.00115.51 N 210 | ATOM 210 C5 A 0 10 -25.012 33.078 4.702 1.00117.33 C 211 | ATOM 211 C6 A 0 10 -26.410 33.058 4.503 1.00119.13 C 212 | ATOM 212 N6 A 0 10 -27.011 32.984 3.312 1.00119.43 N 213 | ATOM 213 N1 A 0 10 -27.199 33.119 5.599 1.00120.87 N 214 | ATOM 214 C2 A 0 10 -26.622 33.194 6.803 1.00120.83 C 215 | ATOM 215 N3 A 0 10 -25.330 33.220 7.116 1.00119.28 N 216 | ATOM 216 C4 A 0 10 -24.571 33.158 6.012 1.00117.53 C 217 | ATOM 217 P G 0 11 -20.419 28.760 8.096 1.00168.73 P 218 | ATOM 218 OP1 G 0 11 -19.342 28.213 8.960 1.00122.36 O 219 | ATOM 219 OP2 G 0 11 -20.319 28.586 6.624 1.00155.28 O 220 | ATOM 220 O5' G 0 11 -21.811 28.144 8.561 1.00156.39 O 221 | ATOM 221 C5' G 0 11 -22.393 28.517 9.803 1.00130.01 C 222 | ATOM 222 C4' G 0 11 -23.878 28.250 9.820 1.00115.21 C 223 | ATOM 223 O4' G 0 11 -24.591 29.343 9.188 1.00111.07 O 224 | ATOM 224 C3' G 0 11 -24.339 27.021 9.056 1.00105.92 C 225 | ATOM 225 O3' G 0 11 -24.153 25.818 9.779 1.00103.87 O 226 | ATOM 226 C2' G 0 11 -25.800 27.338 8.758 1.00108.39 C 227 | ATOM 227 O2' G 0 11 -26.622 27.055 9.880 1.00120.30 O 228 | ATOM 228 C1' G 0 11 -25.752 28.854 8.548 1.00111.22 C 229 | ATOM 229 N9 G 0 11 -25.693 29.195 7.115 1.00110.97 N 230 | ATOM 230 C8 G 0 11 -24.575 29.478 6.368 1.00109.62 C 231 | ATOM 231 N7 G 0 11 -24.853 29.724 5.117 1.00109.95 N 232 | ATOM 232 C5 G 0 11 -26.232 29.586 5.031 1.00111.56 C 233 | ATOM 233 C6 G 0 11 -27.107 29.733 3.921 1.00112.77 C 234 | ATOM 234 O6 G 0 11 -26.831 30.023 2.751 1.00112.65 O 235 | ATOM 235 N1 G 0 11 -28.431 29.506 4.279 1.00114.48 N 236 | ATOM 236 C2 G 0 11 -28.863 29.178 5.539 1.00114.98 C 237 | ATOM 237 N2 G 0 11 -30.185 29.000 5.679 1.00120.64 N 238 | ATOM 238 N3 G 0 11 -28.060 29.040 6.581 1.00113.91 N 239 | ATOM 239 C4 G 0 11 -26.767 29.256 6.257 1.00112.21 C 240 | ATOM 240 P C 0 12 -23.841 24.451 8.997 1.00111.89 P 241 | ATOM 241 OP1 C 0 12 -23.480 23.418 10.002 1.00104.08 O 242 | ATOM 242 OP2 C 0 12 -22.902 24.753 7.888 1.00102.19 O 243 | ATOM 243 O5' C 0 12 -25.246 24.047 8.366 1.00116.87 O 244 | ATOM 244 C5' C 0 12 -26.338 23.702 9.203 1.00103.72 C 245 | ATOM 245 C4' C 0 12 -27.653 23.777 8.469 1.00104.52 C 246 | ATOM 246 O4' C 0 12 -27.844 25.109 7.922 1.00106.82 O 247 | ATOM 247 C3' C 0 12 -27.805 22.875 7.258 1.00102.59 C 248 | ATOM 248 O3' C 0 12 -28.067 21.522 7.579 1.00113.52 O 249 | ATOM 249 C2' C 0 12 -28.927 23.558 6.492 1.00117.52 C 250 | ATOM 250 O2' C 0 12 -30.177 23.314 7.123 1.00121.90 O 251 | ATOM 251 C1' C 0 12 -28.567 25.027 6.709 1.00116.61 C 252 | ATOM 252 N1 C 0 12 -27.727 25.558 5.608 1.00111.35 N 253 | ATOM 253 C2 C 0 12 -28.370 26.047 4.468 1.00110.77 C 254 | ATOM 254 O2 C 0 12 -29.608 26.022 4.427 1.00109.85 O 255 | ATOM 255 N3 C 0 12 -27.635 26.536 3.442 1.00107.26 N 256 | ATOM 256 C4 C 0 12 -26.305 26.546 3.524 1.00105.25 C 257 | ATOM 257 N4 C 0 12 -25.618 27.038 2.490 1.00104.94 N 258 | ATOM 258 C5 C 0 12 -25.621 26.053 4.672 1.00103.66 C 259 | ATOM 259 C6 C 0 12 -26.362 25.574 5.678 1.00104.25 C 260 | ATOM 260 P A 0 13 -27.520 20.364 6.608 1.00125.78 P 261 | ATOM 261 OP1 A 0 13 -27.738 19.057 7.278 1.00133.97 O 262 | ATOM 262 OP2 A 0 13 -26.152 20.743 6.171 1.00100.41 O 263 | ATOM 263 O5' A 0 13 -28.479 20.441 5.338 1.00107.99 O 264 | ATOM 264 C5' A 0 13 -29.859 20.131 5.454 1.00107.62 C 265 | ATOM 265 C4' A 0 13 -30.630 20.506 4.214 1.00116.57 C 266 | ATOM 266 O4' A 0 13 -30.566 21.942 3.991 1.00115.89 O 267 | ATOM 267 C3' A 0 13 -30.138 19.927 2.898 1.00114.34 C 268 | ATOM 268 O3' A 0 13 -30.459 18.561 2.705 1.00131.51 O 269 | ATOM 269 C2' A 0 13 -30.781 20.865 1.889 1.00130.16 C 270 | ATOM 270 O2' A 0 13 -32.170 20.595 1.765 1.00136.78 O 271 | ATOM 271 C1' A 0 13 -30.613 22.207 2.600 1.00125.76 C 272 | ATOM 272 N9 A 0 13 -29.349 22.841 2.188 1.00121.60 N 273 | ATOM 273 C8 A 0 13 -28.168 22.931 2.880 1.00110.90 C 274 | ATOM 274 N7 A 0 13 -27.215 23.535 2.211 1.00103.97 N 275 | ATOM 275 C5 A 0 13 -27.807 23.849 0.996 1.00114.89 C 276 | ATOM 276 C6 A 0 13 -27.323 24.498 -0.153 1.00115.18 C 277 | ATOM 277 N6 A 0 13 -26.078 24.966 -0.265 1.00111.82 N 278 | ATOM 278 N1 A 0 13 -28.172 24.652 -1.193 1.00126.59 N 279 | ATOM 279 C2 A 0 13 -29.421 24.183 -1.080 1.00126.71 C 280 | ATOM 280 N3 A 0 13 -29.992 23.556 -0.053 1.00126.89 N 281 | ATOM 281 C4 A 0 13 -29.120 23.420 0.964 1.00126.52 C 282 | ATOM 282 P G 0 14 -29.415 17.602 1.943 1.00134.16 P 283 | ATOM 283 OP1 G 0 14 -29.725 16.191 2.288 1.00114.66 O 284 | ATOM 284 OP2 G 0 14 -28.052 18.125 2.217 1.00133.87 O 285 | ATOM 285 O5' G 0 14 -29.728 17.834 0.395 1.00135.51 O 286 | ATOM 286 C5' G 0 14 -31.008 17.537 -0.147 1.00121.28 C 287 | ATOM 287 C4' G 0 14 -31.295 18.341 -1.395 1.00129.64 C 288 | ATOM 288 O4' G 0 14 -31.001 19.744 -1.155 1.00131.45 O 289 | ATOM 289 C3' G 0 14 -30.472 17.999 -2.630 1.00116.19 C 290 | ATOM 290 O3' G 0 14 -30.952 16.862 -3.329 1.00124.55 O 291 | ATOM 291 C2' G 0 14 -30.543 19.288 -3.438 1.00126.09 C 292 | ATOM 292 O2' G 0 14 -31.802 19.408 -4.086 1.00104.36 O 293 | ATOM 293 C1' G 0 14 -30.474 20.334 -2.326 1.00129.11 C 294 | ATOM 294 N9 G 0 14 -29.083 20.745 -2.053 1.00130.02 N 295 | ATOM 295 C8 G 0 14 -28.361 20.512 -0.907 1.00124.80 C 296 | ATOM 296 N7 G 0 14 -27.149 20.994 -0.956 1.00115.84 N 297 | ATOM 297 C5 G 0 14 -27.063 21.581 -2.210 1.00118.48 C 298 | ATOM 298 C6 G 0 14 -25.987 22.267 -2.832 1.00122.49 C 299 | ATOM 299 O6 G 0 14 -24.857 22.498 -2.386 1.00127.91 O 300 | ATOM 300 N1 G 0 14 -26.326 22.701 -4.109 1.00125.72 N 301 | ATOM 301 C2 G 0 14 -27.544 22.504 -4.710 1.00128.41 C 302 | ATOM 302 N2 G 0 14 -27.679 23.000 -5.948 1.00126.61 N 303 | ATOM 303 N3 G 0 14 -28.555 21.868 -4.141 1.00128.47 N 304 | ATOM 304 C4 G 0 14 -28.248 21.437 -2.900 1.00126.21 C 305 | ATOM 305 P A 0 15 -29.920 15.858 -4.046 1.00126.85 P 306 | ATOM 306 OP1 A 0 15 -30.689 14.725 -4.622 1.00129.44 O 307 | ATOM 307 OP2 A 0 15 -28.806 15.587 -3.104 1.00125.26 O 308 | ATOM 308 O5' A 0 15 -29.330 16.704 -5.259 1.00115.89 O 309 | ATOM 309 C5' A 0 15 -30.144 17.049 -6.369 1.00122.03 C 310 | ATOM 310 C4' A 0 15 -29.395 17.912 -7.352 1.00127.12 C 311 | ATOM 311 O4' A 0 15 -29.034 19.172 -6.724 1.00132.12 O 312 | ATOM 312 C3' A 0 15 -28.066 17.360 -7.845 1.00116.96 C 313 | ATOM 313 O3' A 0 15 -28.194 16.385 -8.865 1.00109.86 O 314 | ATOM 314 C2' A 0 15 -27.336 18.620 -8.281 1.00130.82 C 315 | ATOM 315 O2' A 0 15 -27.836 19.089 -9.525 1.00136.64 O 316 | ATOM 316 C1' A 0 15 -27.767 19.594 -7.189 1.00132.23 C 317 | ATOM 317 N9 A 0 15 -26.822 19.556 -6.059 1.00129.02 N 318 | ATOM 318 C8 A 0 15 -26.996 18.935 -4.848 1.00128.40 C 319 | ATOM 319 N7 A 0 15 -25.972 19.057 -4.042 1.00131.67 N 320 | ATOM 320 C5 A 0 15 -25.059 19.799 -4.778 1.00130.97 C 321 | ATOM 321 C6 A 0 15 -23.769 20.265 -4.478 1.00132.33 C 322 | ATOM 322 N6 A 0 15 -23.162 20.037 -3.312 1.00137.21 N 323 | ATOM 323 N1 A 0 15 -23.121 20.978 -5.425 1.00133.73 N 324 | ATOM 324 C2 A 0 15 -23.739 21.205 -6.591 1.00125.65 C 325 | ATOM 325 N3 A 0 15 -24.949 20.817 -6.991 1.00131.55 N 326 | ATOM 326 C4 A 0 15 -25.565 20.111 -6.025 1.00132.22 C 327 | ATOM 327 P A 0 16 -27.230 15.098 -8.863 1.00131.50 P 328 | ATOM 328 OP1 A 0 16 -27.865 14.039 -9.686 1.00135.92 O 329 | ATOM 329 OP2 A 0 16 -26.866 14.811 -7.451 1.00124.14 O 330 | ATOM 330 O5' A 0 16 -25.914 15.593 -9.615 1.00129.29 O 331 | ATOM 331 C5' A 0 16 -25.967 16.032 -10.966 1.00125.09 C 332 | ATOM 332 C4' A 0 16 -24.914 17.074 -11.252 1.00126.32 C 333 | ATOM 333 O4' A 0 16 -24.958 18.103 -10.228 1.00133.14 O 334 | ATOM 334 C3' A 0 16 -23.470 16.592 -11.227 1.00122.60 C 335 | ATOM 335 O3' A 0 16 -23.067 15.967 -12.433 1.00112.52 O 336 | ATOM 336 C2' A 0 16 -22.703 17.868 -10.914 1.00130.70 C 337 | ATOM 337 O2' A 0 16 -22.602 18.689 -12.067 1.00138.10 O 338 | ATOM 338 C1' A 0 16 -23.651 18.543 -9.927 1.00135.64 C 339 | ATOM 339 N9 A 0 16 -23.343 18.160 -8.536 1.00135.94 N 340 | ATOM 340 C8 A 0 16 -24.058 17.321 -7.717 1.00135.52 C 341 | ATOM 341 N7 A 0 16 -23.529 17.168 -6.527 1.00130.15 N 342 | ATOM 342 C5 A 0 16 -22.388 17.957 -6.568 1.00126.35 C 343 | ATOM 343 C6 A 0 16 -21.389 18.224 -5.617 1.00122.77 C 344 | ATOM 344 N6 A 0 16 -21.380 17.702 -4.389 1.00127.49 N 345 | ATOM 345 N1 A 0 16 -20.385 19.055 -5.974 1.00120.27 N 346 | ATOM 346 C2 A 0 16 -20.394 19.578 -7.206 1.00126.44 C 347 | ATOM 347 N3 A 0 16 -21.277 19.403 -8.187 1.00134.60 N 348 | ATOM 348 C4 A 0 16 -22.259 18.571 -7.800 1.00130.14 C 349 | ATOM 349 P G 0 17 -22.141 14.654 -12.391 1.00124.90 P 350 | ATOM 350 OP1 G 0 17 -22.069 14.103 -13.767 1.00134.36 O 351 | ATOM 351 OP2 G 0 17 -22.629 13.791 -11.285 1.00134.90 O 352 | ATOM 352 O5' G 0 17 -20.696 15.204 -12.000 1.00118.98 O 353 | ATOM 353 C5' G 0 17 -19.953 16.005 -12.908 1.00117.26 C 354 | ATOM 354 C4' G 0 17 -18.802 16.710 -12.231 1.00126.72 C 355 | ATOM 355 O4' G 0 17 -19.295 17.518 -11.128 1.00142.21 O 356 | ATOM 356 C3' G 0 17 -17.747 15.823 -11.585 1.00115.95 C 357 | ATOM 357 O3' G 0 17 -16.813 15.284 -12.502 1.00110.73 O 358 | ATOM 358 C2' G 0 17 -17.119 16.759 -10.565 1.00116.15 C 359 | ATOM 359 O2' G 0 17 -16.239 17.674 -11.201 1.00113.44 O 360 | ATOM 360 C1' G 0 17 -18.346 17.526 -10.079 1.00136.54 C 361 | ATOM 361 N9 G 0 17 -18.943 16.877 -8.897 1.00139.32 N 362 | ATOM 362 C8 G 0 17 -20.145 16.218 -8.820 1.00145.24 C 363 | ATOM 363 N7 G 0 17 -20.390 15.741 -7.629 1.00139.25 N 364 | ATOM 364 C5 G 0 17 -19.280 16.102 -6.878 1.00125.82 C 365 | ATOM 365 C6 G 0 17 -18.975 15.865 -5.513 1.00121.50 C 366 | ATOM 366 O6 G 0 17 -19.647 15.265 -4.665 1.00120.59 O 367 | ATOM 367 N1 G 0 17 -17.743 16.408 -5.162 1.00119.63 N 368 | ATOM 368 C2 G 0 17 -16.911 17.093 -6.012 1.00124.24 C 369 | ATOM 369 N2 G 0 17 -15.762 17.541 -5.484 1.00127.00 N 370 | ATOM 370 N3 G 0 17 -17.184 17.321 -7.286 1.00127.39 N 371 | ATOM 371 C4 G 0 17 -18.377 16.802 -7.647 1.00125.98 C 372 | ATOM 372 P C 0 18 -15.937 14.002 -12.087 1.00118.40 P 373 | ATOM 373 OP1 C 0 18 -15.178 13.538 -13.274 1.00135.03 O 374 | ATOM 374 OP2 C 0 18 -16.830 13.057 -11.369 1.00 96.87 O 375 | ATOM 375 O5' C 0 18 -14.884 14.568 -11.034 1.00119.48 O 376 | ATOM 376 C5' C 0 18 -14.022 13.686 -10.333 1.00114.96 C 377 | ATOM 377 C4' C 0 18 -13.537 14.288 -9.039 1.00119.28 C 378 | ATOM 378 O4' C 0 18 -14.640 14.924 -8.343 1.00113.21 O 379 | ATOM 379 C3' C 0 18 -12.978 13.298 -8.030 1.00112.00 C 380 | ATOM 380 O3' C 0 18 -11.627 12.961 -8.293 1.00 87.61 O 381 | ATOM 381 C2' C 0 18 -13.180 14.013 -6.701 1.00111.26 C 382 | ATOM 382 O2' C 0 18 -12.158 14.975 -6.488 1.00110.37 O 383 | ATOM 383 C1' C 0 18 -14.495 14.756 -6.948 1.00109.84 C 384 | ATOM 384 N1 C 0 18 -15.682 14.028 -6.439 1.00110.82 N 385 | ATOM 385 C2 C 0 18 -15.874 13.820 -5.065 1.00112.56 C 386 | ATOM 386 O2 C 0 18 -15.033 14.227 -4.250 1.00115.94 O 387 | ATOM 387 N3 C 0 18 -16.980 13.162 -4.643 1.00114.43 N 388 | ATOM 388 C4 C 0 18 -17.879 12.728 -5.527 1.00121.70 C 389 | ATOM 389 N4 C 0 18 -18.955 12.086 -5.066 1.00127.49 N 390 | ATOM 390 C5 C 0 18 -17.717 12.934 -6.925 1.00129.51 C 391 | ATOM 391 C6 C 0 18 -16.620 13.584 -7.329 1.00121.59 C 392 | ATOM 392 P G 0 19 -11.144 11.432 -8.204 1.00110.98 P 393 | ATOM 393 OP1 G 0 19 -9.820 11.325 -8.869 1.00106.75 O 394 | ATOM 394 OP2 G 0 19 -12.268 10.581 -8.676 1.00114.23 O 395 | ATOM 395 O5' G 0 19 -10.926 11.192 -6.644 1.00 90.64 O 396 | ATOM 396 C5' G 0 19 -10.156 12.101 -5.871 1.00 76.07 C 397 | ATOM 397 C4' G 0 19 -10.383 11.897 -4.394 1.00 80.13 C 398 | ATOM 398 O4' G 0 19 -11.714 12.343 -4.032 1.00 84.38 O 399 | ATOM 399 C3' G 0 19 -10.330 10.457 -3.915 1.00 79.62 C 400 | ATOM 400 O3' G 0 19 -9.004 10.004 -3.710 1.00 73.53 O 401 | ATOM 401 C2' G 0 19 -11.170 10.487 -2.642 1.00 79.16 C 402 | ATOM 402 O2' G 0 19 -10.404 10.951 -1.541 1.00 71.24 O 403 | ATOM 403 C1' G 0 19 -12.226 11.540 -2.988 1.00 83.80 C 404 | ATOM 404 N9 G 0 19 -13.498 10.932 -3.429 1.00 82.51 N 405 | ATOM 405 C8 G 0 19 -13.974 10.853 -4.716 1.00105.13 C 406 | ATOM 406 N7 G 0 19 -15.134 10.257 -4.799 1.00121.31 N 407 | ATOM 407 C5 G 0 19 -15.450 9.921 -3.489 1.00110.13 C 408 | ATOM 408 C6 G 0 19 -16.591 9.257 -2.954 1.00107.02 C 409 | ATOM 409 O6 G 0 19 -17.585 8.808 -3.540 1.00100.88 O 410 | ATOM 410 N1 G 0 19 -16.497 9.121 -1.575 1.00 96.96 N 411 | ATOM 411 C2 G 0 19 -15.452 9.566 -0.806 1.00 82.90 C 412 | ATOM 412 N2 G 0 19 -15.563 9.336 0.510 1.00 84.01 N 413 | ATOM 413 N3 G 0 19 -14.386 10.187 -1.289 1.00 68.65 N 414 | ATOM 414 C4 G 0 19 -14.447 10.329 -2.630 1.00 83.97 C 415 | ATOM 415 P U 0 20 -8.566 8.543 -4.208 1.00 98.57 P 416 | ATOM 416 OP1 U 0 20 -7.085 8.462 -4.124 1.00 87.87 O 417 | ATOM 417 OP2 U 0 20 -9.242 8.270 -5.502 1.00 99.38 O 418 | ATOM 418 O5' U 0 20 -9.170 7.569 -3.102 1.00100.81 O 419 | ATOM 419 C5' U 0 20 -8.779 7.693 -1.744 1.00 81.29 C 420 | ATOM 420 C4' U 0 20 -9.810 7.118 -0.807 1.00 80.27 C 421 | ATOM 421 O4' U 0 20 -11.086 7.776 -1.000 1.00 80.34 O 422 | ATOM 422 C3' U 0 20 -10.132 5.645 -0.977 1.00 89.56 C 423 | ATOM 423 O3' U 0 20 -9.160 4.805 -0.380 1.00 93.93 O 424 | ATOM 424 C2' U 0 20 -11.507 5.529 -0.331 1.00 83.89 C 425 | ATOM 425 O2' U 0 20 -11.382 5.450 1.082 1.00 78.19 O 426 | ATOM 426 C1' U 0 20 -12.133 6.884 -0.685 1.00 70.43 C 427 | ATOM 427 N1 U 0 20 -13.084 6.818 -1.822 1.00 64.65 N 428 | ATOM 428 C2 U 0 20 -14.390 6.477 -1.524 1.00 77.98 C 429 | ATOM 429 O2 U 0 20 -14.753 6.222 -0.390 1.00 84.94 O 430 | ATOM 430 N3 U 0 20 -15.261 6.437 -2.585 1.00 78.36 N 431 | ATOM 431 C4 U 0 20 -14.962 6.702 -3.903 1.00 90.88 C 432 | ATOM 432 O4 U 0 20 -15.856 6.627 -4.750 1.00 79.30 O 433 | ATOM 433 C5 U 0 20 -13.591 7.059 -4.132 1.00 86.43 C 434 | ATOM 434 C6 U 0 20 -12.718 7.109 -3.114 1.00 66.17 C 435 | ATOM 435 P U 0 21 -8.374 3.720 -1.265 1.00105.08 P 436 | ATOM 436 OP1 U 0 21 -7.419 3.002 -0.382 1.00 91.35 O 437 | ATOM 437 OP2 U 0 21 -7.874 4.413 -2.480 1.00 81.02 O 438 | ATOM 438 O5' U 0 21 -9.507 2.694 -1.713 1.00 90.62 O 439 | ATOM 439 C5' U 0 21 -10.311 2.028 -0.751 1.00 84.15 C 440 | ATOM 440 C4' U 0 21 -11.564 1.466 -1.372 1.00 90.04 C 441 | ATOM 441 O4' U 0 21 -12.379 2.539 -1.906 1.00 93.73 O 442 | ATOM 442 C3' U 0 21 -11.366 0.530 -2.553 1.00 94.88 C 443 | ATOM 443 O3' U 0 21 -11.038 -0.788 -2.149 1.00 95.39 O 444 | ATOM 444 C2' U 0 21 -12.704 0.620 -3.281 1.00 90.02 C 445 | ATOM 445 O2' U 0 21 -13.661 -0.234 -2.674 1.00102.48 O 446 | ATOM 446 C1' U 0 21 -13.113 2.074 -3.019 1.00 88.11 C 447 | ATOM 447 N1 U 0 21 -12.862 2.959 -4.181 1.00 90.92 N 448 | ATOM 448 C2 U 0 21 -13.842 3.007 -5.155 1.00 87.12 C 449 | ATOM 449 O2 U 0 21 -14.871 2.359 -5.083 1.00 85.12 O 450 | ATOM 450 N3 U 0 21 -13.580 3.836 -6.217 1.00 78.84 N 451 | ATOM 451 C4 U 0 21 -12.454 4.610 -6.401 1.00101.66 C 452 | ATOM 452 O4 U 0 21 -12.360 5.308 -7.413 1.00 88.06 O 453 | ATOM 453 C5 U 0 21 -11.485 4.508 -5.349 1.00108.24 C 454 | ATOM 454 C6 U 0 21 -11.715 3.709 -4.298 1.00 91.84 C 455 | ATOM 455 P C 0 22 -9.874 -1.611 -2.893 1.00103.10 P 456 | ATOM 456 OP1 C 0 22 -10.134 -3.055 -2.662 1.00 92.94 O 457 | ATOM 457 OP2 C 0 22 -8.565 -1.037 -2.493 1.00106.54 O 458 | ATOM 458 O5' C 0 22 -10.107 -1.307 -4.440 1.00 88.92 O 459 | ATOM 459 C5' C 0 22 -9.180 -1.756 -5.419 1.00 96.15 C 460 | ATOM 460 C4' C 0 22 -9.882 -2.260 -6.655 1.00107.47 C 461 | ATOM 461 O4' C 0 22 -10.363 -3.608 -6.437 1.00115.52 O 462 | ATOM 462 C3' C 0 22 -11.116 -1.476 -7.073 1.00102.47 C 463 | ATOM 463 O3' C 0 22 -10.785 -0.340 -7.853 1.00107.97 O 464 | ATOM 464 C2' C 0 22 -11.945 -2.503 -7.836 1.00 95.61 C 465 | ATOM 465 O2' C 0 22 -11.525 -2.581 -9.191 1.00102.83 O 466 | ATOM 466 C1' C 0 22 -11.573 -3.812 -7.131 1.00108.77 C 467 | ATOM 467 N1 C 0 22 -12.594 -4.300 -6.170 1.00101.92 N 468 | ATOM 468 C2 C 0 22 -13.908 -4.607 -6.565 1.00105.28 C 469 | ATOM 469 O2 C 0 22 -14.272 -4.436 -7.739 1.00100.67 O 470 | ATOM 470 N3 C 0 22 -14.780 -5.074 -5.638 1.00105.35 N 471 | ATOM 471 C4 C 0 22 -14.394 -5.260 -4.374 1.00 92.86 C 472 | ATOM 472 N4 C 0 22 -15.293 -5.724 -3.501 1.00 67.28 N 473 | ATOM 473 C5 C 0 22 -13.065 -4.978 -3.951 1.00 94.35 C 474 | ATOM 474 C6 C 0 22 -12.213 -4.515 -4.874 1.00102.83 C 475 | ATOM 475 P A 0 23 -11.593 1.033 -7.667 1.00115.79 P 476 | ATOM 476 OP1 A 0 23 -11.113 2.015 -8.675 1.00 98.22 O 477 | ATOM 477 OP2 A 0 23 -11.527 1.367 -6.224 1.00105.99 O 478 | ATOM 478 O5' A 0 23 -13.095 0.649 -8.032 1.00 97.10 O 479 | ATOM 479 C5' A 0 23 -13.476 0.377 -9.373 1.00108.44 C 480 | ATOM 480 C4' A 0 23 -14.946 0.060 -9.475 1.00112.02 C 481 | ATOM 481 O4' A 0 23 -15.198 -1.275 -8.966 1.00113.64 O 482 | ATOM 482 C3' A 0 23 -15.866 0.952 -8.659 1.00107.65 C 483 | ATOM 483 O3' A 0 23 -16.157 2.178 -9.307 1.00117.12 O 484 | ATOM 484 C2' A 0 23 -17.085 0.066 -8.428 1.00 95.64 C 485 | ATOM 485 O2' A 0 23 -17.939 0.060 -9.562 1.00 93.96 O 486 | ATOM 486 C1' A 0 23 -16.438 -1.312 -8.293 1.00 97.24 C 487 | ATOM 487 N9 A 0 23 -16.184 -1.656 -6.884 1.00 95.42 N 488 | ATOM 488 C8 A 0 23 -15.041 -1.410 -6.166 1.00101.18 C 489 | ATOM 489 N7 A 0 23 -15.100 -1.823 -4.926 1.00 99.92 N 490 | ATOM 490 C5 A 0 23 -16.368 -2.376 -4.821 1.00 98.60 C 491 | ATOM 491 C6 A 0 23 -17.040 -2.987 -3.750 1.00 92.98 C 492 | ATOM 492 N6 A 0 23 -16.501 -3.145 -2.539 1.00 70.00 N 493 | ATOM 493 N1 A 0 23 -18.297 -3.432 -3.971 1.00 95.90 N 494 | ATOM 494 C2 A 0 23 -18.830 -3.268 -5.190 1.00105.71 C 495 | ATOM 495 N3 A 0 23 -18.298 -2.709 -6.276 1.00108.91 N 496 | ATOM 496 C4 A 0 23 -17.051 -2.278 -6.020 1.00100.70 C 497 | ATOM 497 P C 0 24 -16.742 3.413 -8.467 1.00150.78 P 498 | ATOM 498 OP1 C 0 24 -15.949 4.628 -8.777 1.00143.06 O 499 | ATOM 499 OP2 C 0 24 -16.883 2.980 -7.053 1.00138.36 O 500 | ATOM 500 O5' C 0 24 -18.196 3.619 -9.073 1.00 98.18 O 501 | ATOM 501 C5' C 0 24 -19.256 4.078 -8.256 1.00 97.57 C 502 | ATOM 502 C4' C 0 24 -20.431 3.140 -8.312 1.00 95.61 C 503 | ATOM 503 O4' C 0 24 -19.989 1.769 -8.118 1.00 95.89 O 504 | ATOM 504 C3' C 0 24 -21.475 3.341 -7.230 1.00 96.82 C 505 | ATOM 505 O3' C 0 24 -22.343 4.426 -7.497 1.00108.42 O 506 | ATOM 506 C2' C 0 24 -22.158 1.983 -7.178 1.00 95.91 C 507 | ATOM 507 O2' C 0 24 -23.050 1.829 -8.272 1.00101.78 O 508 | ATOM 508 C1' C 0 24 -20.966 1.049 -7.393 1.00105.51 C 509 | ATOM 509 N1 C 0 24 -20.368 0.613 -6.110 1.00 96.59 N 510 | ATOM 510 C2 C 0 24 -21.041 -0.315 -5.305 1.00102.34 C 511 | ATOM 511 O2 C 0 24 -22.136 -0.766 -5.673 1.00110.38 O 512 | ATOM 512 N3 C 0 24 -20.483 -0.701 -4.134 1.00 96.42 N 513 | ATOM 513 C4 C 0 24 -19.305 -0.202 -3.759 1.00 91.59 C 514 | ATOM 514 N4 C 0 24 -18.790 -0.612 -2.598 1.00 90.15 N 515 | ATOM 515 C5 C 0 24 -18.598 0.740 -4.558 1.00 90.28 C 516 | ATOM 516 C6 C 0 24 -19.161 1.114 -5.712 1.00 95.20 C 517 | ATOM 517 P G 0 25 -22.297 5.722 -6.551 1.00106.87 P 518 | ATOM 518 OP1 G 0 25 -22.958 6.844 -7.261 1.00102.60 O 519 | ATOM 519 OP2 G 0 25 -20.913 5.875 -6.036 1.00101.82 O 520 | ATOM 520 O5' G 0 25 -23.207 5.308 -5.315 1.00104.49 O 521 | ATOM 521 C5' G 0 25 -24.508 4.786 -5.524 1.00100.67 C 522 | ATOM 522 C4' G 0 25 -25.071 4.190 -4.262 1.00102.99 C 523 | ATOM 523 O4' G 0 25 -24.519 2.867 -4.038 1.00103.59 O 524 | ATOM 524 C3' G 0 25 -24.748 4.925 -2.977 1.00107.92 C 525 | ATOM 525 O3' G 0 25 -25.511 6.099 -2.784 1.00123.31 O 526 | ATOM 526 C2' G 0 25 -24.984 3.850 -1.928 1.00 93.51 C 527 | ATOM 527 O2' G 0 25 -26.370 3.660 -1.691 1.00 97.81 O 528 | ATOM 528 C1' G 0 25 -24.439 2.620 -2.646 1.00 94.17 C 529 | ATOM 529 N9 G 0 25 -23.034 2.412 -2.278 1.00 92.36 N 530 | ATOM 530 C8 G 0 25 -21.930 2.598 -3.068 1.00 88.34 C 531 | ATOM 531 N7 G 0 25 -20.822 2.349 -2.430 1.00 89.81 N 532 | ATOM 532 C5 G 0 25 -21.236 1.998 -1.155 1.00 87.94 C 533 | ATOM 533 C6 G 0 25 -20.466 1.629 -0.036 1.00 82.47 C 534 | ATOM 534 O6 G 0 25 -19.236 1.544 0.021 1.00 87.47 O 535 | ATOM 535 N1 G 0 25 -21.262 1.347 1.073 1.00 77.68 N 536 | ATOM 536 C2 G 0 25 -22.634 1.417 1.097 1.00 90.32 C 537 | ATOM 537 N2 G 0 25 -23.226 1.109 2.263 1.00100.59 N 538 | ATOM 538 N3 G 0 25 -23.366 1.764 0.049 1.00 92.76 N 539 | ATOM 539 C4 G 0 25 -22.601 2.037 -1.032 1.00 91.55 C 540 | ATOM 540 P U 0 26 -24.813 7.383 -2.121 1.00100.39 P 541 | ATOM 541 OP1 U 0 26 -25.679 8.567 -2.346 1.00133.38 O 542 | ATOM 542 OP2 U 0 26 -23.405 7.402 -2.596 1.00102.62 O 543 | ATOM 543 O5' U 0 26 -24.804 7.044 -0.565 1.00 99.63 O 544 | ATOM 544 C5' U 0 26 -25.967 6.538 0.074 1.00113.19 C 545 | ATOM 545 C4' U 0 26 -25.638 5.942 1.417 1.00108.56 C 546 | ATOM 546 O4' U 0 26 -24.825 4.750 1.248 1.00 96.59 O 547 | ATOM 547 C3' U 0 26 -24.804 6.820 2.331 1.00 99.66 C 548 | ATOM 548 O3' U 0 26 -25.549 7.838 2.974 1.00116.62 O 549 | ATOM 549 C2' U 0 26 -24.172 5.805 3.272 1.00 89.14 C 550 | ATOM 550 O2' U 0 26 -25.109 5.361 4.242 1.00109.14 O 551 | ATOM 551 C1' U 0 26 -23.891 4.653 2.306 1.00 87.03 C 552 | ATOM 552 N1 U 0 26 -22.525 4.744 1.741 1.00101.64 N 553 | ATOM 553 C2 U 0 26 -21.471 4.318 2.529 1.00108.55 C 554 | ATOM 554 O2 U 0 26 -21.620 3.864 3.652 1.00119.00 O 555 | ATOM 555 N3 U 0 26 -20.229 4.435 1.954 1.00 89.78 N 556 | ATOM 556 C4 U 0 26 -19.940 4.931 0.699 1.00 93.84 C 557 | ATOM 557 O4 U 0 26 -18.768 4.971 0.322 1.00108.92 O 558 | ATOM 558 C5 U 0 26 -21.082 5.355 -0.053 1.00 90.94 C 559 | ATOM 559 C6 U 0 26 -22.302 5.250 0.481 1.00 97.67 C 560 | ATOM 560 P C 0 27 -25.233 9.377 2.635 1.00116.19 P 561 | ATOM 561 OP1 C 0 27 -26.241 10.237 3.305 1.00109.06 O 562 | ATOM 562 OP2 C 0 27 -25.046 9.483 1.166 1.00113.47 O 563 | ATOM 563 O5' C 0 27 -23.812 9.630 3.308 1.00 87.43 O 564 | ATOM 564 C5' C 0 27 -23.553 9.210 4.638 1.00 96.68 C 565 | ATOM 565 C4' C 0 27 -22.089 8.925 4.858 1.00 85.07 C 566 | ATOM 566 O4' C 0 27 -21.614 7.952 3.890 1.00 79.54 O 567 | ATOM 567 C3' C 0 27 -21.145 10.100 4.675 1.00 84.62 C 568 | ATOM 568 O3' C 0 27 -21.131 10.979 5.781 1.00 78.48 O 569 | ATOM 569 C2' C 0 27 -19.815 9.409 4.420 1.00 81.61 C 570 | ATOM 570 O2' C 0 27 -19.269 8.913 5.633 1.00 81.69 O 571 | ATOM 571 C1' C 0 27 -20.261 8.218 3.570 1.00 86.32 C 572 | ATOM 572 N1 C 0 27 -20.160 8.506 2.120 1.00 95.59 N 573 | ATOM 573 C2 C 0 27 -18.896 8.544 1.515 1.00 79.92 C 574 | ATOM 574 O2 C 0 27 -17.885 8.333 2.202 1.00 72.25 O 575 | ATOM 575 N3 C 0 27 -18.802 8.811 0.192 1.00 77.10 N 576 | ATOM 576 C4 C 0 27 -19.905 9.032 -0.526 1.00 87.34 C 577 | ATOM 577 N4 C 0 27 -19.762 9.290 -1.829 1.00 80.91 N 578 | ATOM 578 C5 C 0 27 -21.204 9.000 0.060 1.00100.45 C 579 | ATOM 579 C6 C 0 27 -21.282 8.736 1.371 1.00101.95 C 580 | ATOM 580 P G 0 28 -21.359 12.549 5.545 1.00106.72 P 581 | ATOM 581 OP1 G 0 28 -21.940 13.127 6.784 1.00115.34 O 582 | ATOM 582 OP2 G 0 28 -22.087 12.706 4.259 1.00 84.21 O 583 | ATOM 583 O5' G 0 28 -19.885 13.120 5.342 1.00 82.67 O 584 | ATOM 584 C5' G 0 28 -18.816 12.701 6.177 1.00 78.34 C 585 | ATOM 585 C4' G 0 28 -17.500 12.758 5.446 1.00 77.18 C 586 | ATOM 586 O4' G 0 28 -17.464 11.720 4.432 1.00 84.75 O 587 | ATOM 587 C3' G 0 28 -17.225 14.042 4.675 1.00 74.94 C 588 | ATOM 588 O3' G 0 28 -16.713 15.082 5.490 1.00 76.54 O 589 | ATOM 589 C2' G 0 28 -16.253 13.580 3.601 1.00 74.86 C 590 | ATOM 590 O2' G 0 28 -14.953 13.424 4.150 1.00 71.85 O 591 | ATOM 591 C1' G 0 28 -16.805 12.194 3.278 1.00 77.81 C 592 | ATOM 592 N9 G 0 28 -17.782 12.223 2.172 1.00 83.74 N 593 | ATOM 593 C8 G 0 28 -19.135 12.014 2.284 1.00 91.74 C 594 | ATOM 594 N7 G 0 28 -19.766 12.085 1.144 1.00107.35 N 595 | ATOM 595 C5 G 0 28 -18.772 12.356 0.213 1.00 93.76 C 596 | ATOM 596 C6 G 0 28 -18.851 12.543 -1.195 1.00 88.00 C 597 | ATOM 597 O6 G 0 28 -19.843 12.508 -1.937 1.00 80.96 O 598 | ATOM 598 N1 G 0 28 -17.597 12.796 -1.740 1.00 82.86 N 599 | ATOM 599 C2 G 0 28 -16.426 12.861 -1.028 1.00 81.73 C 600 | ATOM 600 N2 G 0 28 -15.319 13.120 -1.736 1.00 81.83 N 601 | ATOM 601 N3 G 0 28 -16.340 12.690 0.279 1.00 80.50 N 602 | ATOM 602 C4 G 0 28 -17.542 12.441 0.834 1.00 81.54 C 603 | ATOM 603 P C 0 29 -16.923 16.618 5.068 1.00 86.97 P 604 | ATOM 604 OP1 C 0 29 -16.309 17.470 6.117 1.00103.24 O 605 | ATOM 605 OP2 C 0 29 -18.351 16.810 4.712 1.00 88.78 O 606 | ATOM 606 O5' C 0 29 -16.046 16.781 3.748 1.00 77.05 O 607 | ATOM 607 C5' C 0 29 -14.628 16.730 3.807 1.00 75.71 C 608 | ATOM 608 C4' C 0 29 -14.012 17.245 2.532 1.00 88.77 C 609 | ATOM 609 O4' C 0 29 -14.222 16.295 1.459 1.00 84.14 O 610 | ATOM 610 C3' C 0 29 -14.587 18.545 2.000 1.00 90.62 C 611 | ATOM 611 O3' C 0 29 -14.021 19.663 2.656 1.00 80.51 O 612 | ATOM 612 C2' C 0 29 -14.266 18.476 0.510 1.00 98.62 C 613 | ATOM 613 O2' C 0 29 -12.926 18.877 0.266 1.00102.71 O 614 | ATOM 614 C1' C 0 29 -14.387 16.975 0.235 1.00 97.08 C 615 | ATOM 615 N1 C 0 29 -15.700 16.599 -0.334 1.00 97.96 N 616 | ATOM 616 C2 C 0 29 -15.912 16.730 -1.709 1.00105.76 C 617 | ATOM 617 O2 C 0 29 -14.997 17.172 -2.417 1.00118.40 O 618 | ATOM 618 N3 C 0 29 -17.108 16.379 -2.233 1.00100.22 N 619 | ATOM 619 C4 C 0 29 -18.067 15.909 -1.437 1.00 99.16 C 620 | ATOM 620 N4 C 0 29 -19.231 15.575 -1.998 1.00108.42 N 621 | ATOM 621 C5 C 0 29 -17.878 15.758 -0.033 1.00 93.36 C 622 | ATOM 622 C6 C 0 29 -16.689 16.109 0.471 1.00 91.03 C 623 | ATOM 623 P A 0 30 -14.699 21.115 2.579 1.00100.60 P 624 | ATOM 624 OP1 A 0 30 -15.624 21.276 3.731 1.00102.14 O 625 | ATOM 625 OP2 A 0 30 -15.187 21.362 1.199 1.00102.46 O 626 | ATOM 626 O5' A 0 30 -13.457 22.076 2.803 1.00 83.11 O 627 | ATOM 627 C5' A 0 30 -12.304 21.939 1.990 1.00 90.06 C 628 | ATOM 628 C4' A 0 30 -12.114 23.161 1.137 1.00 90.41 C 629 | ATOM 629 O4' A 0 30 -13.161 24.117 1.457 1.00103.56 O 630 | ATOM 630 C3' A 0 30 -10.817 23.918 1.381 1.00 84.41 C 631 | ATOM 631 O3' A 0 30 -9.733 23.411 0.625 1.00 82.90 O 632 | ATOM 632 C2' A 0 30 -11.194 25.348 1.038 1.00100.80 C 633 | ATOM 633 O2' A 0 30 -11.254 25.518 -0.371 1.00124.38 O 634 | ATOM 634 C1' A 0 30 -12.614 25.407 1.589 1.00105.94 C 635 | ATOM 635 N9 A 0 30 -12.625 25.732 3.027 1.00103.99 N 636 | ATOM 636 C8 A 0 30 -13.148 24.955 4.033 1.00105.44 C 637 | ATOM 637 N7 A 0 30 -13.024 25.473 5.230 1.00 96.41 N 638 | ATOM 638 C5 A 0 30 -12.373 26.676 4.996 1.00106.04 C 639 | ATOM 639 C6 A 0 30 -11.948 27.697 5.860 1.00117.39 C 640 | ATOM 640 N6 A 0 30 -12.125 27.664 7.183 1.00123.79 N 641 | ATOM 641 N1 A 0 30 -11.327 28.765 5.315 1.00108.85 N 642 | ATOM 642 C2 A 0 30 -11.150 28.796 3.989 1.00 95.45 C 643 | ATOM 643 N3 A 0 30 -11.505 27.899 3.072 1.00 92.96 N 644 | ATOM 644 C4 A 0 30 -12.120 26.850 3.646 1.00100.53 C 645 | ATOM 645 P G 0 31 -8.404 22.905 1.371 1.00 92.54 P 646 | ATOM 646 OP1 G 0 31 -7.552 22.201 0.377 1.00 99.43 O 647 | ATOM 647 OP2 G 0 31 -8.822 22.198 2.607 1.00 95.45 O 648 | ATOM 648 O5' G 0 31 -7.683 24.252 1.822 1.00101.15 O 649 | ATOM 649 C5' G 0 31 -7.495 25.327 0.911 1.00102.14 C 650 | ATOM 650 C4' G 0 31 -7.105 26.596 1.628 1.00 94.37 C 651 | ATOM 651 O4' G 0 31 -8.218 27.071 2.428 1.00103.36 O 652 | ATOM 652 C3' G 0 31 -5.958 26.471 2.619 1.00 99.57 C 653 | ATOM 653 O3' G 0 31 -4.689 26.527 1.989 1.00101.85 O 654 | ATOM 654 C2' G 0 31 -6.211 27.627 3.580 1.00110.43 C 655 | ATOM 655 O2' G 0 31 -5.731 28.847 3.034 1.00105.20 O 656 | ATOM 656 C1' G 0 31 -7.741 27.681 3.608 1.00105.69 C 657 | ATOM 657 N9 G 0 31 -8.326 26.984 4.773 1.00108.95 N 658 | ATOM 658 C8 G 0 31 -9.177 25.906 4.736 1.00111.76 C 659 | ATOM 659 N7 G 0 31 -9.556 25.499 5.917 1.00 96.18 N 660 | ATOM 660 C5 G 0 31 -8.922 26.367 6.793 1.00 94.87 C 661 | ATOM 661 C6 G 0 31 -8.955 26.417 8.210 1.00 97.99 C 662 | ATOM 662 O6 G 0 31 -9.570 25.680 8.993 1.00104.43 O 663 | ATOM 663 N1 G 0 31 -8.168 27.456 8.698 1.00 96.96 N 664 | ATOM 664 C2 G 0 31 -7.444 28.331 7.925 1.00 95.66 C 665 | ATOM 665 N2 G 0 31 -6.745 29.265 8.586 1.00103.15 N 666 | ATOM 666 N3 G 0 31 -7.406 28.295 6.604 1.00 93.64 N 667 | ATOM 667 C4 G 0 31 -8.164 27.295 6.105 1.00100.04 C 668 | ATOM 668 P C 0 32 -3.728 25.239 1.971 1.00109.05 P 669 | ATOM 669 OP1 C 0 32 -2.492 25.603 1.231 1.00 98.95 O 670 | ATOM 670 OP2 C 0 32 -4.520 24.056 1.549 1.00122.82 O 671 | ATOM 671 O5' C 0 32 -3.322 25.047 3.497 1.00 87.73 O 672 | ATOM 672 C5' C 0 32 -2.019 25.382 3.944 1.00 98.34 C 673 | ATOM 673 C4' C 0 32 -2.003 26.695 4.685 1.00112.24 C 674 | ATOM 674 O4' C 0 32 -3.361 27.115 4.993 1.00101.28 O 675 | ATOM 675 C3' C 0 32 -1.306 26.669 6.035 1.00101.77 C 676 | ATOM 676 O3' C 0 32 0.100 26.783 5.929 1.00 90.74 O 677 | ATOM 677 C2' C 0 32 -1.964 27.824 6.771 1.00 92.33 C 678 | ATOM 678 O2' C 0 32 -1.443 29.065 6.316 1.00 95.03 O 679 | ATOM 679 C1' C 0 32 -3.404 27.703 6.278 1.00 91.70 C 680 | ATOM 680 N1 C 0 32 -4.237 26.831 7.142 1.00 97.79 N 681 | ATOM 681 C2 C 0 32 -4.417 27.054 8.520 1.00 94.43 C 682 | ATOM 682 O2 C 0 32 -3.857 28.001 9.092 1.00102.04 O 683 | ATOM 683 N3 C 0 32 -5.206 26.206 9.223 1.00100.78 N 684 | ATOM 684 C4 C 0 32 -5.812 25.185 8.615 1.00 96.51 C 685 | ATOM 685 N4 C 0 32 -6.583 24.374 9.341 1.00 96.47 N 686 | ATOM 686 C5 C 0 32 -5.658 24.941 7.224 1.00 95.31 C 687 | ATOM 687 C6 C 0 32 -4.875 25.782 6.541 1.00 99.34 C 688 | ATOM 688 P C 0 33 1.041 25.627 6.523 1.00 97.77 P 689 | ATOM 689 OP1 C 0 33 2.447 25.991 6.214 1.00120.02 O 690 | ATOM 690 OP2 C 0 33 0.507 24.313 6.077 1.00101.43 O 691 | ATOM 691 O5' C 0 33 0.825 25.733 8.098 1.00 97.63 O 692 | ATOM 692 C5' C 0 33 1.142 26.929 8.791 1.00 92.45 C 693 | ATOM 693 C4' C 0 33 0.948 26.779 10.279 1.00 92.93 C 694 | ATOM 694 O4' C 0 33 -0.463 26.834 10.611 1.00106.69 O 695 | ATOM 695 C3' C 0 33 1.414 25.469 10.891 1.00106.19 C 696 | ATOM 696 O3' C 0 33 2.815 25.416 11.091 1.00102.81 O 697 | ATOM 697 C2' C 0 33 0.612 25.411 12.185 1.00106.62 C 698 | ATOM 698 O2' C 0 33 1.194 26.250 13.171 1.00 94.49 O 699 | ATOM 699 C1' C 0 33 -0.717 26.032 11.747 1.00107.45 C 700 | ATOM 700 N1 C 0 33 -1.730 25.008 11.400 1.00 96.74 N 701 | ATOM 701 C2 C 0 33 -2.394 24.343 12.437 1.00107.84 C 702 | ATOM 702 O2 C 0 33 -2.115 24.624 13.613 1.00112.62 O 703 | ATOM 703 N3 C 0 33 -3.324 23.407 12.139 1.00110.21 N 704 | ATOM 704 C4 C 0 33 -3.604 23.127 10.866 1.00 99.20 C 705 | ATOM 705 N4 C 0 33 -4.531 22.196 10.622 1.00 95.15 N 706 | ATOM 706 C5 C 0 33 -2.946 23.789 9.789 1.00 93.71 C 707 | ATOM 707 C6 C 0 33 -2.027 24.714 10.098 1.00 88.20 C 708 | ATOM 708 P C 0 34 3.573 23.999 11.115 1.00112.30 P 709 | ATOM 709 OP1 C 0 34 5.031 24.270 11.185 1.00 98.22 O 710 | ATOM 710 OP2 C 0 34 3.029 23.159 10.018 1.00108.61 O 711 | ATOM 711 O5' C 0 34 3.129 23.347 12.500 1.00107.32 O 712 | ATOM 712 C5' C 0 34 3.513 23.935 13.734 1.00 92.42 C 713 | ATOM 713 C4' C 0 34 2.890 23.226 14.910 1.00 90.33 C 714 | ATOM 714 O4' C 0 34 1.449 23.393 14.891 1.00 96.09 O 715 | ATOM 715 C3' C 0 34 3.075 21.721 14.963 1.00109.69 C 716 | ATOM 716 O3' C 0 34 4.358 21.328 15.415 1.00110.56 O 717 | ATOM 717 C2' C 0 34 1.937 21.285 15.877 1.00105.87 C 718 | ATOM 718 O2' C 0 34 2.260 21.532 17.238 1.00 90.66 O 719 | ATOM 719 C1' C 0 34 0.829 22.255 15.457 1.00 91.94 C 720 | ATOM 720 N1 C 0 34 -0.085 21.649 14.462 1.00 90.12 N 721 | ATOM 721 C2 C 0 34 -1.191 20.945 14.942 1.00104.94 C 722 | ATOM 722 O2 C 0 34 -1.368 20.869 16.168 1.00104.74 O 723 | ATOM 723 N3 C 0 34 -2.046 20.368 14.067 1.00110.13 N 724 | ATOM 724 C4 C 0 34 -1.827 20.468 12.756 1.00104.14 C 725 | ATOM 725 N4 C 0 34 -2.698 19.882 11.929 1.00 93.85 N 726 | ATOM 726 C5 C 0 34 -0.703 21.175 12.236 1.00 97.16 C 727 | ATOM 727 C6 C 0 34 0.135 21.743 13.115 1.00 88.18 C 728 | ATOM 728 P C 0 35 5.036 19.995 14.828 1.00115.84 P 729 | ATOM 729 OP1 C 0 35 6.483 20.030 15.162 1.00102.05 O 730 | ATOM 730 OP2 C 0 35 4.617 19.856 13.409 1.00104.46 O 731 | ATOM 731 O5' C 0 35 4.357 18.824 15.668 1.00105.10 O 732 | ATOM 732 C5' C 0 35 4.414 18.822 17.086 1.00 96.12 C 733 | ATOM 733 C4' C 0 35 3.424 17.854 17.684 1.00 99.69 C 734 | ATOM 734 O4' C 0 35 2.070 18.302 17.418 1.00103.06 O 735 | ATOM 735 C3' C 0 35 3.456 16.438 17.131 1.00120.05 C 736 | ATOM 736 O3' C 0 35 4.501 15.651 17.675 1.00127.02 O 737 | ATOM 737 C2' C 0 35 2.060 15.924 17.460 1.00112.74 C 738 | ATOM 738 O2' C 0 35 1.970 15.525 18.820 1.00 95.61 O 739 | ATOM 739 C1' C 0 35 1.217 17.186 17.263 1.00111.24 C 740 | ATOM 740 N1 C 0 35 0.603 17.231 15.916 1.00101.19 N 741 | ATOM 741 C2 C 0 35 -0.602 16.551 15.732 1.00104.84 C 742 | ATOM 742 O2 C 0 35 -1.107 15.952 16.694 1.00103.88 O 743 | ATOM 743 N3 C 0 35 -1.190 16.561 14.514 1.00114.05 N 744 | ATOM 744 C4 C 0 35 -0.620 17.215 13.501 1.00114.51 C 745 | ATOM 745 N4 C 0 35 -1.243 17.194 12.319 1.00109.82 N 746 | ATOM 746 C5 C 0 35 0.612 17.916 13.659 1.00106.27 C 747 | ATOM 747 C6 C 0 35 1.184 17.896 14.871 1.00 99.09 C 748 | ATOM 748 P U 0 36 5.395 14.718 16.716 1.00121.06 P 749 | ATOM 749 OP1 U 0 36 6.524 14.184 17.520 1.00110.42 O 750 | ATOM 750 OP2 U 0 36 5.679 15.469 15.467 1.00116.08 O 751 | ATOM 751 O5' U 0 36 4.432 13.501 16.362 1.00114.57 O 752 | ATOM 752 C5' U 0 36 3.864 12.712 17.395 1.00111.16 C 753 | ATOM 753 C4' U 0 36 2.616 12.000 16.939 1.00108.58 C 754 | ATOM 754 O4' U 0 36 1.635 12.955 16.466 1.00116.72 O 755 | ATOM 755 C3' U 0 36 2.774 11.040 15.774 1.00102.00 C 756 | ATOM 756 O3' U 0 36 3.326 9.795 16.164 1.00106.46 O 757 | ATOM 757 C2' U 0 36 1.351 10.938 15.236 1.00118.66 C 758 | ATOM 758 O2' U 0 36 0.572 10.057 16.029 1.00119.81 O 759 | ATOM 759 C1' U 0 36 0.834 12.360 15.467 1.00120.94 C 760 | ATOM 760 N1 U 0 36 0.873 13.186 14.240 1.00112.44 N 761 | ATOM 761 C2 U 0 36 -0.181 13.007 13.372 1.00114.64 C 762 | ATOM 762 O2 U 0 36 -1.081 12.214 13.593 1.00113.79 O 763 | ATOM 763 N3 U 0 36 -0.141 13.777 12.238 1.00117.13 N 764 | ATOM 764 C4 U 0 36 0.830 14.696 11.897 1.00119.50 C 765 | ATOM 765 O4 U 0 36 0.727 15.317 10.838 1.00119.64 O 766 | ATOM 766 C5 U 0 36 1.890 14.825 12.852 1.00115.53 C 767 | ATOM 767 C6 U 0 36 1.877 14.085 13.967 1.00109.51 C 768 | ATOM 768 P G 0 37 3.963 8.815 15.064 1.00123.13 P 769 | ATOM 769 OP1 G 0 37 4.955 7.949 15.753 1.00105.56 O 770 | ATOM 770 OP2 G 0 37 4.384 9.642 13.905 1.00131.45 O 771 | ATOM 771 O5' G 0 37 2.737 7.902 14.617 1.00120.95 O 772 | ATOM 772 C5' G 0 37 1.865 7.356 15.593 1.00104.39 C 773 | ATOM 773 C4' G 0 37 0.999 6.252 15.043 1.00112.65 C 774 | ATOM 774 O4' G 0 37 -0.143 6.784 14.330 1.00105.45 O 775 | ATOM 775 C3' G 0 37 1.636 5.309 14.040 1.00114.84 C 776 | ATOM 776 O3' G 0 37 2.504 4.370 14.649 1.00119.03 O 777 | ATOM 777 C2' G 0 37 0.419 4.669 13.373 1.00102.36 C 778 | ATOM 778 O2' G 0 37 -0.074 3.595 14.158 1.00111.66 O 779 | ATOM 779 C1' G 0 37 -0.609 5.810 13.418 1.00104.68 C 780 | ATOM 780 N9 G 0 37 -0.893 6.403 12.093 1.00115.12 N 781 | ATOM 781 C8 G 0 37 -1.954 6.009 11.312 1.00111.50 C 782 | ATOM 782 N7 G 0 37 -2.039 6.637 10.177 1.00100.06 N 783 | ATOM 783 C5 G 0 37 -0.960 7.504 10.201 1.00109.37 C 784 | ATOM 784 C6 G 0 37 -0.548 8.440 9.223 1.00110.18 C 785 | ATOM 785 O6 G 0 37 -1.073 8.685 8.129 1.00109.81 O 786 | ATOM 786 N1 G 0 37 0.590 9.126 9.628 1.00118.47 N 787 | ATOM 787 C2 G 0 37 1.248 8.932 10.818 1.00118.50 C 788 | ATOM 788 N2 G 0 37 2.333 9.695 11.017 1.00118.16 N 789 | ATOM 789 N3 G 0 37 0.874 8.057 11.742 1.00117.98 N 790 | ATOM 790 C4 G 0 37 -0.232 7.376 11.371 1.00114.58 C 791 | ATOM 791 P U 0 38 3.671 3.678 13.789 1.00118.78 P 792 | ATOM 792 OP1 U 0 38 3.325 2.248 13.603 1.00107.88 O 793 | ATOM 793 OP2 U 0 38 4.980 4.057 14.381 1.00142.28 O 794 | ATOM 794 O5' U 0 38 3.544 4.370 12.363 1.00110.06 O 795 | ATOM 795 C5' U 0 38 3.742 3.620 11.178 1.00 95.53 C 796 | ATOM 796 C4' U 0 38 2.440 3.343 10.470 1.00 91.04 C 797 | ATOM 797 O4' U 0 38 1.659 4.563 10.371 1.00 98.69 O 798 | ATOM 798 C3' U 0 38 2.572 2.866 9.034 1.00101.05 C 799 | ATOM 799 O3' U 0 38 2.840 1.477 8.942 1.00 85.40 O 800 | ATOM 800 C2' U 0 38 1.242 3.277 8.416 1.00106.15 C 801 | ATOM 801 O2' U 0 38 0.221 2.347 8.747 1.00112.14 O 802 | ATOM 802 C1' U 0 38 0.956 4.590 9.147 1.00100.23 C 803 | ATOM 803 N1 U 0 38 1.402 5.775 8.378 1.00 99.74 N 804 | ATOM 804 C2 U 0 38 0.720 6.130 7.226 1.00101.02 C 805 | ATOM 805 O2 U 0 38 -0.237 5.515 6.788 1.00108.07 O 806 | ATOM 806 N3 U 0 38 1.205 7.244 6.589 1.00104.34 N 807 | ATOM 807 C4 U 0 38 2.272 8.025 6.979 1.00128.61 C 808 | ATOM 808 O4 U 0 38 2.588 8.999 6.299 1.00135.15 O 809 | ATOM 809 C5 U 0 38 2.919 7.597 8.180 1.00116.82 C 810 | ATOM 810 C6 U 0 38 2.470 6.515 8.822 1.00110.26 C 811 | ATOM 811 P C 0 39 4.297 0.952 8.510 1.00105.99 P 812 | ATOM 812 OP1 C 0 39 4.199 -0.521 8.361 1.00107.41 O 813 | ATOM 813 OP2 C 0 39 5.325 1.517 9.420 1.00 63.85 O 814 | ATOM 814 O5' C 0 39 4.513 1.575 7.061 1.00 81.26 O 815 | ATOM 815 C5' C 0 39 3.618 1.270 6.003 1.00 78.16 C 816 | ATOM 816 C4' C 0 39 3.794 2.214 4.842 1.00 86.14 C 817 | ATOM 817 O4' C 0 39 3.281 3.530 5.187 1.00 89.47 O 818 | ATOM 818 C3' C 0 39 5.225 2.495 4.423 1.00 78.11 C 819 | ATOM 819 O3' C 0 39 5.835 1.460 3.677 1.00 68.26 O 820 | ATOM 820 C2' C 0 39 5.083 3.808 3.673 1.00 90.45 C 821 | ATOM 821 O2' C 0 39 4.494 3.597 2.398 1.00 79.22 O 822 | ATOM 822 C1' C 0 39 4.068 4.526 4.559 1.00 85.35 C 823 | ATOM 823 N1 C 0 39 4.757 5.322 5.600 1.00 85.74 N 824 | ATOM 824 C2 C 0 39 5.280 6.566 5.237 1.00 95.78 C 825 | ATOM 825 O2 C 0 39 5.129 6.965 4.072 1.00 96.65 O 826 | ATOM 826 N3 C 0 39 5.929 7.307 6.164 1.00 92.39 N 827 | ATOM 827 C4 C 0 39 6.072 6.846 7.405 1.00 92.15 C 828 | ATOM 828 N4 C 0 39 6.718 7.615 8.284 1.00 96.96 N 829 | ATOM 829 C5 C 0 39 5.560 5.578 7.803 1.00 93.53 C 830 | ATOM 830 C6 C 0 39 4.917 4.858 6.876 1.00 88.25 C 831 | ATOM 831 P A 0 40 7.395 1.151 3.910 1.00 96.75 P 832 | ATOM 832 OP1 A 0 40 7.758 -0.067 3.146 1.00103.38 O 833 | ATOM 833 OP2 A 0 40 7.650 1.208 5.372 1.00 72.27 O 834 | ATOM 834 O5' A 0 40 8.135 2.393 3.241 1.00 78.55 O 835 | ATOM 835 C5' A 0 40 7.974 2.677 1.859 1.00 65.62 C 836 | ATOM 836 C4' A 0 40 8.549 4.022 1.495 1.00 69.09 C 837 | ATOM 837 O4' A 0 40 7.898 5.068 2.264 1.00 75.30 O 838 | ATOM 838 C3' A 0 40 10.025 4.228 1.789 1.00 60.48 C 839 | ATOM 839 O3' A 0 40 10.878 3.628 0.832 1.00 58.12 O 840 | ATOM 840 C2' A 0 40 10.135 5.744 1.836 1.00 64.50 C 841 | ATOM 841 O2' A 0 40 10.132 6.278 0.522 1.00 72.71 O 842 | ATOM 842 C1' A 0 40 8.814 6.117 2.510 1.00 67.74 C 843 | ATOM 843 N9 A 0 40 8.983 6.275 3.964 1.00 66.86 N 844 | ATOM 844 C8 A 0 40 8.592 5.415 4.957 1.00 76.34 C 845 | ATOM 845 N7 A 0 40 8.896 5.832 6.162 1.00 74.37 N 846 | ATOM 846 C5 A 0 40 9.534 7.044 5.941 1.00 73.26 C 847 | ATOM 847 C6 A 0 40 10.097 7.982 6.818 1.00 72.48 C 848 | ATOM 848 N6 A 0 40 10.105 7.834 8.144 1.00 76.33 N 849 | ATOM 849 N1 A 0 40 10.656 9.087 6.280 1.00 73.96 N 850 | ATOM 850 C2 A 0 40 10.647 9.230 4.948 1.00 73.19 C 851 | ATOM 851 N3 A 0 40 10.147 8.419 4.019 1.00 65.85 N 852 | ATOM 852 C4 A 0 40 9.599 7.331 4.591 1.00 72.34 C 853 | ATOM 853 P G 0 41 12.344 3.139 1.268 1.00 72.84 P 854 | ATOM 854 OP1 G 0 41 12.920 2.366 0.140 1.00 71.08 O 855 | ATOM 855 OP2 G 0 41 12.238 2.512 2.608 1.00 62.91 O 856 | ATOM 856 O5' G 0 41 13.174 4.492 1.410 1.00 60.46 O 857 | ATOM 857 C5' G 0 41 13.455 5.285 0.267 1.00 61.58 C 858 | ATOM 858 C4' G 0 41 14.104 6.597 0.633 1.00 63.43 C 859 | ATOM 859 O4' G 0 41 13.233 7.362 1.504 1.00 63.46 O 860 | ATOM 860 C3' G 0 41 15.410 6.513 1.406 1.00 64.48 C 861 | ATOM 861 O3' G 0 41 16.521 6.235 0.577 1.00 65.35 O 862 | ATOM 862 C2' G 0 41 15.482 7.877 2.079 1.00 66.09 C 863 | ATOM 863 O2' G 0 41 15.917 8.869 1.159 1.00 67.75 O 864 | ATOM 864 C1' G 0 41 14.008 8.127 2.405 1.00 65.38 C 865 | ATOM 865 N9 G 0 41 13.674 7.715 3.780 1.00 71.84 N 866 | ATOM 866 C8 G 0 41 12.830 6.705 4.171 1.00 74.41 C 867 | ATOM 867 N7 G 0 41 12.739 6.586 5.468 1.00 72.24 N 868 | ATOM 868 C5 G 0 41 13.577 7.577 5.960 1.00 64.61 C 869 | ATOM 869 C6 G 0 41 13.892 7.942 7.294 1.00 72.58 C 870 | ATOM 870 O6 G 0 41 13.479 7.438 8.342 1.00 91.84 O 871 | ATOM 871 N1 G 0 41 14.787 9.005 7.343 1.00 81.06 N 872 | ATOM 872 C2 G 0 41 15.314 9.642 6.248 1.00 71.83 C 873 | ATOM 873 N2 G 0 41 16.162 10.649 6.499 1.00 75.86 N 874 | ATOM 874 N3 G 0 41 15.029 9.317 4.999 1.00 73.24 N 875 | ATOM 875 C4 G 0 41 14.160 8.283 4.930 1.00 73.70 C 876 | ATOM 876 P C 0 42 17.360 4.881 0.779 1.00 64.80 P 877 | ATOM 877 OP1 C 0 42 17.121 4.020 -0.407 1.00 84.05 O 878 | ATOM 878 OP2 C 0 42 17.080 4.361 2.143 1.00 69.37 O 879 | ATOM 879 O5' C 0 42 18.876 5.364 0.732 1.00 69.35 O 880 | ATOM 880 C5' C 0 42 19.695 5.302 1.888 1.00 76.34 C 881 | ATOM 881 C4' C 0 42 20.009 6.676 2.422 1.00 83.05 C 882 | ATOM 882 O4' C 0 42 18.784 7.330 2.853 1.00 76.33 O 883 | ATOM 883 C3' C 0 42 20.898 6.698 3.653 1.00 78.33 C 884 | ATOM 884 O3' C 0 42 22.272 6.601 3.333 1.00 79.32 O 885 | ATOM 885 C2' C 0 42 20.514 8.006 4.326 1.00 72.29 C 886 | ATOM 886 O2' C 0 42 21.129 9.107 3.674 1.00 74.45 O 887 | ATOM 887 C1' C 0 42 19.014 8.055 4.043 1.00 71.37 C 888 | ATOM 888 N1 C 0 42 18.207 7.427 5.119 1.00 74.14 N 889 | ATOM 889 C2 C 0 42 18.233 7.888 6.445 1.00 69.89 C 890 | ATOM 890 O2 C 0 42 18.957 8.841 6.771 1.00 76.48 O 891 | ATOM 891 N3 C 0 42 17.461 7.266 7.368 1.00 72.62 N 892 | ATOM 892 C4 C 0 42 16.680 6.242 7.022 1.00 76.16 C 893 | ATOM 893 N4 C 0 42 15.938 5.666 7.971 1.00 85.43 N 894 | ATOM 894 C5 C 0 42 16.624 5.759 5.687 1.00 74.11 C 895 | ATOM 895 C6 C 0 42 17.393 6.379 4.785 1.00 76.97 C 896 | ATOM 896 P C 0 43 23.126 5.351 3.866 1.00 82.59 P 897 | ATOM 897 OP1 C 0 43 24.473 5.403 3.241 1.00 81.76 O 898 | ATOM 898 OP2 C 0 43 22.290 4.132 3.708 1.00 84.95 O 899 | ATOM 899 O5' C 0 43 23.283 5.647 5.422 1.00 79.57 O 900 | ATOM 900 C5' C 0 43 23.636 6.943 5.878 1.00 75.76 C 901 | ATOM 901 C4' C 0 43 23.409 7.087 7.360 1.00 76.16 C 902 | ATOM 902 O4' C 0 43 22.005 7.331 7.637 1.00 83.80 O 903 | ATOM 903 C3' C 0 43 23.730 5.868 8.203 1.00 79.61 C 904 | ATOM 904 O3' C 0 43 25.115 5.674 8.421 1.00 89.34 O 905 | ATOM 905 C2' C 0 43 22.930 6.134 9.470 1.00 75.32 C 906 | ATOM 906 O2' C 0 43 23.583 7.093 10.289 1.00 78.04 O 907 | ATOM 907 C1' C 0 43 21.665 6.771 8.892 1.00 83.15 C 908 | ATOM 908 N1 C 0 43 20.587 5.771 8.709 1.00 82.76 N 909 | ATOM 909 C2 C 0 43 19.809 5.429 9.821 1.00 82.04 C 910 | ATOM 910 O2 C 0 43 20.040 5.979 10.909 1.00 81.75 O 911 | ATOM 911 N3 C 0 43 18.823 4.513 9.685 1.00 78.68 N 912 | ATOM 912 C4 C 0 43 18.603 3.942 8.500 1.00 80.48 C 913 | ATOM 913 N4 C 0 43 17.619 3.045 8.415 1.00 78.21 N 914 | ATOM 914 C5 C 0 43 19.382 4.267 7.353 1.00 85.88 C 915 | ATOM 915 C6 C 0 43 20.354 5.176 7.501 1.00 82.87 C 916 | ATOM 916 P A 0 44 25.714 4.183 8.411 1.00 81.96 P 917 | ATOM 917 OP1 A 0 44 27.193 4.276 8.310 1.00 93.36 O 918 | ATOM 918 OP2 A 0 44 24.961 3.394 7.401 1.00 81.84 O 919 | ATOM 919 O5' A 0 44 25.346 3.614 9.854 1.00 91.02 O 920 | ATOM 920 C5' A 0 44 25.698 4.331 11.028 1.00 78.42 C 921 | ATOM 921 C4' A 0 44 24.837 3.937 12.202 1.00 77.46 C 922 | ATOM 922 O4' A 0 44 23.450 4.280 11.939 1.00 78.82 O 923 | ATOM 923 C3' A 0 44 24.785 2.454 12.527 1.00 85.30 C 924 | ATOM 924 O3' A 0 44 25.921 1.992 13.235 1.00 87.29 O 925 | ATOM 925 C2' A 0 44 23.480 2.339 13.301 1.00 82.84 C 926 | ATOM 926 O2' A 0 44 23.639 2.828 14.624 1.00 94.09 O 927 | ATOM 927 C1' A 0 44 22.598 3.317 12.527 1.00 73.70 C 928 | ATOM 928 N9 A 0 44 21.854 2.629 11.455 1.00 78.19 N 929 | ATOM 929 C8 A 0 44 22.107 2.628 10.106 1.00 82.48 C 930 | ATOM 930 N7 A 0 44 21.265 1.901 9.410 1.00 76.47 N 931 | ATOM 931 C5 A 0 44 20.404 1.385 10.368 1.00 77.50 C 932 | ATOM 932 C6 A 0 44 19.288 0.536 10.280 1.00 80.48 C 933 | ATOM 933 N6 A 0 44 18.829 0.034 9.131 1.00 91.08 N 934 | ATOM 934 N1 A 0 44 18.653 0.216 11.429 1.00 77.00 N 935 | ATOM 935 C2 A 0 44 19.115 0.720 12.581 1.00 71.89 C 936 | ATOM 936 N3 A 0 44 20.150 1.526 12.792 1.00 69.67 N 937 | ATOM 937 C4 A 0 44 20.757 1.824 11.631 1.00 77.79 C 938 | ATOM 938 P U 0 45 26.616 0.608 12.807 1.00 88.28 P 939 | ATOM 939 OP1 U 0 45 27.967 0.554 13.421 1.00 79.98 O 940 | ATOM 940 OP2 U 0 45 26.475 0.460 11.335 1.00 77.74 O 941 | ATOM 941 O5' U 0 45 25.706 -0.502 13.496 1.00 78.57 O 942 | ATOM 942 C5' U 0 45 25.377 -0.415 14.874 1.00 76.15 C 943 | ATOM 943 C4' U 0 45 24.132 -1.200 15.198 1.00 73.91 C 944 | ATOM 944 O4' U 0 45 22.988 -0.631 14.505 1.00 75.94 O 945 | ATOM 945 C3' U 0 45 24.125 -2.652 14.757 1.00 72.33 C 946 | ATOM 946 O3' U 0 45 24.874 -3.505 15.600 1.00 82.31 O 947 | ATOM 947 C2' U 0 45 22.639 -2.966 14.714 1.00 82.16 C 948 | ATOM 948 O2' U 0 45 22.124 -3.122 16.028 1.00 71.94 O 949 | ATOM 949 C1' U 0 45 22.091 -1.661 14.137 1.00 73.48 C 950 | ATOM 950 N1 U 0 45 22.016 -1.718 12.659 1.00 68.48 N 951 | ATOM 951 C2 U 0 45 20.928 -2.357 12.101 1.00 66.67 C 952 | ATOM 952 O2 U 0 45 20.041 -2.852 12.774 1.00 66.74 O 953 | ATOM 953 N3 U 0 45 20.915 -2.391 10.729 1.00 68.20 N 954 | ATOM 954 C4 U 0 45 21.863 -1.865 9.875 1.00 74.74 C 955 | ATOM 955 O4 U 0 45 21.716 -1.972 8.658 1.00 74.96 O 956 | ATOM 956 C5 U 0 45 22.960 -1.225 10.527 1.00 84.97 C 957 | ATOM 957 C6 U 0 45 22.997 -1.177 11.861 1.00 79.46 C 958 | ATOM 958 P U 0 46 25.896 -4.561 14.951 1.00 81.77 P 959 | ATOM 959 OP1 U 0 46 26.764 -5.087 16.036 1.00108.03 O 960 | ATOM 960 OP2 U 0 46 26.510 -3.939 13.750 1.00 74.10 O 961 | ATOM 961 O5' U 0 46 24.954 -5.734 14.431 1.00 71.21 O 962 | ATOM 962 C5' U 0 46 23.957 -6.294 15.276 1.00 70.27 C 963 | ATOM 963 C4' U 0 46 23.976 -7.802 15.240 1.00 69.55 C 964 | ATOM 964 O4' U 0 46 23.389 -8.269 13.993 1.00 69.72 O 965 | ATOM 965 C3' U 0 46 25.359 -8.446 15.335 1.00 78.58 C 966 | ATOM 966 O3' U 0 46 25.274 -9.634 16.120 1.00 78.27 O 967 | ATOM 967 C2' U 0 46 25.665 -8.812 13.883 1.00 75.35 C 968 | ATOM 968 O2' U 0 46 26.571 -9.883 13.723 1.00 93.44 O 969 | ATOM 969 C1' U 0 46 24.278 -9.164 13.356 1.00 75.35 C 970 | ATOM 970 N1 U 0 46 24.127 -9.013 11.901 1.00 76.01 N 971 | ATOM 971 C2 U 0 46 23.988 -10.160 11.144 1.00 76.53 C 972 | ATOM 972 O2 U 0 46 23.992 -11.279 11.628 1.00 71.19 O 973 | ATOM 973 N3 U 0 46 23.849 -9.949 9.794 1.00 71.23 N 974 | ATOM 974 C4 U 0 46 23.832 -8.733 9.144 1.00 69.72 C 975 | ATOM 975 O4 U 0 46 23.701 -8.703 7.922 1.00 78.43 O 976 | ATOM 976 C5 U 0 46 23.978 -7.596 9.996 1.00 65.91 C 977 | ATOM 977 C6 U 0 46 24.115 -7.771 11.313 1.00 66.76 C 978 | ATOM 978 P G 0 47 25.663 -9.602 17.679 1.00 73.66 P 979 | ATOM 979 OP1 G 0 47 26.970 -8.910 17.810 1.00 77.92 O 980 | ATOM 980 OP2 G 0 47 25.495 -10.975 18.218 1.00 74.59 O 981 | ATOM 981 O5' G 0 47 24.549 -8.675 18.342 1.00 75.43 O 982 | ATOM 982 C5' G 0 47 23.646 -9.194 19.309 1.00 75.21 C 983 | ATOM 983 C4' G 0 47 22.293 -8.531 19.210 1.00 82.93 C 984 | ATOM 984 O4' G 0 47 22.125 -7.973 17.882 1.00 81.62 O 985 | ATOM 985 C3' G 0 47 21.087 -9.444 19.398 1.00 72.34 C 986 | ATOM 986 O3' G 0 47 20.748 -9.629 20.764 1.00 87.09 O 987 | ATOM 987 C2' G 0 47 19.992 -8.735 18.607 1.00 67.81 C 988 | ATOM 988 O2' G 0 47 19.414 -7.692 19.377 1.00 71.29 O 989 | ATOM 989 C1' G 0 47 20.785 -8.101 17.464 1.00 67.64 C 990 | ATOM 990 N9 G 0 47 20.758 -8.903 16.226 1.00 69.17 N 991 | ATOM 991 C8 G 0 47 20.946 -10.258 16.091 1.00 69.25 C 992 | ATOM 992 N7 G 0 47 20.875 -10.661 14.852 1.00 70.92 N 993 | ATOM 993 C5 G 0 47 20.634 -9.504 14.125 1.00 68.37 C 994 | ATOM 994 C6 G 0 47 20.466 -9.317 12.729 1.00 73.35 C 995 | ATOM 995 O6 G 0 47 20.497 -10.165 11.828 1.00 69.81 O 996 | ATOM 996 N1 G 0 47 20.241 -7.980 12.418 1.00 86.04 N 997 | ATOM 997 C2 G 0 47 20.187 -6.955 13.332 1.00 79.98 C 998 | ATOM 998 N2 G 0 47 19.960 -5.730 12.832 1.00 83.29 N 999 | ATOM 999 N3 G 0 47 20.344 -7.118 14.636 1.00 71.04 N 1000 | ATOM 1000 C4 G 0 47 20.562 -8.410 14.959 1.00 67.55 C 1001 | ATOM 1001 P C 0 48 20.928 -11.058 21.479 1.00 80.08 P 1002 | ATOM 1002 OP1 C 0 48 20.806 -10.852 22.945 1.00 73.82 O 1003 | ATOM 1003 OP2 C 0 48 22.141 -11.702 20.921 1.00 86.26 O 1004 | ATOM 1004 O5' C 0 48 19.667 -11.904 21.000 1.00 69.63 O 1005 | ATOM 1005 C5' C 0 48 18.379 -11.308 20.921 1.00 68.35 C 1006 | ATOM 1006 C4' C 0 48 17.327 -12.142 21.610 1.00 68.18 C 1007 | ATOM 1007 O4' C 0 48 17.316 -13.477 21.038 1.00 67.30 O 1008 | ATOM 1008 C3' C 0 48 17.505 -12.325 23.117 1.00 70.46 C 1009 | ATOM 1009 O3' C 0 48 16.218 -12.356 23.726 1.00 72.14 O 1010 | ATOM 1010 C2' C 0 48 18.131 -13.710 23.218 1.00 72.52 C 1011 | ATOM 1011 O2' C 0 48 17.910 -14.362 24.451 1.00 72.71 O 1012 | ATOM 1012 C1' C 0 48 17.436 -14.432 22.072 1.00 68.79 C 1013 | ATOM 1013 N1 C 0 48 18.173 -15.585 21.551 1.00 68.78 N 1014 | ATOM 1014 C2 C 0 48 17.594 -16.837 21.730 1.00 74.64 C 1015 | ATOM 1015 O2 C 0 48 16.503 -16.904 22.318 1.00 76.57 O 1016 | ATOM 1016 N3 C 0 48 18.241 -17.927 21.264 1.00 75.47 N 1017 | ATOM 1017 C4 C 0 48 19.411 -17.790 20.642 1.00 71.49 C 1018 | ATOM 1018 N4 C 0 48 20.007 -18.898 20.200 1.00 73.17 N 1019 | ATOM 1019 C5 C 0 48 20.022 -16.516 20.446 1.00 77.63 C 1020 | ATOM 1020 C6 C 0 48 19.372 -15.445 20.914 1.00 72.27 C 1021 | ATOM 1021 P A 0 49 15.986 -11.669 25.156 1.00 87.20 P 1022 | ATOM 1022 OP1 A 0 49 16.786 -10.417 25.197 1.00 73.47 O 1023 | ATOM 1023 OP2 A 0 49 16.226 -12.718 26.179 1.00 89.57 O 1024 | ATOM 1024 O5' A 0 49 14.433 -11.297 25.155 1.00 97.29 O 1025 | ATOM 1025 C5' A 0 49 13.967 -10.084 24.575 1.00 85.48 C 1026 | ATOM 1026 C4' A 0 49 12.964 -10.332 23.473 1.00 81.08 C 1027 | ATOM 1027 O4' A 0 49 13.417 -11.432 22.643 1.00 79.48 O 1028 | ATOM 1028 C3' A 0 49 11.560 -10.743 23.904 1.00 83.48 C 1029 | ATOM 1029 O3' A 0 49 10.750 -9.636 24.275 1.00 76.00 O 1030 | ATOM 1030 C2' A 0 49 11.039 -11.493 22.683 1.00 81.67 C 1031 | ATOM 1031 O2' A 0 49 10.586 -10.585 21.689 1.00 74.80 O 1032 | ATOM 1032 C1' A 0 49 12.312 -12.170 22.169 1.00 78.00 C 1033 | ATOM 1033 N9 A 0 49 12.441 -13.568 22.627 1.00 86.62 N 1034 | ATOM 1034 C8 A 0 49 13.397 -14.104 23.456 1.00 75.62 C 1035 | ATOM 1035 N7 A 0 49 13.249 -15.389 23.673 1.00 77.06 N 1036 | ATOM 1036 C5 A 0 49 12.122 -15.725 22.934 1.00 76.97 C 1037 | ATOM 1037 C6 A 0 49 11.437 -16.939 22.739 1.00 78.48 C 1038 | ATOM 1038 N6 A 0 49 11.799 -18.095 23.297 1.00 83.93 N 1039 | ATOM 1039 N1 A 0 49 10.349 -16.926 21.940 1.00 80.34 N 1040 | ATOM 1040 C2 A 0 49 9.978 -15.770 21.377 1.00 81.20 C 1041 | ATOM 1041 N3 A 0 49 10.539 -14.568 21.483 1.00 84.04 N 1042 | ATOM 1042 C4 A 0 49 11.618 -14.613 22.283 1.00 84.82 C 1043 | ATOM 1043 P C 0 50 9.539 -9.815 25.320 1.00 93.57 P 1044 | ATOM 1044 OP1 C 0 50 8.848 -8.509 25.466 1.00106.26 O 1045 | ATOM 1045 OP2 C 0 50 10.047 -10.517 26.525 1.00105.49 O 1046 | ATOM 1046 O5' C 0 50 8.527 -10.790 24.576 1.00 70.36 O 1047 | ATOM 1047 C5' C 0 50 7.675 -10.310 23.548 1.00 80.91 C 1048 | ATOM 1048 C4' C 0 50 6.666 -11.358 23.161 1.00 87.79 C 1049 | ATOM 1049 O4' C 0 50 7.358 -12.517 22.628 1.00 82.82 O 1050 | ATOM 1050 C3' C 0 50 5.836 -11.917 24.305 1.00 89.88 C 1051 | ATOM 1051 O3' C 0 50 4.741 -11.088 24.651 1.00 97.48 O 1052 | ATOM 1052 C2' C 0 50 5.430 -13.290 23.783 1.00 96.47 C 1053 | ATOM 1053 O2' C 0 50 4.350 -13.177 22.869 1.00100.80 O 1054 | ATOM 1054 C1' C 0 50 6.683 -13.699 23.004 1.00 90.41 C 1055 | ATOM 1055 N1 C 0 50 7.600 -14.531 23.819 1.00 96.93 N 1056 | ATOM 1056 C2 C 0 50 7.463 -15.923 23.792 1.00 91.58 C 1057 | ATOM 1057 O2 C 0 50 6.581 -16.434 23.084 1.00 93.18 O 1058 | ATOM 1058 N3 C 0 50 8.297 -16.683 24.541 1.00 85.63 N 1059 | ATOM 1059 C4 C 0 50 9.238 -16.109 25.292 1.00 92.57 C 1060 | ATOM 1060 N4 C 0 50 10.037 -16.900 26.012 1.00 81.97 N 1061 | ATOM 1061 C5 C 0 50 9.401 -14.695 25.340 1.00 88.49 C 1062 | ATOM 1062 C6 C 0 50 8.570 -13.957 24.595 1.00 88.25 C 1063 | ATOM 1063 P U 0 51 4.548 -10.593 26.168 1.00120.51 P 1064 | ATOM 1064 OP1 U 0 51 5.874 -10.179 26.692 1.00109.03 O 1065 | ATOM 1065 OP2 U 0 51 3.766 -11.625 26.894 1.00 96.38 O 1066 | ATOM 1066 O5' U 0 51 3.659 -9.278 26.031 1.00141.84 O 1067 | ATOM 1067 C5' U 0 51 2.763 -9.107 24.938 1.00143.38 C 1068 | ATOM 1068 C4' U 0 51 2.760 -7.681 24.442 1.00161.61 C 1069 | ATOM 1069 O4' U 0 51 2.800 -6.789 25.590 1.00181.83 O 1070 | ATOM 1070 C3' U 0 51 3.947 -7.295 23.561 1.00147.26 C 1071 | ATOM 1071 O3' U 0 51 3.531 -6.357 22.569 1.00139.98 O 1072 | ATOM 1072 C2' U 0 51 4.894 -6.614 24.543 1.00152.34 C 1073 | ATOM 1073 O2' U 0 51 5.789 -5.693 23.956 1.00128.82 O 1074 | ATOM 1074 C1' U 0 51 3.918 -5.926 25.492 1.00174.88 C 1075 | ATOM 1075 N1 U 0 51 4.464 -5.723 26.843 1.00185.33 N 1076 | ATOM 1076 C2 U 0 51 3.861 -4.792 27.668 1.00189.15 C 1077 | ATOM 1077 O2 U 0 51 2.880 -4.144 27.341 1.00176.99 O 1078 | ATOM 1078 N3 U 0 51 4.444 -4.662 28.906 1.00190.65 N 1079 | ATOM 1079 C4 U 0 51 5.548 -5.341 29.386 1.00180.73 C 1080 | ATOM 1080 O4 U 0 51 5.956 -5.109 30.526 1.00165.73 O 1081 | ATOM 1081 C5 U 0 51 6.120 -6.276 28.468 1.00153.79 C 1082 | ATOM 1082 C6 U 0 51 5.572 -6.426 27.259 1.00154.95 C 1083 | ATOM 1083 P C 0 52 4.359 -6.222 21.199 1.00165.99 P 1084 | ATOM 1084 OP1 C 0 52 3.512 -6.722 20.088 1.00173.47 O 1085 | ATOM 1085 OP2 C 0 52 5.707 -6.803 21.418 1.00131.24 O 1086 | ATOM 1086 O5' C 0 52 4.541 -4.652 21.009 1.00142.48 O 1087 | ATOM 1087 C5' C 0 52 3.761 -3.942 20.060 1.00152.34 C 1088 | ATOM 1088 C4' C 0 52 4.002 -2.459 20.155 1.00150.86 C 1089 | ATOM 1089 O4' C 0 52 3.495 -1.962 21.420 1.00160.38 O 1090 | ATOM 1090 C3' C 0 52 5.455 -2.020 20.153 1.00140.76 C 1091 | ATOM 1091 O3' C 0 52 6.046 -2.007 18.867 1.00138.04 O 1092 | ATOM 1092 C2' C 0 52 5.385 -0.652 20.822 1.00137.72 C 1093 | ATOM 1093 O2' C 0 52 4.937 0.337 19.907 1.00126.64 O 1094 | ATOM 1094 C1' C 0 52 4.289 -0.881 21.864 1.00156.52 C 1095 | ATOM 1095 N1 C 0 52 4.852 -1.199 23.197 1.00160.72 N 1096 | ATOM 1096 C2 C 0 52 4.564 -0.320 24.240 1.00159.16 C 1097 | ATOM 1097 O2 C 0 52 3.836 0.657 24.008 1.00145.91 O 1098 | ATOM 1098 N3 C 0 52 5.067 -0.565 25.472 1.00165.63 N 1099 | ATOM 1099 C4 C 0 52 5.843 -1.631 25.676 1.00168.55 C 1100 | ATOM 1100 N4 C 0 52 6.318 -1.832 26.907 1.00167.66 N 1101 | ATOM 1101 C5 C 0 52 6.166 -2.540 24.630 1.00158.45 C 1102 | ATOM 1102 C6 C 0 52 5.657 -2.285 23.421 1.00148.45 C 1103 | ATOM 1103 P C 0 53 7.573 -2.479 18.690 1.00136.23 P 1104 | ATOM 1104 OP1 C 0 53 7.611 -3.480 17.593 1.00 95.18 O 1105 | ATOM 1105 OP2 C 0 53 8.099 -2.843 20.032 1.00122.22 O 1106 | ATOM 1106 O5' C 0 53 8.328 -1.160 18.208 1.00122.61 O 1107 | ATOM 1107 C5' C 0 53 7.776 0.128 18.465 1.00117.96 C 1108 | ATOM 1108 C4' C 0 53 8.818 1.118 18.922 1.00112.22 C 1109 | ATOM 1109 O4' C 0 53 8.251 2.002 19.937 1.00112.05 O 1110 | ATOM 1110 C3' C 0 53 10.075 0.517 19.555 1.00108.52 C 1111 | ATOM 1111 O3' C 0 53 11.154 1.410 19.287 1.00102.59 O 1112 | ATOM 1112 C2' C 0 53 9.751 0.606 21.039 1.00116.17 C 1113 | ATOM 1113 O2' C 0 53 10.867 0.531 21.899 1.00110.81 O 1114 | ATOM 1114 C1' C 0 53 9.086 1.973 21.079 1.00126.04 C 1115 | ATOM 1115 N1 C 0 53 8.324 2.292 22.296 1.00140.91 N 1116 | ATOM 1116 C2 C 0 53 9.018 3.033 23.256 1.00138.58 C 1117 | ATOM 1117 O2 C 0 53 10.198 3.338 23.023 1.00124.28 O 1118 | ATOM 1118 N3 C 0 53 8.409 3.389 24.408 1.00133.43 N 1119 | ATOM 1119 C4 C 0 53 7.143 3.037 24.615 1.00136.19 C 1120 | ATOM 1120 N4 C 0 53 6.575 3.409 25.765 1.00110.83 N 1121 | ATOM 1121 C5 C 0 53 6.406 2.290 23.648 1.00147.92 C 1122 | ATOM 1122 C6 C 0 53 7.022 1.940 22.509 1.00137.05 C 1123 | ATOM 1123 P G 0 54 12.578 0.879 18.772 1.00 95.36 P 1124 | ATOM 1124 OP1 G 0 54 12.588 0.974 17.290 1.00 92.14 O 1125 | ATOM 1125 OP2 G 0 54 12.881 -0.411 19.439 1.00102.13 O 1126 | ATOM 1126 O5' G 0 54 13.594 1.968 19.334 1.00 91.99 O 1127 | ATOM 1127 C5' G 0 54 13.370 3.353 19.114 1.00 91.96 C 1128 | ATOM 1128 C4' G 0 54 14.673 4.099 19.004 1.00 92.25 C 1129 | ATOM 1129 O4' G 0 54 15.667 3.223 18.406 1.00 92.13 O 1130 | ATOM 1130 C3' G 0 54 14.669 5.325 18.101 1.00 84.32 C 1131 | ATOM 1131 O3' G 0 54 14.168 6.495 18.725 1.00 87.69 O 1132 | ATOM 1132 C2' G 0 54 16.126 5.426 17.682 1.00 89.16 C 1133 | ATOM 1133 O2' G 0 54 16.915 5.952 18.740 1.00 83.53 O 1134 | ATOM 1134 C1' G 0 54 16.475 3.952 17.510 1.00 84.30 C 1135 | ATOM 1135 N9 G 0 54 16.163 3.485 16.147 1.00 87.37 N 1136 | ATOM 1136 C8 G 0 54 15.089 2.715 15.767 1.00 91.98 C 1137 | ATOM 1137 N7 G 0 54 15.066 2.462 14.488 1.00 97.24 N 1138 | ATOM 1138 C5 G 0 54 16.191 3.107 13.993 1.00 97.12 C 1139 | ATOM 1139 C6 G 0 54 16.694 3.191 12.669 1.00 83.49 C 1140 | ATOM 1140 O6 G 0 54 16.230 2.696 11.637 1.00 86.54 O 1141 | ATOM 1141 N1 G 0 54 17.861 3.945 12.611 1.00 80.33 N 1142 | ATOM 1142 C2 G 0 54 18.466 4.542 13.689 1.00 89.42 C 1143 | ATOM 1143 N2 G 0 54 19.587 5.230 13.435 1.00 87.91 N 1144 | ATOM 1144 N3 G 0 54 18.008 4.472 14.927 1.00 85.24 N 1145 | ATOM 1145 C4 G 0 54 16.877 3.744 15.004 1.00 89.44 C 1146 | ATOM 1146 P G 0 55 13.592 7.694 17.822 1.00 92.54 P 1147 | ATOM 1147 OP1 G 0 55 12.900 8.673 18.696 1.00109.70 O 1148 | ATOM 1148 OP2 G 0 55 12.862 7.087 16.680 1.00 89.77 O 1149 | ATOM 1149 O5' G 0 55 14.896 8.396 17.239 1.00 75.47 O 1150 | ATOM 1150 C5' G 0 55 15.860 8.990 18.096 1.00 79.96 C 1151 | ATOM 1151 C4' G 0 55 16.992 9.578 17.295 1.00 86.07 C 1152 | ATOM 1152 O4' G 0 55 17.727 8.509 16.641 1.00 83.94 O 1153 | ATOM 1153 C3' G 0 55 16.565 10.493 16.157 1.00 79.20 C 1154 | ATOM 1154 O3' G 0 55 16.316 11.821 16.581 1.00 81.19 O 1155 | ATOM 1155 C2' G 0 55 17.714 10.363 15.167 1.00 83.29 C 1156 | ATOM 1156 O2' G 0 55 18.827 11.139 15.583 1.00 89.42 O 1157 | ATOM 1157 C1' G 0 55 18.069 8.888 15.325 1.00 84.96 C 1158 | ATOM 1158 N9 G 0 55 17.291 8.038 14.406 1.00 88.65 N 1159 | ATOM 1159 C8 G 0 55 16.324 7.146 14.796 1.00 93.87 C 1160 | ATOM 1160 N7 G 0 55 15.772 6.509 13.807 1.00 95.84 N 1161 | ATOM 1161 C5 G 0 55 16.411 7.011 12.686 1.00 81.58 C 1162 | ATOM 1162 C6 G 0 55 16.220 6.683 11.322 1.00 81.25 C 1163 | ATOM 1163 O6 G 0 55 15.428 5.868 10.834 1.00 82.91 O 1164 | ATOM 1164 N1 G 0 55 17.068 7.420 10.504 1.00 76.67 N 1165 | ATOM 1165 C2 G 0 55 17.981 8.348 10.941 1.00 80.37 C 1166 | ATOM 1166 N2 G 0 55 18.704 8.950 9.985 1.00 80.29 N 1167 | ATOM 1167 N3 G 0 55 18.169 8.663 12.214 1.00 82.91 N 1168 | ATOM 1168 C4 G 0 55 17.356 7.959 13.031 1.00 84.08 C 1169 | ATOM 1169 P C 0 56 15.000 12.591 16.078 1.00 83.85 P 1170 | ATOM 1170 OP1 C 0 56 14.740 13.717 17.011 1.00 92.38 O 1171 | ATOM 1171 OP2 C 0 56 13.937 11.582 15.839 1.00 92.91 O 1172 | ATOM 1172 O5' C 0 56 15.434 13.196 14.671 1.00 80.30 O 1173 | ATOM 1173 C5' C 0 56 16.639 13.934 14.538 1.00 82.74 C 1174 | ATOM 1174 C4' C 0 56 17.100 13.976 13.104 1.00 81.92 C 1175 | ATOM 1175 O4' C 0 56 17.527 12.651 12.687 1.00 79.85 O 1176 | ATOM 1176 C3' C 0 56 16.039 14.352 12.084 1.00 80.41 C 1177 | ATOM 1177 O3' C 0 56 15.800 15.744 12.005 1.00 82.50 O 1178 | ATOM 1178 C2' C 0 56 16.585 13.741 10.804 1.00 83.81 C 1179 | ATOM 1179 O2' C 0 56 17.645 14.531 10.285 1.00 81.17 O 1180 | ATOM 1180 C1' C 0 56 17.173 12.436 11.335 1.00 79.32 C 1181 | ATOM 1181 N1 C 0 56 16.183 11.336 11.282 1.00 75.09 N 1182 | ATOM 1182 C2 C 0 56 15.910 10.728 10.054 1.00 76.39 C 1183 | ATOM 1183 O2 C 0 56 16.510 11.118 9.042 1.00 78.76 O 1184 | ATOM 1184 N3 C 0 56 15.002 9.727 9.999 1.00 82.01 N 1185 | ATOM 1185 C4 C 0 56 14.374 9.332 11.105 1.00 82.69 C 1186 | ATOM 1186 N4 C 0 56 13.489 8.340 11.000 1.00 98.92 N 1187 | ATOM 1187 C5 C 0 56 14.628 9.933 12.370 1.00 83.99 C 1188 | ATOM 1188 C6 C 0 56 15.529 10.920 12.408 1.00 82.29 C 1189 | ATOM 1189 P U 0 57 14.307 16.283 11.756 1.00 81.64 P 1190 | ATOM 1190 OP1 U 0 57 14.317 17.749 11.991 1.00 84.56 O 1191 | ATOM 1191 OP2 U 0 57 13.370 15.427 12.527 1.00 79.87 O 1192 | ATOM 1192 O5' U 0 57 14.060 16.021 10.203 1.00 80.73 O 1193 | ATOM 1193 C5' U 0 57 14.956 16.539 9.230 1.00 80.87 C 1194 | ATOM 1194 C4' U 0 57 14.770 15.885 7.881 1.00 78.71 C 1195 | ATOM 1195 O4' U 0 57 15.120 14.476 7.946 1.00 76.72 O 1196 | ATOM 1196 C3' U 0 57 13.357 15.873 7.324 1.00 76.97 C 1197 | ATOM 1197 O3' U 0 57 12.949 17.110 6.774 1.00 78.49 O 1198 | ATOM 1198 C2' U 0 57 13.423 14.744 6.309 1.00 79.49 C 1199 | ATOM 1199 O2' U 0 57 14.111 15.163 5.140 1.00 81.49 O 1200 | ATOM 1200 C1' U 0 57 14.308 13.742 7.049 1.00 83.47 C 1201 | ATOM 1201 N1 U 0 57 13.494 12.769 7.813 1.00 72.32 N 1202 | ATOM 1202 C2 U 0 57 12.869 11.770 7.094 1.00 72.08 C 1203 | ATOM 1203 O2 U 0 57 12.973 11.658 5.886 1.00 73.91 O 1204 | ATOM 1204 N3 U 0 57 12.119 10.900 7.842 1.00 79.96 N 1205 | ATOM 1205 C4 U 0 57 11.928 10.926 9.205 1.00 83.92 C 1206 | ATOM 1206 O4 U 0 57 11.221 10.067 9.730 1.00 84.18 O 1207 | ATOM 1207 C5 U 0 57 12.601 11.989 9.880 1.00 86.45 C 1208 | ATOM 1208 C6 U 0 57 13.340 12.853 9.176 1.00 78.96 C 1209 | ATOM 1209 P G 0 58 11.435 17.605 6.981 1.00 87.82 P 1210 | ATOM 1210 OP1 G 0 58 11.399 19.075 6.791 1.00 91.66 O 1211 | ATOM 1211 OP2 G 0 58 10.944 17.021 8.255 1.00 77.34 O 1212 | ATOM 1212 O5' G 0 58 10.634 16.913 5.789 1.00 76.71 O 1213 | ATOM 1213 C5' G 0 58 11.079 17.019 4.444 1.00 84.07 C 1214 | ATOM 1214 C4' G 0 58 10.421 15.980 3.571 1.00 88.38 C 1215 | ATOM 1215 O4' G 0 58 10.860 14.658 3.980 1.00 78.98 O 1216 | ATOM 1216 C3' G 0 58 8.903 15.920 3.658 1.00 75.85 C 1217 | ATOM 1217 O3' G 0 58 8.274 16.873 2.817 1.00 78.79 O 1218 | ATOM 1218 C2' G 0 58 8.597 14.476 3.283 1.00 72.78 C 1219 | ATOM 1219 O2' G 0 58 8.646 14.311 1.875 1.00 98.15 O 1220 | ATOM 1220 C1' G 0 58 9.791 13.742 3.894 1.00 69.66 C 1221 | ATOM 1221 N9 G 0 58 9.514 13.241 5.254 1.00 79.82 N 1222 | ATOM 1222 C8 G 0 58 9.967 13.802 6.425 1.00 88.76 C 1223 | ATOM 1223 N7 G 0 58 9.586 13.155 7.490 1.00 83.48 N 1224 | ATOM 1224 C5 G 0 58 8.839 12.095 6.993 1.00 82.62 C 1225 | ATOM 1225 C6 G 0 58 8.167 11.051 7.681 1.00 84.37 C 1226 | ATOM 1226 O6 G 0 58 8.099 10.852 8.901 1.00 78.18 O 1227 | ATOM 1227 N1 G 0 58 7.532 10.189 6.794 1.00 82.83 N 1228 | ATOM 1228 C2 G 0 58 7.540 10.314 5.426 1.00 74.22 C 1229 | ATOM 1229 N2 G 0 58 6.866 9.378 4.745 1.00 80.61 N 1230 | ATOM 1230 N3 G 0 58 8.161 11.281 4.772 1.00 75.04 N 1231 | ATOM 1231 C4 G 0 58 8.786 12.131 5.613 1.00 81.31 C 1232 | ATOM 1232 P C 0 59 6.761 17.337 3.094 1.00 79.58 P 1233 | ATOM 1233 OP1 C 0 59 6.492 18.541 2.265 1.00 79.13 O 1234 | ATOM 1234 OP2 C 0 59 6.558 17.387 4.562 1.00 95.75 O 1235 | ATOM 1235 O5' C 0 59 5.879 16.148 2.507 1.00 97.64 O 1236 | ATOM 1236 C5' C 0 59 5.868 15.872 1.114 1.00 88.44 C 1237 | ATOM 1237 C4' C 0 59 5.150 14.583 0.799 1.00 88.60 C 1238 | ATOM 1238 O4' C 0 59 5.792 13.474 1.485 1.00 90.61 O 1239 | ATOM 1239 C3' C 0 59 3.699 14.490 1.243 1.00100.13 C 1240 | ATOM 1240 O3' C 0 59 2.800 15.161 0.378 1.00101.93 O 1241 | ATOM 1241 C2' C 0 59 3.475 12.986 1.308 1.00 98.07 C 1242 | ATOM 1242 O2' C 0 59 3.299 12.450 0.005 1.00 98.77 O 1243 | ATOM 1243 C1' C 0 59 4.825 12.507 1.842 1.00 95.55 C 1244 | ATOM 1244 N1 C 0 59 4.815 12.354 3.316 1.00 87.72 N 1245 | ATOM 1245 C2 C 0 59 4.182 11.240 3.877 1.00 86.12 C 1246 | ATOM 1246 O2 C 0 59 3.642 10.407 3.134 1.00 90.55 O 1247 | ATOM 1247 N3 C 0 59 4.172 11.096 5.221 1.00 90.86 N 1248 | ATOM 1248 C4 C 0 59 4.757 12.002 6.003 1.00 82.83 C 1249 | ATOM 1249 N4 C 0 59 4.718 11.811 7.322 1.00 84.26 N 1250 | ATOM 1250 C5 C 0 59 5.410 13.145 5.461 1.00 89.06 C 1251 | ATOM 1251 C6 C 0 59 5.413 13.277 4.128 1.00 91.85 C 1252 | ATOM 1252 P G 0 60 1.619 16.066 0.989 1.00 96.62 P 1253 | ATOM 1253 OP1 G 0 60 0.706 16.445 -0.119 1.00 84.47 O 1254 | ATOM 1254 OP2 G 0 60 2.245 17.130 1.812 1.00 94.80 O 1255 | ATOM 1255 O5' G 0 60 0.842 15.085 1.978 1.00107.95 O 1256 | ATOM 1256 C5' G 0 60 0.065 13.999 1.491 1.00 87.94 C 1257 | ATOM 1257 C4' G 0 60 -0.500 13.181 2.625 1.00 94.22 C 1258 | ATOM 1258 O4' G 0 60 0.587 12.621 3.409 1.00111.06 O 1259 | ATOM 1259 C3' G 0 60 -1.330 13.953 3.641 1.00104.32 C 1260 | ATOM 1260 O3' G 0 60 -2.666 14.154 3.224 1.00120.51 O 1261 | ATOM 1261 C2' G 0 60 -1.192 13.107 4.896 1.00102.52 C 1262 | ATOM 1262 O2' G 0 60 -2.023 11.958 4.818 1.00113.20 O 1263 | ATOM 1263 C1' G 0 60 0.262 12.657 4.783 1.00112.88 C 1264 | ATOM 1264 N9 G 0 60 1.184 13.607 5.434 1.00 99.87 N 1265 | ATOM 1265 C8 G 0 60 1.843 14.648 4.825 1.00 94.27 C 1266 | ATOM 1266 N7 G 0 60 2.599 15.329 5.638 1.00 92.02 N 1267 | ATOM 1267 C5 G 0 60 2.432 14.699 6.862 1.00 92.47 C 1268 | ATOM 1268 C6 G 0 60 3.004 14.993 8.125 1.00 95.37 C 1269 | ATOM 1269 O6 G 0 60 3.797 15.895 8.421 1.00 98.72 O 1270 | ATOM 1270 N1 G 0 60 2.563 14.102 9.098 1.00100.39 N 1271 | ATOM 1271 C2 G 0 60 1.688 13.065 8.885 1.00108.84 C 1272 | ATOM 1272 N2 G 0 60 1.384 12.315 9.953 1.00125.48 N 1273 | ATOM 1273 N3 G 0 60 1.147 12.782 7.712 1.00102.05 N 1274 | ATOM 1274 C4 G 0 60 1.561 13.635 6.754 1.00 96.90 C 1275 | ATOM 1275 P A 0 61 -3.135 15.610 2.738 1.00149.05 P 1276 | ATOM 1276 OP1 A 0 61 -1.923 16.385 2.369 1.00140.52 O 1277 | ATOM 1277 OP2 A 0 61 -4.085 16.152 3.742 1.00160.82 O 1278 | ATOM 1278 O5' A 0 61 -3.947 15.325 1.401 1.00 91.43 O 1279 | ATOM 1279 C5' A 0 61 -3.319 14.736 0.268 1.00101.22 C 1280 | ATOM 1280 C4' A 0 61 -4.336 14.351 -0.775 1.00113.20 C 1281 | ATOM 1281 O4' A 0 61 -4.863 13.035 -0.460 1.00131.38 O 1282 | ATOM 1282 C3' A 0 61 -5.536 15.288 -0.860 1.00 99.47 C 1283 | ATOM 1283 O3' A 0 61 -5.964 15.421 -2.212 1.00101.21 O 1284 | ATOM 1284 C2' A 0 61 -6.600 14.573 -0.032 1.00117.30 C 1285 | ATOM 1285 O2' A 0 61 -7.926 14.864 -0.414 1.00129.00 O 1286 | ATOM 1286 C1' A 0 61 -6.263 13.102 -0.265 1.00 90.41 C 1287 | ATOM 1287 N9 A 0 61 -6.590 12.252 0.883 1.00109.82 N 1288 | ATOM 1288 C8 A 0 61 -5.727 11.501 1.644 1.00120.11 C 1289 | ATOM 1289 N7 A 0 61 -6.316 10.841 2.614 1.00122.61 N 1290 | ATOM 1290 C5 A 0 61 -7.654 11.180 2.482 1.00125.76 C 1291 | ATOM 1291 C6 A 0 61 -8.802 10.811 3.205 1.00123.11 C 1292 | ATOM 1292 N6 A 0 61 -8.789 9.982 4.248 1.00128.08 N 1293 | ATOM 1293 N1 A 0 61 -9.987 11.327 2.816 1.00106.91 N 1294 | ATOM 1294 C2 A 0 61 -10.002 12.154 1.765 1.00107.26 C 1295 | ATOM 1295 N3 A 0 61 -8.998 12.578 1.004 1.00101.84 N 1296 | ATOM 1296 C4 A 0 61 -7.838 12.043 1.417 1.00112.47 C 1297 | ATOM 1297 P A 0 62 -6.281 16.885 -2.792 1.00104.90 P 1298 | ATOM 1298 OP1 A 0 62 -5.497 17.094 -4.036 1.00102.83 O 1299 | ATOM 1299 OP2 A 0 62 -6.129 17.840 -1.664 1.00117.71 O 1300 | ATOM 1300 O5' A 0 62 -7.825 16.817 -3.167 1.00 99.86 O 1301 | ATOM 1301 C5' A 0 62 -8.330 15.776 -3.989 1.00102.87 C 1302 | ATOM 1302 C4' A 0 62 -9.831 15.690 -3.893 1.00104.63 C 1303 | ATOM 1303 O4' A 0 62 -10.211 14.824 -2.794 1.00113.72 O 1304 | ATOM 1304 C3' A 0 62 -10.543 17.000 -3.597 1.00117.75 C 1305 | ATOM 1305 O3' A 0 62 -10.719 17.795 -4.756 1.00104.42 O 1306 | ATOM 1306 C2' A 0 62 -11.853 16.541 -2.966 1.00119.06 C 1307 | ATOM 1307 O2' A 0 62 -12.786 16.167 -3.969 1.00116.05 O 1308 | ATOM 1308 C1' A 0 62 -11.415 15.280 -2.214 1.00111.14 C 1309 | ATOM 1309 N9 A 0 62 -11.205 15.502 -0.767 1.00104.14 N 1310 | ATOM 1310 C8 A 0 62 -10.384 16.404 -0.132 1.00110.72 C 1311 | ATOM 1311 N7 A 0 62 -10.425 16.328 1.180 1.00116.14 N 1312 | ATOM 1312 C5 A 0 62 -11.332 15.306 1.434 1.00101.55 C 1313 | ATOM 1313 C6 A 0 62 -11.823 14.730 2.625 1.00 95.26 C 1314 | ATOM 1314 N6 A 0 62 -11.464 15.111 3.854 1.00 92.37 N 1315 | ATOM 1315 N1 A 0 62 -12.724 13.729 2.513 1.00 89.56 N 1316 | ATOM 1316 C2 A 0 62 -13.098 13.337 1.289 1.00 92.50 C 1317 | ATOM 1317 N3 A 0 62 -12.707 13.796 0.103 1.00 94.43 N 1318 | ATOM 1318 C4 A 0 62 -11.811 14.789 0.243 1.00 96.45 C 1319 | ATOM 1319 P U 0 63 -10.542 19.390 -4.684 1.00114.43 P 1320 | ATOM 1320 OP1 U 0 63 -11.033 19.925 -5.982 1.00 99.58 O 1321 | ATOM 1321 OP2 U 0 63 -9.143 19.677 -4.274 1.00106.52 O 1322 | ATOM 1322 O5' U 0 63 -11.518 19.829 -3.498 1.00129.93 O 1323 | ATOM 1323 C5' U 0 63 -11.798 21.202 -3.229 1.00108.91 C 1324 | ATOM 1324 C4' U 0 63 -12.661 21.798 -4.311 1.00117.42 C 1325 | ATOM 1325 O4' U 0 63 -13.842 20.971 -4.505 1.00108.37 O 1326 | ATOM 1326 C3' U 0 63 -13.171 23.221 -4.081 1.00116.90 C 1327 | ATOM 1327 O3' U 0 63 -13.205 23.879 -5.347 1.00121.42 O 1328 | ATOM 1328 C2' U 0 63 -14.600 22.985 -3.604 1.00109.72 C 1329 | ATOM 1329 O2' U 0 63 -15.480 24.071 -3.800 1.00120.24 O 1330 | ATOM 1330 C1' U 0 63 -14.993 21.784 -4.451 1.00118.47 C 1331 | ATOM 1331 N1 U 0 63 -16.109 21.000 -3.914 1.00118.00 N 1332 | ATOM 1332 C2 U 0 63 -17.144 20.774 -4.794 1.00124.42 C 1333 | ATOM 1333 O2 U 0 63 -17.127 21.189 -5.940 1.00121.16 O 1334 | ATOM 1334 N3 U 0 63 -18.186 20.047 -4.284 1.00124.95 N 1335 | ATOM 1335 C4 U 0 63 -18.283 19.542 -3.005 1.00120.11 C 1336 | ATOM 1336 O4 U 0 63 -19.287 18.904 -2.689 1.00118.87 O 1337 | ATOM 1337 C5 U 0 63 -17.167 19.826 -2.153 1.00111.67 C 1338 | ATOM 1338 C6 U 0 63 -16.135 20.534 -2.623 1.00107.51 C 1339 | ATOM 1339 P U 0 64 -13.265 25.480 -5.455 1.00125.48 P 1340 | ATOM 1340 OP1 U 0 64 -11.942 25.954 -5.931 1.00139.34 O 1341 | ATOM 1341 OP2 U 0 64 -13.831 26.023 -4.195 1.00138.51 O 1342 | ATOM 1342 O5' U 0 64 -14.315 25.731 -6.626 1.00136.45 O 1343 | ATOM 1343 C5' U 0 64 -15.610 26.244 -6.351 1.00125.71 C 1344 | ATOM 1344 C4' U 0 64 -16.623 25.748 -7.352 1.00125.35 C 1345 | ATOM 1345 O4' U 0 64 -16.940 24.357 -7.086 1.00121.38 O 1346 | ATOM 1346 C3' U 0 64 -17.973 26.444 -7.323 1.00121.72 C 1347 | ATOM 1347 O3' U 0 64 -17.970 27.677 -8.020 1.00109.09 O 1348 | ATOM 1348 C2' U 0 64 -18.903 25.400 -7.928 1.00124.22 C 1349 | ATOM 1349 O2' U 0 64 -18.800 25.389 -9.344 1.00130.34 O 1350 | ATOM 1350 C1' U 0 64 -18.298 24.105 -7.385 1.00133.60 C 1351 | ATOM 1351 N1 U 0 64 -18.978 23.653 -6.151 1.00131.76 N 1352 | ATOM 1352 C2 U 0 64 -20.212 23.041 -6.271 1.00127.37 C 1353 | ATOM 1353 O2 U 0 64 -20.767 22.854 -7.341 1.00122.63 O 1354 | ATOM 1354 N3 U 0 64 -20.777 22.652 -5.081 1.00131.18 N 1355 | ATOM 1355 C4 U 0 64 -20.247 22.810 -3.817 1.00119.40 C 1356 | ATOM 1356 O4 U 0 64 -20.877 22.407 -2.840 1.00114.35 O 1357 | ATOM 1357 C5 U 0 64 -18.970 23.451 -3.781 1.00117.64 C 1358 | ATOM 1358 C6 U 0 64 -18.396 23.841 -4.921 1.00124.38 C 1359 | ATOM 1359 P C 0 65 -18.557 28.996 -7.318 1.00128.79 P 1360 | ATOM 1360 OP1 C 0 65 -18.098 30.171 -8.101 1.00142.94 O 1361 | ATOM 1361 OP2 C 0 65 -18.245 28.926 -5.868 1.00119.88 O 1362 | ATOM 1362 O5' C 0 65 -20.133 28.863 -7.514 1.00129.59 O 1363 | ATOM 1363 C5' C 0 65 -20.756 29.369 -8.684 1.00117.81 C 1364 | ATOM 1364 C4' C 0 65 -21.815 28.434 -9.215 1.00120.27 C 1365 | ATOM 1365 O4' C 0 65 -21.510 27.062 -8.848 1.00116.55 O 1366 | ATOM 1366 C3' C 0 65 -23.224 28.637 -8.686 1.00120.69 C 1367 | ATOM 1367 O3' C 0 65 -23.901 29.731 -9.279 1.00113.79 O 1368 | ATOM 1368 C2' C 0 65 -23.871 27.291 -8.973 1.00123.10 C 1369 | ATOM 1369 O2' C 0 65 -24.209 27.176 -10.348 1.00115.95 O 1370 | ATOM 1370 C1' C 0 65 -22.713 26.337 -8.678 1.00120.18 C 1371 | ATOM 1371 N1 C 0 65 -22.784 25.822 -7.292 1.00122.79 N 1372 | ATOM 1372 C2 C 0 65 -23.752 24.856 -6.990 1.00125.75 C 1373 | ATOM 1373 O2 C 0 65 -24.510 24.449 -7.883 1.00123.68 O 1374 | ATOM 1374 N3 C 0 65 -23.841 24.382 -5.728 1.00129.45 N 1375 | ATOM 1375 C4 C 0 65 -23.014 24.836 -4.787 1.00121.89 C 1376 | ATOM 1376 N4 C 0 65 -23.139 24.337 -3.556 1.00125.44 N 1377 | ATOM 1377 C5 C 0 65 -22.023 25.821 -5.065 1.00117.43 C 1378 | ATOM 1378 C6 C 0 65 -21.944 26.285 -6.318 1.00125.06 C 1379 | ATOM 1379 P U 0 66 -24.940 30.592 -8.406 1.00136.19 P 1380 | ATOM 1380 OP1 U 0 66 -25.517 31.658 -9.263 1.00132.49 O 1381 | ATOM 1381 OP2 U 0 66 -24.256 30.964 -7.142 1.00129.87 O 1382 | ATOM 1382 O5' U 0 66 -26.107 29.561 -8.066 1.00132.90 O 1383 | ATOM 1383 C5' U 0 66 -26.953 29.056 -9.089 1.00129.44 C 1384 | ATOM 1384 C4' U 0 66 -28.016 28.141 -8.533 1.00130.72 C 1385 | ATOM 1385 O4' U 0 66 -27.399 26.960 -7.952 1.00121.83 O 1386 | ATOM 1386 C3' U 0 66 -28.858 28.708 -7.401 1.00132.16 C 1387 | ATOM 1387 O3' U 0 66 -29.896 29.566 -7.841 1.00118.83 O 1388 | ATOM 1388 C2' U 0 66 -29.357 27.452 -6.704 1.00137.58 C 1389 | ATOM 1389 O2' U 0 66 -30.415 26.858 -7.442 1.00141.61 O 1390 | ATOM 1390 C1' U 0 66 -28.129 26.549 -6.813 1.00134.07 C 1391 | ATOM 1391 N1 U 0 66 -27.249 26.666 -5.625 1.00134.12 N 1392 | ATOM 1392 C2 U 0 66 -27.690 26.142 -4.421 1.00139.28 C 1393 | ATOM 1393 O2 U 0 66 -28.767 25.588 -4.282 1.00140.14 O 1394 | ATOM 1394 N3 U 0 66 -26.819 26.292 -3.370 1.00142.11 N 1395 | ATOM 1395 C4 U 0 66 -25.579 26.894 -3.392 1.00134.18 C 1396 | ATOM 1396 O4 U 0 66 -24.914 26.949 -2.356 1.00124.82 O 1397 | ATOM 1397 C5 U 0 66 -25.195 27.409 -4.671 1.00136.12 C 1398 | ATOM 1398 C6 U 0 66 -26.020 27.279 -5.714 1.00131.58 C 1399 | ATOM 1399 P G 0 67 -30.284 30.862 -6.972 1.00138.33 P 1400 | ATOM 1400 OP1 G 0 67 -31.218 31.697 -7.770 1.00126.63 O 1401 | ATOM 1401 OP2 G 0 67 -29.019 31.454 -6.468 1.00120.30 O 1402 | ATOM 1402 O5' G 0 67 -31.088 30.274 -5.725 1.00148.40 O 1403 | ATOM 1403 C5' G 0 67 -32.458 29.916 -5.847 1.00148.26 C 1404 | ATOM 1404 C4' G 0 67 -32.962 29.163 -4.639 1.00146.28 C 1405 | ATOM 1405 O4' G 0 67 -32.030 28.110 -4.284 1.00130.73 O 1406 | ATOM 1406 C3' G 0 67 -33.120 29.963 -3.354 1.00130.95 C 1407 | ATOM 1407 O3' G 0 67 -34.321 30.716 -3.320 1.00129.35 O 1408 | ATOM 1408 C2' G 0 67 -33.053 28.883 -2.282 1.00126.32 C 1409 | ATOM 1409 O2' G 0 67 -34.298 28.208 -2.172 1.00122.15 O 1410 | ATOM 1410 C1' G 0 67 -32.032 27.917 -2.885 1.00124.07 C 1411 | ATOM 1411 N9 G 0 67 -30.668 28.147 -2.374 1.00122.41 N 1412 | ATOM 1412 C8 G 0 67 -29.610 28.709 -3.047 1.00124.42 C 1413 | ATOM 1413 N7 G 0 67 -28.522 28.771 -2.331 1.00123.30 N 1414 | ATOM 1414 C5 G 0 67 -28.883 28.214 -1.112 1.00125.60 C 1415 | ATOM 1415 C6 G 0 67 -28.119 28.009 0.063 1.00126.11 C 1416 | ATOM 1416 O6 G 0 67 -26.931 28.292 0.261 1.00123.13 O 1417 | ATOM 1417 N1 G 0 67 -28.880 27.412 1.066 1.00129.01 N 1418 | ATOM 1418 C2 G 0 67 -30.204 27.057 0.954 1.00132.37 C 1419 | ATOM 1419 N2 G 0 67 -30.767 26.490 2.033 1.00127.91 N 1420 | ATOM 1420 N3 G 0 67 -30.925 27.244 -0.138 1.00135.10 N 1421 | ATOM 1421 C4 G 0 67 -30.205 27.821 -1.123 1.00129.90 C 1422 | ATOM 1422 P C 0 68 -34.449 32.000 -2.362 1.00144.16 P 1423 | ATOM 1423 OP1 C 0 68 -35.711 32.709 -2.696 1.00133.80 O 1424 | ATOM 1424 OP2 C 0 68 -33.157 32.730 -2.405 1.00148.17 O 1425 | ATOM 1425 O5' C 0 68 -34.618 31.383 -0.904 1.00142.09 O 1426 | ATOM 1426 C5' C 0 68 -35.788 30.663 -0.547 1.00136.40 C 1427 | ATOM 1427 C4' C 0 68 -35.659 30.059 0.828 1.00131.09 C 1428 | ATOM 1428 O4' C 0 68 -34.537 29.138 0.853 1.00124.10 O 1429 | ATOM 1429 C3' C 0 68 -35.358 31.039 1.951 1.00129.56 C 1430 | ATOM 1430 O3' C 0 68 -36.512 31.708 2.425 1.00133.37 O 1431 | ATOM 1431 C2' C 0 68 -34.686 30.156 2.993 1.00140.09 C 1432 | ATOM 1432 O2' C 0 68 -35.653 29.404 3.711 1.00141.92 O 1433 | ATOM 1433 C1' C 0 68 -33.890 29.199 2.106 1.00123.06 C 1434 | ATOM 1434 N1 C 0 68 -32.500 29.662 1.888 1.00121.27 N 1435 | ATOM 1435 C2 C 0 68 -31.534 29.428 2.870 1.00124.60 C 1436 | ATOM 1436 O2 C 0 68 -31.862 28.836 3.910 1.00133.59 O 1437 | ATOM 1437 N3 C 0 68 -30.265 29.853 2.661 1.00125.53 N 1438 | ATOM 1438 C4 C 0 68 -29.949 30.486 1.529 1.00121.43 C 1439 | ATOM 1439 N4 C 0 68 -28.687 30.888 1.363 1.00120.92 N 1440 | ATOM 1440 C5 C 0 68 -30.913 30.737 0.512 1.00120.42 C 1441 | ATOM 1441 C6 C 0 68 -32.159 30.310 0.735 1.00121.81 C 1442 | ATOM 1442 P U 0 69 -36.418 33.251 2.857 1.00145.73 P 1443 | ATOM 1443 OP1 U 0 69 -37.786 33.830 2.857 1.00140.63 O 1444 | ATOM 1444 OP2 U 0 69 -35.348 33.878 2.042 1.00161.06 O 1445 | ATOM 1445 O5' U 0 69 -35.906 33.192 4.362 1.00137.75 O 1446 | ATOM 1446 C5' U 0 69 -36.696 32.593 5.377 1.00136.51 C 1447 | ATOM 1447 C4' U 0 69 -35.892 32.361 6.630 1.00136.35 C 1448 | ATOM 1448 O4' U 0 69 -34.792 31.460 6.343 1.00133.88 O 1449 | ATOM 1449 C3' U 0 69 -35.218 33.590 7.220 1.00145.05 C 1450 | ATOM 1450 O3' U 0 69 -36.096 34.378 8.005 1.00140.62 O 1451 | ATOM 1451 C2' U 0 69 -34.063 32.989 8.012 1.00133.52 C 1452 | ATOM 1452 O2' U 0 69 -34.519 32.484 9.258 1.00134.15 O 1453 | ATOM 1453 C1' U 0 69 -33.667 31.807 7.123 1.00130.35 C 1454 | ATOM 1454 N1 U 0 69 -32.547 32.144 6.215 1.00129.45 N 1455 | ATOM 1455 C2 U 0 69 -31.257 32.043 6.705 1.00128.25 C 1456 | ATOM 1456 O2 U 0 69 -31.005 31.691 7.845 1.00131.44 O 1457 | ATOM 1457 N3 U 0 69 -30.268 32.371 5.809 1.00124.12 N 1458 | ATOM 1458 C4 U 0 69 -30.438 32.779 4.502 1.00124.75 C 1459 | ATOM 1459 O4 U 0 69 -29.451 33.041 3.813 1.00123.46 O 1460 | ATOM 1460 C5 U 0 69 -31.800 32.859 4.073 1.00127.69 C 1461 | ATOM 1461 C6 U 0 69 -32.782 32.546 4.922 1.00128.53 C 1462 | TER 1463 | END 1464 | -------------------------------------------------------------------------------- /fig/pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ml4bio/RiboDiffusion/3ac7a557f470c25d95379acedf75a9a49f70ef6e/fig/pipeline.png -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | """Training and evaluation""" 2 | 3 | import run_lib 4 | from absl import app, flags 5 | from ml_collections.config_flags import config_flags 6 | import os 7 | 8 | 9 | FLAGS = flags.FLAGS 10 | 11 | config_flags.DEFINE_config_file( 12 | 'config', 'configs/inference_ribodiffusion.py', 'Training configuration.', lock_config=True 13 | ) 14 | flags.DEFINE_enum('mode', 'inference', ['inference'], 15 | 'Running mode') 16 | flags.DEFINE_string('save_folder', 'exp_inf', 'The folder name for storing inference results') 17 | flags.DEFINE_string('PDB_file', 'example/R1107.pdb', 'The PDB file for inference') 18 | flags.DEFINE_boolean('deterministic', True, 'Set random seed for reproducibility.') 19 | flags.mark_flags_as_required(['PDB_file']) 20 | 21 | 22 | def main(argv): 23 | # Set random seed 24 | if FLAGS.deterministic: 25 | run_lib.set_random_seed(FLAGS.config) 26 | 27 | if FLAGS.mode == 'inference': 28 | run_lib.vpsde_inference(FLAGS.config, FLAGS.save_folder, FLAGS.PDB_file) 29 | else: 30 | raise ValueError(f"Mode {FLAGS.mode} not recognized.") 31 | 32 | 33 | if __name__ == '__main__': 34 | app.run(main) 35 | -------------------------------------------------------------------------------- /models/GVP_diff.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import math 3 | import torch 4 | import torch.nn as nn 5 | import torch.nn.functional as F 6 | import torch_geometric 7 | from torch_geometric.nn import MessagePassing 8 | from torch_geometric.data import Batch 9 | from torch_scatter import scatter_add 10 | 11 | from .utils import register_model 12 | from .esm_block import DihedralFeatures 13 | from .transformer_layer import TransformerEncoderCondLayer, SinusoidalPositionalEmbedding 14 | 15 | 16 | @torch.no_grad() 17 | def geo_batch(batch): 18 | data_list = [] 19 | # print(len(batch['z_t'])) 20 | batch_size, length = batch['z_t'].shape[:2] 21 | 22 | for i in range(batch_size): 23 | data_list.append(torch_geometric.data.Data( 24 | z_t=batch['z_t'][i], 25 | seq=batch['seq'][i], # num_res x 1 26 | coords=batch['coords'][i], # num_res x 3 x 3 27 | node_s=batch['node_s'][i], # num_res x num_conf x 4 28 | node_v=batch['node_v'][i], # num_res x num_conf x 4 x 3 29 | edge_s=batch['edge_s'][i], # num_edges x num_conf x 32 30 | edge_v=batch['edge_v'][i], # num_edges x num_conf x 1 x 3 31 | edge_index=batch['edge_index'][i], # 2 x num_edges 32 | mask=batch['mask'][i] # num_res x 1 33 | )) 34 | 35 | return Batch.from_data_list(data_list), batch_size, length 36 | 37 | @register_model(name='GVPTransCond') 38 | class GVPTransCond(torch.nn.Module): 39 | ''' 40 | GVP + Transformer model for RNA design 41 | 42 | :param node_in_dim: node dimensions in input graph 43 | :param node_h_dim: node dimensions to use in GVP-GNN layers 44 | :param node_in_dim: edge dimensions in input graph 45 | :param edge_h_dim: edge dimensions to embed in GVP-GNN layers 46 | :param num_layers: number of GVP-GNN layers in encoder/decoder 47 | :param drop_rate: rate to use in all dropout layers 48 | :param out_dim: output dimension (4 bases) 49 | ''' 50 | 51 | def __init__(self, config): 52 | super().__init__() 53 | self.node_in_dim = tuple(config.model.node_in_dim) # node_in_dim 54 | self.node_h_dim = tuple(config.model.node_h_dim) # node_h_dim 55 | self.edge_in_dim = tuple(config.model.edge_in_dim) # edge_in_dim 56 | self.edge_h_dim = tuple(config.model.edge_in_dim) # edge_h_dim 57 | self.num_layers = config.model.num_layers 58 | self.out_dim = config.model.out_dim 59 | self.time_cond = config.model.time_cond 60 | self.dihedral_angle = config.model.dihedral_angle 61 | self.drop_struct = config.model.drop_struct 62 | drop_rate = config.model.drop_rate 63 | activations = (F.relu, None) 64 | 65 | # Node input embedding 66 | self.W_v = torch.nn.Sequential( 67 | LayerNorm(self.node_in_dim), 68 | GVP(self.node_in_dim, self.node_h_dim, 69 | activations=(None, None), vector_gate=True) 70 | ) 71 | 72 | # Edge input embedding 73 | self.W_e = torch.nn.Sequential( 74 | LayerNorm(self.edge_in_dim), 75 | GVP(self.edge_in_dim, self.edge_h_dim, 76 | activations=(None, None), vector_gate=True) 77 | ) 78 | 79 | # Encoder layers (supports multiple conformations) 80 | self.encoder_layers = nn.ModuleList( 81 | GVPConvLayer(self.node_h_dim, self.edge_h_dim, 82 | activations=activations, vector_gate=True, 83 | drop_rate=drop_rate) 84 | for _ in range(self.num_layers)) 85 | 86 | # Output 87 | self.W_out = GVP(self.node_h_dim, (self.node_h_dim[0], 0), activations=(None, None)) 88 | 89 | # Transformer Layers 90 | self.seq_res = nn.Linear(self.node_in_dim[0], self.node_h_dim[0]) 91 | self.mix_lin = nn.Linear(self.node_h_dim[0] * 2, self.node_h_dim[0]) 92 | self.num_trans_layer = config.model.num_trans_layer 93 | self.embed_positions = SinusoidalPositionalEmbedding( 94 | self.node_h_dim[0], 95 | -1, 96 | ) 97 | self.trans_layers = nn.ModuleList( 98 | TransformerEncoderCondLayer(config.model.trans) 99 | for _ in range(self.num_trans_layer)) 100 | self.MLP_out = nn.Sequential( 101 | nn.Linear(self.node_h_dim[0], self.node_h_dim[0]), 102 | nn.ReLU(), 103 | nn.Linear(self.node_h_dim[0], self.out_dim) 104 | ) 105 | 106 | # Time conditioning 107 | if self.time_cond: 108 | learned_sinu_pos_emb_dim = 16 109 | time_cond_dim = config.model.node_h_dim[0] * 2 110 | sinu_pos_emb = LearnedSinusoidalPosEmb(learned_sinu_pos_emb_dim) 111 | sinu_pos_emb_input_dim = learned_sinu_pos_emb_dim + 1 112 | self.to_time_hiddens = nn.Sequential( 113 | sinu_pos_emb, 114 | nn.Linear(sinu_pos_emb_input_dim, time_cond_dim), 115 | nn.SiLU(), 116 | nn.Linear(time_cond_dim, config.model.node_h_dim[0]), 117 | ) 118 | 119 | # Dihedral angle 120 | if self.dihedral_angle: 121 | self.embed_dihedral = DihedralFeatures(config.model.node_h_dim[0]) 122 | 123 | def struct_forward(self, batch, init_seq, batch_size, length, **kwargs): 124 | h_V = (init_seq, batch.node_v) 125 | h_E = (batch.edge_s, batch.edge_v) 126 | edge_index = batch.edge_index 127 | 128 | h_V = self.W_v(h_V) # (n_nodes, n_conf, d_s), (n_nodes, n_conf, d_v, 3) 129 | h_E = self.W_e(h_E) # (n_edges, n_conf, d_se), (n_edges, n_conf, d_ve, 3) 130 | 131 | if self.dihedral_angle: 132 | dihedral_feats = self.embed_dihedral(batch.coords).reshape_as(h_V[0]) 133 | h_V = (h_V[0] + dihedral_feats, h_V[1]) 134 | 135 | for layer in self.encoder_layers: 136 | h_V = layer(h_V, edge_index, h_E) # (n_nodes, n_conf, d_s), (n_nodes, n_conf, d_v, 3) 137 | 138 | gvp_output = self.W_out(h_V).reshape(batch_size, length, -1) 139 | return gvp_output 140 | 141 | def forward(self, batch, cond_drop_prob=0., **kwargs): 142 | # construct extra node and edge features 143 | batch, batch_size, length = geo_batch(batch) 144 | 145 | z_t = batch.z_t 146 | cond_x = kwargs.get('cond_x', None) 147 | if cond_x is None: 148 | cond_x = torch.zeros_like(batch.z_t) 149 | else: 150 | cond_x = cond_x.reshape_as(batch.z_t) 151 | 152 | init_seq = torch.cat([z_t, cond_x], -1) 153 | 154 | if self.training: 155 | if self.drop_struct > 0 and random.random() < self.drop_struct: 156 | gvp_output = torch.zeros(batch_size, length, self.node_h_dim[0], device=batch.z_t.device) 157 | else: 158 | gvp_output = self.struct_forward(batch, init_seq, batch_size, length, **kwargs) 159 | else: 160 | if cond_drop_prob == 0.: 161 | gvp_output = self.struct_forward(batch, init_seq, batch_size, length, **kwargs) 162 | elif cond_drop_prob == 1.: 163 | gvp_output = torch.zeros(batch_size, length, self.node_h_dim[0], device=batch.z_t.device) 164 | else: 165 | raise ValueError(f'Invalid cond_drop_prob: {cond_drop_prob}') 166 | 167 | trans_x = torch.cat([gvp_output, self.seq_res(init_seq.reshape(batch_size, length, -1))], dim=-1) 168 | trans_x = self.mix_lin(trans_x) 169 | 170 | if self.time_cond: 171 | noise_level = kwargs.get('noise_level') 172 | time_cond = self.to_time_hiddens(noise_level) # [B, d_s] 173 | time_cond = time_cond.unsqueeze(1).repeat(1, length, 1) # [B, length, d_s] 174 | else: 175 | time_cond = None 176 | 177 | # add position embedding 178 | seq_mask = torch.ones((batch_size, length), device=batch.z_t.device) 179 | pos_emb = self.embed_positions(seq_mask) 180 | 181 | trans_x = trans_x + pos_emb 182 | trans_x = trans_x.transpose(0, 1) 183 | 184 | # transformer layers 185 | for layer in self.trans_layers: 186 | trans_x = layer(trans_x, None, cond=time_cond.transpose(0, 1)) 187 | 188 | logits = self.MLP_out(trans_x.transpose(0, 1)) 189 | # logits = logits.reshape(batch_size, -1, self.out_dim) 190 | return logits 191 | 192 | 193 | class LearnedSinusoidalPosEmb(nn.Module): 194 | """ following @crowsonkb 's lead with learned sinusoidal pos emb """ 195 | """ https://github.com/crowsonkb/v-diffusion-jax/blob/master/diffusion/models/danbooru_128.py#L8 """ 196 | 197 | def __init__(self, dim): 198 | super().__init__() 199 | assert (dim % 2) == 0 200 | half_dim = dim // 2 201 | self.weights = nn.Parameter(torch.randn(half_dim)) 202 | 203 | def forward(self, x): 204 | # x = rearrange(x, 'b -> b 1') 205 | x = x.unsqueeze(-1) 206 | # freqs = x * rearrange(self.weights, 'd -> 1 d') * 2 * math.pi 207 | freqs = x * self.weights.unsqueeze(0) * 2 * math.pi 208 | fouriered = torch.cat((freqs.sin(), freqs.cos()), dim = -1) 209 | fouriered = torch.cat((x, fouriered), dim = -1) 210 | return fouriered 211 | 212 | 213 | ######################################################################### 214 | 215 | class GVPConvLayer(nn.Module): 216 | ''' 217 | Full graph convolution / message passing layer with 218 | Geometric Vector Perceptrons. Residually updates node embeddings with 219 | aggregated incoming messages, applies a pointwise feedforward 220 | network to node embeddings, and returns updated node embeddings. 221 | 222 | To only compute the aggregated messages, see `GVPConv`. 223 | 224 | :param node_dims: node embedding dimensions (n_scalar, n_vector) 225 | :param edge_dims: input edge embedding dimensions (n_scalar, n_vector) 226 | :param n_message: number of GVPs to use in message function 227 | :param n_feedforward: number of GVPs to use in feedforward function 228 | :param drop_rate: drop probability in all dropout layers 229 | :param autoregressive: if `True`, this `GVPConvLayer` will be used 230 | with a different set of input node embeddings for messages 231 | where src >= dst 232 | :param activations: tuple of functions (scalar_act, vector_act) to use in GVPs 233 | :param vector_gate: whether to use vector gating. 234 | (vector_act will be used as sigma^+ in vector gating if `True`) 235 | ''' 236 | 237 | def __init__( 238 | self, 239 | node_dims, 240 | edge_dims, 241 | n_message=3, 242 | n_feedforward=2, 243 | drop_rate=.1, 244 | autoregressive=False, 245 | activations=(F.relu, torch.sigmoid), 246 | vector_gate=True, 247 | residual=True 248 | ): 249 | 250 | super(GVPConvLayer, self).__init__() 251 | self.conv = GVPConv(node_dims, node_dims, edge_dims, n_message, 252 | aggr="add" if autoregressive else "mean", 253 | activations=activations, vector_gate=vector_gate) 254 | GVP_ = functools.partial(GVP, 255 | activations=activations, vector_gate=vector_gate) 256 | self.norm = nn.ModuleList([LayerNorm(node_dims) for _ in range(2)]) 257 | self.dropout = nn.ModuleList([Dropout(drop_rate) for _ in range(2)]) 258 | 259 | ff_func = [] 260 | if n_feedforward == 1: 261 | ff_func.append(GVP_(node_dims, node_dims, activations=(None, None))) 262 | else: 263 | hid_dims = 4 * node_dims[0], 2 * node_dims[1] 264 | ff_func.append(GVP_(node_dims, hid_dims)) 265 | for i in range(n_feedforward - 2): 266 | ff_func.append(GVP_(hid_dims, hid_dims)) 267 | ff_func.append(GVP_(hid_dims, node_dims, activations=(None, None))) 268 | self.ff_func = nn.Sequential(*ff_func) 269 | self.residual = residual 270 | 271 | def forward(self, x, edge_index, edge_attr, 272 | autoregressive_x=None, node_mask=None): 273 | ''' 274 | :param x: tuple (s, V) of `torch.Tensor` 275 | :param edge_index: array of shape [2, n_edges] 276 | :param edge_attr: tuple (s, V) of `torch.Tensor` 277 | :param autoregressive_x: tuple (s, V) of `torch.Tensor`. 278 | If not `None`, will be used as src node embeddings 279 | for forming messages where src >= dst. The corrent node 280 | embeddings `x` will still be the base of the update and the 281 | pointwise feedforward. 282 | :param node_mask: array of type `bool` to index into the first 283 | dim of node embeddings (s, V). If not `None`, only 284 | these nodes will be updated. 285 | ''' 286 | 287 | if autoregressive_x is not None: 288 | src, dst = edge_index 289 | mask = src < dst 290 | edge_index_forward = edge_index[:, mask] 291 | edge_index_backward = edge_index[:, ~mask] 292 | edge_attr_forward = tuple_index(edge_attr, mask) 293 | edge_attr_backward = tuple_index(edge_attr, ~mask) 294 | 295 | dh = tuple_sum( 296 | self.conv(x, edge_index_forward, edge_attr_forward), 297 | self.conv(autoregressive_x, edge_index_backward, edge_attr_backward) 298 | ) 299 | 300 | count = scatter_add(torch.ones_like(dst), dst, 301 | dim_size=dh[0].size(0)).clamp(min=1).unsqueeze(-1) 302 | 303 | dh = dh[0] / count, dh[1] / count.unsqueeze(-1) 304 | 305 | else: 306 | dh = self.conv(x, edge_index, edge_attr) 307 | 308 | if node_mask is not None: 309 | x_ = x 310 | x, dh = tuple_index(x, node_mask), tuple_index(dh, node_mask) 311 | 312 | x = self.norm[0](tuple_sum(x, self.dropout[0](dh))) if self.residual else dh 313 | 314 | dh = self.ff_func(x) 315 | x = self.norm[1](tuple_sum(x, self.dropout[1](dh))) if self.residual else dh 316 | 317 | if node_mask is not None: 318 | x_[0][node_mask], x_[1][node_mask] = x[0], x[1] 319 | x = x_ 320 | return x 321 | 322 | 323 | class GVPConv(MessagePassing): 324 | ''' 325 | Graph convolution / message passing with Geometric Vector Perceptrons. 326 | Takes in a graph with node and edge embeddings, 327 | and returns new node embeddings. 328 | 329 | This does NOT do residual updates and pointwise feedforward layers 330 | ---see `GVPConvLayer`. 331 | 332 | :param in_dims: input node embedding dimensions (n_scalar, n_vector) 333 | :param out_dims: output node embedding dimensions (n_scalar, n_vector) 334 | :param edge_dims: input edge embedding dimensions (n_scalar, n_vector) 335 | :param n_layers: number of GVPs in the message function 336 | :param module_list: preconstructed message function, overrides n_layers 337 | :param aggr: should be "add" if some incoming edges are masked, as in 338 | a masked autoregressive decoder architecture, otherwise "mean" 339 | :param activations: tuple of functions (scalar_act, vector_act) to use in GVPs 340 | :param vector_gate: whether to use vector gating. 341 | (vector_act will be used as sigma^+ in vector gating if `True`) 342 | ''' 343 | 344 | def __init__(self, in_dims, out_dims, edge_dims, 345 | n_layers=3, module_list=None, aggr="mean", 346 | activations=(F.relu, torch.sigmoid), vector_gate=True): 347 | super(GVPConv, self).__init__(aggr=aggr) 348 | self.si, self.vi = in_dims 349 | self.so, self.vo = out_dims 350 | self.se, self.ve = edge_dims 351 | 352 | GVP_ = functools.partial(GVP, 353 | activations=activations, vector_gate=vector_gate) 354 | 355 | module_list = module_list or [] 356 | if not module_list: 357 | if n_layers == 1: 358 | module_list.append( 359 | GVP_((2 * self.si + self.se, 2 * self.vi + self.ve), 360 | (self.so, self.vo), activations=(None, None))) 361 | else: 362 | module_list.append( 363 | GVP_((2 * self.si + self.se, 2 * self.vi + self.ve), out_dims) 364 | ) 365 | for i in range(n_layers - 2): 366 | module_list.append(GVP_(out_dims, out_dims)) 367 | module_list.append(GVP_(out_dims, out_dims, 368 | activations=(None, None))) 369 | self.message_func = nn.Sequential(*module_list) 370 | 371 | def forward(self, x, edge_index, edge_attr): 372 | ''' 373 | :param x: tuple (s, V) of `torch.Tensor` 374 | :param edge_index: array of shape [2, n_edges] 375 | :param edge_attr: tuple (s, V) of `torch.Tensor` 376 | ''' 377 | x_s, x_v = x 378 | message = self.propagate(edge_index, 379 | s=x_s, v=x_v.contiguous().view(x_v.shape[0], x_v.shape[1] * 3), 380 | edge_attr=edge_attr) 381 | return _split(message, self.vo) 382 | 383 | def message(self, s_i, v_i, s_j, v_j, edge_attr): 384 | v_j = v_j.view(v_j.shape[0], v_j.shape[1] // 3, 3) 385 | v_i = v_i.view(v_i.shape[0], v_i.shape[1] // 3, 3) 386 | message = tuple_cat((s_j, v_j), edge_attr, (s_i, v_i)) 387 | message = self.message_func(message) 388 | return _merge(*message) 389 | 390 | 391 | class GVP(nn.Module): 392 | ''' 393 | Geometric Vector Perceptron. See manuscript and README.md 394 | for more details. 395 | 396 | :param in_dims: tuple (n_scalar, n_vector) 397 | :param out_dims: tuple (n_scalar, n_vector) 398 | :param h_dim: intermediate number of vector channels, optional 399 | :param activations: tuple of functions (scalar_act, vector_act) 400 | :param vector_gate: whether to use vector gating. 401 | (vector_act will be used as sigma^+ in vector gating if `True`) 402 | ''' 403 | 404 | def __init__(self, in_dims, out_dims, h_dim=None, 405 | activations=(F.relu, torch.sigmoid), vector_gate=True): 406 | super(GVP, self).__init__() 407 | self.si, self.vi = in_dims 408 | self.so, self.vo = out_dims 409 | self.vector_gate = vector_gate 410 | if self.vi: 411 | self.h_dim = h_dim or max(self.vi, self.vo) 412 | self.wh = nn.Linear(self.vi, self.h_dim, bias=False) 413 | self.ws = nn.Linear(self.h_dim + self.si, self.so) 414 | if self.vo: 415 | self.wv = nn.Linear(self.h_dim, self.vo, bias=False) 416 | if self.vector_gate: self.wsv = nn.Linear(self.so, self.vo) 417 | else: 418 | self.ws = nn.Linear(self.si, self.so) 419 | 420 | self.scalar_act, self.vector_act = activations 421 | self.dummy_param = nn.Parameter(torch.empty(0)) 422 | 423 | def forward(self, x): 424 | ''' 425 | :param x: tuple (s, V) of `torch.Tensor`, 426 | or (if vectors_in is 0), a single `torch.Tensor` 427 | :return: tuple (s, V) of `torch.Tensor`, 428 | or (if vectors_out is 0), a single `torch.Tensor` 429 | ''' 430 | if self.vi: 431 | s, v = x 432 | v = torch.transpose(v, -1, -2) 433 | vh = self.wh(v) 434 | vn = _norm_no_nan(vh, axis=-2) 435 | s = self.ws(torch.cat([s, vn], -1)) 436 | if self.vo: 437 | v = self.wv(vh) 438 | v = torch.transpose(v, -1, -2) 439 | if self.vector_gate: 440 | if self.vector_act: 441 | gate = self.wsv(self.vector_act(s)) 442 | else: 443 | gate = self.wsv(s) 444 | v = v * torch.sigmoid(gate).unsqueeze(-1) 445 | elif self.vector_act: 446 | v = v * self.vector_act( 447 | _norm_no_nan(v, axis=-1, keepdims=True)) 448 | else: 449 | s = self.ws(x) 450 | if self.vo: 451 | v = torch.zeros(s.shape[0], self.vo, 3, device=x.device) 452 | if self.scalar_act: 453 | s = self.scalar_act(s) 454 | 455 | return (s, v) if self.vo else s 456 | 457 | 458 | ######################################################################### 459 | 460 | class _VDropout(nn.Module): 461 | ''' 462 | Vector channel dropout where the elements of each 463 | vector channel are dropped together. 464 | ''' 465 | 466 | def __init__(self, drop_rate): 467 | super(_VDropout, self).__init__() 468 | self.drop_rate = drop_rate 469 | self.dummy_param = nn.Parameter(torch.empty(0)) 470 | 471 | def forward(self, x): 472 | ''' 473 | :param x: `torch.Tensor` corresponding to vector channels 474 | ''' 475 | device = x.device 476 | if not self.training: 477 | return x 478 | mask = torch.bernoulli( 479 | (1 - self.drop_rate) * torch.ones(x.shape[:-1], device=device) 480 | ).unsqueeze(-1) 481 | x = mask * x / (1 - self.drop_rate) 482 | return x 483 | 484 | 485 | class Dropout(nn.Module): 486 | ''' 487 | Combined dropout for tuples (s, V). 488 | Takes tuples (s, V) as input and as output. 489 | ''' 490 | 491 | def __init__(self, drop_rate): 492 | super(Dropout, self).__init__() 493 | self.sdropout = nn.Dropout(drop_rate) 494 | self.vdropout = _VDropout(drop_rate) 495 | 496 | def forward(self, x): 497 | ''' 498 | :param x: tuple (s, V) of `torch.Tensor`, 499 | or single `torch.Tensor` 500 | (will be assumed to be scalar channels) 501 | ''' 502 | if type(x) is torch.Tensor: 503 | return self.sdropout(x) 504 | s, v = x 505 | return self.sdropout(s), self.vdropout(v) 506 | 507 | 508 | class LayerNorm(nn.Module): 509 | ''' 510 | Combined LayerNorm for tuples (s, V). 511 | Takes tuples (s, V) as input and as output. 512 | ''' 513 | 514 | def __init__(self, dims): 515 | super(LayerNorm, self).__init__() 516 | self.s, self.v = dims 517 | self.scalar_norm = nn.LayerNorm(self.s) 518 | 519 | def forward(self, x): 520 | ''' 521 | :param x: tuple (s, V) of `torch.Tensor`, 522 | or single `torch.Tensor` 523 | (will be assumed to be scalar channels) 524 | ''' 525 | if not self.v: 526 | return self.scalar_norm(x) 527 | s, v = x 528 | vn = _norm_no_nan(v, axis=-1, keepdims=True, sqrt=False) 529 | vn = torch.sqrt(torch.mean(vn, dim=-2, keepdim=True)) 530 | return self.scalar_norm(s), v / vn 531 | 532 | 533 | def tuple_sum(*args): 534 | ''' 535 | Sums any number of tuples (s, V) elementwise. 536 | ''' 537 | return tuple(map(sum, zip(*args))) 538 | 539 | 540 | def tuple_cat(*args, dim=-1): 541 | ''' 542 | Concatenates any number of tuples (s, V) elementwise. 543 | 544 | :param dim: dimension along which to concatenate when viewed 545 | as the `dim` index for the scalar-channel tensors. 546 | This means that `dim=-1` will be applied as 547 | `dim=-2` for the vector-channel tensors. 548 | ''' 549 | dim %= len(args[0][0].shape) 550 | s_args, v_args = list(zip(*args)) 551 | return torch.cat(s_args, dim=dim), torch.cat(v_args, dim=dim) 552 | 553 | 554 | def tuple_index(x, idx): 555 | ''' 556 | Indexes into a tuple (s, V) along the first dimension. 557 | 558 | :param idx: any object which can be used to index into a `torch.Tensor` 559 | ''' 560 | return x[0][idx], x[1][idx] 561 | 562 | 563 | def _norm_no_nan(x, axis=-1, keepdims=False, eps=1e-8, sqrt=True): 564 | ''' 565 | L2 norm of tensor clamped above a minimum value `eps`. 566 | 567 | :param sqrt: if `False`, returns the square of the L2 norm 568 | ''' 569 | out = torch.clamp(torch.sum(torch.square(x), axis, keepdims), min=eps) 570 | return torch.sqrt(out) if sqrt else out 571 | 572 | 573 | def _split(x, nv): 574 | ''' 575 | Splits a merged representation of (s, V) back into a tuple. 576 | Should be used only with `_merge(s, V)` and only if the tuple 577 | representation cannot be used. 578 | 579 | :param x: the `torch.Tensor` returned from `_merge` 580 | :param nv: the number of vector channels in the input to `_merge` 581 | ''' 582 | s = x[..., :-3 * nv] 583 | v = x[..., -3 * nv:].contiguous().view(x.shape[0], nv, 3) 584 | return s, v 585 | 586 | 587 | def _merge(s, v): 588 | ''' 589 | Merges a tuple (s, V) into a single `torch.Tensor`, where the 590 | vector channels are flattened and appended to the scalar channels. 591 | Should be used only if the tuple representation cannot be used. 592 | Use `_split(x, nv)` to reverse. 593 | ''' 594 | v = v.contiguous().view(v.shape[0], v.shape[1] * 3) 595 | return torch.cat([s, v], -1) 596 | 597 | -------------------------------------------------------------------------------- /models/__init__.py: -------------------------------------------------------------------------------- 1 | from .ema import ExponentialMovingAverage 2 | from .utils import create_model 3 | from .GVP_diff import GVPTransCond -------------------------------------------------------------------------------- /models/ema.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | 4 | class ExponentialMovingAverage: 5 | """ 6 | Maintains (exponential) moving average of a set of parameters. 7 | """ 8 | 9 | def __init__(self, parameters, decay, use_num_updates=True): 10 | """ 11 | Args: 12 | parameters: Iterable of `torch.nn.Parameter`; usually the result of `model.parameters()`. 13 | decay: The exponential decay. 14 | use_num_updates: Whether to use number of updates when computing averages. 15 | """ 16 | if decay < 0.0 or decay > 1.0: 17 | raise ValueError('Decay must be between 0 and 1') 18 | self.decay = decay 19 | self.num_updates = 0 if use_num_updates else None 20 | self.shadow_params = [p.clone().detach() 21 | for p in parameters if p.requires_grad] 22 | self.collected_params = [] 23 | 24 | def update(self, parameters): 25 | """ 26 | Update currently maintained parameters. 27 | 28 | Call this every time the parameters are updated, such as the result of the `optimizer.step()` call. 29 | 30 | Args: 31 | parameters: Iterable of `torch.nn.Parameter`; usually the same set of parameters used to 32 | initialize this object. 33 | """ 34 | decay = self.decay 35 | if self.num_updates is not None: 36 | self.num_updates += 1 37 | decay = min(decay, (1 + self.num_updates) / (10 + self.num_updates)) 38 | one_minus_decay = 1.0 - decay 39 | with torch.no_grad(): 40 | parameters = [p for p in parameters if p.requires_grad] 41 | for s_param, param in zip(self.shadow_params, parameters): 42 | s_param.sub_(one_minus_decay * (s_param - param)) 43 | 44 | def copy_to(self, parameters): 45 | """ 46 | Copy current parameters into given collection of parameters. 47 | 48 | Args: 49 | parameters: Iterable of `torch.nn.Parameter`; the parameters to be 50 | updated with the stored moving averages. 51 | """ 52 | parameters = [p for p in parameters if p.requires_grad] 53 | for s_param, param in zip(self.shadow_params, parameters): 54 | if param.requires_grad: 55 | param.data.copy_(s_param.data) 56 | 57 | def store(self, parameters): 58 | """ 59 | Save the current parameters for restoring later. 60 | 61 | Args: 62 | parameters: Iterable of `torch.nn.Parameter`; the parameters to be temporarily stored. 63 | """ 64 | self.collected_params = [param.clone() for param in parameters] 65 | 66 | def restore(self, parameters): 67 | """ 68 | Restore the parameters stored with the `store` method. 69 | Useful to validate the model with EMA parameters without affecting the original optimization process. 70 | Store the parameters before the `copy_to` method. 71 | After validation (or model saving), use this to restore the former parameters. 72 | 73 | Args: 74 | parameters: Iterable of `torch.nn.Parameter`; the parameters to be updated with the stored parameters. 75 | """ 76 | for c_param, param in zip(self.collected_params, parameters): 77 | param.data.copy_(c_param.data) 78 | 79 | def state_dict(self): 80 | return dict(decay=self.decay, num_updates=self.num_updates, shadow_params=self.shadow_params) 81 | 82 | def load_state_dict(self, state_dict): 83 | self.decay = state_dict['decay'] 84 | self.num_updates = state_dict['num_updates'] 85 | self.shadow_params = state_dict['shadow_params'] 86 | -------------------------------------------------------------------------------- /models/esm_block.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | 6 | class DihedralFeatures(nn.Module): 7 | def __init__(self, node_embed_dim): 8 | """ Embed dihedral angle features. """ 9 | super(DihedralFeatures, self).__init__() 10 | # 3 dihedral angles; sin and cos of each angle 11 | node_in = 6 12 | # Normalization and embedding 13 | self.node_embedding = nn.Linear(node_in, node_embed_dim, bias=True) 14 | self.norm_nodes = Normalize(node_embed_dim) 15 | 16 | def forward(self, X): 17 | """ Featurize coordinates as an attributed graph """ 18 | with torch.no_grad(): 19 | V = self._dihedrals(X) 20 | V = self.node_embedding(V) 21 | V = self.norm_nodes(V) 22 | return V 23 | 24 | @staticmethod 25 | def _dihedrals(X, eps=1e-7, return_angles=False): 26 | # First 3 coordinates are [N, CA, C] / [C4', C1', N1/N9] 27 | if len(X.shape) == 4: 28 | X = X[..., :3, :].reshape(X.shape[0], 3*X.shape[1], 3) 29 | else: 30 | X = X[:, :3, :] 31 | 32 | # Shifted slices of unit vectors 33 | dX = X[:,1:,:] - X[:,:-1,:] 34 | U = F.normalize(dX, dim=-1) 35 | u_2 = U[:,:-2,:] 36 | u_1 = U[:,1:-1,:] 37 | u_0 = U[:,2:,:] 38 | # Backbone normals 39 | n_2 = F.normalize(torch.cross(u_2, u_1, dim=-1), dim=-1) 40 | n_1 = F.normalize(torch.cross(u_1, u_0, dim=-1), dim=-1) 41 | 42 | # Angle between normals 43 | cosD = (n_2 * n_1).sum(-1) 44 | cosD = torch.clamp(cosD, -1+eps, 1-eps) 45 | D = torch.sign((u_2 * n_1).sum(-1)) * torch.acos(cosD) 46 | 47 | # This scheme will remove phi[0], psi[-1], omega[-1] 48 | D = F.pad(D, (1,2), 'constant', 0) 49 | D = D.view((D.size(0), int(D.size(1)/3), 3)) 50 | 51 | # phi, psi, omega = torch.unbind(D,-1) 52 | # 53 | # if return_angles: 54 | # return phi, psi, omega 55 | 56 | # Lift angle representations to the circle 57 | D_features = torch.cat((torch.cos(D), torch.sin(D)), 2) 58 | return D_features 59 | 60 | 61 | class Normalize(nn.Module): 62 | def __init__(self, features, epsilon=1e-6): 63 | super(Normalize, self).__init__() 64 | self.gain = nn.Parameter(torch.ones(features)) 65 | self.bias = nn.Parameter(torch.zeros(features)) 66 | self.epsilon = epsilon 67 | 68 | def forward(self, x, dim=-1): 69 | mu = x.mean(dim, keepdim=True) 70 | sigma = torch.sqrt(x.var(dim, keepdim=True) + self.epsilon) 71 | gain = self.gain 72 | bias = self.bias 73 | # Reshape 74 | if dim != -1: 75 | shape = [1] * len(mu.size()) 76 | shape[dim] = self.gain.size()[0] 77 | gain = gain.view(shape) 78 | bias = bias.view(shape) 79 | return gain * (x - mu) / (sigma + self.epsilon) + bias -------------------------------------------------------------------------------- /models/esm_utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | def get_rotation_frames(coords): 4 | """ 5 | Returns a local rotation frame defined by N, CA, C positions. 6 | 7 | Args: 8 | coords: coordinates, tensor of shape (batch_size x length x 3 x 3) 9 | where the third dimension is in order of N, CA, C 10 | 11 | Returns: 12 | Local relative rotation frames in shape (batch_size x length x 3 x 3) 13 | """ 14 | v1 = coords[:, :, 2] - coords[:, :, 1] 15 | v2 = coords[:, :, 0] - coords[:, :, 1] 16 | e1 = normalize(v1, dim=-1) 17 | u2 = v2 - e1 * torch.sum(e1 * v2, dim=-1, keepdim=True) 18 | e2 = normalize(u2, dim=-1) 19 | e3 = torch.cross(e1, e2, dim=-1) 20 | R = torch.stack([e1, e2, e3], dim=-2) 21 | return R 22 | 23 | 24 | def rotate(v, R): 25 | """ 26 | Rotates a vector by a rotation matrix. 27 | 28 | Args: 29 | v: 3D vector, tensor of shape (length x batch_size x channels x 3) 30 | R: rotation matrix, tensor of shape (length x batch_size x 3 x 3) 31 | 32 | Returns: 33 | Rotated version of v by rotation matrix R. 34 | """ 35 | R = R.unsqueeze(-3) 36 | v = v.unsqueeze(-1) 37 | return torch.sum(v * R, dim=-2) 38 | 39 | 40 | def normalize(tensor, dim=-1): 41 | """ 42 | Normalizes a tensor along a dimension after removing nans. 43 | """ 44 | return nan_to_num( 45 | torch.div(tensor, norm(tensor, dim=dim, keepdim=True)) 46 | ) 47 | 48 | def nan_to_num(ts, val=0.0): 49 | """ 50 | Replaces nans in tensor with a fixed value. 51 | """ 52 | val = torch.tensor(val, dtype=ts.dtype, device=ts.device) 53 | return torch.where(~torch.isfinite(ts), val, ts) 54 | 55 | def norm(tensor, dim, eps=1e-8, keepdim=False): 56 | """ 57 | Returns L2 norm along a dimension. 58 | """ 59 | return torch.sqrt( 60 | torch.sum(torch.square(tensor), dim=dim, keepdim=keepdim) + eps) -------------------------------------------------------------------------------- /models/transformer_layer.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Meta Platforms, Inc. and affiliates. 2 | # 3 | # Contents of this file were adapted from the open source fairseq repository. 4 | # 5 | # This source code is licensed under the MIT license found in the 6 | # LICENSE file in the root directory of this source tree. 7 | 8 | from typing import Dict, List, Optional 9 | 10 | import torch 11 | import torch.nn as nn 12 | import torch.nn.functional as F 13 | from esm.multihead_attention import MultiheadAttention 14 | from torch import Tensor 15 | import math 16 | 17 | 18 | class SinusoidalPositionalEmbedding(nn.Module): 19 | def __init__(self, embed_dim, padding_idx, learned=False): 20 | super().__init__() 21 | self.embed_dim = embed_dim 22 | self.padding_idx = padding_idx 23 | self.register_buffer("_float_tensor", torch.FloatTensor(1)) 24 | self.weights = None 25 | 26 | def forward(self, x): 27 | bsz, seq_len = x.shape 28 | max_pos = self.padding_idx + 1 + seq_len 29 | if self.weights is None or max_pos > self.weights.size(0): 30 | self.weights = self.get_embedding(max_pos) 31 | self.weights = self.weights.type_as(self._float_tensor) 32 | 33 | positions = self.make_positions(x) 34 | return self.weights.index_select(0, positions.view(-1)).view(bsz, seq_len, -1).detach() 35 | 36 | def make_positions(self, x): 37 | mask = x.ne(self.padding_idx) 38 | range_buf = torch.arange(x.size(1), device=x.device).expand_as(x) + self.padding_idx + 1 39 | positions = range_buf.expand_as(x) 40 | return positions * mask.long() + self.padding_idx * (1 - mask.long()) 41 | 42 | def get_embedding(self, num_embeddings): 43 | half_dim = self.embed_dim // 2 44 | emb = math.log(10000) / (half_dim - 1) 45 | emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb) 46 | emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(1) * emb.unsqueeze(0) 47 | emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(num_embeddings, -1) 48 | if self.embed_dim % 2 == 1: 49 | # zero pad 50 | emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1) 51 | if self.padding_idx is not None: 52 | emb[self.padding_idx, :] = 0 53 | return emb 54 | 55 | 56 | class TransformerEncoderLayer(nn.Module): 57 | """Encoder layer block. 58 | `layernorm -> dropout -> add residual` 59 | 60 | Args: 61 | args (argparse.Namespace): parsed command-line arguments 62 | """ 63 | 64 | def __init__(self, args): 65 | super().__init__() 66 | self.args = args 67 | self.embed_dim = args.encoder_embed_dim 68 | self.self_attn = self.build_self_attention(self.embed_dim, args) 69 | self.self_attn_layer_norm = torch.nn.LayerNorm(self.embed_dim) 70 | self.dropout_module = nn.Dropout(args.dropout) 71 | self.activation_fn = F.relu 72 | self.fc1 = self.build_fc1( 73 | self.embed_dim, 74 | args.encoder_ffn_embed_dim, 75 | ) 76 | self.fc2 = self.build_fc2( 77 | args.encoder_ffn_embed_dim, 78 | self.embed_dim, 79 | ) 80 | 81 | self.final_layer_norm = nn.LayerNorm(self.embed_dim) 82 | 83 | def build_fc1(self, input_dim, output_dim): 84 | return nn.Linear(input_dim, output_dim) 85 | 86 | def build_fc2(self, input_dim, output_dim): 87 | return nn.Linear(input_dim, output_dim) 88 | 89 | def build_self_attention(self, embed_dim, args): 90 | return MultiheadAttention( 91 | embed_dim, 92 | args.encoder_attention_heads, 93 | dropout=args.attention_dropout, 94 | self_attention=True, 95 | ) 96 | 97 | def residual_connection(self, x, residual): 98 | return residual + x 99 | 100 | def forward( 101 | self, 102 | x, 103 | encoder_padding_mask: Optional[Tensor], 104 | attn_mask: Optional[Tensor] = None, 105 | ): 106 | """ 107 | Args: 108 | x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)` 109 | encoder_padding_mask (ByteTensor): binary ByteTensor of shape 110 | `(batch, seq_len)` where padding elements are indicated by ``1``. 111 | attn_mask (ByteTensor): binary tensor of shape `(tgt_len, src_len)`, 112 | where `tgt_len` is the length of output and `src_len` is the 113 | length of input, though here both are equal to `seq_len`. 114 | `attn_mask[tgt_i, src_j] = 1` means that when calculating the 115 | embedding for `tgt_i`, we exclude (mask out) `src_j`. This is 116 | useful for strided self-attention. 117 | 118 | Returns: 119 | encoded output of shape `(seq_len, batch, embed_dim)` 120 | """ 121 | # anything in original attn_mask = 1, becomes -1e8 122 | # anything in original attn_mask = 0, becomes 0 123 | # Note that we cannot use -inf here, because at some edge cases, 124 | # the attention weight (before softmax) for some padded element in query 125 | # will become -inf, which results in NaN in model parameters 126 | if attn_mask is not None: 127 | attn_mask = attn_mask.masked_fill( 128 | attn_mask.to(torch.bool), -1e8 if x.dtype == torch.float32 else -1e4 129 | ) 130 | 131 | residual = x 132 | x = self.self_attn_layer_norm(x) 133 | x, _ = self.self_attn( 134 | query=x, 135 | key=x, 136 | value=x, 137 | key_padding_mask=encoder_padding_mask, 138 | need_weights=False, 139 | attn_mask=attn_mask, 140 | ) 141 | x = self.dropout_module(x) 142 | x = self.residual_connection(x, residual) 143 | 144 | residual = x 145 | x = self.final_layer_norm(x) 146 | x = self.activation_fn(self.fc1(x)) 147 | x = self.fc2(x) 148 | x = self.dropout_module(x) 149 | x = self.residual_connection(x, residual) 150 | return x 151 | 152 | def modulate(x, shift, scale): 153 | return x * (1 + scale) + shift 154 | 155 | class TransformerEncoderCondLayer(nn.Module): 156 | """Encoder layer block with extra conditional input. 157 | `layernorm -> dropout -> add residual` 158 | 159 | Args: 160 | args (argparse.Namespace): parsed command-line arguments 161 | """ 162 | 163 | def __init__(self, args): 164 | super().__init__() 165 | self.args = args 166 | self.embed_dim = args.encoder_embed_dim 167 | self.self_attn = self.build_self_attention(self.embed_dim, args) 168 | self.self_attn_layer_norm = torch.nn.LayerNorm(self.embed_dim) 169 | self.dropout_module = nn.Dropout(args.dropout) 170 | self.activation_fn = F.relu 171 | self.fc1 = self.build_fc1( 172 | self.embed_dim, 173 | args.encoder_ffn_embed_dim, 174 | ) 175 | self.fc2 = self.build_fc2( 176 | args.encoder_ffn_embed_dim, 177 | self.embed_dim, 178 | ) 179 | 180 | self.final_layer_norm = nn.LayerNorm(self.embed_dim) 181 | 182 | self.cond_mlp = nn.Sequential( 183 | nn.SiLU(), 184 | nn.Linear(self.embed_dim, self.embed_dim * 6), 185 | ) 186 | 187 | def build_fc1(self, input_dim, output_dim): 188 | return nn.Linear(input_dim, output_dim) 189 | 190 | def build_fc2(self, input_dim, output_dim): 191 | return nn.Linear(input_dim, output_dim) 192 | 193 | def build_self_attention(self, embed_dim, args): 194 | return MultiheadAttention( 195 | embed_dim, 196 | args.encoder_attention_heads, 197 | dropout=args.attention_dropout, 198 | self_attention=True, 199 | ) 200 | 201 | def residual_connection(self, x, residual): 202 | return residual + x 203 | 204 | def forward( 205 | self, 206 | x, 207 | encoder_padding_mask: Optional[Tensor], 208 | attn_mask: Optional[Tensor] = None, 209 | cond: Optional[Tensor] = None 210 | ): 211 | """ 212 | Args: 213 | x (Tensor): input to the layer of shape `(seq_len, batch, embed_dim)` 214 | cond (Tensor)L input to the layer of shape `(seq_len, batch, embed_dim)` 215 | encoder_padding_mask (ByteTensor): binary ByteTensor of shape 216 | `(batch, seq_len)` where padding elements are indicated by ``1``. 217 | attn_mask (ByteTensor): binary tensor of shape `(tgt_len, src_len)`, 218 | where `tgt_len` is the length of output and `src_len` is the 219 | length of input, though here both are equal to `seq_len`. 220 | `attn_mask[tgt_i, src_j] = 1` means that when calculating the 221 | embedding for `tgt_i`, we exclude (mask out) `src_j`. This is 222 | useful for strided self-attention. 223 | 224 | Returns: 225 | encoded output of shape `(seq_len, batch, embed_dim)` 226 | """ 227 | # anything in original attn_mask = 1, becomes -1e8 228 | # anything in original attn_mask = 0, becomes 0 229 | # Note that we cannot use -inf here, because at some edge cases, 230 | # the attention weight (before softmax) for some padded element in query 231 | # will become -inf, which results in NaN in model parameters 232 | if attn_mask is not None: 233 | attn_mask = attn_mask.masked_fill( 234 | attn_mask.to(torch.bool), -1e8 if x.dtype == torch.float32 else -1e4 235 | ) 236 | 237 | residual = x 238 | 239 | # condition 240 | cond_flag = cond is not None 241 | if cond_flag: 242 | shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.cond_mlp(cond).chunk(6, dim=-1) 243 | x = modulate(self.self_attn_layer_norm(x), shift_msa, scale_msa) 244 | else: 245 | x = self.self_attn_layer_norm(x) 246 | 247 | x, _ = self.self_attn( 248 | query=x, 249 | key=x, 250 | value=x, 251 | key_padding_mask=encoder_padding_mask, 252 | need_weights=False, 253 | attn_mask=attn_mask, 254 | ) 255 | x = self.dropout_module(x) 256 | x = self.residual_connection(gate_msa * x, residual) if cond_flag else self.residual_connection(x, residual) 257 | 258 | residual = x 259 | x = modulate(self.final_layer_norm(x), shift_mlp, scale_mlp) if cond_flag else self.final_layer_norm(x) 260 | x = self.activation_fn(self.fc1(x)) 261 | x = self.fc2(x) 262 | x = self.dropout_module(x) 263 | x = self.residual_connection(gate_mlp * x, residual) if cond_flag else self.residual_connection(x, residual) 264 | return x 265 | -------------------------------------------------------------------------------- /models/utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | _MODELS = {} 4 | 5 | def register_model(cls=None, *, name=None): 6 | """"A decorator for registering model classes.""" 7 | 8 | def _register(cls): 9 | if name is None: 10 | local_name = cls.__name__ 11 | else: 12 | local_name = name 13 | if local_name in _MODELS: 14 | raise ValueError(f"Already registerd model") 15 | _MODELS[local_name] = cls 16 | return cls 17 | 18 | if cls is None: 19 | return _register 20 | else: 21 | return _register(cls) 22 | 23 | def create_model(config): 24 | model = _MODELS[config.model.name](config) 25 | model = model.to(config.device) 26 | model = torch.nn.DataParallel(model) 27 | return model -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | absl_py==0.15.0 2 | biopython==1.80 3 | dm_tree==0.1.7 4 | fair_esm==2.0.0 5 | ml_collections==0.1.1 6 | numpy==1.24.3 7 | scipy>=1.10.0 8 | torch==1.13.1 9 | torch_cluster==1.6.1+pt113cu116 10 | torch_geometric==2.3.1 11 | torch_scatter==2.1.1+pt113cu116 12 | tqdm==4.64.1 13 | -------------------------------------------------------------------------------- /run_lib.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from tqdm import tqdm 3 | import numpy as np 4 | import random 5 | from models import * 6 | from utils import * 7 | from diffusion import NoiseScheduleVP 8 | from sampling import get_sampling_fn 9 | from datasets import utils as du 10 | from torch_geometric.data import Batch 11 | import logging 12 | import pickle 13 | import functools 14 | import tree 15 | import copy 16 | import time 17 | 18 | 19 | def set_random_seed(config): 20 | seed = config.seed 21 | 22 | torch.manual_seed(seed) 23 | torch.cuda.manual_seed(seed) 24 | torch.cuda.manual_seed_all(seed) 25 | 26 | np.random.seed(seed) 27 | random.seed(seed) 28 | 29 | torch.backends.cudnn.deterministic = True 30 | torch.backends.cudnn.benchmark = False 31 | 32 | 33 | def get_optimizer(config, params): 34 | """Return a flax optimizer object based on `config`.""" 35 | if config.optim.optimizer == 'Adam': 36 | optimizer = optim.Adam(params, lr=config.optim.lr, betas=(config.optim.beta1, 0.999), eps=config.optim.eps, 37 | weight_decay=config.optim.weight_decay) 38 | elif config.optim.optimizer == 'AdamW': 39 | optimizer = torch.optim.AdamW(params, lr=config.optim.lr, amsgrad=True, weight_decay=1e-12) 40 | else: 41 | raise NotImplementedError( 42 | f'Optimizer {config.optim.optimizer} not supported yet!' 43 | ) 44 | return optimizer 45 | 46 | 47 | def vpsde_inference(config, save_folder, 48 | pdb_file='./example/R1107.pdb'): 49 | """Runs inference for RNA inverse design in a given dir.""" 50 | # Create directory for eval_folder 51 | os.makedirs(save_folder, exist_ok=True) 52 | 53 | # Initialize model 54 | model = create_model(config) 55 | ema = ExponentialMovingAverage(model.parameters(), decay=config.model.ema_decay) 56 | optimizer = get_optimizer(config, model.parameters()) 57 | state = dict(optimizer=optimizer, model=model, ema=ema, step=0) 58 | 59 | model_size = sum(p.numel() for p in model.parameters()) * 4 / 2 ** 20 60 | print('model size: {:.1f}MB'.format(model_size)) 61 | 62 | # Checkpoint name 63 | checkpoint_path = './ckpts/exp_inf.pth' 64 | 65 | # Load checkpoint 66 | state = restore_checkpoint(checkpoint_path, state, device=config.device) 67 | ema.copy_to(model.parameters()) 68 | 69 | # Initialize noise scheduler 70 | noise_scheduler = NoiseScheduleVP(config.sde.schedule, continuous_beta_0=config.sde.continuous_beta_0, 71 | continuous_beta_1=config.sde.continuous_beta_1) 72 | 73 | # Obtain data scalar and inverse scalar 74 | inverse_scaler = get_data_inverse_scaler(config) 75 | 76 | # Setup new sampling function for multi-state 77 | test_sampling_fn = get_sampling_fn(config, noise_scheduler, config.eval.sampling_steps, inverse_scaler) 78 | pdb2data = functools.partial(du.PDBtoData, num_posenc=config.data.num_posenc, 79 | num_rbf=config.data.num_rbf, knn_num=config.data.knn_num) 80 | 81 | fasta_dir = os.path.join(save_folder, 'fasta') 82 | os.makedirs(fasta_dir, exist_ok=True) 83 | 84 | # run inference on a single pdb file 85 | print('Start inference on a single pdb file') 86 | pdb_id = pdb_file.replace('.pdb', '') 87 | if '/' in pdb_id: 88 | pdb_id = pdb_id.split('/')[-1] 89 | struct_data = pdb2data(pdb_file) 90 | struct_data = tree.map_structure(lambda x: 91 | x.unsqueeze(0).repeat_interleave(config.eval.n_samples, dim=0).to(config.device), 92 | struct_data) 93 | samples = test_sampling_fn(model, struct_data) 94 | 95 | # save to fasta dir 96 | for i in range(len(samples)): 97 | du.sample_to_fasta(samples[i], pdb_id, 98 | os.path.join(fasta_dir, pdb_id + '_' + str(i) + '.fasta')) 99 | 100 | recovery_ = samples.eq(struct_data['seq']).float().mean().item() 101 | print(f'{pdb_id}, recovery_rate {recovery_:.4f}') 102 | -------------------------------------------------------------------------------- /sampling.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | from torch.nn import functional as F 4 | import random 5 | import pdb 6 | from torch_geometric.data import Batch 7 | import utils 8 | 9 | 10 | def post_process(gen_seq, inverse_scaler): 11 | """Post process generated sequences.""" 12 | gen_seq = inverse_scaler(gen_seq) 13 | gen_seq = torch.argmax(gen_seq, dim=-1) 14 | return gen_seq 15 | 16 | def get_sampling_fn(config, noise_scheduler, sampling_steps, inverse_scaler, eps=1e-3): 17 | device = config.device 18 | 19 | time_steps = torch.linspace(noise_scheduler.T, eps, sampling_steps, device=device) 20 | sampler = AncestralSampler(config, noise_scheduler, time_steps, config.model.pred_data, inverse_scaler) 21 | # n_samples = config.eval.n_samples 22 | 23 | @torch.no_grad() 24 | def sampling_fn(model, data): 25 | model.eval() 26 | # extend the sequence into a batch according to n_samples 27 | batch = data 28 | # sample initial noise 29 | z = torch.randn(batch['node_s'].shape, device=device) 30 | gen_seq = sampler.sampling(model, z, batch) 31 | # reshape the batch from a seq to a matrix 32 | gen_seqs = post_process(gen_seq, inverse_scaler) 33 | return gen_seqs 34 | 35 | return sampling_fn 36 | 37 | def expand_dims(v, dims): 38 | return v[(...,) + (None,) * (dims - 1)] 39 | 40 | 41 | class AncestralSampler: 42 | """Ancestral sampling for RNA inverse design.""" 43 | def __init__(self, config, noise_scheduler, time_steps, model_pred_data, inverse_scaler): 44 | self.noise_scheduler = noise_scheduler 45 | self.t_array = time_steps 46 | self.s_array = torch.cat([time_steps[1:], torch.zeros(1, device=time_steps.device)]) 47 | self.model_pred_data = model_pred_data 48 | self.self_cond = config.model.self_cond 49 | self.cond_scale = getattr(config.eval, 'cond_scale', -1.) 50 | self.dynamic_threshold = getattr(config.eval, 'dynamic_threshold', False) 51 | self.dynamic_thresholding_percentile = getattr(config.eval, 'dynamic_thresholding_percentile', 0.95) 52 | 53 | @torch.no_grad() 54 | def forward_with_cond_scale(self, model, *args, cond_scale=1.0, **kwargs): 55 | logits = model(*args, **kwargs) # with condition 56 | if cond_scale == 1.0: 57 | return logits 58 | 59 | null_logits = model(*args, cond_drop_prob=1.0, **kwargs) # without condition 60 | return null_logits + (logits - null_logits) * cond_scale 61 | 62 | @torch.no_grad() 63 | def sampling(self, model, z_T, batch): 64 | batch['z_t'] = x = z_T 65 | bs = z_T.shape[0] 66 | cond_x = None 67 | for i in range(len(self.t_array)): 68 | t = self.t_array[i] 69 | s = self.s_array[i] 70 | 71 | alpha_t, sigma_t = self.noise_scheduler.marginal_prob(t) 72 | alpha_s, sigma_s = self.noise_scheduler.marginal_prob(s) 73 | 74 | alpha_t_given_s = alpha_t / alpha_s 75 | sigma2_t_given_s = sigma_t ** 2 - alpha_t_given_s ** 2 * sigma_s ** 2 76 | sigma_t_given_s = torch.sqrt(sigma2_t_given_s) 77 | sigma = sigma_t_given_s * sigma_s / sigma_t 78 | 79 | # vec_t = torch.ones(bs, device=x.device) * t 80 | # noise_level = torch.ones(bs, device=x.device) * torch.log(alpha_t ** 2 / sigma_t ** 2) 81 | noise_level = torch.log(alpha_t ** 2 / sigma_t ** 2) 82 | 83 | # prediction with model 84 | pred_t = model(batch, time=t.unsqueeze(0), noise_level=noise_level.unsqueeze(0), cond_x=cond_x) \ 85 | if self.cond_scale < 0. \ 86 | else self.forward_with_cond_scale(model, batch, cond_scale=self.cond_scale, time=t.unsqueeze(0), 87 | noise_level=noise_level.unsqueeze(0), cond_x=cond_x) 88 | 89 | # dynamic thresholding 90 | if self.dynamic_threshold: 91 | # s is the dynamic threshold, determined by percentile of absolute values of reconstructed sample per batch element 92 | s = torch.quantile( 93 | pred_t.reshape(bs, -1).abs(), 94 | self.dynamic_thresholding_percentile, 95 | dim=-1 96 | ) 97 | s.clamp_(min=1.) 98 | 99 | s = expand_dims(s, pred_t.dim()) 100 | pred_t = pred_t.clamp(-s, s) / s 101 | 102 | if self.self_cond: 103 | assert self.model_pred_data 104 | cond_x = pred_t.detach().clone() 105 | 106 | # seq update 107 | if pred_t.shape != x.shape: 108 | pred_t = pred_t.unsqueeze(-2) 109 | 110 | if self.model_pred_data: 111 | x_mean = expand_dims((alpha_t_given_s * sigma_s ** 2 / sigma_t ** 2).repeat(bs), x.dim()) * x \ 112 | + expand_dims((alpha_s * sigma2_t_given_s / sigma_t ** 2).repeat(bs), pred_t.dim()) * pred_t 113 | else: 114 | x_mean = x / expand_dims(alpha_t_given_s.repeat(bs), x.dim()) \ 115 | - expand_dims((sigma2_t_given_s / alpha_t_given_s / sigma_t).repeat(bs), pred_t.dim()) * pred_t 116 | 117 | batch['z_t'] = x = x_mean + expand_dims(sigma.repeat(bs), x_mean.dim()) * \ 118 | torch.randn(x_mean.shape, device=x.device) 119 | return x_mean.squeeze(-2) 120 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import os 3 | import numpy as np 4 | from sampling import post_process 5 | 6 | 7 | def restore_checkpoint(ckpt_dir, state, device): 8 | if not os.path.exists(ckpt_dir): 9 | if not os.path.exists(os.path.dirname(ckpt_dir)): 10 | os.makedirs(os.path.dirname(ckpt_dir)) 11 | print(f"No checkpoint found at {ckpt_dir}. " 12 | f"Returned the same state as input") 13 | return state 14 | else: 15 | loaded_state = torch.load(ckpt_dir, map_location=device) 16 | state['optimizer'].load_state_dict(loaded_state['optimizer']) 17 | state['model'].load_state_dict(loaded_state['model'], strict=True) # change strict to False? 18 | state['ema'].load_state_dict(loaded_state['ema']) 19 | state['step'] = loaded_state['step'] 20 | return state 21 | 22 | 23 | def save_checkpoint(ckpt_dir, state): 24 | saved_state = { 25 | 'optimizer': state['optimizer'].state_dict(), 26 | 'model': state['model'].state_dict(), 27 | 'ema': state['ema'].state_dict(), 28 | 'step': state['step'] 29 | } 30 | torch.save(saved_state, ckpt_dir) 31 | 32 | 33 | def get_data_scaler(config): 34 | """Data normalizer""" 35 | # not consider bias here 36 | centered = config.data.seq_centered 37 | 38 | def scale_fn(seq): 39 | if centered: 40 | seq = seq * 2. - 1. 41 | return seq 42 | 43 | return scale_fn 44 | 45 | 46 | def get_data_inverse_scaler(config): 47 | """Inverse data normalizer.""" 48 | 49 | centered = config.data.seq_centered 50 | 51 | def inverse_scale_fn(seq): 52 | if centered: 53 | seq = (seq + 1.) / 2. 54 | return seq 55 | 56 | return inverse_scale_fn --------------------------------------------------------------------------------