├── src ├── __init__.py ├── .python-version ├── .DS_Store └── mil.py ├── examples ├── .DS_Store ├── data │ ├── bag_of_bags │ │ ├── labels.csv │ │ └── ids.csv │ └── musk │ │ ├── labels.csv │ │ └── bagids.csv ├── musk.py └── bag_of_bags.py ├── .gitignore └── README.md /src/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/.python-version: -------------------------------------------------------------------------------- 1 | mil 2 | -------------------------------------------------------------------------------- /src/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jakubmonhart/mil_pytorch/HEAD/src/.DS_Store -------------------------------------------------------------------------------- /examples/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jakubmonhart/mil_pytorch/HEAD/examples/.DS_Store -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Jupyter Notebook 2 | **/.ipynb_checkpoints 3 | 4 | # Python cache 5 | **/__pycache__ 6 | 7 | # pyc extension 8 | **/*.pyc 9 | 10 | # setup files 11 | build/ 12 | dist/ 13 | *.egg-info/ 14 | -------------------------------------------------------------------------------- /examples/data/bag_of_bags/labels.csv: -------------------------------------------------------------------------------- 1 | -1 2 | 1 3 | 1 4 | -1 5 | 1 6 | 1 7 | 1 8 | 1 9 | 1 10 | 1 11 | 1 12 | -1 13 | 1 14 | 1 15 | 1 16 | -1 17 | 1 18 | 1 19 | -1 20 | 1 21 | 1 22 | 1 23 | -1 24 | 1 25 | -1 26 | 1 27 | -1 28 | 1 29 | 1 30 | 1 31 | 1 32 | 1 33 | 1 34 | 1 35 | -1 36 | 1 37 | 1 38 | -1 39 | 1 40 | 1 41 | -1 42 | 1 43 | 1 44 | 1 45 | 1 46 | -1 47 | 1 48 | 1 49 | 1 50 | 1 51 | 1 52 | 1 53 | 1 54 | 1 55 | -1 56 | 1 57 | 1 58 | 1 59 | 1 60 | 1 61 | 1 62 | 1 63 | 1 64 | -1 65 | -1 66 | -1 67 | -1 68 | -1 69 | -1 70 | -1 71 | -1 72 | -1 73 | -1 74 | -1 75 | -1 76 | -1 77 | -1 78 | -1 79 | -1 80 | -1 81 | -1 82 | -1 83 | -1 84 | -1 85 | -1 86 | -1 87 | -1 88 | -1 89 | -1 90 | -1 91 | -1 92 | -1 93 | -1 94 | -1 95 | -1 96 | -1 97 | -1 98 | -1 99 | -1 100 | -1 101 | -------------------------------------------------------------------------------- /examples/data/musk/labels.csv: -------------------------------------------------------------------------------- 1 | 1 2 | 1 3 | 1 4 | 1 5 | 1 6 | 1 7 | 1 8 | 1 9 | 1 10 | 1 11 | 1 12 | 1 13 | 1 14 | 1 15 | 1 16 | 1 17 | 1 18 | 1 19 | 1 20 | 1 21 | 1 22 | 1 23 | 1 24 | 1 25 | 1 26 | 1 27 | 1 28 | 1 29 | 1 30 | 1 31 | 1 32 | 1 33 | 1 34 | 1 35 | 1 36 | 1 37 | 1 38 | 1 39 | 1 40 | 1 41 | 1 42 | 1 43 | 1 44 | 1 45 | 1 46 | 1 47 | 1 48 | 1 49 | 1 50 | 1 51 | 1 52 | 1 53 | 1 54 | 1 55 | 1 56 | 1 57 | 1 58 | 1 59 | 1 60 | 1 61 | 1 62 | 1 63 | 1 64 | 1 65 | 1 66 | 1 67 | 1 68 | 1 69 | 1 70 | 1 71 | 1 72 | 1 73 | 1 74 | 1 75 | 1 76 | 1 77 | 1 78 | 1 79 | 1 80 | 1 81 | 1 82 | 1 83 | 1 84 | 1 85 | 1 86 | 1 87 | 1 88 | 1 89 | 1 90 | 1 91 | 1 92 | 1 93 | 1 94 | 1 95 | 1 96 | 1 97 | 1 98 | 1 99 | 1 100 | 1 101 | 1 102 | 1 103 | 1 104 | 1 105 | 1 106 | 1 107 | 1 108 | 1 109 | 1 110 | 1 111 | 1 112 | 1 113 | 1 114 | 1 115 | 1 116 | 1 117 | 1 118 | 1 119 | 1 120 | 1 121 | 1 122 | 1 123 | 1 124 | 1 125 | 1 126 | 1 127 | 1 128 | 1 129 | 1 130 | 1 131 | 1 132 | 1 133 | 1 134 | 1 135 | 1 136 | 1 137 | 1 138 | 1 139 | 1 140 | 1 141 | 1 142 | 1 143 | 1 144 | 1 145 | 1 146 | 1 147 | 1 148 | 1 149 | 1 150 | 1 151 | 1 152 | 1 153 | 1 154 | 1 155 | 1 156 | 1 157 | 1 158 | 1 159 | 1 160 | 1 161 | 1 162 | 1 163 | 1 164 | 1 165 | 1 166 | 1 167 | 1 168 | 1 169 | 1 170 | 1 171 | 1 172 | 1 173 | 1 174 | 1 175 | 1 176 | 1 177 | 1 178 | 1 179 | 1 180 | 1 181 | 1 182 | 1 183 | 1 184 | 1 185 | 1 186 | 1 187 | 1 188 | 1 189 | 1 190 | 1 191 | 1 192 | 1 193 | 1 194 | 1 195 | 1 196 | 1 197 | 1 198 | 1 199 | 1 200 | 1 201 | 1 202 | 1 203 | 1 204 | 1 205 | 1 206 | 1 207 | 1 208 | 0 209 | 0 210 | 0 211 | 0 212 | 0 213 | 0 214 | 0 215 | 0 216 | 0 217 | 0 218 | 0 219 | 0 220 | 0 221 | 0 222 | 0 223 | 0 224 | 0 225 | 0 226 | 0 227 | 0 228 | 0 229 | 0 230 | 0 231 | 0 232 | 0 233 | 0 234 | 0 235 | 0 236 | 0 237 | 0 238 | 0 239 | 0 240 | 0 241 | 0 242 | 0 243 | 0 244 | 0 245 | 0 246 | 0 247 | 0 248 | 0 249 | 0 250 | 0 251 | 0 252 | 0 253 | 0 254 | 0 255 | 0 256 | 0 257 | 0 258 | 0 259 | 0 260 | 0 261 | 0 262 | 0 263 | 0 264 | 0 265 | 0 266 | 0 267 | 0 268 | 0 269 | 0 270 | 0 271 | 0 272 | 0 273 | 0 274 | 0 275 | 0 276 | 0 277 | 0 278 | 0 279 | 0 280 | 0 281 | 0 282 | 0 283 | 0 284 | 0 285 | 0 286 | 0 287 | 0 288 | 0 289 | 0 290 | 0 291 | 0 292 | 0 293 | 0 294 | 0 295 | 0 296 | 0 297 | 0 298 | 0 299 | 0 300 | 0 301 | 0 302 | 0 303 | 0 304 | 0 305 | 0 306 | 0 307 | 0 308 | 0 309 | 0 310 | 0 311 | 0 312 | 0 313 | 0 314 | 0 315 | 0 316 | 0 317 | 0 318 | 0 319 | 0 320 | 0 321 | 0 322 | 0 323 | 0 324 | 0 325 | 0 326 | 0 327 | 0 328 | 0 329 | 0 330 | 0 331 | 0 332 | 0 333 | 0 334 | 0 335 | 0 336 | 0 337 | 0 338 | 0 339 | 0 340 | 0 341 | 0 342 | 0 343 | 0 344 | 0 345 | 0 346 | 0 347 | 0 348 | 0 349 | 0 350 | 0 351 | 0 352 | 0 353 | 0 354 | 0 355 | 0 356 | 0 357 | 0 358 | 0 359 | 0 360 | 0 361 | 0 362 | 0 363 | 0 364 | 0 365 | 0 366 | 0 367 | 0 368 | 0 369 | 0 370 | 0 371 | 0 372 | 0 373 | 0 374 | 0 375 | 0 376 | 0 377 | 0 378 | 0 379 | 0 380 | 0 381 | 0 382 | 0 383 | 0 384 | 0 385 | 0 386 | 0 387 | 0 388 | 0 389 | 0 390 | 0 391 | 0 392 | 0 393 | 0 394 | 0 395 | 0 396 | 0 397 | 0 398 | 0 399 | 0 400 | 0 401 | 0 402 | 0 403 | 0 404 | 0 405 | 0 406 | 0 407 | 0 408 | 0 409 | 0 410 | 0 411 | 0 412 | 0 413 | 0 414 | 0 415 | 0 416 | 0 417 | 0 418 | 0 419 | 0 420 | 0 421 | 0 422 | 0 423 | 0 424 | 0 425 | 0 426 | 0 427 | 0 428 | 0 429 | 0 430 | 0 431 | 0 432 | 0 433 | 0 434 | 0 435 | 0 436 | 0 437 | 0 438 | 0 439 | 0 440 | 0 441 | 0 442 | 0 443 | 0 444 | 0 445 | 0 446 | 0 447 | 0 448 | 0 449 | 0 450 | 0 451 | 0 452 | 0 453 | 0 454 | 0 455 | 0 456 | 0 457 | 0 458 | 0 459 | 0 460 | 0 461 | 0 462 | 0 463 | 0 464 | 0 465 | 0 466 | 0 467 | 0 468 | 0 469 | 0 470 | 0 471 | 0 472 | 0 473 | 0 474 | 0 475 | 0 476 | 0 477 | -------------------------------------------------------------------------------- /examples/musk.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | sys.path.append("../src") 5 | sys.path.append("src") 6 | 7 | import mil 8 | import torch 9 | from torch import nn 10 | from torch.utils.data import DataLoader, Subset 11 | import pandas 12 | import numpy as np 13 | from sklearn.model_selection import train_test_split 14 | 15 | # Set seed for reproducibility 16 | torch.manual_seed(42) 17 | np.random.seed(42) 18 | 19 | 20 | # --- CONFIG --- 21 | 22 | # Configurations 23 | n_neurons = 15 24 | lr = 1e-3 25 | n_epochs = 100 26 | batch_size = 4 27 | 28 | 29 | # --- DATA --- 30 | 31 | data = pandas.read_csv(filepath_or_buffer='data/musk/data.csv', sep='\t', header=None).values 32 | ids = pandas.read_csv(filepath_or_buffer='data/musk/bagids.csv', sep='\t', header=None).values.reshape(-1) 33 | instance_labels = pandas.read_csv(filepath_or_buffer='data/musk/labels.csv', sep='\t', header=None).values.reshape(-1) 34 | 35 | data = torch.tensor(data, dtype=torch.float) 36 | ids = torch.tensor(ids) 37 | instance_labels = torch.tensor(instance_labels) 38 | # Create bag labels from instance labels 39 | bagids = torch.unique(ids) 40 | labels = torch.stack([max(instance_labels[ids==i]) for i in bagids]).float() 41 | print('INFO: Data shape \n data: {}\n ids: {}\n labels: {}'.format(data.shape, ids.shape, labels.shape)) 42 | # Check if gpu available 43 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 44 | print("INFO: Using device: {}".format(device)) 45 | 46 | # Move data to gpu (if available) 47 | data = data.to(device).T 48 | ids = ids.to(device) 49 | labels = labels.to(device) 50 | 51 | # Create dataset and divide to train, valid and test part 52 | dataset = mil.MilDataset(data, ids, labels, normalize=True) 53 | 54 | train_indices, test_indices = train_test_split(np.arange(len(dataset)), test_size=0.2, stratify=dataset.labels) 55 | train, test = Subset(dataset, train_indices), Subset(dataset, test_indices) 56 | train_dl, test_dl = DataLoader(train, batch_size=batch_size, collate_fn=mil.collate, drop_last=True), \ 57 | DataLoader(test, batch_size=batch_size, collate_fn=mil.collate, drop_last=True) 58 | 59 | 60 | # --- MODEL --- 61 | 62 | prepNN = torch.nn.Sequential( 63 | torch.nn.Linear(len(dataset.data[0]), n_neurons), 64 | torch.nn.ReLU() 65 | ) 66 | 67 | afterNN = torch.nn.Sequential( 68 | torch.nn.Linear(n_neurons, 1) 69 | ) 70 | 71 | # Define model ,loss function and optimizer 72 | model = mil.BagModel(prepNN, afterNN, torch.mean) 73 | 74 | criterion = nn.BCEWithLogitsLoss() 75 | optimizer = torch.optim.Adam(model.parameters(), lr=lr) 76 | 77 | # Move model to gpu if available 78 | model = model.to(device) 79 | 80 | 81 | # --- TRAIN --- 82 | 83 | losses = [] 84 | running_loss = 0.0 85 | 86 | for t in range(n_epochs): 87 | for data, bagids, labels in train_dl: 88 | 89 | pred = model((data, bagids)).squeeze() 90 | loss = criterion(pred, labels) 91 | 92 | # Optimizer step 93 | optimizer.zero_grad() 94 | loss.backward() 95 | optimizer.step() 96 | running_loss += loss.item() 97 | 98 | # Log 99 | losses.append(running_loss/len(train_dl)) 100 | running_loss = 0.0 101 | if (t+1) % 10 == 0: 102 | print('epoch: {} | loss: {:.3f}'.format(t+1, sum(losses[-10:])/10)) 103 | 104 | 105 | # --- EVAL --- 106 | 107 | # Train 108 | correct_count = 0 109 | total_count = 0 110 | for data, bagids, labels in train_dl: 111 | pred = model((data, bagids)).squeeze() > 0.5 112 | correct_count += (pred==labels).sum() 113 | total_count += len(labels) 114 | 115 | print('train acc: {:.1f} %'.format((correct_count/total_count)*100)) 116 | 117 | # Test 118 | correct_count = 0 119 | total_count = 0 120 | for data, bagids, labels in test_dl: 121 | pred = model((data, bagids)).squeeze() > 0.5 122 | correct_count += (pred==labels).sum() 123 | total_count += len(labels) 124 | 125 | print('test acc: {:.1f} %'.format((correct_count/total_count)*100)) 126 | -------------------------------------------------------------------------------- /examples/data/musk/bagids.csv: -------------------------------------------------------------------------------- 1 | 1 2 | 1 3 | 1 4 | 1 5 | 2 6 | 2 7 | 2 8 | 2 9 | 3 10 | 3 11 | 4 12 | 4 13 | 4 14 | 5 15 | 5 16 | 5 17 | 5 18 | 6 19 | 6 20 | 7 21 | 7 22 | 8 23 | 8 24 | 9 25 | 9 26 | 9 27 | 9 28 | 9 29 | 10 30 | 10 31 | 10 32 | 10 33 | 10 34 | 10 35 | 11 36 | 11 37 | 11 38 | 11 39 | 11 40 | 12 41 | 12 42 | 12 43 | 12 44 | 12 45 | 12 46 | 12 47 | 12 48 | 13 49 | 13 50 | 13 51 | 13 52 | 14 53 | 14 54 | 15 55 | 15 56 | 15 57 | 15 58 | 16 59 | 16 60 | 16 61 | 17 62 | 17 63 | 17 64 | 17 65 | 17 66 | 18 67 | 18 68 | 18 69 | 18 70 | 19 71 | 19 72 | 19 73 | 19 74 | 19 75 | 19 76 | 19 77 | 19 78 | 20 79 | 20 80 | 20 81 | 20 82 | 21 83 | 21 84 | 21 85 | 21 86 | 22 87 | 22 88 | 22 89 | 22 90 | 23 91 | 23 92 | 24 93 | 24 94 | 24 95 | 24 96 | 24 97 | 24 98 | 24 99 | 24 100 | 25 101 | 25 102 | 25 103 | 25 104 | 26 105 | 26 106 | 26 107 | 26 108 | 27 109 | 27 110 | 28 111 | 28 112 | 28 113 | 28 114 | 28 115 | 28 116 | 28 117 | 28 118 | 29 119 | 29 120 | 29 121 | 29 122 | 30 123 | 30 124 | 30 125 | 30 126 | 31 127 | 31 128 | 31 129 | 31 130 | 32 131 | 32 132 | 32 133 | 32 134 | 32 135 | 32 136 | 32 137 | 32 138 | 33 139 | 33 140 | 34 141 | 34 142 | 34 143 | 34 144 | 35 145 | 35 146 | 35 147 | 35 148 | 36 149 | 36 150 | 37 151 | 37 152 | 37 153 | 37 154 | 37 155 | 37 156 | 37 157 | 37 158 | 38 159 | 38 160 | 38 161 | 38 162 | 38 163 | 38 164 | 38 165 | 38 166 | 39 167 | 39 168 | 40 169 | 40 170 | 40 171 | 40 172 | 41 173 | 41 174 | 41 175 | 42 176 | 42 177 | 42 178 | 42 179 | 42 180 | 43 181 | 43 182 | 43 183 | 43 184 | 44 185 | 44 186 | 44 187 | 44 188 | 44 189 | 45 190 | 45 191 | 45 192 | 45 193 | 45 194 | 46 195 | 46 196 | 46 197 | 46 198 | 46 199 | 46 200 | 46 201 | 46 202 | 47 203 | 47 204 | 47 205 | 47 206 | 47 207 | 47 208 | 48 209 | 48 210 | 48 211 | 48 212 | 49 213 | 49 214 | 49 215 | 49 216 | 50 217 | 50 218 | 51 219 | 51 220 | 51 221 | 51 222 | 52 223 | 52 224 | 53 225 | 53 226 | 53 227 | 53 228 | 53 229 | 53 230 | 53 231 | 53 232 | 53 233 | 54 234 | 54 235 | 55 236 | 55 237 | 56 238 | 56 239 | 57 240 | 57 241 | 57 242 | 57 243 | 58 244 | 58 245 | 59 246 | 59 247 | 59 248 | 59 249 | 59 250 | 59 251 | 59 252 | 59 253 | 59 254 | 60 255 | 60 256 | 60 257 | 60 258 | 60 259 | 60 260 | 60 261 | 60 262 | 60 263 | 60 264 | 60 265 | 60 266 | 60 267 | 60 268 | 60 269 | 60 270 | 60 271 | 60 272 | 60 273 | 60 274 | 60 275 | 60 276 | 60 277 | 60 278 | 60 279 | 60 280 | 60 281 | 60 282 | 60 283 | 60 284 | 60 285 | 60 286 | 61 287 | 61 288 | 61 289 | 61 290 | 62 291 | 62 292 | 63 293 | 63 294 | 64 295 | 64 296 | 64 297 | 64 298 | 65 299 | 65 300 | 65 301 | 65 302 | 66 303 | 66 304 | 67 305 | 67 306 | 68 307 | 68 308 | 69 309 | 69 310 | 70 311 | 70 312 | 70 313 | 70 314 | 71 315 | 71 316 | 72 317 | 72 318 | 72 319 | 72 320 | 73 321 | 73 322 | 74 323 | 74 324 | 75 325 | 75 326 | 76 327 | 76 328 | 77 329 | 77 330 | 77 331 | 77 332 | 77 333 | 77 334 | 77 335 | 77 336 | 78 337 | 78 338 | 78 339 | 78 340 | 79 341 | 79 342 | 79 343 | 79 344 | 79 345 | 79 346 | 79 347 | 79 348 | 80 349 | 80 350 | 81 351 | 81 352 | 81 353 | 81 354 | 81 355 | 81 356 | 81 357 | 81 358 | 81 359 | 81 360 | 81 361 | 81 362 | 81 363 | 81 364 | 81 365 | 81 366 | 81 367 | 81 368 | 81 369 | 81 370 | 81 371 | 81 372 | 81 373 | 81 374 | 81 375 | 81 376 | 81 377 | 81 378 | 81 379 | 81 380 | 81 381 | 81 382 | 81 383 | 81 384 | 81 385 | 81 386 | 81 387 | 81 388 | 81 389 | 81 390 | 82 391 | 82 392 | 82 393 | 82 394 | 82 395 | 82 396 | 82 397 | 82 398 | 82 399 | 82 400 | 82 401 | 82 402 | 82 403 | 82 404 | 82 405 | 82 406 | 82 407 | 82 408 | 82 409 | 82 410 | 82 411 | 82 412 | 82 413 | 82 414 | 82 415 | 82 416 | 82 417 | 82 418 | 82 419 | 82 420 | 82 421 | 82 422 | 82 423 | 82 424 | 82 425 | 82 426 | 82 427 | 82 428 | 82 429 | 82 430 | 83 431 | 83 432 | 84 433 | 84 434 | 85 435 | 85 436 | 86 437 | 86 438 | 87 439 | 87 440 | 87 441 | 87 442 | 88 443 | 88 444 | 88 445 | 88 446 | 88 447 | 88 448 | 88 449 | 88 450 | 88 451 | 88 452 | 88 453 | 88 454 | 88 455 | 88 456 | 88 457 | 88 458 | 89 459 | 89 460 | 89 461 | 89 462 | 90 463 | 90 464 | 90 465 | 90 466 | 91 467 | 91 468 | 91 469 | 92 470 | 92 471 | 92 472 | 92 473 | 92 474 | 92 475 | 92 476 | 92 477 | -------------------------------------------------------------------------------- /src/mil.py: -------------------------------------------------------------------------------- 1 | from torch import nn 2 | from torch.utils.data import Dataset 3 | import torch 4 | 5 | 6 | class BagModel(nn.Module): 7 | ''' 8 | Model for solving MIL problems 9 | 10 | Args: 11 | prepNN: neural network created by user processing input before aggregation function (subclass of torch.nn.Module) 12 | afterNN: neural network created by user processing output of aggregation function and outputing final output of BagModel (subclass of torch.nn.Module) 13 | aggregation_func: mil.max and mil.mean supported, any aggregation function with argument 'dim' and same behaviour as torch.mean can be used 14 | 15 | Returns: 16 | Output of forward function. 17 | ''' 18 | 19 | def __init__(self, prepNN, afterNN, aggregation_func): 20 | super().__init__() 21 | 22 | self.prepNN = prepNN 23 | self.aggregation_func = aggregation_func 24 | self.afterNN = afterNN 25 | 26 | def forward(self, input): 27 | ids = input[1] 28 | input = input[0] 29 | 30 | # Modify shape of bagids if only 1d tensor 31 | if (len(ids.shape) == 1): 32 | ids.resize_(1, len(ids)) 33 | 34 | inner_ids = ids[len(ids)-1] 35 | 36 | device = input.device 37 | 38 | NN_out = self.prepNN(input) 39 | 40 | unique, inverse, counts = torch.unique(inner_ids, sorted = True, return_inverse = True, return_counts = True) 41 | idx = torch.cat([(inverse == x).nonzero()[0] for x in range(len(unique))]).sort()[1] 42 | bags = unique[idx] 43 | counts = counts[idx] 44 | 45 | output = torch.empty((len(bags), len(NN_out[0])), device = device) 46 | 47 | for i, bag in enumerate(bags): 48 | output[i] = self.aggregation_func(NN_out[inner_ids == bag], dim = 0) 49 | 50 | output = self.afterNN(output) 51 | 52 | if (ids.shape[0] == 1): 53 | return output 54 | else: 55 | ids = ids[:len(ids)-1] 56 | mask = torch.empty(0, device = device).long() 57 | for i in range(len(counts)): 58 | mask = torch.cat((mask, torch.sum(counts[:i], dtype = torch.int64).reshape(1))) 59 | return (output, ids[:,mask]) 60 | 61 | 62 | class MilDataset(Dataset): 63 | ''' 64 | Subclass of torch.utils.data.Dataset. 65 | 66 | Args: 67 | data: 68 | ids: 69 | labels: 70 | normalize: 71 | ''' 72 | def __init__(self, data, ids, labels, normalize=True): 73 | self.data = data 74 | self.labels = labels 75 | self.ids = ids 76 | 77 | # Modify shape of bagids if only 1d tensor 78 | if (len(ids.shape) == 1): 79 | ids.resize_(1, len(ids)) 80 | 81 | self.bags = torch.unique(self.ids[0]) 82 | 83 | # Normalize 84 | if normalize: 85 | std = self.data.std(dim=0) 86 | mean = self.data.mean(dim=0) 87 | self.data = (self.data - mean)/std 88 | 89 | def __len__(self): 90 | return len(self.bags) 91 | 92 | def __getitem__(self, index): 93 | data = self.data[self.ids[0] == self.bags[index]] 94 | bagids = self.ids[:, self.ids[0] == self.bags[index]] 95 | labels = self.labels[index] 96 | 97 | return data, bagids, labels 98 | 99 | def n_features(self): 100 | return self.data.size(1) 101 | 102 | 103 | def collate(batch): 104 | ''' 105 | 106 | ''' 107 | batch_data = [] 108 | batch_bagids = [] 109 | batch_labels = [] 110 | 111 | for sample in batch: 112 | batch_data.append(sample[0]) 113 | batch_bagids.append(sample[1]) 114 | batch_labels.append(sample[2]) 115 | 116 | out_data = torch.cat(batch_data, dim = 0) 117 | out_bagids = torch.cat(batch_bagids, dim = 1) 118 | out_labels = torch.stack(batch_labels) 119 | 120 | return out_data, out_bagids, out_labels 121 | 122 | 123 | def collate_np(batch): 124 | ''' 125 | 126 | ''' 127 | batch_data = [] 128 | batch_bagids = [] 129 | batch_labels = [] 130 | 131 | for sample in batch: 132 | batch_data.append(sample[0]) 133 | batch_bagids.append(sample[1]) 134 | batch_labels.append(sample[2]) 135 | 136 | out_data = torch.cat(batch_data, dim = 0) 137 | out_bagids = torch.cat(batch_bagids, dim = 1) 138 | out_labels = torch.tensor(batch_labels) 139 | 140 | return out_data, out_bagids, out_labels 141 | -------------------------------------------------------------------------------- /examples/bag_of_bags.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | sys.path.append("../src") 5 | sys.path.append("src") 6 | 7 | import mil 8 | import torch 9 | from torch import nn 10 | from torch.utils.data import DataLoader, Subset 11 | import pandas 12 | import numpy as np 13 | from sklearn.model_selection import train_test_split 14 | 15 | # Set seed for reproducibility 16 | torch.manual_seed(42) 17 | np.random.seed(42) 18 | 19 | 20 | # --- CONFIG --- 21 | 22 | # Configurations 23 | n_neurons1 = 15 24 | n_neurons2 = 15 25 | lr = 1e-3 26 | n_epochs = 100 27 | batch_size = 4 28 | 29 | 30 | # --- DATA --- 31 | 32 | # Load data from files 33 | data = pandas.read_csv('data/bag_of_bags/data.csv', header=None).values 34 | ids = pandas.read_csv('data/bag_of_bags/ids.csv', header=None).values 35 | labels = pandas.read_csv('data/bag_of_bags/labels.csv', header=None).values.reshape(-1) 36 | 37 | # Create tensors containing data 38 | data = torch.tensor(data, dtype=torch.float).T 39 | ids = torch.tensor(ids) 40 | labels = torch.tensor(labels).float() 41 | 42 | labels[labels==-1] = 0 43 | 44 | # Create instance of MilDataset 45 | dataset = mil.MilDataset(data, ids, labels, normalize = True) 46 | 47 | # Check if gpu available 48 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 49 | print("INFO: Using device: {}".format(device)) 50 | 51 | # Move data to gpu (if available) 52 | data = data.to(device).T 53 | ids = ids.to(device) 54 | labels = labels.to(device) 55 | 56 | # Create dataset and divide to train, valid and test part 57 | dataset = mil.MilDataset(data, ids, labels, normalize=True) 58 | 59 | train_indices, test_indices = train_test_split(np.arange(len(dataset)), test_size=0.2, stratify=dataset.labels) 60 | train, test = Subset(dataset, train_indices), Subset(dataset, test_indices) 61 | train_dl, test_dl = DataLoader(train, batch_size=batch_size, collate_fn=mil.collate, drop_last=True), \ 62 | DataLoader(test, batch_size=batch_size, collate_fn=mil.collate, drop_last=True) 63 | 64 | 65 | # Defining neural networks for proccesing inputs before and after aggregation function 66 | prepNN1 = torch.nn.Sequential( 67 | torch.nn.Linear(len(dataset.data[0]), n_neurons1, bias = True), 68 | torch.nn.ReLU() 69 | ) 70 | 71 | afterNN1 = torch.nn.Identity() 72 | 73 | prepNN2 = torch.nn.Sequential( 74 | torch.nn.Linear(n_neurons1, n_neurons2, bias = True), 75 | torch.nn.ReLU() 76 | ) 77 | 78 | afterNN2 = torch.nn.Sequential( 79 | torch.nn.Linear(n_neurons2, 1, bias = True) 80 | ) 81 | 82 | # Create model, using custom created prepNN, afterNN and aggregation function 83 | model = torch.nn.Sequential( 84 | mil.BagModel(prepNN1, afterNN1, aggregation_func = torch.mean), 85 | mil.BagModel(prepNN2, afterNN2, aggregation_func = torch.mean) 86 | ) 87 | 88 | # Loss function 89 | criterion = nn.BCEWithLogitsLoss() 90 | optimizer = torch.optim.Adam(model.parameters(), lr=lr) 91 | 92 | 93 | # --- TRAIN --- 94 | 95 | losses = [] 96 | running_loss = 0.0 97 | 98 | for t in range(n_epochs): 99 | for data, bagids, labels in train_dl: 100 | 101 | pred = model((data, bagids)).squeeze() 102 | loss = criterion(pred, labels) 103 | 104 | # Optimizer step 105 | optimizer.zero_grad() 106 | loss.backward() 107 | optimizer.step() 108 | running_loss += loss.item() 109 | 110 | # Log 111 | losses.append(running_loss/len(train_dl)) 112 | running_loss = 0.0 113 | if (t+1) % 10 == 0: 114 | print('epoch: {} | loss: {:.3f}'.format(t+1, sum(losses[-10:])/10)) 115 | 116 | 117 | # --- EVAL --- 118 | 119 | # Train 120 | correct_count = 0 121 | total_count = 0 122 | for data, bagids, labels in train_dl: 123 | pred = model((data, bagids)).squeeze() > 0.5 124 | correct_count += (pred==labels).sum() 125 | total_count += len(labels) 126 | 127 | print('train acc: {:.1f} %'.format((correct_count/total_count)*100)) 128 | 129 | # Test 130 | correct_count = 0 131 | total_count = 0 132 | for data, bagids, labels in test_dl: 133 | pred = model((data, bagids)).squeeze() > 0.5 134 | correct_count += (pred==labels).sum() 135 | total_count += len(labels) 136 | 137 | print('test acc: {:.1f} %'.format((correct_count/total_count)*100)) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # mil_pytorch - multiple instance learning model implemented in pytorch 2 | This library consists mainly of **mil.BagModel** and **mil.MilDataset** 3 | 4 | ```python 5 | from mil_pytorch.mil import BagModel, MilDataset 6 | ``` 7 | 8 | **BagModel** is subclass of **torch.nn.Module** (see https://pytorch.org/docs/stable/nn.html#torch.nn.Module). 9 | **MilDataset** is subclass of **torch.utils.data.dataset** (see https://pytorch.org/docs/stable/data.html#torch.utils.data.Dataset). 10 | 11 | For description of multiple instance learning problem see https://github.com/pevnak/Mill.jl#what-is-multiple-instance-learning-mil-problem. 12 | 13 | ## Usage 14 | ### Data 15 | Each instance is feature vector with fixed length. A bag contains variable number of these instances. Each instance has an id specifying, which bag does it belong to. Ids of instances are stored in vector with length equal to number of instances. 16 | 17 | Create an instance of MilDataset by passing it instances, ids and labels of bags. 18 | 19 | ```python 20 | import torch 21 | import mil_pytorch.mil as mil 22 | 23 | # Create 4 instances divided to 2 bags in 3:1 ratio. First bag has positive label, second bag has negative label 24 | instances = torch.tensor([[1.0, 1.0, 1.0, 1.0], 25 | [2.0, 2.0, 2.0, 2.0], 26 | [3.0, 3.0, 3.0, 3.0], 27 | [4.0, 4.0, 4.0, 4.0]]) 28 | ids = torch.tensor([0, 0, 0, 1]) 29 | labels = torch.tensor([1.0, 0.0]) 30 | 31 | # Initialize MilDataset using created data 32 | dataset = mil.MilDataset(instances, ids, labels) 33 | ``` 34 | 35 | You can access bags from this dataset using index ... 36 | 37 | ```python 38 | instances, ids, label = dataset[index] 39 | ``` 40 | 41 | ... or iteration. 42 | 43 | ```python 44 | for data, bagids, labels in dataset: 45 | ... 46 | ``` 47 | 48 | To use **torch.utils.data.DataLoader** (https://pytorch.org/docs/stable/data.html#torch.utils.data.DataLoader) you need to use custom collate function **mil.collate**. 49 | 50 | ```python 51 | from torch.utils.data import DataLoader 52 | import mil 53 | 54 | dataloader = DataLoader(dataset=dataset, batch_size=batch_size, collate_fn=mil.collate) 55 | ``` 56 | 57 | ### Creating model 58 | BagModel consists of user defined non-linear neural networks prepNN and afterNN and an aggregation function stacked between them. 59 | PrepNN preserves number of feature vectors (instances) in input, aggregation function aggregates instances of each bag creating one feature vector per bag. This output is then forwarded through afterNN. 60 | 61 | ```python 62 | # Define custom prepNN 63 | prepNN = torch.nn.Sequential( 64 | torch.nn.Linear(input_len, 10), 65 | torch.nn.ReLU(), 66 | ) 67 | 68 | # Define custom afterNN 69 | afterNN = torch.nn.Sequential( 70 | torch.nn.Linear(10, 1) 71 | ) 72 | 73 | # Define model with prepNN, afterNN and torch.mean as aggregation function 74 | model = mil.BagModel(prepNN, afterNN, torch.mean) 75 | ``` 76 | 77 | ### Form of input 78 | Input for model must be a tuple of instances and ids. 79 | 80 | ```python 81 | input = (instances, bagids) 82 | 83 | output = model(input) 84 | ``` 85 | 86 | 87 | ### Bag of bags 88 | 89 | Data for MIL problem can also be in the form of "bag of bags", where each bag consists of variable number of lower bags, which consists of variable number of instances. In this case ids is a matrix with number of columns equal to number of instances and number of rows equal to number of "bag-layers". For example number of rows is 1 for "bag of instances" problem, 2 for "bag of bags" problem and so on. 90 | 91 | Matrix of ids is sorted in such a way, that last row specifies ids of instances, the row above specifies ids of sub-bags and so on to top. 92 | 93 | Data for bag of bags problem would look like this: 94 | 95 | ```python 96 | import torch 97 | import mil 98 | 99 | # Create 8 instances divided to 4 lower-bags in 1:3:2:2 ratio. Lower-bags are divided into 2 bags in ratio 2:2 First bag has positive label, second bag has negative label 100 | instances = torch.tensor([[1.0, 1.0, 1.0, 1.0], 101 | [2.0, 2.0, 2.0, 2.0], 102 | [3.0, 3.0, 3.0, 3.0], 103 | [4.0, 4.0, 4.0, 4.0], 104 | [5.0, 5.0, 5.0, 5.0], 105 | [6.0, 6.0, 6.0, 6.0], 106 | [7.0, 7.0, 7.0, 7.0], 107 | [8.0, 8.0, 8.0, 8.0]]) 108 | 109 | ids = torch.tensor([[0, 0, 0, 0, 1, 1, 1, 1], 110 | [0, 1, 1, 1, 2, 2, 3, 3]]) 111 | 112 | labels = torch.tensor([1.0, 0.0]) 113 | 114 | # Initialize MilDataset using created data 115 | dataset = mil.MilDataset(instances, ids, labels) 116 | ``` 117 | 118 | 119 | 120 | In the case of "bag of bags" problem, the neural network is created as sequence of two BagModels. 121 | 122 | ```python 123 | # Define prepNNs and afterNNs 124 | prepNN1 = torch.nn.Sequential( 125 | torch.nn.Linear(input_len, 10), 126 | torch.nn.ReLU(), 127 | ) 128 | 129 | afterNN1 = torch.nn.Sequential( 130 | torch.identity() # In this case afterNN1 and prepNN2 are interchangeable 131 | ) 132 | 133 | prepNN2 = torch.nn.Sequential( 134 | torch.nn.Linear(5, 3), 135 | torch.nn.ReLU() 136 | ) 137 | 138 | afterNN2 = torch.nn.Sequential( 139 | torch.nn.Linear(3, 1), 140 | torch.nn.Tanh() 141 | ) 142 | 143 | # Define model with prepNN, afterNN and torch.mean as aggregation function 144 | model = torch.nn.Sequential( 145 | mil.BagModel(prepNN1, afterNN1, torch.mean), 146 | mil.BagModel(prepNN2, afterNN2, torch.mean) 147 | ) 148 | ``` 149 | 150 | ## Examples 151 | [musk](https://github.com/jakubmonhart/mil_pytorch/blob/master/examples/musk.py) 152 | 153 | [bag\_of\_bags](https://github.com/jakubmonhart/mil_pytorch/blob/master/examples/bag_of_bags.py) 154 | -------------------------------------------------------------------------------- /examples/data/bag_of_bags/ids.csv: -------------------------------------------------------------------------------- 1 | 0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13,13,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15,15,15,15,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,17,17,17,17,17,17,18,18,18,18,18,18,18,18,18,18,18,18,18,18,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,22,22,22,22,22,22,22,22,22,22,22,22,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,26,26,26,26,26,26,26,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,29,29,29,29,29,29,29,29,30,30,30,30,30,30,30,30,30,30,30,31,31,31,31,31,31,31,32,32,32,32,32,32,32,32,32,32,32,32,32,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,33,34,34,34,34,34,34,34,34,34,34,34,34,34,34,34,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,35,36,36,36,36,36,36,36,37,37,37,37,37,37,37,37,37,37,37,37,37,37,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,38,39,39,39,39,39,39,39,39,39,40,40,40,40,40,40,40,40,40,40,40,40,41,41,41,41,41,41,41,41,41,41,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,42,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,43,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,45,45,45,45,45,45,45,45,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,46,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,47,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,48,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,49,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,50,51,51,51,51,51,51,51,51,51,51,51,51,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,52,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,53,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,54,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,55,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,56,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,57,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,58,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,59,60,60,60,60,60,60,60,60,60,60,60,60,60,60,60,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,61,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,62,63,63,63,63,63,63,63,63,63,63,63,63,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,64,65,65,65,65,65,65,65,65,65,65,65,65,65,65,65,66,66,66,66,66,66,66,66,66,66,67,67,67,67,67,67,67,67,67,67,67,68,68,68,68,68,68,68,68,69,69,69,69,69,69,69,69,69,69,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,70,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,71,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,72,73,73,73,73,73,73,73,73,73,74,74,74,74,74,74,74,75,75,75,75,75,75,75,75,75,75,75,75,75,75,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,76,77,77,77,77,77,77,77,77,77,77,77,77,77,77,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,78,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,79,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,80,81,81,81,81,81,81,81,81,81,81,81,81,82,82,82,82,82,82,82,82,82,82,82,82,82,82,83,83,83,83,83,83,83,83,83,83,83,83,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,84,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,85,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,86,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,87,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,88,89,89,89,89,89,89,89,89,89,89,89,89,90,90,90,90,90,90,90,90,91,91,91,91,91,91,91,92,92,92,92,92,92,92,92,92,92,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,93,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,94,95,95,95,95,95,95,95,95,95,95,95,95,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,96,97,97,97,97,97,97,97,97,97,97,97,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99 2 | 0,0,0,0,0,1,1,2,2,2,2,2,3,3,4,4,4,4,5,5,5,5,6,6,6,6,6,6,7,7,7,8,8,8,8,8,8,9,9,10,10,10,10,10,10,11,11,11,11,11,11,11,12,12,12,12,13,13,13,13,13,13,14,14,15,15,15,15,15,15,16,16,16,16,17,17,17,18,18,18,18,18,18,19,19,19,19,20,20,21,21,21,22,22,22,22,22,22,22,23,23,23,24,24,25,25,25,25,26,26,26,26,27,27,27,27,27,28,28,28,28,28,28,28,29,29,29,30,30,30,30,30,30,30,31,31,31,31,31,32,32,33,33,34,34,34,34,34,34,34,35,35,35,35,35,36,36,36,36,36,36,37,37,37,37,37,38,38,38,38,38,39,39,39,39,39,39,39,40,40,40,40,40,40,40,41,41,41,41,41,41,42,42,42,42,43,43,43,44,44,44,44,44,44,45,45,45,46,46,46,46,46,46,46,47,47,47,47,48,48,48,48,48,48,48,49,49,49,49,50,50,50,50,51,51,51,51,51,51,52,52,52,52,52,52,53,53,53,53,53,53,53,54,54,54,54,54,54,55,55,55,56,56,56,56,56,57,57,57,58,58,59,59,60,60,60,60,61,61,61,62,62,62,62,62,63,63,64,64,64,64,65,65,65,65,66,66,66,66,66,67,67,67,67,67,68,68,68,68,68,68,68,69,69,69,69,69,69,70,70,71,71,71,72,72,72,72,73,73,73,74,74,74,74,74,75,75,75,75,75,75,75,76,76,76,76,76,76,76,77,77,77,77,77,78,78,78,78,78,78,79,79,79,79,79,80,80,80,80,80,81,81,81,81,81,82,82,82,82,83,83,83,83,84,84,84,84,84,84,85,85,85,86,86,86,87,87,88,88,88,88,88,88,89,89,89,89,89,89,90,90,90,90,90,90,90,91,91,91,91,92,92,92,93,93,93,93,94,94,94,94,94,94,95,95,95,95,95,96,96,96,96,96,96,97,97,98,98,99,99,99,99,100,100,100,100,100,100,101,101,101,101,101,102,102,102,102,103,103,103,103,103,104,104,104,105,105,105,105,106,106,107,107,107,107,107,107,108,108,109,109,109,109,109,109,110,110,110,110,110,110,110,111,111,111,112,112,112,112,112,113,113,113,114,114,114,114,114,114,115,115,115,115,115,115,116,116,116,116,116,117,117,117,117,117,118,118,118,118,119,119,119,119,119,119,119,120,120,120,121,121,121,121,121,121,122,122,122,122,123,123,123,123,123,123,124,124,124,125,125,125,125,125,126,126,127,127,127,127,127,128,128,128,128,128,128,128,129,129,129,129,129,129,130,130,131,131,131,131,132,132,132,132,132,133,133,133,133,133,134,134,134,135,135,136,136,136,136,137,137,137,137,138,138,138,138,138,139,139,139,140,140,140,141,141,141,141,142,142,142,143,143,143,143,144,144,145,145,145,145,145,145,145,146,146,146,146,147,147,147,148,148,148,148,148,148,149,149,150,150,150,150,150,150,151,151,151,151,151,151,152,152,152,152,153,153,153,153,153,153,154,154,154,154,154,154,154,155,155,156,156,156,156,156,156,156,157,157,157,157,158,158,158,158,158,158,159,159,159,159,160,160,161,161,161,161,162,162,162,163,163,164,164,165,165,165,165,165,166,166,167,167,167,167,167,167,167,168,168,168,168,168,168,168,169,169,169,169,169,169,169,170,170,170,170,170,170,171,171,171,171,172,172,172,172,172,173,173,173,173,174,174,174,175,175,176,176,176,176,176,176,176,177,177,177,177,177,178,178,178,178,178,178,178,179,179,179,180,180,180,180,180,180,181,181,181,181,181,181,181,182,182,182,183,183,183,183,183,183,183,184,184,184,184,185,185,185,185,185,186,186,187,187,187,187,188,188,188,188,188,188,189,189,189,189,189,189,190,190,190,191,191,191,192,192,193,193,193,194,194,195,195,196,196,196,197,197,197,197,198,198,198,198,199,199,199,199,199,199,199,200,200,200,200,200,201,201,202,202,202,202,202,202,203,203,203,203,204,204,204,204,204,204,205,205,206,206,206,206,206,207,207,207,207,207,208,208,208,208,208,208,209,209,209,209,209,210,210,211,211,211,212,212,212,212,213,213,214,214,214,214,214,214,214,215,215,216,216,217,217,217,217,217,217,217,218,218,218,218,219,219,219,219,219,219,220,220,220,220,220,220,221,221,221,221,221,221,221,222,222,222,222,222,222,223,223,224,224,224,224,224,225,225,225,226,226,226,226,226,227,227,227,227,227,227,228,228,228,228,228,228,229,229,229,229,229,230,230,231,231,231,231,231,231,231,232,232,232,232,232,232,232,233,233,233,233,233,234,234,234,234,235,235,235,235,235,235,236,236,236,236,236,236,236,237,237,237,237,238,238,238,238,238,238,239,239,239,239,240,240,240,241,241,242,242,242,243,243,243,243,243,243,243,244,244,245,245,246,246,247,247,248,248,248,248,248,249,249,250,250,250,250,250,250,251,251,252,252,252,252,252,252,253,253,253,253,254,254,254,255,255,255,256,256,256,256,256,256,256,257,257,258,258,258,258,258,258,259,259,259,259,260,260,260,260,260,261,261,261,261,261,261,262,262,262,263,263,263,263,263,264,264,265,265,265,265,265,265,266,266,266,267,267,267,267,267,268,268,268,268,269,269,269,269,270,270,270,270,271,271,271,271,271,272,272,272,272,272,273,273,273,273,273,273,274,274,274,274,274,274,275,275,275,276,276,276,277,277,277,277,277,277,277,278,278,278,278,278,278,279,279,279,280,280,280,280,280,280,280,281,281,281,281,281,281,282,282,282,282,282,282,283,283,283,283,283,284,284,284,284,284,284,285,285,286,286,286,286,286,286,287,287,287,287,287,287,287,288,288,288,288,288,289,289,289,289,289,289,289,290,290,291,291,292,292,292,292,292,292,292,293,293,293,293,293,293,294,294,295,295,295,295,296,296,296,296,296,296,297,297,297,297,297,298,298,298,298,298,299,299,299,299,299,300,300,300,301,301,301,301,301,301,301,302,302,302,302,303,303,303,303,303,304,304,305,305,305,305,305,306,306,306,307,307,307,308,308,308,308,308,308,308,309,309,310,310,310,310,310,310,310,311,311,311,311,311,311,311,312,312,312,312,312,313,313,313,314,314,314,314,314,314,314,315,315,316,316,316,316,316,316,316,317,317,317,318,318,318,318,318,318,319,319,319,319,319,319,320,320,320,320,320,321,321,321,321,321,322,322,323,323,323,323,323,323,324,324,324,324,325,325,325,325,325,325,325,326,326,326,326,326,326,327,327,327,328,328,328,328,329,329,329,330,330,330,330,330,330,331,331,331,332,332,333,333,333,334,334,334,334,334,334,335,335,335,335,335,335,335,336,336,336,336,336,336,337,337,337,338,338,339,339,340,340,340,341,341,341,341,342,342,342,342,343,343,343,343,344,344,345,345,345,345,345,345,346,346,347,347,347,347,347,348,348,348,348,348,348,349,349,349,349,349,350,350,350,350,350,351,351,351,351,351,352,352,352,352,352,353,353,354,354,354,355,355,355,356,356,356,357,357,357,357,358,358,358,358,359,359,360,360,360,360,360,360,361,361,361,361,361,362,362,362,362,362,363,363,363,364,364,364,364,364,364,365,365,366,366,366,366,367,367,367,367,368,368,369,369,369,370,370,370,370,370,371,371,371,371,371,371,372,372,372,372,372,372,373,373,373,374,374,374,375,375,375,376,376,376,376,376,377,377,377,377,377,377,378,378,378,378,378,379,379,379,380,380,381,381,381,381,381,382,382,382,382,382,382,383,383,383,383,383,383,384,384,384,385,385,385,385,385,385,385,386,386,386,386,386,386,386,387,387,387,387,388,388,388,389,389,389,389,390,390,390,390,390,390,390,391,391,391,392,392,392,393,393,394,394,394,394,394,394,394,395,395,395,395,395,395,395,396,396,396,396,396,396,396,397,397,397,397,397,397,398,398,398,398,398,398,399,399,399,399,400,400,400,400,401,401,401,402,402,402,402,403,403,404,404,404,405,405,405,405,405,406,406,406,407,407,407,407,407,407,408,408,408,408,408,408,408,409,409,409,409,409,409,409,410,410,411,411,411,411,411,411,411,412,412,412,413,413,413,413,414,414,415,415,415,416,416,416,417,417,417,417,417,417,418,418,418,418,418,418,418,419,419,419,419,419,419,420,420,420,421,421,421,422,422,423,423,423,423,423,424,424,424,424,425,425,426,426,426,426,427,427,427,427,427,427,427,428,428,428,428,428,428,428,429,429,429,429,430,430,431,431,431,431,432,432,432,432,432,432,433,433 3 | --------------------------------------------------------------------------------