├── .gitignore
├── .idea
├── .gitignore
├── vcs.xml
├── inspectionProfiles
│ └── profiles_settings.xml
├── misc.xml
├── modules.xml
└── automatic-scoliosis-assessment.iml
├── .DS_Store
├── dataset-preprocessing
├── inLimits.m
├── createDirectories.py
├── scanHorForContour.m
├── scanVertForContour.m
├── createPickleDatasets.py
├── hiResROI.m
├── dataAugmentation.py
└── fixLandmarkErrors.py
├── vertebra-segmentation-network
├── UNetTraining_Colab.ipynb
├── UNetTesting.py
└── UNetTraining.py
├── performance-evaluation
├── alignSlopeVectors.m
├── results_segmentation.py
├── results_endplates.py
├── results_cobb.py
├── results_lenke.py
└── boostnet_data_test.m
├── clinical-assessment-algorithms
├── classifyLenkeCurveType.m
├── predictedSpineMask2Outputs.m
├── processSpineMask.m
├── calculateCobbAngles.m
├── minboundrect.m
└── fitEndplates.m
└── README.md
/.gitignore:
--------------------------------------------------------------------------------
1 | data/*
2 | thesis/*
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # Default ignored files
3 | /workspace.xml
--------------------------------------------------------------------------------
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scoliosistools/automatic-scoliosis-assessment/HEAD/.DS_Store
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/dataset-preprocessing/inLimits.m:
--------------------------------------------------------------------------------
1 | function [output] = inLimits(desiredVal,maxSize)
2 | %INLIMITS Summary of this function goes here
3 | % Detailed explanation goes here
4 | if desiredVal < 1
5 | output = 1;
6 | elseif desiredVal > maxSize
7 | output = maxSize;
8 | else
9 | output = desiredVal;
10 | end
11 | end
12 |
13 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/automatic-scoliosis-assessment.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/vertebra-segmentation-network/UNetTraining_Colab.ipynb:
--------------------------------------------------------------------------------
1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"UNetTraining_Colab.ipynb","provenance":[],"collapsed_sections":[],"authorship_tag":"ABX9TyN+zuj6MJtXZf4AdAnYBMf2"},"kernelspec":{"name":"python3","display_name":"Python 3"}},"cells":[{"cell_type":"code","metadata":{"id":"lvh-Z30lxPa-","colab_type":"code","colab":{}},"source":["!git clone https://github.com/darraghmaguire/automatic-scoliosis-assessment.git"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"SGwEo_-8xRpH","colab_type":"code","colab":{}},"source":["!python /content/automatic-scoliosis-assessment/vertebra-segmentation-network/UNetTraining.py"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"HpF2qLvTxSZ3","colab_type":"code","colab":{}},"source":["!rm -rf /content/automatic-scoliosis-assessment/"],"execution_count":null,"outputs":[]}]}
--------------------------------------------------------------------------------
/dataset-preprocessing/createDirectories.py:
--------------------------------------------------------------------------------
1 | import os
2 | from shutil import copy
3 |
4 | for path in {"../data/FixedSpineWebData", "../data/HiResVertebraeMasks", "../data/DataAugmentation/images",
5 | "../data/DataAugmentation/masks", "../data/PredictionsVsGroundTruth/SpineMasks",
6 | "../data/PredictionsVsGroundTruth/SpineMasks_Processed",
7 | "../data/PredictionsVsGroundTruth/SpineMasks_GroundTruthEndplates",
8 | "G:/My Drive/GitHub/automatic-scoliosis-assessment",
9 | "G:/My Drive/GitHub/automatic-scoliosis-assessment/models",
10 | "G:/My Drive/GitHub/automatic-scoliosis-assessment/logs"}:
11 | try:
12 | os.makedirs(path)
13 | except OSError:
14 | print ("Creation of the directory %s failed" % path)
15 | else:
16 | print ("Successfully created the directory %s" % path)
17 |
18 | copy("../vertebra-segmentation-network/UNetTraining_Colab.ipynb", "G:/My Drive/GitHub/automatic-scoliosis-assessment/UNetTraining_Colab.ipynb")
19 |
--------------------------------------------------------------------------------
/performance-evaluation/alignSlopeVectors.m:
--------------------------------------------------------------------------------
1 | function [predSlopesAligned] = alignSlopeVectors(gtSlopes,predSlopes)
2 | %ALIGNSLOPEVECTORS: function designed to align the predicted and
3 | %ground-truth endplate slopes for comparison, for cases where there are a
4 | %different number of predicted endplates.
5 |
6 | gtLength = length(gtSlopes);
7 | diffLength = length(predSlopes) - length(gtSlopes);
8 | if diffLength > 0
9 | for k = 1:diffLength+1
10 | error = sum(abs(gtSlopes - predSlopes(k:(k+gtLength-1))));
11 | if (k == 1) || (error < minError)
12 | minError = error;
13 | predSlopesAligned = predSlopes(k:(k+gtLength-1));
14 | end
15 | end
16 | elseif diffLength < 0
17 | for k = 1:abs(diffLength)+1
18 | error = sum(abs(gtSlopes(k:(k+length(predSlopes)-1)) - predSlopes));
19 | if (k == 1) || (error < minError)
20 | minError = error;
21 | predSlopesAligned = zeros(1,gtLength);
22 | predSlopesAligned(k:(k+length(predSlopes)-1)) = predSlopes;
23 | predSlopesAligned(1:k) = predSlopes(1);
24 | predSlopesAligned((k+length(predSlopes)-1):gtLength) = predSlopes(length(predSlopes));
25 | end
26 | end
27 | else
28 | predSlopesAligned = predSlopes;
29 | end
30 |
31 | end
32 |
33 |
--------------------------------------------------------------------------------
/dataset-preprocessing/scanHorForContour.m:
--------------------------------------------------------------------------------
1 | function contour = scanHorForContour(image, default, point1, point2, zone1, zone2, left)
2 |
3 | imSize = size(image);
4 | threshold = imSize(1)*imSize(2); % 1 -> default, big -> no correction ... imSize(1)*imSize(2)/75000 for some correction
5 |
6 | n = 4; % order of polynomial to fit
7 |
8 | % length of default line
9 | line_length = size(default);
10 | line_length = line_length(2);
11 |
12 | % variables to store the difference in intensity between two pixels
13 | % also need to store the maximum difference and its index
14 | diff = zeros(1, line_length);
15 | max_diff = diff;
16 | max_diff_ind = diff + default;
17 |
18 |
19 | for y_coord = point1:point2
20 |
21 | ind = y_coord - point1 + 1;
22 |
23 | % scan search zone for max change in intensity
24 | for x_coord = default(ind)-zone1:default(ind)+zone2
25 |
26 | diff(ind) = abs(double(image(y_coord,inLimits(x_coord+1, imSize(2))))-double(image(y_coord,inLimits(x_coord-1, imSize(2)))));
27 |
28 | % searching for left pixel darker than right for left-side of vertebra
29 | % searching for right pixel darker than left for right-side of vertebra
30 | if left && image(y_coord,inLimits(x_coord+1, imSize(2))) < image(y_coord,inLimits(x_coord-1, imSize(2)))
31 | elseif ~left && image(y_coord,inLimits(x_coord+1, imSize(2))) > image(y_coord,inLimits(x_coord-1, imSize(2)))
32 | elseif diff(ind) > max_diff(ind)
33 | max_diff(ind) = diff(ind);
34 | max_diff_ind(ind) = x_coord;
35 | end
36 |
37 | end
38 | end
39 |
40 | y_contour = point1:point2;
41 | x_contour = max_diff_ind;
42 |
43 | % eliminate max changes in intensity that are beyond the threshold from the
44 | % default line
45 | bool_arr = zeros(size(x_contour));
46 | for point=1:line_length
47 | if abs(x_contour(point)- default(point)) > threshold
48 | bool_arr(point) = 1;
49 | end
50 | end
51 | for point=1:line_length
52 | if bool_arr(point) == 1
53 | x_contour(point) = default(point);
54 | end
55 | end
56 |
57 | % fit polynomial to max changes in intensity
58 | p = polyfit(y_contour,x_contour,n);
59 | y1 = linspace(min(y_contour),max(y_contour));
60 | x_contour_smooth = polyval(p,y1);
61 | contour = [x_contour_smooth; y1]';
62 |
63 | end
64 |
65 |
--------------------------------------------------------------------------------
/dataset-preprocessing/scanVertForContour.m:
--------------------------------------------------------------------------------
1 | function contour = scanVertForContour(image, default, point1, point2, zone1, zone2, top)
2 |
3 | imSize = size(image);
4 | threshold = imSize(1)*imSize(2); % 1 -> default, big -> no correction ... imSize(1)*imSize(2)/75000 for some correction
5 |
6 | n = 4; % order of polynomial to fit
7 |
8 | % length of default line
9 | line_length = size(default);
10 | line_length = line_length(2);
11 |
12 | % variables to store the difference in intensity between two pixels
13 | % also need to store the maximum difference and its index
14 | diff = zeros(1, line_length);
15 | max_diff = diff;
16 | max_diff_ind = diff + default;
17 |
18 |
19 | for x_coord = point1:point2
20 |
21 | ind = x_coord - point1 + 1;
22 |
23 | % scan search zone for max change in intensity
24 | for y_coord = default(ind)-zone1:default(ind)+zone2
25 |
26 | diff(ind) = abs(double(image(inLimits(y_coord+1, imSize(1)),x_coord))-double(image(inLimits(y_coord-1, imSize(1)),x_coord)));
27 |
28 | % searching for upper pixel darker than lower for top of vertebra
29 | % searching for lower pixel darker than upper for bottom of vertebra
30 | if top && (image(inLimits(y_coord+1, imSize(1)),x_coord) < image(inLimits(y_coord-1, imSize(1)),x_coord))
31 | elseif ~top && (image(inLimits(y_coord+1, imSize(1)),x_coord) > image(inLimits(y_coord-1, imSize(1)),x_coord))
32 | elseif diff(ind) > max_diff(ind)
33 | max_diff(ind) = diff(ind);
34 | max_diff_ind(ind) = y_coord;
35 | end
36 |
37 | end
38 | end
39 |
40 | x_contour = point1:point2;
41 | y_contour = max_diff_ind;
42 |
43 | % eliminate max changes in intensity that are beyond the threshold from the
44 | % default line
45 |
46 | bool_arr = zeros(size(y_contour));
47 | for point=1:line_length
48 | if abs(y_contour(point)- default(point)) > threshold
49 | bool_arr(point) = 1;
50 | end
51 | end
52 | for point=1:line_length
53 | if bool_arr(point) == 1
54 | y_contour(point) = default(point);
55 | end
56 | end
57 |
58 |
59 |
60 | % fit polynomial to max changes in intensity
61 | p = polyfit(x_contour,y_contour,n);
62 | x1 = linspace(min(x_contour),max(x_contour));
63 | y_contour_smooth = polyval(p,x1);
64 | contour = [x1; y_contour_smooth]';
65 |
66 | end
67 |
68 |
--------------------------------------------------------------------------------
/performance-evaluation/results_segmentation.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import scipy.io
3 | from numpy import genfromtxt
4 | import os
5 | import cv2
6 | from sklearn import metrics
7 |
8 | ########################################### Vertebra segmentation results
9 | IMG_SIZE_X = 128
10 | IMG_SIZE_Y = 256
11 |
12 | # function to generate datasets with segmentation maps of entire spinal column
13 | def create_roi_datasets(roi_dir, fn_dir, IMG_SIZE_X, IMG_SIZE_Y):
14 | # extract filenames and landmark data into arrays
15 | fn_data = genfromtxt(fn_dir, delimiter=',', dtype=str)
16 | roi_data = []
17 |
18 | # extract ROIs in order of filenames - same order as landmarks
19 | for filename in fn_data:
20 | for roi in os.listdir(roi_dir):
21 | if roi == filename:
22 | roi_array = cv2.imread(os.path.join(roi_dir, roi), cv2.IMREAD_GRAYSCALE)
23 | new_array = cv2.resize(roi_array, (IMG_SIZE_X, IMG_SIZE_Y))
24 | roi_data.append([new_array])
25 |
26 |
27 | # save images in array and binarize
28 | roi_data = np.array(roi_data).reshape(-1, IMG_SIZE_Y, IMG_SIZE_X, 1)
29 | roi_data[roi_data < 0.5] = 0
30 | roi_data[roi_data >= 0.5] = 1
31 |
32 | roi_data = roi_data.astype(float)
33 | roi_data = np.squeeze(roi_data)
34 |
35 | return roi_data
36 |
37 | gt_roi_dir = "../data/PredictionsVsGroundTruth/SpineMasks_GroundTruthEndplates"
38 | pred_roi_dir = "../data/PredictionsVsGroundTruth/SpineMasks"
39 | pred_processed_roi_dir = "../data/PredictionsVsGroundTruth/SpineMasks_Processed"
40 | fn_dir = "../data/boostnet_labeldata/labels/test/filenames.csv"
41 |
42 | gt_masks = create_roi_datasets(gt_roi_dir, fn_dir, IMG_SIZE_X, IMG_SIZE_Y)
43 | pred_masks = create_roi_datasets(pred_roi_dir, fn_dir, IMG_SIZE_X, IMG_SIZE_Y)
44 | pred_proc_masks = create_roi_datasets(pred_processed_roi_dir, fn_dir, IMG_SIZE_X, IMG_SIZE_Y)
45 |
46 | pred_mask_acc = metrics.accuracy_score(gt_masks.reshape(-1), pred_masks.reshape(-1))
47 | pred_proc_masks_acc = metrics.accuracy_score(gt_masks.reshape(-1), pred_proc_masks.reshape(-1))
48 |
49 | pred_mask_bal_acc = metrics.balanced_accuracy_score(gt_masks.reshape(-1), pred_masks.reshape(-1))
50 | pred_proc_masks_bal_acc = metrics.balanced_accuracy_score(gt_masks.reshape(-1), pred_proc_masks.reshape(-1))
51 |
52 | pred_mask_dice = 1 - scipy.spatial.distance.dice(gt_masks.reshape(-1), pred_masks.reshape(-1))
53 | pred_proc_masks_dice = 1 - scipy.spatial.distance.dice(gt_masks.reshape(-1), pred_proc_masks.reshape(-1))
54 |
--------------------------------------------------------------------------------
/performance-evaluation/results_endplates.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from numpy import genfromtxt
3 | import seaborn as sns
4 | from matplotlib import pyplot as plt
5 | import pingouin as pg
6 | import pandas as pd
7 |
8 | ########################################### Endplate results
9 | gt_slopes_dir = "../data/PredictionsVsGroundTruth/EndplateSlopes_GroundTruthEndplates.csv"
10 | gt_slopes_data = genfromtxt(gt_slopes_dir, delimiter=',')
11 |
12 | pred_slopes_dir = "../data/PredictionsVsGroundTruth/EndplateSlopes.csv"
13 | pred_slopes_data = genfromtxt(pred_slopes_dir, delimiter=',')
14 |
15 | slopesDiff = pred_slopes_data - gt_slopes_data
16 | slopesAbsDiff = abs(pred_slopes_data - gt_slopes_data)
17 |
18 | SD = np.std(slopesDiff)
19 |
20 | slopesDiffMean = np.mean(slopesDiff)
21 | slopesAbsDiffMean = np.mean(slopesAbsDiff)
22 | slopesCorr = pg.corr(pred_slopes_data.reshape(-1),gt_slopes_data.reshape(-1))
23 |
24 | plt.figure()
25 | sns.distplot(slopesDiff.reshape(-1))
26 | plt.xlabel("Difference in Endplate Slope (Degrees)")
27 | plt.ylabel("Density")
28 | plt.title("Difference between Predicted and Ground-truth Endplate Slopes")
29 | plt.show()
30 |
31 | plt.figure()
32 | sns.scatterplot(x=gt_slopes_data.reshape(-1), y=pred_slopes_data.reshape(-1))
33 | plt.xlabel("Ground-truth Endplate Slope (Degrees)")
34 | plt.ylabel("Predicted Endplate Slope (Degrees)")
35 | plt.title("Ground-truth vs. Predicted Endplate Slopes")
36 | plt.show()
37 |
38 | ax = pg.plot_blandaltman(gt_slopes_data.flatten(), pred_slopes_data.flatten())
39 |
40 | gt_slopes_data_col = gt_slopes_data.reshape(-1)
41 | pred_slopes_data_col = pred_slopes_data.reshape(-1)
42 | icc_ratings = np.concatenate((gt_slopes_data_col, pred_slopes_data_col), axis=0)
43 |
44 | corr = pg.corr(pred_slopes_data.reshape(-1),gt_slopes_data.reshape(-1))
45 | print(corr.to_string())
46 |
47 | icc_targets = []
48 | icc_raters = []
49 | for k in range(8704):
50 | if k < 4352:
51 | icc_targets.append(str(k))
52 | icc_raters.append('gt')
53 | else:
54 | icc_targets.append(str(k-4352))
55 | icc_raters.append('pred')
56 |
57 | icc_df = pd.DataFrame({'Targets': icc_targets, 'Raters': icc_raters, 'Ratings': icc_ratings})
58 |
59 | icc = pg.intraclass_corr(data=icc_df, targets='Targets', raters='Raters', ratings='Ratings')
60 | print(icc.to_string())
61 |
62 | ##################################################### Find outliers
63 | EndplateOutliers = np.zeros(np.shape(slopesAbsDiff))
64 | EndplateOutliers[slopesAbsDiff > 20] = 1
65 | EndplateOutliersInd = np.array(np.where(EndplateOutliers == 1))
--------------------------------------------------------------------------------
/clinical-assessment-algorithms/classifyLenkeCurveType.m:
--------------------------------------------------------------------------------
1 | function [curveType, curveTypeProbabilities] = classifyLenkeCurveType(cobbAngles)
2 | %CLASSIFYLENKECURVETYPE: classify the Lenke curve type from a given set of
3 | %Cobb angles, and calculate the probability of each curve type.
4 | pt = cobbAngles(1);
5 | mt = cobbAngles(2);
6 | tl = cobbAngles(3);
7 |
8 | % classify curve type
9 | if (pt < 25) && (mt >= 25) && (tl < 25)
10 | curveType = 1;
11 | elseif (pt >= 25) && (tl < 25)
12 | curveType = 2;
13 | elseif (pt < 25) && (mt >= 25) && (tl >= 25) && (mt >= tl)
14 | curveType = 3;
15 | elseif (pt >= 25) && (tl >= 25)
16 | curveType = 4;
17 | elseif (pt < 25) && (mt < 25) && (tl >= 25)
18 | curveType = 5;
19 | elseif (pt < 25) && (mt >= 25) && (tl >= 25) && (mt < tl)
20 | curveType = 6;
21 | elseif (tl > mt) && (tl > pt)
22 | curveType = 5;
23 | else
24 | curveType = 1;
25 | end
26 |
27 | %%%%%%%% estimate probability of each curve type
28 | %%%%%%%% assuming estimate lies in the center of normal distribution
29 |
30 | sd_est = 6.86; % standard deviation of estimated Cobb angle
31 |
32 | p_tl_major = probAGreaterThanB(tl,mt,sd_est)*probAGreaterThanB(tl,pt,sd_est);
33 | p_mt_major = 1 - p_tl_major;
34 |
35 | p_curvesLess25 = zeros(1, 3);
36 | count = 1;
37 | for ang = [pt mt tl]
38 | p_curvesLess25(count) = normcdf(25,ang,sd_est);
39 | count = count + 1;
40 | end
41 | p_curvesGreater25 = 1 - p_curvesLess25;
42 |
43 | curveTypeProbabilities = zeros(1, 6);
44 |
45 | curveTypeProbabilities(1) = ((p_curvesLess25(1)) * (p_curvesGreater25(2)) * (p_curvesLess25(3))) + ((p_curvesLess25(1)) * (p_curvesLess25(2)) * (p_curvesLess25(3)) * p_mt_major);
46 | curveTypeProbabilities(2) = ((p_curvesGreater25(1)) * (p_curvesGreater25(2)) * (p_curvesLess25(3))) + ((p_curvesGreater25(1)) * (p_curvesLess25(2)) * (p_curvesLess25(3)));
47 | curveTypeProbabilities(3) = ((p_curvesLess25(1)) * (p_curvesGreater25(2)) * (p_curvesGreater25(3)) * p_mt_major);
48 | curveTypeProbabilities(4) = ((p_curvesGreater25(1)) * (p_curvesGreater25(2)) * (p_curvesGreater25(3))) + ((p_curvesGreater25(1)) * (p_curvesLess25(2)) * (p_curvesGreater25(3)) * p_mt_major);
49 | curveTypeProbabilities(5) = ((p_curvesLess25(1)) * (p_curvesLess25(2)) * (p_curvesGreater25(3))) + ((p_curvesLess25(1)) * (p_curvesLess25(2)) * (p_curvesLess25(3)) * p_tl_major);
50 | curveTypeProbabilities(6) = ((p_curvesLess25(1)) * (p_curvesGreater25(2)) * (p_curvesGreater25(3)) * p_tl_major);
51 |
52 | end
53 |
54 |
55 | function [prob] = probAGreaterThanB(angA, angB, sd_est)
56 | step = 0:0.01:180;
57 | angA_pdf = normpdf(step,angA,sd_est);
58 | angB_pdf = normpdf(step,angB,sd_est);
59 | if angA > angB
60 | prob = (1-(trapz(min(angA_pdf, angB_pdf))/(2*trapz(angA_pdf))));
61 | else
62 | prob = (trapz(min(angA_pdf, angB_pdf))/(2*trapz(angA_pdf)));
63 | end
64 | end
65 |
--------------------------------------------------------------------------------
/vertebra-segmentation-network/UNetTesting.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from numpy import genfromtxt
3 | import matplotlib.pyplot as plt
4 | import os
5 | import cv2
6 | import tensorflow as tf
7 |
8 | IMG_SIZE_X = 128
9 | IMG_SIZE_Y = 256
10 |
11 | def tversky_loss(y_true, y_pred, beta=0.5):
12 | def loss(y_true, y_pred):
13 | numerator = tf.reduce_sum(y_true * y_pred, axis=-1)
14 | denominator = y_true * y_pred + beta * (1 - y_true) * y_pred + (1 - beta) * y_true * (1 - y_pred)
15 |
16 | return 1 - (numerator + 1) / (tf.reduce_sum(denominator, axis=-1) + 1)
17 |
18 | return loss(y_true, y_pred)
19 |
20 | def roi2edge(roi):
21 | edge = np.zeros((IMG_SIZE_Y, IMG_SIZE_X))
22 | for k in range(IMG_SIZE_Y):
23 | for j in range(IMG_SIZE_X):
24 | if roi[k,j] == 1:
25 | if not (roi[k+1,j]==1 and roi[k-1,j]==1 and roi[k,j+1]==1 and roi[k,j-1]==1 and roi[k+1,j+1]==1 and roi[k+1,j-1]==1 and roi[k-1,j+1]==1 and roi[k-1,j-1]==1):
26 | edge[k,j] = 1
27 | return edge
28 |
29 | def create_test_im_datasets(im_dir, fn_dir, IMG_SIZE_X, IMG_SIZE_Y):
30 | fn_data = genfromtxt(fn_dir, delimiter=',', dtype=str)
31 | im_data = []
32 |
33 | for filename in fn_data:
34 | for img in os.listdir(im_dir):
35 | if img == filename:
36 | img_array = cv2.imread(os.path.join(im_dir, img), cv2.IMREAD_GRAYSCALE)
37 | new_array = cv2.resize(img_array, (IMG_SIZE_X, IMG_SIZE_Y))
38 | im_data.append([new_array])
39 |
40 | im_data = np.array(im_data).reshape(-1, IMG_SIZE_Y, IMG_SIZE_X, 1)
41 | im_data = im_data / 255.0
42 | return im_data
43 |
44 |
45 | test_im_dir = "../data/boostnet_labeldata/data/test"
46 | test_fn_dir = "../data/boostnet_labeldata/labels/test/filenames.csv"
47 |
48 | X = create_test_im_datasets(test_im_dir, test_fn_dir, IMG_SIZE_X, IMG_SIZE_Y)
49 |
50 |
51 | model_vertebrae = tf.keras.models.load_model("G:/My Drive/GitHub/automatic-scoliosis-assessment/models/VertebraSegmentationNetwork-1592842376", compile=False)
52 | model_vertebrae.compile(optimizer='adam', loss=tversky_loss, metrics=['accuracy'])
53 | Y_predicted = model_vertebrae.predict(X)
54 |
55 | kernel = np.ones((2,2),np.uint8)
56 |
57 | count = 0
58 | plot_count = 1
59 | fig = plt.figure()
60 | plt.title("Predictions")
61 | for j in range(128):
62 | # taking a sample
63 | if count % 14 == 0:
64 | plt.subplot(2, 5, plot_count)
65 | plt.imshow(X[j, :, :, 0], cmap="gray")
66 | # roi = Y_predicted_spacing[j, :, :, 0]
67 | # edge = roi2edge(roi)
68 | gradient = cv2.morphologyEx(Y_predicted[j, :, :, 0], cv2.MORPH_GRADIENT, kernel)
69 | plt.imshow(gradient, 'inferno', interpolation='none', alpha=0.3)
70 | plot_count += 1
71 | count += 1
72 | plt.show()
73 |
74 | # save spineMasks
75 | fn_dir = "../data/boostnet_labeldata/labels/test/filenames.csv"
76 | fn_data = genfromtxt(fn_dir, delimiter=',', dtype=str)
77 | for k in range(128):
78 | name = '../data/PredictionsVsGroundTruth/SpineMasks/'+fn_data[k]
79 | plt.imsave(name, Y_predicted[k, :, :, 0], cmap="gray")
80 |
81 |
82 |
--------------------------------------------------------------------------------
/performance-evaluation/results_cobb.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from numpy import genfromtxt
3 | import seaborn as sns
4 | from matplotlib import pyplot as plt
5 | import pingouin as pg
6 | import pandas as pd
7 |
8 | ########################################### Angle results
9 | gt_angle_dir = "../data/PredictionsVsGroundTruth/Angles_GroundTruthEndplates.csv"
10 | gt_angle_data = genfromtxt(gt_angle_dir, delimiter=',')
11 |
12 | pred_angle_dir = "../data/PredictionsVsGroundTruth/Angles.csv"
13 | pred_angle_data = genfromtxt(pred_angle_dir, delimiter=',')
14 |
15 | AD = abs(gt_angle_data - pred_angle_data)
16 | MAD = np.mean(AD)
17 |
18 | AD1 = abs(gt_angle_data[:,0] - pred_angle_data[:,0])
19 | MAD1 = np.mean(AD1)
20 |
21 | AD2 = abs(gt_angle_data[:,1] - pred_angle_data[:,1])
22 | MAD2 = np.mean(AD2)
23 |
24 | AD3 = abs(gt_angle_data[:,2] - pred_angle_data[:,2])
25 | MAD3 = np.mean(AD3)
26 |
27 | ########## This is shorter
28 | MAD = np.mean(abs(gt_angle_data.reshape(-1) - pred_angle_data.reshape(-1)))
29 |
30 | D = pred_angle_data - gt_angle_data
31 | MD = np.mean(D)
32 |
33 | SD = np.std(D)
34 |
35 | corr = pg.corr(pred_angle_data.reshape(-1),gt_angle_data.reshape(-1))
36 | print(corr.to_string())
37 |
38 | plt.figure()
39 | # sns.distplot(D[:,0], label="Proximal-thoracic")
40 | # sns.distplot(D[:,1], label="Main thoracic")
41 | # sns.distplot(D[:,2], label="Lumbar")
42 | sns.distplot(D.reshape(-1))
43 | plt.xlabel("Difference in Cobb Angle (Degrees)")
44 | plt.ylabel("Density")
45 | # plt.legend()
46 | plt.title("Difference between Predicted and Ground-truth Cobb Angles")
47 | plt.show()
48 |
49 | ########## Shapiro-Wilk test
50 | ShapiroWilk = pg.normality(data=D.reshape(-1))
51 | print(ShapiroWilk.to_string())
52 | pg.qqplot(D.reshape(-1), dist='norm', sparams=(), confidence=0.95, figsize=(5, 4), ax=None)
53 |
54 | plt.figure()
55 | # sns.scatterplot(x=gt_angle_data[:,0], y=pred_angle_data[:,0], label="Proximal-thoracic")
56 | # sns.scatterplot(x=gt_angle_data[:,1], y=pred_angle_data[:,1], label="Main thoracic")
57 | # sns.scatterplot(x=gt_angle_data[:,2], y=pred_angle_data[:,2], label="Lumbar")
58 | sns.scatterplot(x=gt_angle_data.reshape(-1), y=pred_angle_data.reshape(-1))
59 | plt.xlabel("Ground-truth Angle (Degrees)")
60 | plt.ylabel("Predicted Angle (Degrees)")
61 | # plt.legend()
62 | plt.title("Ground-truth vs. Predicted Cobb Angles")
63 | plt.show()
64 |
65 | ax = pg.plot_blandaltman(gt_angle_data.flatten(), pred_angle_data.flatten())
66 |
67 |
68 |
69 | gt_angle_data_col = gt_angle_data.reshape(-1)
70 | pred_angle_data_col = pred_angle_data.reshape(-1)
71 | icc_ratings = np.concatenate((gt_angle_data_col, pred_angle_data_col), axis=0)
72 |
73 | icc_targets = []
74 | icc_raters = []
75 | for k in range(768):
76 | if k < 384:
77 | icc_targets.append(str(k))
78 | icc_raters.append('gt')
79 | else:
80 | icc_targets.append(str(k-384))
81 | icc_raters.append('pred')
82 |
83 | icc_df = pd.DataFrame({'Targets': icc_targets, 'Raters': icc_raters, 'Ratings': icc_ratings})
84 |
85 | icc = pg.intraclass_corr(data=icc_df, targets='Targets', raters='Raters', ratings='Ratings')
86 | print(icc.to_string())
87 |
88 |
89 | ##################################################### Find outliers
90 | CobbOutliers = np.zeros(np.shape(AD))
91 | CobbOutliers[AD > 20] = 1
92 | CobbOutliersInd = np.array(np.where(CobbOutliers == 1))
93 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # automatic-scoliosis-assessment
2 | Automatic scoliosis assessment in spinal x-rays using deep learning and a novel vertebral segmentation dataset.
3 |
4 | Publications resulting from any use of this material must cite https://www.scoliosistools.com/
5 |
6 | **Thesis Report & Presentation available on request, for an in-depth description of this project and any further information.**
7 |
8 | ## Getting Started
9 | In order to run the code in this repository, you must first obtain SpineWeb Dataset 16 (http://spineweb.digitalimaginggroup.ca/Index.php?n=Main.Datasets#Dataset_16.3A_609_spinal_anterior-posterior_x-ray_images).
10 | Copy the 'boostnet_labeldata' folder into a new folder named 'data' in the root directory of the local repository after cloning.
11 | This 'automatic-scoliosis-assessment/data/' folder will be used for storing any additional data generated locally.
12 |
13 | Google Colab was used for training the U-Net in this project. The scripts have been designed to work with Google Drive File Stream for this purpose. By default, the scripts create and work out of the following Google Drive directory:
14 | 'G:/My Drive/GitHub/automatic-scoliosis-assessment/'. This can be adjusted if required.
15 |
16 | The following process outlines the steps required to preprocess the data, train the network, and test the algorithms for automatic assessment of scoliosis.
17 | This process can be adjusted as required (for example, if a different dataset is used or if not using Google Colab and File Stream).
18 |
19 | ### 1: dataset-preprocessing
20 | 1. Run 'createDirectories.py'. This will create the necessary folders in 'automatic-scoliosis-assessment/data/' for storage. This step only needs to be performed once.
21 | 2. Run 'fixLandmarkErrors.py' to correct for errors that have been identified with the ground-truth landmark dataset.
22 | 3. Run 'hiResROI.m' in MATLAB. This script will generate high-resolution ground-truth vertebral segmentations from the ground-truth landmarks. Note: The terms segmentation, mask, and region of interest (ROI) may be used interchangeably in this project.
23 | 4. Run 'dataAugmentation.py', performing various augmenations on the images in order to increase the dataset size. This was done in advance of training to ensure that the ground-truth data augmentations matched up with the image augmentations.
24 | 5. Run 'createPickleDatasets.py'. This will save the training data into '.pickle' arrays in Google Drive, for use in Google Colab.
25 |
26 | ### 2: vertebra-segmentation-network
27 | 1. Open Google Drive in a browser, and open 'My Drive/GitHub/automatic-scoliosis-assessment/UNetTraining_Colab.ipynb' in Google Colab.
28 | 2. Change the runtime to GPU (Runtime > Change runtime type).
29 | 3. Mount Google Drive (Files > Mount Drive).
30 | 4. Run all cells (Runtime > Run all). This short notebook will clone the automatic-scoliosis-assessment repo and run the 'UNetTraining.py' script, accessing the training data in '/content/drive/My Drive/GitHub/automatic-scoliosis-assessment/'.
31 | 5. Run 'UNetTesting.py' locally, adjusting the timestamp in line 51 to that of the of the trained network (visible in 'My Drive/GitHub/automatic-scoliosis-assessment/models/'). This script will save the network predictions for the testing data.
32 |
33 | ### 3: performance-evaluation
34 | 1. Run 'boostnet_data_test.m' in MATLAB. This script will generate and save the relevant clinical metrics from the predicted test-set segmentations using the developed functions in '**clinical-assessment-algorithms**'. Note: Additionally, the 'plotting' variable can be toggled to 'true', in order to visually plot a random sample of test set performance.
35 | 2. Run 'results_segmentation.py', 'results_endplates.py', 'results_cobb.py', and 'results_lenke.py' to generate various statistics, summarising performance of the system.
36 |
--------------------------------------------------------------------------------
/clinical-assessment-algorithms/predictedSpineMask2Outputs.m:
--------------------------------------------------------------------------------
1 | function [cobbAngles,outputOverlay] = predictedSpineMask2Outputs(xray,spineMask)
2 | %PREDICTEDSPINEMASK2OUTPUTS interface with clinical assessment algorithms
3 | %to provide relevant metrics given an inputted predictied vertebral segmentation map
4 |
5 | %% call algorithms to perform assessment
6 | xray_size = size(xray);
7 | spineMask = imresize(spineMask, xray_size);
8 | spineMask = processSpineMask(spineMask);
9 |
10 | [endplateLandmarks, centroidsTop2Bottom] = fitEndplates(spineMask);
11 |
12 | nullAngleLocations = zeros(4,1); % used to allow for manual setting of angle locations
13 | [cobbAngles, ~, ~, cobbEndplates] = calculateCobbAngles(endplateLandmarks, centroidsTop2Bottom, nullAngleLocations);
14 |
15 | %% create rgb image to show algorithm process
16 | outputOverlay = zeros([xray_size 3]);
17 | outputOverlayR = xray;
18 | outputOverlayG = xray;
19 | outputOverlayB = xray;
20 |
21 | % segmentation overlay
22 | spineMaskPerim = imdilate(bwperim(spineMask), strel('disk',5));
23 | outputOverlayR(spineMaskPerim) = 255;
24 | outputOverlayG(spineMaskPerim) = 255;
25 | outputOverlayB(spineMaskPerim) = 255;
26 |
27 | % cobb overlay
28 | cobbOverlay = zeros([xray_size 3]);
29 |
30 | cobbEndplatesExtended = cobbEndplates;
31 | for n = 1:3 % pt - mt - tl/l
32 | for m = [0 2]
33 | cobbEndplatesExtended(1+m,1,n) = cobbEndplates(1+m,1,n)-(cobbEndplates(2+m,1,n)-cobbEndplates(1+m,1,n));
34 | cobbEndplatesExtended(2+m,1,n) = cobbEndplates(2+m,1,n)-(cobbEndplates(1+m,1,n)-cobbEndplates(2+m,1,n));
35 | cobbEndplatesExtended(1+m,2,n) = cobbEndplates(1+m,2,n)-(cobbEndplates(2+m,2,n)-cobbEndplates(1+m,2,n));
36 | cobbEndplatesExtended(2+m,2,n) = cobbEndplates(2+m,2,n)-(cobbEndplates(1+m,2,n)-cobbEndplates(2+m,2,n));
37 | end
38 | end
39 |
40 | for n = 1:3 % pt - mt - tl/l
41 | for m = [0 2]
42 | hor = abs(cobbEndplatesExtended(1+m,1,n) - cobbEndplatesExtended(2+m,1,n));
43 | vert = abs(cobbEndplatesExtended(1+m,2,n) - cobbEndplatesExtended(2+m,2,n));
44 | line_length = max(hor,vert);
45 | if cobbEndplatesExtended(1+m,1,n) ~= cobbEndplatesExtended(2+m,1,n)
46 | lineCols = round(cobbEndplatesExtended(1+m,1,n):(cobbEndplatesExtended(2+m,1,n)-cobbEndplatesExtended(1+m,1,n))/line_length:cobbEndplatesExtended(2+m,1,n));
47 | else
48 | lineCols = zeros(1, line_length+1) + cobbEndplatesExtended(1+m,1,n);
49 | end
50 | if cobbEndplatesExtended(1+m,2,n) ~= cobbEndplatesExtended(2+m,2,n)
51 | lineRows = round(cobbEndplatesExtended(1+m,2,n):(cobbEndplatesExtended(2+m,2,n)-cobbEndplatesExtended(1+m,2,n))/line_length:cobbEndplatesExtended(2+m,2,n));
52 | else
53 | lineRows = zeros(1, line_length+1) + cobbEndplatesExtended(1+m,2,n);
54 | end
55 | for k = 1:line_length+1
56 | if lineRows(k) < xray_size(1) && lineCols(k) < xray_size(2)
57 | cobbOverlay(lineRows(k),lineCols(k),n) = 1;
58 | end
59 | end
60 | end
61 | end
62 | ptOverlay = imdilate(cobbOverlay(:,:,1), strel('disk',5));
63 | mtOverlay = imdilate(cobbOverlay(:,:,2), strel('disk',5));
64 | tlOverlay = imdilate(cobbOverlay(:,:,3), strel('disk',5));
65 |
66 | outputOverlayR(ptOverlay == 1) = 255;
67 | outputOverlayG(ptOverlay == 1) = 0;
68 | outputOverlayB(ptOverlay == 1) = 0;
69 |
70 | outputOverlayR(mtOverlay == 1) = 0;
71 | outputOverlayG(mtOverlay == 1) = 255;
72 | outputOverlayB(mtOverlay == 1) = 0;
73 |
74 | outputOverlayR(tlOverlay == 1) = 0;
75 | outputOverlayG(tlOverlay == 1) = 0;
76 | outputOverlayB(tlOverlay == 1) = 255;
77 |
78 | outputOverlay(:,:,1) = outputOverlayR;
79 | outputOverlay(:,:,2) = outputOverlayG;
80 | outputOverlay(:,:,3) = outputOverlayB;
81 |
82 | outputOverlay = outputOverlay/255;
83 | end
84 |
85 |
--------------------------------------------------------------------------------
/performance-evaluation/results_lenke.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from numpy import genfromtxt
3 | import seaborn as sns
4 | from matplotlib import pyplot as plt
5 | import pingouin as pg
6 | import pandas as pd
7 | from sklearn.metrics import cohen_kappa_score
8 |
9 | ########################################### Lenke curve type results
10 | gt_lenke_dir = "../data/PredictionsVsGroundTruth/LenkeCurveTypes_GroundTruthEndplates.csv"
11 | gt_lenke_data = genfromtxt(gt_lenke_dir, delimiter=',')
12 |
13 | pred_lenke_dir = "../data/PredictionsVsGroundTruth/LenkeCurveTypes.csv"
14 | pred_lenke_data = genfromtxt(pred_lenke_dir, delimiter=',')
15 |
16 | correct_lenke = np.zeros_like(gt_lenke_data)
17 | correct_lenke[gt_lenke_data == pred_lenke_data] = 1
18 |
19 | lenke_accuracy = np.sum(correct_lenke)/np.shape(correct_lenke)
20 |
21 | plt.figure()
22 | sns.regplot(x=gt_lenke_data.reshape(-1),
23 | y=pred_lenke_data.reshape(-1),
24 | fit_reg=False, # do not fit a regression line
25 | x_jitter=0.1, # could also dynamically set this with range of data
26 | y_jitter=0.1,
27 | scatter_kws={'alpha': 0.5}) # set transparency to 50%
28 | plt.xlabel("Ground-truth Curve Type")
29 | plt.ylabel("Predicted Curve Type")
30 | plt.title("Ground-truth vs. Predicted Curve Types")
31 | plt.show()
32 |
33 | kap = cohen_kappa_score(gt_lenke_data, pred_lenke_data) # , weights='linear'?
34 |
35 | ######################################### Lenke curve type probabilities
36 | ########################################### Angle results
37 | gt_lenke_prob_dir = "../data/PredictionsVsGroundTruth/LenkeCurveTypeProbabilities_GroundTruthEndplates.csv"
38 | gt_lenke_prob_data = genfromtxt(gt_lenke_prob_dir, delimiter=',')
39 |
40 | pred_lenke_prob_dir = "../data/PredictionsVsGroundTruth/LenkeCurveTypeProbabilities.csv"
41 | pred_lenke_prob_data = genfromtxt(pred_lenke_prob_dir, delimiter=',')
42 |
43 |
44 | MAD = np.mean(abs(gt_lenke_prob_data.reshape(-1) - pred_lenke_prob_data.reshape(-1)))
45 |
46 | D = pred_lenke_prob_data - gt_lenke_prob_data
47 | MD = np.mean(D)
48 |
49 | SD = np.std(D)
50 |
51 | corr = pg.corr(pred_lenke_prob_data.reshape(-1),gt_lenke_prob_data.reshape(-1))
52 | print(corr.to_string())
53 |
54 | plt.figure()
55 | # sns.distplot(D[:,0], label="Proximal-thoracic")
56 | # sns.distplot(D[:,1], label="Main thoracic")
57 | # sns.distplot(D[:,2], label="Lumbar")
58 | sns.distplot(D.reshape(-1))
59 | plt.xlabel("Difference in Probability")
60 | plt.ylabel("Density")
61 | # plt.legend()
62 | plt.title("Difference between Predicted and Ground-truth Lenke Curve Type Probabilities")
63 | plt.show()
64 |
65 | plt.figure()
66 | # sns.scatterplot(x=gt_angle_data[:,0], y=pred_angle_data[:,0], label="Proximal-thoracic")
67 | # sns.scatterplot(x=gt_angle_data[:,1], y=pred_angle_data[:,1], label="Main thoracic")
68 | # sns.scatterplot(x=gt_angle_data[:,2], y=pred_angle_data[:,2], label="Lumbar")
69 | sns.scatterplot(x=gt_lenke_prob_data.reshape(-1), y=pred_lenke_prob_data.reshape(-1))
70 | plt.xlabel("Ground-truth Probability")
71 | plt.ylabel("Predicted Probability")
72 | # plt.legend()
73 | plt.title("Ground-truth vs. Predicted Lenke Curve Type Probabilities")
74 | plt.show()
75 |
76 | ax = pg.plot_blandaltman(gt_lenke_prob_data.flatten(), pred_lenke_prob_data.flatten())
77 |
78 | gt_lenke_prob_data_col = gt_lenke_prob_data.reshape(-1)
79 | pred_lenke_prob_data_col = pred_lenke_prob_data.reshape(-1)
80 | icc_ratings = np.concatenate((gt_lenke_prob_data_col, pred_lenke_prob_data_col), axis=0)
81 |
82 | icc_targets = []
83 | icc_raters = []
84 | for k in range(1536):
85 | if k < 768:
86 | icc_targets.append(str(k))
87 | icc_raters.append('gt')
88 | else:
89 | icc_targets.append(str(k-768))
90 | icc_raters.append('pred')
91 |
92 | icc_df = pd.DataFrame({'Targets': icc_targets, 'Raters': icc_raters, 'Ratings': icc_ratings})
93 |
94 | icc = pg.intraclass_corr(data=icc_df, targets='Targets', raters='Raters', ratings='Ratings')
95 | print(icc.to_string())
96 |
97 | ###### curve type classification from probabilities
98 | gt_prob_class = np.argmax(gt_lenke_prob_data, axis=1)+1
99 | pred_prob_class = np.argmax(pred_lenke_prob_data, axis=1)+1
100 | kap_prob_class = cohen_kappa_score(gt_prob_class, pred_prob_class)
101 |
102 |
103 |
--------------------------------------------------------------------------------
/vertebra-segmentation-network/UNetTraining.py:
--------------------------------------------------------------------------------
1 | import tensorflow as tf
2 | from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten, Conv2D, MaxPooling2D, Conv2DTranspose, concatenate
3 | from tensorflow.keras.callbacks import TensorBoard, ModelCheckpoint
4 | import time
5 | import pickle
6 |
7 | NAME = "VertebraSegmentationNetwork-{}".format(int(time.time()))
8 |
9 | tensorboard = TensorBoard(log_dir="/content/drive/My Drive/GitHub/automatic-scoliosis-assessment/logs/{}".format(NAME))
10 |
11 | pickle_in = open("/content/drive/My Drive/GitHub/automatic-scoliosis-assessment/train_images.pickle", "rb")
12 | X = pickle.load(pickle_in)
13 |
14 | pickle_in = open("/content/drive/My Drive/GitHub/automatic-scoliosis-assessment/train_masks.pickle", "rb")
15 | Y = pickle.load(pickle_in)
16 |
17 | # using tversky_loss for segmentations
18 | def tversky_loss(y_true, y_pred, beta=0.7):
19 | def loss(y_true, y_pred):
20 | numerator = tf.reduce_sum(y_true * y_pred, axis=-1)
21 | denominator = y_true * y_pred + beta * (1 - y_true) * y_pred + (1 - beta) * y_true * (1 - y_pred)
22 |
23 | return 1 - (numerator + 1) / (tf.reduce_sum(denominator, axis=-1) + 1)
24 |
25 | return loss(y_true, y_pred)
26 |
27 |
28 | # Build U-Net model
29 |
30 | IMG_WIDTH = 128
31 | IMG_HEIGHT = 256
32 | IMG_CHANNELS = 1
33 |
34 | inputs = tf.keras.layers.Input((IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS))
35 |
36 | c1 = Conv2D(16, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (inputs)
37 | c1 = Dropout(0.1) (c1)
38 | c1 = Conv2D(16, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c1)
39 | p1 = MaxPooling2D((2, 2)) (c1)
40 |
41 | c2 = Conv2D(32, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (p1)
42 | c2 = Dropout(0.1) (c2)
43 | c2 = Conv2D(32, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c2)
44 | p2 = MaxPooling2D((2, 2)) (c2)
45 |
46 | c3 = Conv2D(64, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (p2)
47 | c3 = Dropout(0.2) (c3)
48 | c3 = Conv2D(64, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c3)
49 | p3 = MaxPooling2D((2, 2)) (c3)
50 |
51 | c4 = Conv2D(128, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (p3)
52 | c4 = Dropout(0.2) (c4)
53 | c4 = Conv2D(128, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c4)
54 | p4 = MaxPooling2D(pool_size=(2, 2)) (c4)
55 |
56 | c5 = Conv2D(256, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (p4)
57 | c5 = Dropout(0.3) (c5)
58 | c5 = Conv2D(256, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c5)
59 |
60 | u6 = Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same') (c5)
61 | u6 = concatenate([u6, c4])
62 | c6 = Conv2D(128, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (u6)
63 | c6 = Dropout(0.2) (c6)
64 | c6 = Conv2D(128, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c6)
65 |
66 | u7 = Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same') (c6)
67 | u7 = concatenate([u7, c3])
68 | c7 = Conv2D(64, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (u7)
69 | c7 = Dropout(0.2) (c7)
70 | c7 = Conv2D(64, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c7)
71 |
72 | u8 = Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same') (c7)
73 | u8 = concatenate([u8, c2])
74 | c8 = Conv2D(32, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (u8)
75 | c8 = Dropout(0.1) (c8)
76 | c8 = Conv2D(32, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c8)
77 |
78 | u9 = Conv2DTranspose(16, (2, 2), strides=(2, 2), padding='same') (c8)
79 | u9 = concatenate([u9, c1], axis=3)
80 | c9 = Conv2D(16, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (u9)
81 | c9 = Dropout(0.1) (c9)
82 | c9 = Conv2D(16, (3, 3), activation='elu', kernel_initializer='he_normal', padding='same') (c9)
83 |
84 | outputs = Conv2D(1, (1, 1), activation='sigmoid') (c9)
85 |
86 |
87 | model = tf.keras.Model(inputs=[inputs], outputs=[outputs])
88 | model.compile(optimizer='adam', loss=tversky_loss, metrics=['accuracy'])
89 | model.summary()
90 |
91 | checkpointer = ModelCheckpoint("/content/drive/My Drive/GitHub/automatic-scoliosis-assessment/models/{}".format(NAME), verbose=1, save_best_only=True, save_weights_only=False)
92 |
93 | model.fit(X, Y, batch_size=64, epochs=200, validation_split=0.15, callbacks=[tensorboard, checkpointer])
94 |
95 | del X, Y
96 |
--------------------------------------------------------------------------------
/clinical-assessment-algorithms/processSpineMask.m:
--------------------------------------------------------------------------------
1 | function [spineMask] = processSpineMask(spineMask)
2 | %SPINEMASKPROCESSING: post-processing to automatically remove errors in
3 | %predicted vertebral segmentation.
4 |
5 | spineMask = imbinarize(spineMask(:,:,1));
6 | numPixels = size(spineMask);
7 | numPixels = numPixels(1)*numPixels(2);
8 |
9 | %%%%%%%%%%%%%%%%%%% remove any tiny objects in image
10 | spineMask = bwareafilt(spineMask, [round(numPixels*0.001) numPixels]);
11 |
12 | %%%%%%%%%%%%%%%%%%% imerode -> watershed -> dilate horizontally to fill watershed holes
13 | SE = strel('rectangle',[3 1]); %%%%%%%%%% does this lessen slopes?
14 | spineMask = imerode(spineMask,SE);
15 |
16 | D = -bwdist(~spineMask, 'cityblock');
17 | L = watershed(D);
18 | spineMask(L == 0) = 0;
19 |
20 | SE = strel('rectangle',[1 2]);
21 | spineMask = imdilate(spineMask,SE);
22 |
23 | %%%%%%%%%%%%%%%%%%% imerode -> imfill any remaining holes
24 | SE = strel('rectangle',[2 1]);
25 | spineMask = imerode(spineMask,SE);
26 |
27 | spineMask = imfill(spineMask,'holes');
28 |
29 |
30 | %%%%%%%%%%%%%%%%%%% find any objects with outlier area i.e. vertebrae that are still joined
31 | %%%%%%%%%%%%%%%%%%% repeat watershed process for the large outliers found
32 | stats = regionprops(spineMask, 'Area');
33 | objectAreas = cat(1,stats.Area);
34 | TF = isoutlier(objectAreas,'movmedian',10);
35 | % ignore small outliers in this step
36 | for k = 1:length(TF)
37 | if TF(k)
38 | if objectAreas(k) < median(objectAreas)
39 | TF(k) = 0;
40 | end
41 | end
42 | end
43 |
44 | stopCount = 0;
45 | while (sum(TF) > 0) && (stopCount < 3)
46 |
47 | labelMat = bwlabel(spineMask);
48 | for k = 1:length(TF)
49 | if TF(k)
50 | mergedVertebraeMask = ismember(labelMat, k);
51 | labelMat(labelMat==k) = 0;
52 |
53 | SE = strel('diamond',5);
54 | mergedVertebraeMask = imerode(mergedVertebraeMask,SE);
55 |
56 | D = -bwdist(~mergedVertebraeMask, 'euclidean');
57 | L = watershed(D);
58 | mergedVertebraeMask(L == 0) = 0;
59 |
60 | SE = strel('square',3);
61 | mergedVertebraeMask = imdilate(mergedVertebraeMask,SE);
62 |
63 | mergedVertebraeMask = imfill(mergedVertebraeMask,'holes');
64 |
65 | labelMat(mergedVertebraeMask==1) = k;
66 |
67 | end
68 | end
69 |
70 | spineMask = imbinarize(labelMat);
71 |
72 | % get stats with region props
73 | stats = regionprops(spineMask, 'Area');
74 | objectAreas = cat(1,stats.Area);
75 | TF = isoutlier(objectAreas,'movmedian',10);
76 | % ignore small outliers in this step
77 | for k = 1:length(TF)
78 | if TF(k)
79 | if objectAreas(k) < median(objectAreas)
80 | TF(k) = 0;
81 | end
82 | end
83 | end
84 |
85 | stopCount = stopCount + 1;
86 | end
87 |
88 | %%%%%%%%%%%%%%%%%%% remove small outliers
89 | stats = regionprops(spineMask, 'Area');
90 | objectAreas = cat(1,stats.Area);
91 | labelMat = bwlabel(spineMask);
92 | for k = 1:length(objectAreas)
93 | if objectAreas(k) < 0.4*median(objectAreas)
94 | labelMat(labelMat==k) = 0;
95 | end
96 | end
97 | spineMask = imbinarize(labelMat);
98 |
99 |
100 | %%%%%%%%%%%%%%%%%%%%%% remove outliers too far from spinal column
101 | flag = true;
102 | while flag
103 | labelMat = bwlabel(spineMask);
104 | stats = regionprops(spineMask, 'Centroid', 'Area');
105 | objectAreas = cat(1,stats.Area);
106 | objectCentroids = cat(1,stats.Centroid);
107 | numObjects = length(objectCentroids);
108 | [~, sortedCentroidsIndex] = sort(objectCentroids(:,2));
109 |
110 | % calculate distance from expected horizontal coordinate for each centroid
111 | dist2expectedX = zeros(numObjects,1);
112 | dist2expectedX(1) = abs(objectCentroids(sortedCentroidsIndex(1),1) - objectCentroids(sortedCentroidsIndex(2),1));
113 | for k = 2:(numObjects-1)
114 | neighbouringAvgX = (objectCentroids(sortedCentroidsIndex(k+1),1) + objectCentroids(sortedCentroidsIndex(k-1),1))/2;
115 | dist2expectedX(k) = abs(objectCentroids(sortedCentroidsIndex(k),1) - neighbouringAvgX);
116 |
117 | end
118 | dist2expectedX(numObjects) = abs(objectCentroids(sortedCentroidsIndex(numObjects),1) - objectCentroids(sortedCentroidsIndex(numObjects-1),1));
119 |
120 | % if there appears to be an outlier, remove the largest and repeat process
121 | if ~isempty(find(dist2expectedX > sqrt(mean(objectAreas)), 1))
122 | [~, maxDistInd] = max(dist2expectedX);
123 | maxDistInd = maxDistInd(1);
124 | labelMat(labelMat==sortedCentroidsIndex(maxDistInd)) = 0;
125 | else
126 | flag = false;
127 | end
128 | spineMask = imbinarize(labelMat);
129 | end
130 |
131 | end
132 |
--------------------------------------------------------------------------------
/clinical-assessment-algorithms/calculateCobbAngles.m:
--------------------------------------------------------------------------------
1 | function [cobbAngles, apicalVertebrae, angleLocations, cobbEndplates] = calculateCobbAngles(endplateLandmarks, centroids, angleLocations)
2 | %CALCULATECOBBANGLES: calculate the PT, MT, and TL/L Cobb angles from a
3 | %given set of endplate landmarks.
4 |
5 | numLandmarks = length(endplateLandmarks);
6 | numVertebrae = numLandmarks/4;
7 | numEndplates = numLandmarks/2;
8 |
9 | cobbAngles = zeros(3,1);
10 | apicalVertebrae = zeros(3,1);
11 | cobbEndplates = zeros(4,2,3);
12 |
13 | %%%%%%%%%%%%%%%%% averaging the endplates for each vertebra to find average slopes
14 | midVertebraLandmarks = zeros(numEndplates,2);
15 | for k = 1:2:numEndplates
16 | landmarkPos = (k-1)*2+1;
17 | midVertebraLandmarks(k,:) = (endplateLandmarks(landmarkPos,:) + endplateLandmarks(landmarkPos+2,:))/2;
18 | midVertebraLandmarks(k+1,:) = (endplateLandmarks(landmarkPos+1,:) + endplateLandmarks(landmarkPos+3,:))/2;
19 | end
20 |
21 | %%%%%%%%%%%%%%%%% calculate slope of each vertebra
22 | midSlopes = zeros(numVertebrae,1);
23 | for k = 1:numVertebrae
24 | landmarkPos = (k-1)*2+1;
25 | midSlopes(k,1) = (midVertebraLandmarks(landmarkPos+1,2) - midVertebraLandmarks(landmarkPos,2)) / (midVertebraLandmarks(landmarkPos+1,1) - midVertebraLandmarks(landmarkPos,1));
26 | end
27 |
28 | if angleLocations == zeros(4,1) % can manually set angle locations either
29 | %%%%%%%%%%%%%%%%% find mt apex using centroids
30 | [~, indexTopBottom] = sort(centroids(:,2));
31 | thoracicCentroids = centroids(indexTopBottom(6:11),1) - mean(centroids(1:11,1));
32 |
33 | [maxima, maximaInd] = findpeaks(thoracicCentroids);
34 | [~, maxPkInd] = max(maxima);
35 |
36 | thoracicCentroidsInv = max(thoracicCentroids) - thoracicCentroids;
37 | [minima, minimaInd] = findpeaks(thoracicCentroidsInv);
38 | [~, minPkInd] = max(minima);
39 |
40 | pks = [maximaInd(maxPkInd) minimaInd(minPkInd)];
41 |
42 | [~, apexInd] = max(abs(thoracicCentroids(pks)));
43 |
44 | if ~isempty(apexInd)
45 | apicalVertebrae(2) = pks(apexInd)+5;
46 | else
47 | apicalVertebrae(2) = 9;
48 | end
49 |
50 | %%%%%%%%%%%%%%%%% find most tilted vertebra for each angle
51 | [~, ind] = max(abs(midSlopes(4:apicalVertebrae(2)-1)));
52 | angleLocations(2) = ind(1)+3;
53 |
54 | [~, ind] = max(abs(midSlopes(1:angleLocations(2))-midSlopes(angleLocations(2))));
55 | angleLocations(1) = ind(1);
56 |
57 | if numVertebrae >= 14
58 | [~, ind] = max(abs(midSlopes((apicalVertebrae(2)+1):14)-midSlopes(angleLocations(2))));
59 | else
60 | [~, ind] = max(abs(midSlopes((apicalVertebrae(2)+1):numVertebrae)-midSlopes(angleLocations(2))));
61 | end
62 | angleLocations(3) = ind(1)+(apicalVertebrae(2));
63 |
64 | [~, ind] = max(abs(midSlopes(angleLocations(3):numVertebrae)-midSlopes(angleLocations(3))));
65 | angleLocations(4) = ind(1)+(angleLocations(3)-1);
66 |
67 | %%%%%%%%%%%%%%%%% find pt and tl/l apical vertebrae using slopes
68 | apex1vector = midSlopes(angleLocations(1)+1:angleLocations(2)-1) - ((midSlopes(angleLocations(1))+midSlopes(angleLocations(2)))/2);
69 | [~, ind] = min(abs(apex1vector));
70 | if ~isempty(ind)
71 | apicalVertebrae(1) = ind(1)+angleLocations(1);
72 | end
73 |
74 | apex2vector = midSlopes(angleLocations(3)+1:angleLocations(4)-1) - ((midSlopes(angleLocations(3))+midSlopes(angleLocations(4)))/2);
75 | [~, ind] = min(abs(apex2vector));
76 | if ~isempty(ind)
77 | apicalVertebrae(3) = ind(1)+angleLocations(3);
78 | end
79 | end
80 |
81 | %%%%%%%%%%%%%%%%% calculate superior and inferior endplate slopes
82 | superiorLandmarks = zeros(numEndplates,2);
83 | inferiorLandmarks = zeros(numEndplates,2);
84 | for k = 1:2:numEndplates
85 | landmarkPos = (k-1)*2+1;
86 | superiorLandmarks(k,:) = endplateLandmarks(landmarkPos,:);
87 | superiorLandmarks(k+1,:) = endplateLandmarks(landmarkPos+1,:);
88 | inferiorLandmarks(k,:) = endplateLandmarks(landmarkPos+2,:);
89 | inferiorLandmarks(k+1,:) = endplateLandmarks(landmarkPos+3,:);
90 | end
91 |
92 | superiorSlopes = zeros(numVertebrae,1);
93 | inferiorSlopes = zeros(numVertebrae,1);
94 | for k = 1:numVertebrae
95 | landmarkPos = (k-1)*2+1;
96 | superiorSlopes(k,1) = (superiorLandmarks(landmarkPos+1,2) - superiorLandmarks(landmarkPos,2)) / (superiorLandmarks(landmarkPos+1,1) - superiorLandmarks(landmarkPos,1));
97 | inferiorSlopes(k,1) = (inferiorLandmarks(landmarkPos+1,2) - inferiorLandmarks(landmarkPos,2)) / (inferiorLandmarks(landmarkPos+1,1) - inferiorLandmarks(landmarkPos,1));
98 | end
99 |
100 | %%%%%%%%%%%%%%%%% calculate Cobb angles
101 | cobbAngles(1) = abs(rad2deg(atan(superiorSlopes(angleLocations(1)))-atan(inferiorSlopes(angleLocations(2)))));
102 | cobbAngles(2) = abs(rad2deg(atan(superiorSlopes(angleLocations(2)))-atan(inferiorSlopes(angleLocations(3)))));
103 | cobbAngles(3) = abs(rad2deg(atan(superiorSlopes(angleLocations(3)))-atan(inferiorSlopes(angleLocations(4)))));
104 |
105 | %%%%%%%%%%%%%%%%% return endplates used in calculation for plotting
106 | for k = 1:3
107 | landmarkPos = (angleLocations(k)-1)*2+1;
108 | cobbEndplates(1:2,:,k) = superiorLandmarks(landmarkPos:landmarkPos+1,:);
109 | landmarkPos = (angleLocations(k+1)-1)*2+1;
110 | cobbEndplates(3:4,:,k) = inferiorLandmarks(landmarkPos:landmarkPos+1,:);
111 | end
112 |
113 | end
114 |
115 |
--------------------------------------------------------------------------------
/dataset-preprocessing/createPickleDatasets.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from numpy import genfromtxt
3 | import matplotlib.pyplot as plt
4 | from scipy.spatial import ConvexHull
5 | import os
6 | import cv2
7 | import tensorflow as tf
8 | import pickle
9 |
10 | IMG_SIZE_X = 128
11 | IMG_SIZE_Y = 256
12 |
13 |
14 | def create_im_datasets(im_dir, fn_dir, IMG_SIZE_X, IMG_SIZE_Y):
15 | # extract filenames and landmark data into arrays
16 | fn_data = genfromtxt(fn_dir, delimiter=',', dtype=str)
17 | im_data = []
18 |
19 | # extract images in order of filenames - same order as landmarks
20 | for filename in fn_data:
21 | idx = filename.index(".jpg")
22 | filenameList = []
23 | filenameList.append(filename[:idx] + "-mirrored" + filename[idx:])
24 | filenameList.append(filename[:idx] + "-rotated+5" + filename[idx:])
25 | filenameList.append(filename[:idx] + "-rotated-5" + filename[idx:])
26 | filenameList.append(filename[:idx] + "-rotated+10" + filename[idx:])
27 | filenameList.append(filename[:idx] + "-rotated-10" + filename[idx:])
28 | filenameList.append(filename[:idx] + "-gamma05" + filename[idx:])
29 | filenameList.append(filename[:idx] + "-gamma15" + filename[idx:])
30 | filenameList.append(filename[:idx] + "-rotated+5-mirrored" + filename[idx:])
31 | filenameList.append(filename[:idx] + "-rotated-5-mirrored" + filename[idx:])
32 | filenameList.append(filename[:idx] + "-rotated+10-mirrored" + filename[idx:])
33 | filenameList.append(filename[:idx] + "-rotated-10-mirrored" + filename[idx:])
34 | filenameList.append(filename[:idx] + "-gamma05-mirrored" + filename[idx:])
35 | filenameList.append(filename[:idx] + "-gamma15-mirrored" + filename[idx:])
36 | for name in filenameList:
37 | for img in os.listdir(im_dir):
38 | if img == name:
39 | im_array = cv2.imread(os.path.join(im_dir, img), cv2.IMREAD_GRAYSCALE)
40 | new_array = cv2.resize(im_array, (IMG_SIZE_X, IMG_SIZE_Y))
41 | im_data.append([new_array])
42 |
43 | # save images in array and normalise
44 | im_data = np.array(im_data).reshape(-1, IMG_SIZE_Y, IMG_SIZE_X, 1)
45 | im_data = im_data / 255.0
46 |
47 |
48 | return im_data
49 |
50 |
51 | # function to generate datasets with segmentation maps of entire spinal column
52 | def create_roi_datasets(roi_dir, fn_dir, IMG_SIZE_X, IMG_SIZE_Y):
53 | # extract filenames and landmark data into arrays
54 | fn_data = genfromtxt(fn_dir, delimiter=',', dtype=str)
55 | roi_data = []
56 |
57 | # extract ROIs in order of filenames - same order as landmarks
58 | for filename in fn_data:
59 | idx = filename.index(".jpg")
60 | filenameList = []
61 | filenameList.append(filename[:idx] + "-mirrored" + filename[idx:])
62 | filenameList.append(filename[:idx] + "-rotated+5" + filename[idx:])
63 | filenameList.append(filename[:idx] + "-rotated-5" + filename[idx:])
64 | filenameList.append(filename[:idx] + "-rotated+10" + filename[idx:])
65 | filenameList.append(filename[:idx] + "-rotated-10" + filename[idx:])
66 | filenameList.append(filename[:idx] + "-gamma05" + filename[idx:])
67 | filenameList.append(filename[:idx] + "-gamma15" + filename[idx:])
68 | filenameList.append(filename[:idx] + "-rotated+5-mirrored" + filename[idx:])
69 | filenameList.append(filename[:idx] + "-rotated-5-mirrored" + filename[idx:])
70 | filenameList.append(filename[:idx] + "-rotated+10-mirrored" + filename[idx:])
71 | filenameList.append(filename[:idx] + "-rotated-10-mirrored" + filename[idx:])
72 | filenameList.append(filename[:idx] + "-gamma05-mirrored" + filename[idx:])
73 | filenameList.append(filename[:idx] + "-gamma15-mirrored" + filename[idx:])
74 | for name in filenameList:
75 | for roi in os.listdir(roi_dir):
76 | if roi == name:
77 | roi_array = cv2.imread(os.path.join(roi_dir, roi), cv2.IMREAD_GRAYSCALE)
78 | new_array = cv2.resize(roi_array, (IMG_SIZE_X, IMG_SIZE_Y))
79 | roi_data.append([new_array])
80 |
81 |
82 | # save images in array and binarize
83 | roi_data = np.array(roi_data).reshape(-1, IMG_SIZE_Y, IMG_SIZE_X, 1)
84 | roi_data[roi_data < 0.5] = 0
85 | roi_data[roi_data >= 0.5] = 1
86 |
87 | roi_data = roi_data.astype(float)
88 | roi_data = np.squeeze(roi_data)
89 |
90 | return roi_data
91 |
92 |
93 | train_im_dir = "../data/DataAugmentation/images"
94 | train_fn_dir = "../data/boostnet_labeldata/labels/training/filenames.csv"
95 |
96 | # Save datasets for u-net to segment each individual vertebra
97 | train_im_data = create_im_datasets(train_im_dir, train_fn_dir, IMG_SIZE_X, IMG_SIZE_Y)
98 |
99 | pickle_out = open("G:/My Drive/GitHub/automatic-scoliosis-assessment/train_images.pickle", "wb")
100 | pickle.dump(train_im_data, pickle_out)
101 | pickle_out.close()
102 |
103 |
104 | train_roi_dir = "../data/DataAugmentation/masks"
105 | train_fn_dir = "../data/boostnet_labeldata/labels/training/filenames.csv"
106 |
107 | # Save datasets for u-net to segment each individual vertebra
108 | train_roi_data_vertebrae = create_roi_datasets(train_roi_dir, train_fn_dir, IMG_SIZE_X, IMG_SIZE_Y)
109 |
110 | pickle_out = open("G:/My Drive/GitHub/automatic-scoliosis-assessment/train_masks.pickle", "wb")
111 | pickle.dump(train_roi_data_vertebrae, pickle_out)
112 | pickle_out.close()
113 |
--------------------------------------------------------------------------------
/clinical-assessment-algorithms/minboundrect.m:
--------------------------------------------------------------------------------
1 | function [rectx,recty,area,perimeter] = minboundrect(x,y,metric)
2 | % minboundrect: Compute the minimal bounding rectangle of points in the plane
3 | % usage: [rectx,recty,area,perimeter] = minboundrect(x,y,metric)
4 | %
5 | % arguments: (input)
6 | % x,y - vectors of points, describing points in the plane as
7 | % (x,y) pairs. x and y must be the same lengths.
8 | %
9 | % metric - (OPTIONAL) - single letter character flag which
10 | % denotes the use of minimal area or perimeter as the
11 | % metric to be minimized. metric may be either 'a' or 'p',
12 | % capitalization is ignored. Any other contraction of 'area'
13 | % or 'perimeter' is also accepted.
14 | %
15 | % DEFAULT: 'a' ('area')
16 | %
17 | % arguments: (output)
18 | % rectx,recty - 5x1 vectors of points that define the minimal
19 | % bounding rectangle.
20 | %
21 | % area - (scalar) area of the minimal rect itself.
22 | %
23 | % perimeter - (scalar) perimeter of the minimal rect as found
24 | %
25 | %
26 | % Note: For those individuals who would prefer the rect with minimum
27 | % perimeter or area, careful testing convinces me that the minimum area
28 | % rect was generally also the minimum perimeter rect on most problems
29 | % (with one class of exceptions). This same testing appeared to verify my
30 | % assumption that the minimum area rect must always contain at least
31 | % one edge of the convex hull. The exception I refer to above is for
32 | % problems when the convex hull is composed of only a few points,
33 | % most likely exactly 3. Here one may see differences between the
34 | % two metrics. My thanks to Roger Stafford for pointing out this
35 | % class of counter-examples.
36 | %
37 | % Thanks are also due to Roger for pointing out a proof that the
38 | % bounding rect must always contain an edge of the convex hull, in
39 | % both the minimal perimeter and area cases.
40 | %
41 | %
42 | % Example usage:
43 | % x = rand(50000,1);
44 | % y = rand(50000,1);
45 | % tic,[rx,ry,area] = minboundrect(x,y);toc
46 | %
47 | % Elapsed time is 0.105754 seconds.
48 | %
49 | % [rx,ry]
50 | % ans =
51 | % 0.99994 -4.2515e-06
52 | % 0.99998 0.99999
53 | % 2.6441e-05 1
54 | % -5.1673e-06 2.7356e-05
55 | % 0.99994 -4.2515e-06
56 | %
57 | % area
58 | % area =
59 | % 0.99994
60 | %
61 | %
62 | % See also: minboundcircle, minboundtri, minboundsphere
63 | %
64 | %
65 | % Author: John D'Errico
66 | % E-mail: woodchips@rochester.rr.com
67 | % Release: 3.0
68 | % Release date: 3/7/07
69 |
70 | % default for metric
71 | if (nargin<3) || isempty(metric)
72 | metric = 'a';
73 | elseif ~ischar(metric)
74 | error 'metric must be a character flag if it is supplied.'
75 | else
76 | % check for 'a' or 'p'
77 | metric = lower(metric(:)');
78 | ind = strmatch(metric,{'area','perimeter'});
79 | if isempty(ind)
80 | error 'metric does not match either ''area'' or ''perimeter'''
81 | end
82 |
83 | % just keep the first letter.
84 | metric = metric(1);
85 | end
86 |
87 | % preprocess data
88 | x=x(:);
89 | y=y(:);
90 |
91 | % not many error checks to worry about
92 | n = length(x);
93 | if n~=length(y)
94 | error 'x and y must be the same sizes'
95 | end
96 |
97 | % start out with the convex hull of the points to
98 | % reduce the problem dramatically. Note that any
99 | % points in the interior of the convex hull are
100 | % never needed, so we drop them.
101 | if n>3
102 | edges = convhull(x,y);
103 |
104 | % exclude those points inside the hull as not relevant
105 | % also sorts the points into their convex hull as a
106 | % closed polygon
107 |
108 | x = x(edges);
109 | y = y(edges);
110 |
111 | % probably fewer points now, unless the points are fully convex
112 | nedges = length(x) - 1;
113 | elseif n>1
114 | % n must be 2 or 3
115 | nedges = n;
116 | x(end+1) = x(1);
117 | y(end+1) = y(1);
118 | else
119 | % n must be 0 or 1
120 | nedges = n;
121 | end
122 |
123 | % now we must find the bounding rectangle of those
124 | % that remain.
125 |
126 | % special case small numbers of points. If we trip any
127 | % of these cases, then we are done, so return.
128 | switch nedges
129 | case 0
130 | % empty begets empty
131 | rectx = [];
132 | recty = [];
133 | area = [];
134 | perimeter = [];
135 | return
136 | case 1
137 | % with one point, the rect is simple.
138 | rectx = repmat(x,1,5);
139 | recty = repmat(y,1,5);
140 | area = 0;
141 | perimeter = 0;
142 | return
143 | case 2
144 | % only two points. also simple.
145 | rectx = x([1 2 2 1 1]);
146 | recty = y([1 2 2 1 1]);
147 | area = 0;
148 | perimeter = 2*sqrt(diff(x).^2 + diff(y).^2);
149 | return
150 | end
151 | % 3 or more points.
152 |
153 | % will need a 2x2 rotation matrix through an angle theta
154 | Rmat = @(theta) [cos(theta) sin(theta);-sin(theta) cos(theta)];
155 |
156 | % get the angle of each edge of the hull polygon.
157 | ind = 1:(length(x)-1);
158 | edgeangles = atan2(y(ind+1) - y(ind),x(ind+1) - x(ind));
159 | % move the angle into the first quadrant.
160 | edgeangles = unique(mod(edgeangles,pi/2));
161 |
162 | % now just check each edge of the hull
163 | nang = length(edgeangles);
164 | area = inf;
165 | perimeter = inf;
166 | met = inf;
167 | xy = [x,y];
168 | for i = 1:nang
169 | % rotate the data through -theta
170 | rot = Rmat(-edgeangles(i));
171 | xyr = xy*rot;
172 | xymin = min(xyr,[],1);
173 | xymax = max(xyr,[],1);
174 |
175 | % The area is simple, as is the perimeter
176 | A_i = prod(xymax - xymin);
177 | P_i = 2*sum(xymax-xymin);
178 |
179 | if metric=='a'
180 | M_i = A_i;
181 | else
182 | M_i = P_i;
183 | end
184 |
185 | % new metric value for the current interval. Is it better?
186 | if M_i default, big -> no correction ... imSize(1)*imSize(2)/75000 for some correction
50 |
51 | % initialise empty mask
52 | BW = false(imSize(1),imSize(2));
53 |
54 | % get landmarks for this image and scale to pixel value
55 | labels = zeros(68,2);
56 | labels(:,:) = landmarks(m,:,:);
57 | labels(:,1) = labels(:,1)*imSize(2);
58 | labels(:,2) = labels(:,2)*imSize(1);
59 |
60 | %{
61 | % add C7 and S1 - these are not marked in the spineweb dataset, so we
62 | % can estimate values using the closest landmarks
63 | T1Length = mean([(labels(3, 2)-labels(1, 2)) (labels(4, 2)-labels(2, 2))]);
64 | T1Spacing = mean([(labels(5, 2)-labels(3, 2)) (labels(6, 2)-labels(4, 2))]);
65 | L5Length = mean([(labels(67, 2)-labels(65, 2)) (labels(68, 2)-labels(66, 2))]);
66 | L5Spacing = mean([(labels(65, 2)-labels(63, 2)) (labels(66, 2)-labels(64, 2))]);
67 | labels = cat(1, labels(1:4, :), labels);
68 | labels = cat(1, labels, labels(69:72, :));
69 | labels(1:4, 2) = max(labels(1:4, 2) - round(T1Length + T1Spacing), 1);
70 | labels(73:74, 2) = min(labels(73:74, 2) + round(L5Length + L5Spacing), imSize(1));
71 | labels(75:76, 2) = min(labels(75:76, 2) + round(L5Length*0.7 + L5Spacing), imSize(1));
72 | %}
73 |
74 | numLabels = size(labels);
75 | numLabels = numLabels(1);
76 |
77 | labels = round(labels);
78 |
79 | % loop through vertebrae in each image
80 | for k = 1:4:numLabels
81 |
82 | vertebra = labels(k:k+3, 1:2);
83 |
84 | % scaling search zone relative to vertebra size
85 | halfCurrentLength = round(mean([(labels(k+2, 2)-labels(k, 2)) (labels(k+3, 2)-labels(k, 2))])*length_multiplier);
86 | if k~=1
87 | CurrentSpacingTop = round(mean([(labels(k, 2)-labels(k-2, 2)) (labels(k+1, 2)-labels(k-1, 2))])*spacing_multiplier);
88 | else
89 | CurrentSpacingTop = round(mean([(labels(k+4, 2)-labels(k+2, 2)) (labels(k+5, 2)-labels(k+3, 2))])*spacing_multiplier);
90 | end
91 | if k~=(numLabels-3)
92 | CurrentSpacingBottom = round(mean([(labels(k+4, 2)-labels(k+2, 2)) (labels(k+5, 2)-labels(k+3, 2))])*spacing_multiplier);
93 | else
94 | CurrentSpacingBottom = round(mean([(labels(k, 2)-labels(k-2, 2)) (labels(k+1, 2)-labels(k-1, 2))])*spacing_multiplier);
95 | end
96 | smallCurrentWidth = round(mean([(labels(k+1, 1)-labels(k, 1)) (labels(k+3, 1)-labels(k+2, 1))])*width_multiplier_small);
97 | bigCurrentWidth = round(mean([(labels(k+1, 1)-labels(k, 1)) (labels(k+3, 1)-labels(k+2, 1))])*width_multiplier_big);
98 |
99 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%% top of vertebra
100 | line_length = vertebra(2, 1)-vertebra(1, 1)+1;
101 |
102 | % default line is calculated as linear connection between the
103 | % relevant landmark coordinates
104 | if vertebra(1,2) == vertebra(2,2)
105 | default = zeros(1, line_length) + vertebra(1,2);
106 | else
107 | default = round(vertebra(1,2):(vertebra(2,2)-vertebra(1,2))/(line_length-1):vertebra(2,2));
108 | end
109 |
110 | contour = scanVertForContour(image, default, vertebra(1, 1), vertebra(2, 1), CurrentSpacingTop, halfCurrentLength, true);
111 | vertebra_points = contour;
112 |
113 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%% bottom of vertebra
114 | line_length = vertebra(4, 1)-vertebra(3, 1)+1;
115 |
116 | if vertebra(3,2) == vertebra(4,2)
117 | default = zeros(1, line_length) + vertebra(3,2);
118 | else
119 | default = round(vertebra(3,2):(vertebra(4,2)-vertebra(3,2))/(line_length-1):vertebra(4,2));
120 | end
121 |
122 | contour = scanVertForContour(image, default, vertebra(3, 1), vertebra(4, 1), halfCurrentLength, CurrentSpacingBottom, false);
123 | vertebra_points = cat(1, vertebra_points, contour);
124 |
125 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%% left of vertebra
126 | line_length = vertebra(3, 2)-vertebra(1, 2)+1;
127 |
128 | if vertebra(1,1) == vertebra(3,1)
129 | default = zeros(1, line_length) + vertebra(1,1);
130 | else
131 | default = round(vertebra(1,1):(vertebra(3,1)-vertebra(1,1))/(line_length-1):vertebra(3,1));
132 | end
133 |
134 | contour = scanHorForContour(image, default, vertebra(1, 2), vertebra(3, 2), bigCurrentWidth, smallCurrentWidth, true);
135 | vertebra_points = cat(1, vertebra_points, contour);
136 |
137 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%% right of vertebra
138 | line_length = vertebra(4, 2)-vertebra(2, 2)+1;
139 |
140 | if vertebra(2,1) == vertebra(4,1)
141 | default = zeros(1, line_length) + vertebra(2,1);
142 | else
143 | default = round(vertebra(2,1):(vertebra(4,1)-vertebra(2,1))/(line_length-1):vertebra(4,1));
144 | end
145 |
146 | contour = scanHorForContour(image, default, vertebra(2, 2), vertebra(4, 2), smallCurrentWidth, bigCurrentWidth, false);
147 | vertebra_points = cat(1, vertebra_points, contour);
148 |
149 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%% construct mask using the contours
150 | vertebra_roi = boundary(vertebra_points, 0.85);
151 | BW_vertebra = poly2mask(vertebra_points(vertebra_roi,1), vertebra_points(vertebra_roi,2),imSize(1),imSize(2));
152 |
153 | % add vertebra roi to mask of entire spine
154 | BW(BW_vertebra ~= 0) = true;
155 |
156 | end
157 |
158 | imwrite(BW, roidir+filenames(m))
159 |
160 | % plot to test outputs
161 | %subplot(2,5,plotcount)
162 | %imshow(image)
163 | %hold on
164 | %visboundaries(BW, 'Color','g', 'LineWidth',1, 'EnhanceVisibility',false)
165 | %hold off
166 | %plotcount = plotcount + 1;
167 |
168 | end
169 | end
170 |
--------------------------------------------------------------------------------
/dataset-preprocessing/dataAugmentation.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from numpy import genfromtxt
3 | import matplotlib.pyplot as plt
4 | from scipy.spatial import ConvexHull
5 | import os
6 | import cv2
7 | import tensorflow as tf
8 | import pickle
9 | from PIL import Image
10 | import argparse
11 |
12 |
13 | def adjust_gamma(image, gamma=1.0):
14 | # build a lookup table mapping the pixel values [0, 255] to
15 | # their adjusted gamma values
16 | invGamma = 1.0 / gamma
17 | table = np.array([((i / 255.0) ** invGamma) * 255
18 | for i in np.arange(0, 256)]).astype("uint8")
19 | # apply gamma correction using the lookup table
20 | return cv2.LUT(image, table)
21 |
22 | def augmentimages(train_im_dir, train_fn_dir, destination):
23 | # extract filenames and landmark data into arrays
24 | fn_data = genfromtxt(train_fn_dir, delimiter=',', dtype=str)
25 | im_data = []
26 |
27 | # extract images in order of filenames - same order as landmarks
28 | for filename in fn_data:
29 | for img in os.listdir(train_im_dir):
30 | if img == filename:
31 | image = cv2.imread(os.path.join(train_im_dir, img), cv2.IMREAD_GRAYSCALE)
32 | original_dir = os.path.join(destination, img)
33 | cv2.imwrite(original_dir, image)
34 |
35 | idx = original_dir.index(".jpg")
36 |
37 | image_flipped = cv2.flip(image, 1)
38 | new_dir = original_dir[:idx] + "-mirrored" + original_dir[idx:]
39 | cv2.imwrite(new_dir, image_flipped)
40 |
41 |
42 |
43 | (h, w) = image.shape[:2]
44 | center = (w / 2, h / 2)
45 | M = cv2.getRotationMatrix2D(center, 5, 1)
46 | image_rotated = cv2.warpAffine(image, M, (w, h))
47 | new_dir = original_dir[:idx] + "-rotated+5" + original_dir[idx:]
48 | cv2.imwrite(new_dir, image_rotated)
49 |
50 | image_flipped = cv2.flip(image_rotated, 1)
51 | new_dir = original_dir[:idx] + "-rotated+5-mirrored" + original_dir[idx:]
52 | cv2.imwrite(new_dir, image_flipped)
53 |
54 |
55 |
56 | (h, w) = image.shape[:2]
57 | center = (w / 2, h / 2)
58 | M = cv2.getRotationMatrix2D(center, 10, 1)
59 | image_rotated = cv2.warpAffine(image, M, (w, h))
60 | new_dir = original_dir[:idx] + "-rotated+10" + original_dir[idx:]
61 | cv2.imwrite(new_dir, image_rotated)
62 |
63 | image_flipped = cv2.flip(image_rotated, 1)
64 | new_dir = original_dir[:idx] + "-rotated+10-mirrored" + original_dir[idx:]
65 | cv2.imwrite(new_dir, image_flipped)
66 |
67 |
68 |
69 | (h, w) = image.shape[:2]
70 | center = (w / 2, h / 2)
71 | M = cv2.getRotationMatrix2D(center, -5, 1)
72 | image_rotated = cv2.warpAffine(image, M, (w, h))
73 | new_dir = original_dir[:idx] + "-rotated-5" + original_dir[idx:]
74 | cv2.imwrite(new_dir, image_rotated)
75 |
76 | image_flipped = cv2.flip(image_rotated, 1)
77 | new_dir = original_dir[:idx] + "-rotated-5-mirrored" + original_dir[idx:]
78 | cv2.imwrite(new_dir, image_flipped)
79 |
80 |
81 |
82 | (h, w) = image.shape[:2]
83 | center = (w / 2, h / 2)
84 | M = cv2.getRotationMatrix2D(center, -10, 1)
85 | image_rotated = cv2.warpAffine(image, M, (w, h))
86 | new_dir = original_dir[:idx] + "-rotated-10" + original_dir[idx:]
87 | cv2.imwrite(new_dir, image_rotated)
88 |
89 | image_flipped = cv2.flip(image_rotated, 1)
90 | new_dir = original_dir[:idx] + "-rotated-10-mirrored" + original_dir[idx:]
91 | cv2.imwrite(new_dir, image_flipped)
92 |
93 |
94 |
95 | image_gamma = adjust_gamma(image, gamma=0.5)
96 | new_dir = original_dir[:idx] + "-gamma05" + original_dir[idx:]
97 | cv2.imwrite(new_dir, image_gamma)
98 |
99 | image_flipped = cv2.flip(image_gamma, 1)
100 | new_dir = original_dir[:idx] + "-gamma05-mirrored" + original_dir[idx:]
101 | cv2.imwrite(new_dir, image_flipped)
102 |
103 |
104 |
105 | image_gamma = adjust_gamma(image, gamma=1.5)
106 | new_dir = original_dir[:idx] + "-gamma15" + original_dir[idx:]
107 | cv2.imwrite(new_dir, image_gamma)
108 |
109 | image_flipped = cv2.flip(image_gamma, 1)
110 | new_dir = original_dir[:idx] + "-gamma15-mirrored" + original_dir[idx:]
111 | cv2.imwrite(new_dir, image_flipped)
112 |
113 |
114 | def augmentmasks(train_im_dir, train_fn_dir, destination):
115 | # extract filenames and landmark data into arrays
116 | fn_data = genfromtxt(train_fn_dir, delimiter=',', dtype=str)
117 | im_data = []
118 |
119 | # extract images in order of filenames - same order as landmarks
120 | for filename in fn_data:
121 | for img in os.listdir(train_im_dir):
122 | if img == filename:
123 | image = cv2.imread(os.path.join(train_im_dir, img), cv2.IMREAD_GRAYSCALE)
124 | original_dir = os.path.join(destination, img)
125 | cv2.imwrite(original_dir, image)
126 |
127 | idx = original_dir.index(".jpg")
128 |
129 | image_flipped = cv2.flip(image, 1)
130 | new_dir = original_dir[:idx] + "-mirrored" + original_dir[idx:]
131 | cv2.imwrite(new_dir, image_flipped)
132 |
133 |
134 |
135 | (h, w) = image.shape[:2]
136 | center = (w / 2, h / 2)
137 | M = cv2.getRotationMatrix2D(center, 5, 1)
138 | image_rotated = cv2.warpAffine(image, M, (w, h))
139 | new_dir = original_dir[:idx] + "-rotated+5" + original_dir[idx:]
140 | cv2.imwrite(new_dir, image_rotated)
141 |
142 | image_flipped = cv2.flip(image_rotated, 1)
143 | new_dir = original_dir[:idx] + "-rotated+5-mirrored" + original_dir[idx:]
144 | cv2.imwrite(new_dir, image_flipped)
145 |
146 |
147 |
148 | (h, w) = image.shape[:2]
149 | center = (w / 2, h / 2)
150 | M = cv2.getRotationMatrix2D(center, 10, 1)
151 | image_rotated = cv2.warpAffine(image, M, (w, h))
152 | new_dir = original_dir[:idx] + "-rotated+10" + original_dir[idx:]
153 | cv2.imwrite(new_dir, image_rotated)
154 |
155 | image_flipped = cv2.flip(image_rotated, 1)
156 | new_dir = original_dir[:idx] + "-rotated+10-mirrored" + original_dir[idx:]
157 | cv2.imwrite(new_dir, image_flipped)
158 |
159 |
160 |
161 | (h, w) = image.shape[:2]
162 | center = (w / 2, h / 2)
163 | M = cv2.getRotationMatrix2D(center, -5, 1)
164 | image_rotated = cv2.warpAffine(image, M, (w, h))
165 | new_dir = original_dir[:idx] + "-rotated-5" + original_dir[idx:]
166 | cv2.imwrite(new_dir, image_rotated)
167 |
168 | image_flipped = cv2.flip(image_rotated, 1)
169 | new_dir = original_dir[:idx] + "-rotated-5-mirrored" + original_dir[idx:]
170 | cv2.imwrite(new_dir, image_flipped)
171 |
172 |
173 |
174 | (h, w) = image.shape[:2]
175 | center = (w / 2, h / 2)
176 | M = cv2.getRotationMatrix2D(center, -10, 1)
177 | image_rotated = cv2.warpAffine(image, M, (w, h))
178 | new_dir = original_dir[:idx] + "-rotated-10" + original_dir[idx:]
179 | cv2.imwrite(new_dir, image_rotated)
180 |
181 | image_flipped = cv2.flip(image_rotated, 1)
182 | new_dir = original_dir[:idx] + "-rotated-10-mirrored" + original_dir[idx:]
183 | cv2.imwrite(new_dir, image_flipped)
184 |
185 |
186 |
187 | new_dir = original_dir[:idx] + "-gamma05" + original_dir[idx:]
188 | cv2.imwrite(new_dir, image)
189 |
190 | image_flipped = cv2.flip(image, 1)
191 | new_dir = original_dir[:idx] + "-gamma05-mirrored" + original_dir[idx:]
192 | cv2.imwrite(new_dir, image_flipped)
193 |
194 |
195 |
196 | new_dir = original_dir[:idx] + "-gamma15" + original_dir[idx:]
197 | cv2.imwrite(new_dir, image)
198 |
199 | image_flipped = cv2.flip(image, 1)
200 | new_dir = original_dir[:idx] + "-gamma15-mirrored" + original_dir[idx:]
201 | cv2.imwrite(new_dir, image_flipped)
202 |
203 |
204 | fn_dir = "../data/boostnet_labeldata/labels/training/filenames.csv"
205 | train_im_dir = "../data/boostnet_labeldata/data/training"
206 | destination = "../data/DataAugmentation/images"
207 | augmentimages(train_im_dir, fn_dir, destination)
208 |
209 | im_mask_dir = "../data/HiResVertebraeMasks"
210 | destination = "../data/DataAugmentation/masks"
211 | augmentmasks(im_mask_dir, fn_dir, destination)
212 |
213 |
--------------------------------------------------------------------------------
/performance-evaluation/boostnet_data_test.m:
--------------------------------------------------------------------------------
1 | % script to test all functionality of the designed system using the boostnet data
2 |
3 | % toggle this for plotting a random sample vs saving results for all test data
4 | plotting = false;
5 |
6 | clear
7 | close all
8 |
9 | addpath('..\clinical-assessment-algorithms');
10 |
11 | % directory of x-rays
12 | imdir = "..\data\boostnet_labeldata\data\test";
13 |
14 | % directory of spineMasks
15 | maskdir = '..\data\PredictionsVsGroundTruth\SpineMasks\';
16 | files = dir(fullfile(maskdir, '*.jpg'));
17 |
18 | % read spreadsheet of filenames
19 | filenames = readmatrix('..\data\boostnet_labeldata\labels\test\filenames.csv', 'ExpectedNumVariables', 1, 'OutputType', 'string', 'Delimiter',',');
20 |
21 | processedMaskDestinationDir = '..\data\PredictionsVsGroundTruth\SpineMasks_Processed\';
22 | endplatesDestinationDir = '..\data\PredictionsVsGroundTruth\Endplates\';
23 |
24 | gtLandmarks = load('..\data\FixedSpineWebData\fixedTestingLandmarks.mat');
25 | gtLandmarks = gtLandmarks.landmarks;
26 |
27 | numImages = size(filenames);
28 | numImages = numImages(1);
29 |
30 | allEndplateSlopes = zeros(numImages,34);
31 | gtAllEndplateSlopes = zeros(numImages,34);
32 |
33 | allAngles = zeros(numImages,3);
34 | gtAllAngles = zeros(numImages,3);
35 |
36 | allLenkeCurveTypes = zeros(numImages,1);
37 | gtAllLenkeCurveTypes = zeros(numImages,1);
38 |
39 | allLenkeCurveTypeProbabilities = zeros(numImages,6);
40 | gtAllLenkeCurveTypeProbabilities = zeros(numImages,6);
41 |
42 | indicesToProcess = 1:numImages;
43 |
44 | if plotting
45 | plotcount = 1;
46 | randomSample = randi(128, 1, 5);
47 | indicesToProcess = randomSample;
48 | figure(1)
49 | sgtitle('Random Sample of Images & Corresponding Vertebral Segmentations')
50 | figure(2)
51 | sgtitle('Random Sample of Vertebral Segmentations & Corresponding Fitted Endplates')
52 | figure(3)
53 | sgtitle('Random Sample of Fitted Endplates & Corresponding Cobb Angles')
54 | figure(4)
55 | sgtitle('Random Sample of Cobb Angles & Corresponding Lenke Curve Type Probabilities')
56 | figure(5)
57 | sgtitle('Random Sample of End-to-End Performance')
58 | end
59 |
60 | for n = indicesToProcess
61 |
62 | % read through files in correct order by checking filename
63 | for filecounter = 1:length(files)
64 | if files(filecounter).name == filenames(n)
65 | xray = imread(fullfile(imdir, files(filecounter).name));
66 | spineMask = imread(fullfile(maskdir, files(filecounter).name));
67 | end
68 | end
69 |
70 | %%%%%%%%%%%%%%%%%%%%%%%%% process spine mask
71 | spineMask = imresize(spineMask, size(xray));
72 | % Plotting
73 | if plotting
74 | figure(5)
75 | subplot(5,5,plotcount)
76 | imshow(xray)
77 | end
78 |
79 | spineMask = processSpineMask(spineMask);
80 |
81 | % Saving
82 | if ~plotting
83 | imwrite(spineMask, processedMaskDestinationDir+filenames(n));
84 | end
85 |
86 | % Plotting
87 | if plotting
88 | figure(5)
89 | subplot(5,5,5+plotcount)
90 | hold on
91 | imshow(xray)
92 | visboundaries(spineMask, 'Color','b', 'LineWidth',0.5, 'EnhanceVisibility',false)
93 | hold off
94 |
95 | figure(1)
96 | subplot(2,5,plotcount)
97 | imshow(xray)
98 | subplot(2,5,plotcount+5)
99 | hold on
100 | imshow(xray)
101 | visboundaries(spineMask, 'Color','b', 'LineWidth',0.5, 'EnhanceVisibility',false)
102 | hold off
103 |
104 | end
105 |
106 |
107 | %%%%%%%%%%%%%%%%%%%%%%%%% fit endplates
108 | [endplateLandmarks, centroidsTop2Bottom] = fitEndplates(spineMask);
109 |
110 | gtLandmarksScaled = squeeze(gtLandmarks(n,:,:)).*fliplr(size(xray));
111 | % Saving
112 | if ~plotting
113 | endplateSlopes = zeros(1,(length(endplateLandmarks)/2));
114 | for k = 1:(length(endplateLandmarks)/2)
115 | landmarkPos = (k-1)*2+1;
116 | endplateSlopes(1,k) = (endplateLandmarks(landmarkPos+1,2) - endplateLandmarks(landmarkPos,2)) / (endplateLandmarks(landmarkPos+1,1) - endplateLandmarks(landmarkPos,1));
117 | end
118 |
119 | gtEndplateSlopes = zeros(1,(length(gtLandmarksScaled)/2));
120 | for k = 1:(length(gtLandmarksScaled)/2)
121 | landmarkPos = (k-1)*2+1;
122 | gtEndplateSlopes(1,k) = (gtLandmarksScaled(landmarkPos+1,2) - gtLandmarksScaled(landmarkPos,2)) / (gtLandmarksScaled(landmarkPos+1,1) - gtLandmarksScaled(landmarkPos,1));
123 | end
124 |
125 | alignedEndplateSlopes = alignSlopeVectors(gtEndplateSlopes, endplateSlopes);
126 |
127 | allEndplateSlopes(n,:) = rad2deg(atan(alignedEndplateSlopes));
128 | gtAllEndplateSlopes(n,:) = rad2deg(atan(gtEndplateSlopes));
129 |
130 | %save(endplatesDestinationDir+filenames(n)+'.mat', 'endplateLandmarks', 'centroidsTop2Bottom');
131 | end
132 |
133 | % Plotting
134 | if plotting
135 | figure(5)
136 | subplot(5,5,10+plotcount)
137 | imshow(xray)
138 | hold on
139 | for k = 1:2:length(endplateLandmarks)-1
140 | plot(endplateLandmarks([k k+1],1), endplateLandmarks([k k+1],2), 'g');
141 | end
142 | hold off
143 |
144 | figure(2)
145 | subplot(2,5,plotcount)
146 | hold on
147 | imshow(xray)
148 | visboundaries(spineMask, 'Color','b', 'LineWidth',0.5, 'EnhanceVisibility',false)
149 | hold off
150 | subplot(2,5,plotcount+5)
151 | hold on
152 | imshow(xray)
153 | for k = 1:2:length(endplateLandmarks)-1
154 | plot(endplateLandmarks([k k+1],1), endplateLandmarks([k k+1],2), 'g');
155 | end
156 | hold off
157 | end
158 |
159 | %%%%%%%%%%%%%%%%%%%%%%%%% calculate Cobb angles
160 | nullAngleLocations = zeros(4,1); % used to allow for manual setting of angle locations
161 | [cobbAngles, apicalVertebrae, angleLocations, cobbEndplates] = calculateCobbAngles(endplateLandmarks, centroidsTop2Bottom, nullAngleLocations);
162 | allAngles(n,:) = cobbAngles';
163 |
164 | [gtCobbAngles, gtApicalVertebrae, gtAngleLocations, gtCobbEndplates] = calculateCobbAngles(gtLandmarksScaled, centroidsTop2Bottom, nullAngleLocations);
165 | gtAllAngles(n,:) = gtCobbAngles';
166 |
167 |
168 | % Plotting
169 | if plotting
170 | figure(5)
171 | subplot(5,5,15+plotcount)
172 | imshow(xray)
173 | hold on
174 | for k = 1:3
175 | if k == 1
176 | style1 = 'c';
177 | elseif k ==2
178 | style1 = 'r';
179 | else
180 | style1 = 'y';
181 | end
182 | plot(cobbEndplates(1:2,1,k), cobbEndplates(1:2,2,k), style1);
183 | plot(cobbEndplates(3:4,1,k), cobbEndplates(3:4,2,k), style1);
184 | end
185 | hold off
186 |
187 | figure(3)
188 | subplot(2,5,plotcount)
189 | hold on
190 | imshow(xray)
191 | for k = 1:2:length(endplateLandmarks)-1
192 | plot(endplateLandmarks([k k+1],1), endplateLandmarks([k k+1],2), 'g');
193 | end
194 | hold off
195 | subplot(2,5,plotcount+5)
196 | hold on
197 | imshow(xray)
198 | for k = 1:3
199 | if k == 1
200 | style1 = 'c';
201 | %style2 = 'c:';
202 | elseif k ==2
203 | style1 = 'r';
204 | %style2 = 'r:';
205 | else
206 | style1 = 'y';
207 | %style2 = 'y:';
208 | end
209 | plot(cobbEndplates(1:2,1,k), cobbEndplates(1:2,2,k), style1);
210 | plot(cobbEndplates(3:4,1,k), cobbEndplates(3:4,2,k), style1);
211 | %plot(gtCobbEndplates(1:2,1,k), gtCobbEndplates(1:2,2,k), style2);
212 | %plot(gtCobbEndplates(3:4,1,k), gtCobbEndplates(3:4,2,k), style2);
213 | end
214 | hold off
215 | end
216 |
217 | %%%%%%%%%%%%%%%%%%%%%%%%% classify Lenke curve type
218 | [curveType, curveTypeProbabilities] = classifyLenkeCurveType(cobbAngles');
219 | [gtCurveType, gtCurveTypeProbabilities] = classifyLenkeCurveType(gtCobbAngles');
220 |
221 | allLenkeCurveTypes(n,1) = curveType;
222 | gtAllLenkeCurveTypes(n,1) = gtCurveType;
223 |
224 | allLenkeCurveTypeProbabilities(n,:) = curveTypeProbabilities;
225 | gtAllLenkeCurveTypeProbabilities(n,:) = gtCurveTypeProbabilities;
226 |
227 | % Plotting
228 | if plotting
229 | figure(5)
230 | subplot(5,5,20+plotcount)
231 | barh(curveTypeProbabilities)
232 | xlim([0 1])
233 | xlabel('Probability')
234 | ylabel('Curve Type')
235 |
236 | figure(4)
237 | subplot(2,5,plotcount)
238 | imshow(xray)
239 | hold on
240 | for k = 1:3
241 | if k == 1
242 | style1 = 'c';
243 | elseif k ==2
244 | style1 = 'r';
245 | else
246 | style1 = 'y';
247 | end
248 | plot(cobbEndplates(1:2,1,k), cobbEndplates(1:2,2,k), style1);
249 | plot(cobbEndplates(3:4,1,k), cobbEndplates(3:4,2,k), style1);
250 | end
251 | hold off
252 |
253 | subplot(2,5,plotcount+5)
254 | barh(curveTypeProbabilities)
255 | xlim([0 1])
256 | xlabel('Probability')
257 | ylabel('Curve Type')
258 |
259 |
260 | plotcount = plotcount + 1;
261 | end
262 |
263 | end
264 |
265 | % write to csv file
266 | if ~plotting
267 | csvwrite('..\data\PredictionsVsGroundTruth\EndplateSlopes.csv',allEndplateSlopes);
268 | csvwrite('..\data\PredictionsVsGroundTruth\EndplateSlopes_GroundTruthEndplates.csv',gtAllEndplateSlopes);
269 |
270 | csvwrite('..\data\PredictionsVsGroundTruth\Angles.csv',allAngles);
271 | csvwrite('..\data\PredictionsVsGroundTruth\Angles_GroundTruthEndplates.csv',gtAllAngles);
272 |
273 | csvwrite('..\data\PredictionsVsGroundTruth\LenkeCurveTypes.csv',allLenkeCurveTypes);
274 | csvwrite('..\data\PredictionsVsGroundTruth\LenkeCurveTypes_GroundTruthEndplates.csv',gtAllLenkeCurveTypes);
275 |
276 | csvwrite('..\data\PredictionsVsGroundTruth\LenkeCurveTypeProbabilities.csv',allLenkeCurveTypeProbabilities);
277 | csvwrite('..\data\PredictionsVsGroundTruth\LenkeCurveTypeProbabilities_GroundTruthEndplates.csv',gtAllLenkeCurveTypeProbabilities);
278 | end
279 |
280 |
281 |
--------------------------------------------------------------------------------
/clinical-assessment-algorithms/fitEndplates.m:
--------------------------------------------------------------------------------
1 | function [endplateLandmarks, centroidsTop2Bottom] = fitEndplates(spineMask)
2 | %CALCULATEENDPLATESLOPES: fit endplates to a set of vertebral
3 | %segmentations.
4 |
5 | %imshow(spineMask)
6 | %hold on
7 |
8 | endplateLandmarks = [];
9 |
10 | % get stats with region props
11 | stats = regionprops(spineMask, 'Centroid');
12 | % find centroids
13 | centroids = cat(1,stats.Centroid);
14 |
15 | %%%%%%%%%%%%%%%%% loop through vertebrae from top to bottom
16 | L = bwlabel(spineMask);
17 | [~, indexTopBottom] = sort(centroids(:,2));
18 | centroidsTop2Bottom = centroids(indexTopBottom,:);
19 | indexTopBottom = indexTopBottom';
20 | for n = indexTopBottom
21 | vertebraMask = ismember(L, n);
22 |
23 | %%%%%%%%%%%%%%%%%%%%% find perimeter and fit minimum bounding rectangle
24 | perims = bwboundaries(vertebraMask);
25 | perim = perims{1,1};
26 | %plot(perim(:,2), perim(:,1));
27 | [rectx,recty,~,~] = minboundrect(perim(:,2), perim(:,1));
28 | %plot(rectx, recty)
29 |
30 | rectx = rectx(1:4);
31 | recty = recty(1:4);
32 |
33 | [~, indexRect] = sort(rectx);
34 |
35 | [~, TLind] = min(recty(indexRect(1:2)));
36 | [~, TRind] = min(recty(indexRect(3:4)));
37 |
38 | TLind = indexRect(TLind);
39 | TRind = indexRect(TRind+2);
40 |
41 | %plot(rectx(TLind), recty(TLind), '+');
42 | %plot(rectx(TRind), recty(TRind), 'o');
43 |
44 | %%%%%%%%%%%%%%%%%%%%% rotate by the angle of the fitted rectangle
45 | angle = rad2deg(atan2(recty(TRind)-recty(TLind), rectx(TRind)-rectx(TLind)));
46 | vertebraMask = imrotate(vertebraMask,angle);
47 |
48 | %%%%%%%%%%%%%%%%%%%%% find left and right edge at the centroid height
49 | % get stats with region props
50 | stats = regionprops(vertebraMask, 'Centroid');
51 | % find centroids
52 | centroids = cat(1,stats.Centroid);
53 | yCentroid = centroids(2);
54 | centroidRow = round(yCentroid);
55 | % Extract
56 | oneRow = vertebraMask(centroidRow, :);
57 | % Get left and right columns.
58 | leftColumn = find(oneRow, 1, 'first');
59 | rightColumn = find(oneRow, 1, 'last');
60 |
61 | %%%%%%%%%%%%%%%%%%%%%% extract points on the top and bottom endplates
62 | plateLen = rightColumn-leftColumn;
63 | leftCol = round(leftColumn+(plateLen/15));
64 | rightCol = round(rightColumn-(plateLen/15));
65 |
66 | lineLen = rightCol-leftCol;
67 | lineCols = leftCol:rightCol;
68 | lineTopRow = zeros(1, lineLen);
69 | lineBottomRow = lineTopRow;
70 | for k = lineCols
71 | idx = k-leftCol+1;
72 | % Extract
73 | oneColumn = vertebraMask(:, k);
74 | % Get top and bottom row
75 | lineTopRow(idx) = find(oneColumn, 1, 'first');
76 | lineBottomRow(idx) = find(oneColumn, 1, 'last');
77 | end
78 |
79 | %%%%%%%%%%%%%%%%%%%%% fit lines to endplates
80 | p = polyfit(lineCols, lineTopRow, 1);
81 | topLine = polyval(p, lineCols);
82 |
83 | p = polyfit(lineCols, lineBottomRow, 1);
84 | bottomLine = polyval(p, lineCols);
85 |
86 | corners = [lineCols(1) topLine(1);
87 | lineCols(1) bottomLine(1);
88 | lineCols(length(lineCols)) topLine(length(lineCols));
89 | lineCols(length(lineCols)) bottomLine(length(lineCols))]';
90 |
91 | % Create rotation matrix
92 | alpha = angle; % to rotate 90 counterclockwise
93 | RotMatrix = [cosd(alpha) -sind(alpha); sind(alpha) cosd(alpha)];
94 | ImCenterA = flipud((size(vertebraMask)/2)'); % Center of the rotated image
95 | ImCenterB = flipud((size(spineMask)/2)'); % Center of the original image
96 |
97 | corners = round(RotMatrix*(corners-ImCenterA)+ImCenterB);
98 | %plot(corners(1,[1 3]),corners(2,[1 3]))
99 | %plot(corners(1,[2 4]),corners(2,[2 4]))
100 |
101 | orderedCorners = corners;
102 | orderedCorners(:,2) = corners(:,3);
103 | orderedCorners(:,3) = corners(:,2);
104 | endplateLandmarks = cat(1, endplateLandmarks, orderedCorners');
105 |
106 | end
107 |
108 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Detect errors in endplates
109 |
110 | numLandmarks = length(endplateLandmarks);
111 | numVertebrae = numLandmarks/4;
112 | numEndplates = numLandmarks/2;
113 |
114 | %%%%%%%%%%%%%%%%%%%% find large changes in slope between successive vertebrae and remove
115 | % averaging the endplates for each vertebra to find average slopes
116 | midVertebraLandmarks = zeros(numEndplates,2);
117 | for k = 1:2:numEndplates
118 | landmarkPos = (k-1)*2+1;
119 | midVertebraLandmarks(k,:) = (endplateLandmarks(landmarkPos,:) + endplateLandmarks(landmarkPos+2,:))/2;
120 | midVertebraLandmarks(k+1,:) = (endplateLandmarks(landmarkPos+1,:) + endplateLandmarks(landmarkPos+3,:))/2;
121 | end
122 | % calculate slope of each vertebra
123 | midSlopes = zeros(numVertebrae,1);
124 | for k = 1:numVertebrae
125 | landmarkPos = (k-1)*2+1;
126 | midSlopes(k,1) = (midVertebraLandmarks(landmarkPos+1,2) - midVertebraLandmarks(landmarkPos,2)) / (midVertebraLandmarks(landmarkPos+1,1) - midVertebraLandmarks(landmarkPos,1));
127 | end
128 | % traverse vertebra slopes to find any unusual changes
129 | for k = 1:numVertebrae-2
130 | % if angle between consecutive vertebrae is greater than 45 degrees
131 | % assume error and replace with weighted avg of neighbours
132 | if abs(rad2deg(atan(midSlopes(k+1))-atan(midSlopes(k)))) > 45
133 | landmarkPos = (k)*4+1; % k instead of k-1 here because vertebra k+1 is unusual slope
134 | endplateLandmarks(landmarkPos,:) = (3*endplateLandmarks(landmarkPos-2,:) + endplateLandmarks(landmarkPos+4,:)) / 4;
135 | landmarkPos = (k)*4+2;
136 | endplateLandmarks(landmarkPos,:) = (3*endplateLandmarks(landmarkPos-2,:) + endplateLandmarks(landmarkPos+4,:)) / 4;
137 | landmarkPos = (k)*4+3;
138 | endplateLandmarks(landmarkPos,:) = (endplateLandmarks(landmarkPos-4,:) + 3*endplateLandmarks(landmarkPos+2,:)) / 4;
139 | landmarkPos = (k)*4+4;
140 | endplateLandmarks(landmarkPos,:) = (endplateLandmarks(landmarkPos-4,:) + 3*endplateLandmarks(landmarkPos+2,:)) / 4;
141 |
142 |
143 | % recalculate vertebra slopes with changes
144 | % averaging the endplates for each vertebra to find average slopes
145 | midVertebraLandmarks = zeros(numEndplates,2);
146 | for k2 = 1:2:numEndplates
147 | landmarkPos = (k2-1)*2+1;
148 | midVertebraLandmarks(k2,:) = (endplateLandmarks(landmarkPos,:) + endplateLandmarks(landmarkPos+2,:))/2;
149 | midVertebraLandmarks(k2+1,:) = (endplateLandmarks(landmarkPos+1,:) + endplateLandmarks(landmarkPos+3,:))/2;
150 | end
151 |
152 | % calculate slope of each vertebra
153 | midSlopes = zeros(numVertebrae,1);
154 | for k2 = 1:numVertebrae
155 | landmarkPos = (k2-1)*2+1;
156 | midSlopes(k2,1) = (midVertebraLandmarks(landmarkPos+1,2) - midVertebraLandmarks(landmarkPos,2)) / (midVertebraLandmarks(landmarkPos+1,1) - midVertebraLandmarks(landmarkPos,1));
157 | end
158 | end
159 | end
160 |
161 | %%%%%%%%%%%%%%%%%%% calculate length of each endplate and remove if unusual size
162 | midLengths = zeros(numVertebrae,1);
163 | for k = 1:numVertebrae
164 | landmarkPos = (k-1)*2+1;
165 | midLengths(k,1) = norm(midVertebraLandmarks(landmarkPos+1,:) - midVertebraLandmarks(landmarkPos,:));
166 | end
167 | rows2Delete = [];
168 | for k = 1:numVertebrae
169 | if (midLengths(k) < 0.6*median(midLengths)) || (midLengths(k) > 2*median(midLengths))
170 | landmarkPos = (k-1)*4+1;
171 | rows2Delete = [rows2Delete landmarkPos landmarkPos+1 landmarkPos+2 landmarkPos+3];
172 | end
173 | end
174 | endplateLandmarks(rows2Delete,:) = [];
175 |
176 | numLandmarks = length(endplateLandmarks);
177 | numVertebrae = numLandmarks/4;
178 | numEndplates = numLandmarks/2;
179 |
180 | %%%%%%%%%%%%%%%%%%% find large changes in slope between endplates of each vertebra and remove
181 | % calculate slope of each vertebra
182 | endplateSlopes = zeros(numEndplates,1);
183 | for k = 1:numEndplates
184 | landmarkPos = (k-1)*2+1;
185 | endplateSlopes(k,1) = (endplateLandmarks(landmarkPos+1,2) - endplateLandmarks(landmarkPos,2)) / (endplateLandmarks(landmarkPos+1,1) - endplateLandmarks(landmarkPos,1));
186 | end
187 | % for T1 and L5, if angle between endplates is too big, replace most sloped
188 | % with same slope of the other endplate
189 | for k = [1 numVertebrae]
190 | endplatePos = (k-1)*2+1;
191 | % if angle between endplates on a vertebrae is greater than 10 degrees
192 | if abs(rad2deg(atan(endplateSlopes(endplatePos+1))-atan(endplateSlopes(endplatePos)))) > 10
193 | if abs(endplateSlopes(endplatePos)) > abs(endplateSlopes(endplatePos+1))
194 | landmarkPos = (k-1)*4+1;
195 | endplateLandmarks(landmarkPos,2) = endplateLandmarks(landmarkPos+1,2) + (endplateLandmarks(landmarkPos+2,2)-endplateLandmarks(landmarkPos+3,2));
196 | else
197 | landmarkPos = (k-1)*4+3;
198 | endplateLandmarks(landmarkPos,2) = endplateLandmarks(landmarkPos+1,2) + (endplateLandmarks(landmarkPos-2,2)-endplateLandmarks(landmarkPos-1,2));
199 | end
200 | end
201 | end
202 | % for the rest ...
203 | for k = 2:numVertebrae-1
204 | endplatePos = (k-1)*2+1;
205 | % if angle between endplates on a vertebrae is greater than 10 degrees
206 | if abs(rad2deg(atan(endplateSlopes(endplatePos+1))-atan(endplateSlopes(endplatePos)))) > 10
207 | % find which endplate is most deviated from its neighbouring
208 | % endplate on the adjacent vertebra; and replace the most deviated
209 | % endplate with the weighted avg of its 2 neighbours
210 | if abs(rad2deg(atan(endplateSlopes(endplatePos))-atan(endplateSlopes(endplatePos-1)))) > abs(rad2deg(atan(endplateSlopes(endplatePos+2))-atan(endplateSlopes(endplatePos+1))))
211 | landmarkPos = (k-1)*4+1;
212 | endplateLandmarks(landmarkPos,:) = (3*endplateLandmarks(landmarkPos-2,:) + endplateLandmarks(landmarkPos+2,:)) / 4;
213 | landmarkPos = (k-1)*4+2;
214 | endplateLandmarks(landmarkPos,:) = (3*endplateLandmarks(landmarkPos-2,:) + endplateLandmarks(landmarkPos+2,:)) / 4;
215 | else
216 | landmarkPos = (k-1)*4+3;
217 | endplateLandmarks(landmarkPos,:) = (endplateLandmarks(landmarkPos-2,:) + 3*endplateLandmarks(landmarkPos+2,:)) / 4;
218 | landmarkPos = (k-1)*4+4;
219 | endplateLandmarks(landmarkPos,:) = (endplateLandmarks(landmarkPos-2,:) + 3*endplateLandmarks(landmarkPos+2,:)) / 4;
220 | end
221 | end
222 | end
223 |
224 | end
225 |
226 |
--------------------------------------------------------------------------------
/dataset-preprocessing/fixLandmarkErrors.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from numpy import genfromtxt
3 | import os
4 | import cv2
5 | import scipy.io
6 | import matplotlib.pyplot as plt
7 |
8 | IMG_SIZE_X = 128
9 | IMG_SIZE_Y = 256
10 |
11 | def create_datasets(im_dir, lm_dir, fn_dir, IMG_SIZE_X, IMG_SIZE_Y):
12 | fn_data = genfromtxt(fn_dir, delimiter=',', dtype=str)
13 | lm_data = genfromtxt(lm_dir, delimiter=',')
14 | im_data = []
15 |
16 | for filename in fn_data:
17 | for img in os.listdir(im_dir):
18 | if img == filename:
19 | img_array = cv2.imread(os.path.join(im_dir, img), cv2.IMREAD_GRAYSCALE)
20 | new_array = cv2.resize(img_array, (IMG_SIZE_X, IMG_SIZE_Y))
21 | im_data.append([new_array])
22 |
23 | im_data = np.array(im_data).reshape(-1, IMG_SIZE_Y, IMG_SIZE_X, 1)
24 | im_data = im_data / 255.0
25 | return im_data, lm_data
26 |
27 |
28 | train_im_dir = "../data/boostnet_labeldata/data/training"
29 | train_lm_dir = "../data/boostnet_labeldata/labels/training/landmarks.csv"
30 | train_fn_dir = "../data/boostnet_labeldata/labels/training/filenames.csv"
31 |
32 | X, lm_data = create_datasets(train_im_dir, train_lm_dir, train_fn_dir, IMG_SIZE_X, IMG_SIZE_Y)
33 |
34 | # reshape landmark array and adjust to be 2 columns for each image corresponding to x and y coordinates
35 | # coordinates converted into pixel values
36 | lm = lm_data.reshape(-1, 2, 68)
37 | lm2 = []
38 | for i in range(lm.shape[0]):
39 | lm[i, 0, :] = lm[i, 0, :] * IMG_SIZE_X
40 | lm[i, 1, :] = lm[i, 1, :] * IMG_SIZE_Y
41 | lm2.append(np.transpose(lm[i, :, :]))
42 | lm = np.array(lm2)
43 |
44 | # ################################## code to visualise landmarks errors and fixed landmarks
45 | # image = 3
46 | # fig = plt.figure()
47 | # plt.subplot(1, 3, 1)
48 | # plt.imshow(X[image, :, :, 0], cmap="gray")
49 | # for k in range(68):
50 | # plt.text(lm[image, k, 0], lm[image, k, 1], str(k), horizontalalignment='center', fontsize=5, color='red')
51 | # plt.title(str(image) + " before fix")
52 | #
53 | # ######## fix goes here
54 | #
55 | #
56 | # plt.subplot(1, 3, 2)
57 | # plt.imshow(X[image, :, :, 0], cmap="gray")
58 | # for k in range(68):
59 | # plt.text(lm[image, k, 0], lm[image, k, 1], str(k), horizontalalignment='center', fontsize=5, color='red')
60 | # plt.title(str(image) + " after fix")
61 | #
62 | # plt.subplot(1, 3, 3)
63 | # plt.imshow(X[0, :, :, 0], cmap="gray")
64 | # for k in range(68):
65 | # plt.text(lm[0, k, 0], lm[0, k, 1], str(k), horizontalalignment='center', fontsize=5, color='red')
66 | # plt.title(str(0) + " - reference")
67 |
68 |
69 | ################### fix issues identified with landmarks
70 | # image 28 - last 2 landmarks were at the top of the spine
71 | lm[28, :, :] = np.roll(lm[28, :, :], 2, axis=0)
72 |
73 | for k in range(64):
74 | lm[28, 67 - k, 0] = lm[28, 67 - k - 2, 0]
75 | lm[28, 67 - k, 1] = lm[28, 67 - k - 2, 1]
76 |
77 | for k in range(12):
78 | lm[28, 67 - k, 0] = lm[28, 67 - k - 2, 0]
79 | lm[28, 67 - k, 1] = lm[28, 67 - k - 2, 1]
80 |
81 | lm[28, 2, 1] = lm[28, 2, 1] - 2
82 | lm[28, 3, 1] = lm[28, 3, 1] - 2
83 | lm[28, 50, 1] = lm[28, 50, 1] - 1
84 | lm[28, 51, 1] = lm[28, 51, 1] - 2
85 | lm[28, 52, 1] = lm[28, 52, 1] - 5
86 | lm[28, 53, 1] = lm[28, 53, 1] - 5
87 | lm[28, 55, 1] = lm[28, 55, 1] - 3
88 | lm[28, 56, 1] = lm[28, 56, 1] + 3
89 |
90 | # image 14 - landmarks 22, 23 were in the middle of vertebrae causing the rest to be a level above
91 | for k in range(22, 66):
92 | lm[14, k, 0] = lm[14, k + 2, 0]
93 | lm[14, k, 1] = lm[14, k + 2, 1]
94 |
95 | lm[14, 66, 0] = lm[14, 64, 0] - 5
96 | lm[14, 66, 1] = lm[14, 64, 1] + 10
97 |
98 | lm[14, 67, 0] = lm[14, 65, 0] - 5
99 | lm[14, 67, 1] = lm[14, 65, 1] + 10
100 |
101 | # image 31 - landmarks 18, 19 were in the middle of vertebrae causing the rest to be a level above
102 | for k in range(18, 66):
103 | lm[31, k, 0] = lm[31, k + 2, 0]
104 | lm[31, k, 1] = lm[31, k + 2, 1]
105 |
106 | lm[31, 20, 1] = lm[31, 20, 1] - 2
107 | lm[31, 21, 1] = lm[31, 21, 1] - 2
108 |
109 | lm[31, 66, 0] = lm[31, 64, 0] - 2
110 | lm[31, 66, 1] = lm[31, 64, 1] + 10
111 |
112 | lm[31, 67, 0] = lm[31, 65, 0] - 2
113 | lm[31, 67, 1] = lm[31, 65, 1] + 10
114 |
115 | # image 33 - missing landmarks between 22/23 and 24/25 and between 56/57 and 58/59
116 | for k in range(42):
117 | lm[33, 67 - k, 0] = lm[33, 67 - k - 2, 0]
118 | lm[33, 67 - k, 1] = lm[33, 67 - k - 2, 1]
119 |
120 | lm[33, 24, 0] = lm[33, 22, 0] + 1
121 | lm[33, 24, 1] = lm[33, 22, 1] + 4
122 | lm[33, 25, 0] = lm[33, 23, 0]
123 | lm[33, 25, 1] = lm[33, 23, 1] + 3
124 |
125 | lm[33, 58, 1] = lm[33, 58, 1] - 1
126 | lm[33, 59, 1] = lm[33, 59, 1] - 1
127 |
128 | for k in range(6):
129 | lm[33, 67 - k, 0] = lm[33, 67 - k - 2, 0]
130 | lm[33, 67 - k, 1] = lm[33, 67 - k - 2, 1]
131 |
132 | lm[33, 60, 1] = lm[33, 60, 1] - 7
133 | lm[33, 61, 1] = lm[33, 61, 1] - 7
134 |
135 | # image 40 - upper end plate too low on some vertebrae
136 | lm[40, 24, 1] = lm[40, 24, 1] - 2
137 | lm[40, 25, 1] = lm[40, 25, 1] - 2
138 |
139 | lm[40, 28, 1] = lm[40, 28, 1] - 2
140 | lm[40, 29, 1] = lm[40, 29, 1] - 2
141 |
142 | lm[40, 32, 1] = lm[40, 32, 1] - 1
143 | lm[40, 33, 1] = lm[40, 33, 1] - 1
144 |
145 | lm[40, 36, 1] = lm[40, 36, 1] - 4
146 | lm[40, 37, 1] = lm[40, 37, 1] - 4
147 |
148 | lm[40, 40, 1] = lm[40, 40, 1] - 4
149 | lm[40, 41, 1] = lm[40, 41, 1] - 4
150 |
151 | lm[40, 44, 1] = lm[40, 44, 1] - 4
152 | lm[40, 45, 1] = lm[40, 45, 1] - 4
153 |
154 | # image 72 - C7 marked instead of L5
155 | for k in range(64):
156 | lm[72, k, 0] = lm[72, k + 4, 0]
157 | lm[72, k, 1] = lm[72, k + 4, 1]
158 |
159 | lm[72, 64, 0] = lm[72, 60, 0] - 5
160 | lm[72, 64, 1] = lm[72, 60, 1] + 20
161 |
162 | lm[72, 65, 0] = lm[72, 61, 0] - 5
163 | lm[72, 65, 1] = lm[72, 61, 1] + 20
164 |
165 | lm[72, 66, 0] = lm[72, 62, 0] - 5
166 | lm[72, 66, 1] = lm[72, 62, 1] + 20
167 |
168 | lm[72, 67, 0] = lm[72, 63, 0] - 5
169 | lm[72, 67, 1] = lm[72, 63, 1] + 18
170 |
171 | # image 84 - missing landmarks between 30/31 and 32/33
172 | for k in range(36):
173 | lm[84, 67 - k, 0] = lm[84, 67 - k - 2, 0]
174 | lm[84, 67 - k, 1] = lm[84, 67 - k - 2, 1]
175 |
176 | lm[84, 28, 1] = lm[84, 26, 1] + 3
177 | lm[84, 29, 1] = lm[84, 27, 1] + 3
178 |
179 | lm[84, 32, 1] = lm[84, 32, 1] + 3
180 | lm[84, 33, 1] = lm[84, 33, 1] + 3
181 |
182 | # image 91 - upper end plate too low on some vertebrae
183 | lm[91, 4, 1] = lm[91, 4, 1] - 2
184 | lm[91, 5, 1] = lm[91, 5, 1] - 2
185 |
186 | lm[91, 8, 1] = lm[91, 8, 1] - 2
187 | lm[91, 9, 1] = lm[91, 9, 1] - 2
188 |
189 | lm[91, 12, 1] = lm[91, 12, 1] - 3
190 | lm[91, 13, 1] = lm[91, 13, 1] - 3
191 |
192 | # image 143 - extra landmarks at 42/43, missing landmarks between 56/57 and 58/59
193 | for k in range(42, 56):
194 | lm[143, k, 0] = lm[143, k + 2, 0]
195 | lm[143, k, 1] = lm[143, k + 2, 1]
196 |
197 | lm[143, 44, 1] = lm[143, 44, 1] - 1
198 | lm[143, 45, 1] = lm[143, 45, 1] - 2
199 |
200 | lm[143, 56, 1] = lm[143, 56, 1] + 6
201 | lm[143, 57, 1] = lm[143, 57, 1] + 7
202 |
203 | # image 193 - L5 landmarks on C7 instead
204 | lm[193, 0, 1] = lm[193, 0, 1] - 10
205 |
206 | lm[193, 64, 0] = lm[193, 60, 0] - 3
207 | lm[193, 64, 1] = lm[193, 60, 1] + 20
208 |
209 | lm[193, 65, 0] = lm[193, 61, 0]
210 | lm[193, 65, 1] = lm[193, 61, 1] + 20
211 |
212 | lm[193, 66, 0] = lm[193, 62, 0]
213 | lm[193, 66, 1] = lm[193, 62, 1] + 20
214 |
215 | lm[193, 67, 0] = lm[193, 63, 0]
216 | lm[193, 67, 1] = lm[193, 63, 1] + 20
217 |
218 | # image 218 - T1 not marked
219 | for k in range(64):
220 | lm[218, 67 - k, 0] = lm[218, 67 - k - 4, 0]
221 | lm[218, 67 - k, 1] = lm[218, 67 - k - 4, 1]
222 |
223 | lm[218, 0, 0] = lm[218, 0, 0]
224 | lm[218, 0, 1] = lm[218, 0, 1] - 10
225 |
226 | lm[218, 1, 0] = lm[218, 1, 0]
227 | lm[218, 1, 1] = lm[218, 1, 1] - 11
228 |
229 | lm[218, 2, 0] = lm[218, 2, 0]
230 | lm[218, 2, 1] = lm[218, 2, 1] - 10
231 |
232 | lm[218, 3, 0] = lm[218, 3, 0]
233 | lm[218, 3, 1] = lm[218, 3, 1] - 10
234 |
235 | # image 242 - landmarks 26 and 28 too far right
236 | lm[242, 26, 0] = lm[242, 26, 0] - 6
237 | lm[242, 28, 0] = lm[242, 28, 0] - 6
238 |
239 | # image 261 - landmarks 22 and 23 too high
240 | lm[261, 22, 0] = lm[261, 24, 0]
241 | lm[261, 22, 1] = lm[261, 24, 1] - 3
242 |
243 | lm[261, 23, 0] = lm[261, 25, 0]
244 | lm[261, 23, 1] = lm[261, 25, 1] - 3
245 |
246 | # image 265 - landmark 63 in wrong position
247 | lm[265, 63, 0] = lm[265, 65, 0]
248 | lm[265, 63, 1] = lm[265, 65, 1] - 4
249 |
250 | # image 268 - landmarks 14 and 15 too high
251 | lm[268, 14, 0] = lm[268, 16, 0] + 2
252 | lm[268, 14, 1] = lm[268, 16, 1] - 3
253 |
254 | lm[268, 15, 0] = lm[268, 17, 0]
255 | lm[268, 15, 1] = lm[268, 17, 1] - 3
256 |
257 | # image 269 - landmark 54 in wrong position
258 | lm[269, 54, 0] = lm[269, 54, 0] + 5
259 | lm[269, 54, 1] = lm[269, 54, 1] - 5
260 |
261 | # image 313 - landmarks 54 and 55 too high
262 | lm[313, 54, 1] = lm[313, 54, 1] + 10
263 | lm[313, 55, 1] = lm[313, 55, 1] + 10
264 |
265 | # image 439 - landmarks 24 and 26 too far right
266 | lm[439, 24, 0] = lm[439, 24, 0] - 7
267 | lm[439, 26, 0] = lm[439, 26, 0] - 7
268 |
269 | lm[439, 28, 1] = lm[439, 28, 1] + 2
270 |
271 | # image 445 - missing 2 vertebrae
272 | lm[445, 42, 0] = lm[445, 42, 0] + 2
273 | lm[445, 42, 1] = lm[445, 42, 1] + 4
274 |
275 | lm[445, 29, 1] = lm[445, 29, 1] - 3
276 |
277 | for k in range(60):
278 | lm[445, k, 0] = lm[445, k + 8, 0]
279 | lm[445, k, 1] = lm[445, k + 8, 1]
280 |
281 | lm[445, 59, 0] = lm[445, 59, 0] - 2
282 |
283 | lm[445, 60, 0] = lm[445, 60, 0] + 6
284 | lm[445, 60, 1] = lm[445, 60, 1] + 27
285 |
286 | lm[445, 61, 0] = lm[445, 61, 0] + 6
287 | lm[445, 61, 1] = lm[445, 61, 1] + 37
288 |
289 | lm[445, 62, 0] = lm[445, 62, 0]
290 | lm[445, 62, 1] = lm[445, 62, 1] + 29
291 |
292 | lm[445, 63, 0] = lm[445, 63, 0]
293 | lm[445, 63, 1] = lm[445, 63, 1] + 37
294 |
295 | lm[445, 64, 0] = lm[445, 64, 0] - 4
296 | lm[445, 64, 1] = lm[445, 64, 1] + 36
297 |
298 | lm[445, 65, 0] = lm[445, 65, 0] - 1
299 | lm[445, 65, 1] = lm[445, 65, 1] + 40
300 |
301 | lm[445, 66, 0] = lm[445, 66, 0] - 6
302 | lm[445, 66, 1] = lm[445, 66, 1] + 39
303 |
304 | lm[445, 67, 0] = lm[445, 67, 0] - 4
305 | lm[445, 67, 1] = lm[445, 67, 1] + 40
306 |
307 | # image 471 - some landmarks too far from corners
308 | lm[471, 53, 1] = lm[471, 53, 1] - 3
309 | lm[471, 56, 1] = lm[471, 56, 1] - 3
310 | lm[471, 57, 1] = lm[471, 57, 1] - 4
311 | lm[471, 60, 1] = lm[471, 60, 1] - 3
312 | lm[471, 61, 1] = lm[471, 61, 1] - 3
313 | lm[471, 64, 1] = lm[471, 64, 1] - 2
314 | lm[471, 65, 1] = lm[471, 65, 1] - 2
315 |
316 | lm[471, 54, 1] = lm[471, 54, 1] + 3
317 | lm[471, 55, 1] = lm[471, 55, 1] + 3
318 | lm[471, 58, 1] = lm[471, 58, 1] + 3
319 | lm[471, 59, 1] = lm[471, 59, 1] + 3
320 | lm[471, 62, 1] = lm[471, 62, 1] + 3
321 | lm[471, 63, 1] = lm[471, 63, 1] + 3
322 | lm[471, 66, 1] = lm[471, 66, 1] + 3
323 | lm[471, 67, 1] = lm[471, 67, 1] + 3
324 |
325 | # image 474 - missing two vertebrae
326 | for k in range(38, 64):
327 | lm[474, k, 0] = lm[474, k + 4, 0]
328 | lm[474, k, 1] = lm[474, k + 4, 1]
329 |
330 | for k in range(54, 66):
331 | lm[474, k, 0] = lm[474, k + 2, 0]
332 | lm[474, k, 1] = lm[474, k + 2, 1]
333 |
334 | lm[474, 62, 1] = lm[474, 62, 1] + 18
335 |
336 | lm[474, 63, 0] = lm[474, 63, 0] + 4
337 | lm[474, 63, 1] = lm[474, 63, 1] + 20
338 |
339 | lm[474, 64, 0] = lm[474, 64, 0] - 2
340 | lm[474, 64, 1] = lm[474, 64, 1] + 18
341 |
342 | lm[474, 65, 1] = lm[474, 65, 1] + 20
343 |
344 | lm[474, 66, 0] = lm[474, 66, 0] - 2
345 | lm[474, 66, 1] = lm[474, 66, 1] + 30
346 |
347 | lm[474, 67, 1] = lm[474, 67, 1] + 32
348 |
349 | # image 476 - missing landmarks between 18/19 and 20/21
350 | for k in range(48):
351 | lm[476, 67 - k, 0] = lm[476, 67 - k - 2, 0]
352 | lm[476, 67 - k, 1] = lm[476, 67 - k - 2, 1]
353 |
354 | lm[476, 51, 0] = lm[476, 51, 0] - 3
355 |
356 | lm[476, 20, 1] = lm[476, 20, 1] + 2
357 | lm[476, 21, 1] = lm[476, 21, 1] + 2
358 |
359 | lm[476, 22, 1] = lm[476, 22, 1] + 2
360 | lm[476, 23, 1] = lm[476, 23, 1] + 2
361 |
362 | # image 3 - mixed up top and bottom landmarks
363 | temp1 = lm[3, 60, 0]
364 | temp2 = lm[3, 60, 1]
365 |
366 | lm[3, 60, 0] = lm[3, 62, 0]
367 | lm[3, 60, 1] = lm[3, 62, 1]
368 |
369 | lm[3, 62, 0] = temp1
370 | lm[3, 62, 1] = temp2
371 |
372 | temp1 = lm[3, 61, 0]
373 | temp2 = lm[3, 61, 1]
374 |
375 | lm[3, 61, 0] = lm[3, 63, 0]
376 | lm[3, 61, 1] = lm[3, 63, 1]
377 |
378 | lm[3, 63, 0] = temp1
379 | lm[3, 63, 1] = temp2
380 |
381 | # image 10 - mixed up top and bottom landmarks, last 4 landmarks at the top of spine
382 | temp1 = lm[10, 64, 0]
383 | temp2 = lm[10, 64, 1]
384 |
385 | lm[10, 64, 0] = lm[10, 66, 0]
386 | lm[10, 64, 1] = lm[10, 66, 1]
387 |
388 | lm[10, 66, 0] = temp1
389 | lm[10, 66, 1] = temp2
390 |
391 | temp1 = lm[10, 65, 0]
392 | temp2 = lm[10, 65, 1]
393 |
394 | lm[10, 65, 0] = lm[10, 67, 0]
395 | lm[10, 65, 1] = lm[10, 67, 1]
396 |
397 | lm[10, 67, 0] = temp1
398 | lm[10, 67, 1] = temp2
399 |
400 | lm[10, :, :] = np.roll(lm[10, :, :], 4, axis=0)
401 |
402 | # image 394 - landmark >1 when normalised
403 | lm[394, 63, 1] = IMG_SIZE_Y
404 |
405 | # image 480 - a lot of overlapping points.. unsure where L5 is so repeating L4 instead
406 | lm[480, 64, 0] = lm[480, 60, 0]
407 | lm[480, 64, 1] = lm[480, 60, 1]
408 |
409 | lm[480, 65, 0] = lm[480, 61, 0]
410 | lm[480, 65, 1] = lm[480, 61, 1]
411 |
412 | lm[480, 66, 0] = lm[480, 62, 0]
413 | lm[480, 66, 1] = lm[480, 62, 1]
414 |
415 | lm[480, 67, 0] = lm[480, 63, 0]
416 | lm[480, 67, 1] = lm[480, 63, 1]
417 |
418 | # last 4 landmarks at the top of spine for all of the following images
419 | for k in [77, 103, 105, 107, 110, 122, 124, 144, 179, 180, 181, 182, 185, 221, 342, 368]:
420 | lm[k, :, :] = np.roll(lm[k, :, :], 4, axis=0)
421 |
422 |
423 | # normalise data
424 | lm[:, :, 0] /= IMG_SIZE_X
425 | lm[:, :, 1] /= IMG_SIZE_Y
426 |
427 | scipy.io.savemat('../data/FixedSpineWebData/fixedTrainingLandmarks.mat', dict(landmarks=lm))
428 |
429 |
430 |
431 | ########################## repeat same process for test set
432 | test_im_dir = "../data/boostnet_labeldata/data/test"
433 | test_lm_dir = "../data/boostnet_labeldata/labels/test/landmarks.csv"
434 | test_fn_dir = "../data/boostnet_labeldata/labels/test/filenames.csv"
435 |
436 | X, lm_data = create_datasets(test_im_dir, test_lm_dir, test_fn_dir, IMG_SIZE_X, IMG_SIZE_Y)
437 |
438 |
439 | # reshape landmark array and adjust to be 2 columns for each image corresponding to x and y coordinates
440 | # coordinates converted into pixel values
441 | lm = lm_data.reshape(-1, 2, 68)
442 | lm2 = []
443 | for i in range(lm.shape[0]):
444 | lm[i, 0, :] = lm[i, 0, :] * IMG_SIZE_X
445 | lm[i, 1, :] = lm[i, 1, :] * IMG_SIZE_Y
446 | lm2.append(np.transpose(lm[i, :, :]))
447 | lm = np.array(lm2)
448 |
449 |
450 | # ################################## check fix
451 | # image = 3
452 | # fig = plt.figure()
453 | # plt.subplot(1, 3, 1)
454 | # plt.imshow(X[image, :, :, 0], cmap="gray")
455 | # for k in range(68):
456 | # plt.text(lm[image, k, 0], lm[image, k, 1], str(k), horizontalalignment='center', fontsize=5, color='red')
457 | # plt.title(str(image) + " before fix")
458 | #
459 | # ######## fix goes here
460 | #
461 | #
462 | # plt.subplot(1, 3, 2)
463 | # plt.imshow(X[image, :, :, 0], cmap="gray")
464 | # for k in range(68):
465 | # plt.text(lm[image, k, 0], lm[image, k, 1], str(k), horizontalalignment='center', fontsize=5, color='red')
466 | # plt.title(str(image) + " after fix")
467 | #
468 | # plt.subplot(1, 3, 3)
469 | # plt.imshow(X[0, :, :, 0], cmap="gray")
470 | # for k in range(68):
471 | # plt.text(lm[0, k, 0], lm[0, k, 1], str(k), horizontalalignment='center', fontsize=5, color='red')
472 | # plt.title(str(0) + " - reference")
473 |
474 |
475 |
476 | ################### fix issues identified with landmarks
477 | # image 3 - L5 not marked
478 | for k in range(64):
479 | lm[3, k, 0] = lm[3, k + 4, 0]
480 | lm[3, k, 1] = lm[3, k + 4, 1]
481 |
482 | lm[3, 64, 1] = lm[3, 64, 1] + 20
483 | lm[3, 65, 1] = lm[3, 65, 1] + 22
484 | lm[3, 66, 1] = lm[3, 66, 1] + 20
485 | lm[3, 67, 1] = lm[3, 67, 1] + 22
486 |
487 |
488 | # image 15 - landmarks 44/45 and 46/47 on the same endplate and T1 is not marked
489 | for k in range(46, 66):
490 | lm[15, k, 0] = lm[15, k + 2, 0]
491 | lm[15, k, 1] = lm[15, k + 2, 1]
492 |
493 | lm[15, 64, 0] = lm[15, 0, 0]
494 | lm[15, 64, 1] = lm[15, 0, 1] - 10
495 |
496 | lm[15, 65, 0] = lm[15, 1, 0]
497 | lm[15, 65, 1] = lm[15, 1, 1] - 10
498 |
499 | lm[15, 66, 0] = lm[15, 2, 0]
500 | lm[15, 66, 1] = lm[15, 2, 1] - 10
501 |
502 | lm[15, 67, 0] = lm[15, 3, 0]
503 | lm[15, 67, 1] = lm[15, 3, 1] - 10
504 |
505 | lm[15, :, :] = np.roll(lm[15, :, :], 4, axis=0)
506 |
507 |
508 | # image 45 - landmarks 0 and 2 swapped
509 | lm[45, 0, 0] = lm[45, 2, 0] + 2
510 | lm[45, 0, 1] = lm[45, 2, 1] - 3
511 |
512 | lm[45, 2, 0] = lm[45, 4, 0]
513 | lm[45, 2, 1] = lm[45, 4, 1] - 2
514 |
515 |
516 | # image 79 - missing landmarks between 40/41 and 42/43
517 | for k in range(26):
518 | lm[79, 67 - k, 0] = lm[79, 67 - k - 2, 0]
519 | lm[79, 67 - k, 1] = lm[79, 67 - k - 2, 1]
520 |
521 | lm[79, 40, 0] = lm[79, 40, 0] - 1
522 | lm[79, 40, 1] = lm[79, 40, 1] - 3
523 | lm[79, 41, 0] = lm[79, 41, 0] - 1
524 | lm[79, 41, 1] = lm[79, 41, 1] - 3
525 |
526 | lm[79, 42, 0] = lm[79, 42, 0] + 2
527 | lm[79, 42, 1] = lm[79, 42, 1] + 4
528 | lm[79, 43, 0] = lm[79, 43, 0] + 2
529 | lm[79, 43, 1] = lm[79, 43, 1] + 4
530 |
531 |
532 | # image 85 - landmark 0 misplaced
533 | lm[85, 0, 1] = lm[85, 0, 1] - 8
534 |
535 |
536 | # image 86 - landmarks 54/55 misplaced
537 | for k in range(54, 62):
538 | lm[86, k, 0] = lm[86, k + 2, 0]
539 | lm[86, k, 1] = lm[86, k + 2, 1]
540 |
541 | lm[86, 62, 0] = lm[86, 64, 0]
542 | lm[86, 62, 1] = lm[86, 64, 1] - 3
543 |
544 | lm[86, 63, 0] = lm[86, 65, 0]
545 | lm[86, 63, 1] = lm[86, 65, 1] - 3
546 |
547 |
548 | # image 88 - missing landmarks between 10/11 and 12/13, marked C7 instead of L5
549 | for k in range(66):
550 | lm[88, k, 0] = lm[88, k + 2, 0]
551 | lm[88, k, 1] = lm[88, k + 2, 1]
552 |
553 | for k in range(8):
554 | lm[88, k, 0] = lm[88, k + 2, 0]
555 | lm[88, k, 1] = lm[88, k + 2, 1]
556 |
557 | lm[88, 6, 1] = lm[88, 6, 1] - 2
558 | lm[88, 7, 1] = lm[88, 7, 1] - 2
559 |
560 | lm[88, 8, 1] = lm[88, 8, 1] + 2
561 | lm[88, 9, 1] = lm[88, 9, 1] + 2
562 |
563 | lm[88, 66, 1] = lm[88, 66, 1] + 12
564 | lm[88, 67, 1] = lm[88, 67, 1] + 12
565 |
566 |
567 | # image 89 - landmarks 0 and 67 misplaced
568 | lm[89, 0, 1] = lm[89, 0, 1] - 9
569 | lm[89, 67, 1] = lm[89, 67, 1] - 4
570 |
571 |
572 | # image 92 - landmarks 34/35 were in the middle of vertebra
573 | lm[92, 34, 1] = lm[92, 36, 1]
574 | lm[92, 35, 1] = lm[92, 37, 1]
575 |
576 | for k in range(36, 66):
577 | lm[92, k, 0] = lm[92, k + 2, 0]
578 | lm[92, k, 1] = lm[92, k + 2, 1]
579 |
580 | lm[92, 36, 1] = lm[92, 36, 1] - 2
581 | lm[92, 37, 1] = lm[92, 37, 1] - 3
582 |
583 | lm[92, 66, 1] = lm[92, 66, 1] + 8
584 | lm[92, 67, 1] = lm[92, 67, 1] + 8
585 |
586 |
587 | # image 104 - landmarks 0 and 1 misplaced
588 | lm[104, 0, 1] = lm[104, 0, 1] - 8
589 | lm[104, 1, 1] = lm[104, 1, 1] - 8
590 |
591 |
592 | # last 4 landmarks at the top of spine for all of the following images
593 | for k in [5, 21, 44]:
594 | lm[k, :, :] = np.roll(lm[k, :, :], 4, axis=0)
595 |
596 |
597 |
598 | # normalise data
599 | lm[:, :, 0] /= IMG_SIZE_X
600 | lm[:, :, 1] /= IMG_SIZE_Y
601 |
602 | scipy.io.savemat('../data/FixedSpineWebData/fixedTestingLandmarks.mat', dict(landmarks=lm))
603 |
604 |
--------------------------------------------------------------------------------