├── decisiontree.m ├── Functions.pdf ├── line plot.pdf ├── looping.pdf ├── knn.m ├── Introduction to MATLAB.pdf ├── linear_regression.m ├── regressiontree.m ├── kmedoids.m ├── kmeans.m ├── dbscan.m ├── svm.m ├── naivebayes.m └── README.md /decisiontree.m: -------------------------------------------------------------------------------- 1 | load ionosphere; 2 | tc = fitctree(X,Y); 3 | view(tc,'Mode','graph'); 4 | -------------------------------------------------------------------------------- /Functions.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/santoshpanda1995/MATLAB-for-all/HEAD/Functions.pdf -------------------------------------------------------------------------------- /line plot.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/santoshpanda1995/MATLAB-for-all/HEAD/line plot.pdf -------------------------------------------------------------------------------- /looping.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/santoshpanda1995/MATLAB-for-all/HEAD/looping.pdf -------------------------------------------------------------------------------- /knn.m: -------------------------------------------------------------------------------- 1 | load fisheriris 2 | X = meas; 3 | Y = species; 4 | Mdl = fitcknn(X,Y,'NumNeighbors',5,'Standardize',1) 5 | -------------------------------------------------------------------------------- /Introduction to MATLAB.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/santoshpanda1995/MATLAB-for-all/HEAD/Introduction to MATLAB.pdf -------------------------------------------------------------------------------- /linear_regression.m: -------------------------------------------------------------------------------- 1 | load carsmall 2 | X = [Weight,Horsepower,Acceleration]; 3 | 4 | mdl = fitlm(X,MPG) 5 | plot(mdl) 6 | -------------------------------------------------------------------------------- /regressiontree.m: -------------------------------------------------------------------------------- 1 | load carsmall; 2 | tree = fitrtree([Weight, Cylinders],MPG,... 3 | 'CategoricalPredictors',2,'MinParentSize',20,... 4 | 'PredictorNames',{'W','C'}); 5 | view(tree,'Mode','graph') 6 | -------------------------------------------------------------------------------- /kmedoids.m: -------------------------------------------------------------------------------- 1 | rng('default'); % For reproducibility 2 | X = [randn(100,2)*0.75+ones(100,2); 3 | randn(100,2)*0.55-ones(100,2)]; 4 | figure; 5 | plot(X(:,1),X(:,2),'.'); 6 | title('Randomly Generated Data'); 7 | 8 | opts = statset('Display','iter'); 9 | [idx,C,sumd,d,midx,info] = kmedoids(X,2,'Distance','cityblock','Options',opts); 10 | 11 | figure; 12 | plot(X(idx==1,1),X(idx==1,2),'r.','MarkerSize',7) 13 | hold on 14 | plot(X(idx==2,1),X(idx==2,2),'b.','MarkerSize',7) 15 | plot(C(:,1),C(:,2),'co',... 16 | 'MarkerSize',7,'LineWidth',1.5) 17 | legend('Cluster 1','Cluster 2','Medoids',... 18 | 'Location','NW'); 19 | title('Cluster Assignments and Medoids'); 20 | hold off 21 | -------------------------------------------------------------------------------- /kmeans.m: -------------------------------------------------------------------------------- 1 | rng default; % For reproducibility 2 | X = [randn(100,2)*0.75+ones(100,2); 3 | randn(100,2)*0.5-ones(100,2)]; 4 | 5 | figure; 6 | plot(X(:,1),X(:,2),'.'); 7 | title 'Randomly Generated Data'; 8 | 9 | opts = statset('Display','final'); 10 | [idx,C] = kmeans(X,2,'Distance','cityblock',... 11 | 'Replicates',5,'Options',opts); 12 | 13 | figure; 14 | plot(X(idx==1,1),X(idx==1,2),'r.','MarkerSize',12) 15 | hold on 16 | plot(X(idx==2,1),X(idx==2,2),'b.','MarkerSize',12) 17 | plot(C(:,1),C(:,2),'kx',... 18 | 'MarkerSize',15,'LineWidth',3) 19 | legend('Cluster 1','Cluster 2','Centroids',... 20 | 'Location','NW') 21 | title 'Cluster Assignments and Centroids' 22 | hold off 23 | -------------------------------------------------------------------------------- /dbscan.m: -------------------------------------------------------------------------------- 1 | rng('default') % For reproducibility 2 | 3 | % Parameters for data generation 4 | N = 300; % Size of each cluster 5 | r1 = 0.5; % Radius of first circle 6 | r2 = 5; % Radius of second circle 7 | theta = linspace(0,2*pi,N)'; 8 | 9 | X1 = r1*[cos(theta),sin(theta)]+ rand(N,1); 10 | X2 = r2*[cos(theta),sin(theta)]+ rand(N,1); 11 | X = [X1;X2]; % Noisy 2-D circular data set 12 | 13 | figure; 14 | scatter(X(:,1),X(:,2)) 15 | 16 | idx = dbscan(X,1,5); % The default distance metric is Euclidean distance 17 | figure; 18 | gscatter(X(:,1),X(:,2),idx); 19 | title('DBSCAN Using Euclidean Distance Metric') 20 | 21 | idx2 = dbscan(X,1,5,'Distance','squaredeuclidean'); 22 | figure; 23 | gscatter(X(:,1),X(:,2),idx2); 24 | title('DBSCAN Using Squared Euclidean Distance Metric') 25 | -------------------------------------------------------------------------------- /svm.m: -------------------------------------------------------------------------------- 1 | load fisheriris; 2 | inds = ~strcmp(species,'versicolor'); 3 | X = meas(inds,1:2); 4 | s = species(inds); 5 | SVMModel = fitcsvm(X,s); 6 | sv = SVMModel.SupportVectors; % Support vectors 7 | beta = SVMModel.Beta; % Linear predictor coefficients 8 | b = SVMModel.Bias; % Bias term 9 | hold on 10 | gscatter(X(:,1),X(:,2),s) 11 | plot(sv(:,1),sv(:,2),'ko','MarkerSize',10) 12 | X1 = linspace(min(X(:,1)),max(X(:,1)),100); 13 | X2 = -(beta(1)/beta(2)*X1)-b/beta(2); 14 | plot(X1,X2,'-') 15 | 16 | m = 1/sqrt(beta(1)^2 + beta(2)^2); % Margin half-width 17 | X1margin_low = X1+beta(1)*m^2; 18 | X2margin_low = X2+beta(2)*m^2; 19 | X1margin_high = X1-beta(1)*m^2; 20 | X2margin_high = X2-beta(2)*m^2; 21 | plot(X1margin_high,X2margin_high,'b--') 22 | plot(X1margin_low,X2margin_low,'r--') 23 | xlabel('X_1 (Sepal Length in cm)') 24 | ylabel('X_2 (Sepal Width in cm)') 25 | legend('setosa','virginica','Support Vector', ... 26 | 'Boundary Line','Upper Margin','Lower Margin') 27 | hold off 28 | -------------------------------------------------------------------------------- /naivebayes.m: -------------------------------------------------------------------------------- 1 | load fisheriris 2 | X = meas(:,3:4); 3 | Y = species; 4 | tabulate(Y) 5 | Mdl = fitcnb(X,Y,'ClassNames',{'setosa','versicolor','virginica'}) 6 | setosaIndex = strcmp(Mdl.ClassNames,'setosa'); 7 | estimates = Mdl.DistributionParameters{setosaIndex,1} 8 | figure 9 | gscatter(X(:,1),X(:,2),Y); 10 | h = gca; 11 | cxlim = h.XLim; 12 | cylim = h.YLim; 13 | hold on 14 | Params = cell2mat(Mdl.DistributionParameters); 15 | Mu = Params(2*(1:3)-1,1:2); % Extract the means 16 | Sigma = zeros(2,2,3); 17 | for j = 1:3 18 | Sigma(:,:,j) = diag(Params(2*j,:)).^2; % Create diagonal covariance matrix 19 | xlim = Mu(j,1) + 4*[-1 1]*sqrt(Sigma(1,1,j)); 20 | ylim = Mu(j,2) + 4*[-1 1]*sqrt(Sigma(2,2,j)); 21 | f = @(x,y) arrayfun(@(x0,y0) mvnpdf([x0 y0],Mu(j,:),Sigma(:,:,j)),x,y); 22 | fcontour(f,[xlim ylim]) % Draw contours for the multivariate normal distributions 23 | end 24 | h.XLim = cxlim; 25 | h.YLim = cylim; 26 | title('Naive Bayes Classifier -- Fisher''s Iris Data') 27 | xlabel('Petal Length (cm)') 28 | ylabel('Petal Width (cm)') 29 | legend('setosa','versicolor','virginica') 30 | hold off 31 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

MATLAB For All

2 | 3 | **This comprehensive collection covers everything from the foundational basics to advanced techniques related to MATLAB. I will try to cover the basic introduction, looping, plotting, image processing, and machine learning concepts.** 4 | 5 | ## Table of Contents 6 | 7 | - Introduction to MATLAB
8 | * Workspace 9 | * Basic operations 10 | * Array 11 | * Matrix 12 | * Operators 13 | - Conditional and Looping statements in MATLAB
14 | * if 15 | * if-else 16 | * else-if 17 | * switch 18 | * for 19 | * while 20 | * Continue 21 | * break 22 | * Nested looping 23 | - Functions in MATLAB
24 | * Built-in functions 25 | * User-defined functions 26 | - Graphics with MATLAB
27 | * Line plots 28 | * Customizing the plots using markers 29 | * Circle plot 30 | * Histogram 31 | * Box plot 32 | * Swarm chart 33 | * Heat map 34 | * Word cloud 35 | * Pie chart 36 | * Scatter plot 37 | * Bubble chart 38 | * Bar graph 39 | * Surface plot 40 | * Ribbon plot 41 | * Ellipsoid 42 | * Sphere 43 | * Cylinder 44 | - Image Processing using MATLAB
45 | * Basic image manipulation techniques 46 | * Image enhancement 47 | * Image segmentation 48 | * Morphological operations 49 | - Machine learning using MATLAB
50 | * Linear Regression 51 | * Decision Tree 52 | * Regression Tree 53 | * Nearest Neighbour 54 | * Naive Bayes 55 | * SVM 56 | * K means 57 | * K medoids 58 | * DBSCAN 59 | 60 | ## Some Applications 61 | - Circle Detection using CHT
62 | 63 | ## To be continued (Work in progress). If you want to add something, you are most welcome. 64 | --------------------------------------------------------------------------------