ex3.m 2.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. %% Machine Learning Online Class - Exercise 3 | Part 1: One-vs-all
  2. % Instructions
  3. % ------------
  4. %
  5. % This file contains code that helps you get started on the
  6. % linear exercise. You will need to complete the following functions
  7. % in this exericse:
  8. %
  9. % lrCostFunction.m (logistic regression cost function)
  10. % oneVsAll.m
  11. % predictOneVsAll.m
  12. % predict.m
  13. %
  14. % For this exercise, you will not need to change any code in this file,
  15. % or any other files other than those mentioned above.
  16. %
  17. %% Initialization
  18. clear ; close all; clc
  19. %% Setup the parameters you will use for this part of the exercise
  20. input_layer_size = 400; % 20x20 Input Images of Digits
  21. num_labels = 10; % 10 labels, from 1 to 10
  22. % (note that we have mapped "0" to label 10)
  23. %% =========== Part 1: Loading and Visualizing Data =============
  24. % We start the exercise by first loading and visualizing the dataset.
  25. % You will be working with a dataset that contains handwritten digits.
  26. %
  27. % Load Training Data
  28. fprintf('Loading and Visualizing Data ...\n')
  29. load('ex3data1.mat'); % training data stored in arrays X, y
  30. m = size(X, 1);
  31. % Randomly select 100 data points to display
  32. rand_indices = randperm(m);
  33. sel = X(rand_indices(1:100), :);
  34. displayData(sel);
  35. fprintf('Program paused. Press enter to continue.\n');
  36. pause;
  37. %% ============ Part 2a: Vectorize Logistic Regression ============
  38. % In this part of the exercise, you will reuse your logistic regression
  39. % code from the last exercise. You task here is to make sure that your
  40. % regularized logistic regression implementation is vectorized. After
  41. % that, you will implement one-vs-all classification for the handwritten
  42. % digit dataset.
  43. %
  44. % Test case for lrCostFunction
  45. fprintf('\nTesting lrCostFunction() with regularization');
  46. theta_t = [-2; -1; 1; 2];
  47. X_t = [ones(5,1) reshape(1:15,5,3)/10];
  48. y_t = ([1;0;1;0;1] >= 0.5);
  49. lambda_t = 3;
  50. [J grad] = lrCostFunction(theta_t, X_t, y_t, lambda_t);
  51. fprintf('\nCost: %f\n', J);
  52. fprintf('Expected cost: 2.534819\n');
  53. fprintf('Gradients:\n');
  54. fprintf(' %f \n', grad);
  55. fprintf('Expected gradients:\n');
  56. fprintf(' 0.146561\n -0.548558\n 0.724722\n 1.398003\n');
  57. fprintf('Program paused. Press enter to continue.\n');
  58. pause;
  59. %% ============ Part 2b: One-vs-All Training ============
  60. fprintf('\nTraining One-vs-All Logistic Regression...\n')
  61. lambda = 0.1;
  62. [all_theta] = oneVsAll(X, y, num_labels, lambda);
  63. fprintf('Program paused. Press enter to continue.\n');
  64. pause;
  65. %% ================ Part 3: Predict for One-Vs-All ================
  66. pred = predictOneVsAll(all_theta, X);
  67. fprintf('\nTraining Set Accuracy: %f\n', mean(double(pred == y)) * 100);