ex2_reg.m 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136
  1. %% Machine Learning Online Class - Exercise 2: Logistic Regression
  2. %
  3. % Instructions
  4. % ------------
  5. %
  6. % This file contains code that helps you get started on the second part
  7. % of the exercise which covers regularization with logistic regression.
  8. %
  9. % You will need to complete the following functions in this exericse:
  10. %
  11. % sigmoid.m
  12. % costFunction.m
  13. % predict.m
  14. % costFunctionReg.m
  15. %
  16. % For this exercise, you will not need to change any code in this file,
  17. % or any other files other than those mentioned above.
  18. %
  19. %% Initialization
  20. clear ; close all; clc
  21. %% Load Data
  22. % The first two columns contains the X values and the third column
  23. % contains the label (y).
  24. data = load('ex2data2.txt');
  25. X = data(:, [1, 2]); y = data(:, 3);
  26. plotData(X, y);
  27. % Put some labels
  28. hold on;
  29. % Labels and Legend
  30. xlabel('Microchip Test 1')
  31. ylabel('Microchip Test 2')
  32. % Specified in plot order
  33. legend('y = 1', 'y = 0')
  34. hold off;
  35. %% =========== Part 1: Regularized Logistic Regression ============
  36. % In this part, you are given a dataset with data points that are not
  37. % linearly separable. However, you would still like to use logistic
  38. % regression to classify the data points.
  39. %
  40. % To do so, you introduce more features to use -- in particular, you add
  41. % polynomial features to our data matrix (similar to polynomial
  42. % regression).
  43. %
  44. % Add Polynomial Features
  45. % Note that mapFeature also adds a column of ones for us, so the intercept
  46. % term is handled
  47. X = mapFeature(X(:,1), X(:,2));
  48. % Initialize fitting parameters
  49. initial_theta = zeros(size(X, 2), 1);
  50. % Set regularization parameter lambda to 1
  51. lambda = 1;
  52. % Compute and display initial cost and gradient for regularized logistic
  53. % regression
  54. [cost, grad] = costFunctionReg(initial_theta, X, y, lambda);
  55. fprintf('Cost at initial theta (zeros): %f\n', cost);
  56. fprintf('Expected cost (approx): 0.693\n');
  57. fprintf('Gradient at initial theta (zeros) - first five values only:\n');
  58. fprintf(' %f \n', grad(1:5));
  59. fprintf('Expected gradients (approx) - first five values only:\n');
  60. fprintf(' 0.0085\n 0.0188\n 0.0001\n 0.0503\n 0.0115\n');
  61. fprintf('\nProgram paused. Press enter to continue.\n');
  62. pause;
  63. % Compute and display cost and gradient
  64. % with all-ones theta and lambda = 10
  65. test_theta = ones(size(X,2),1);
  66. [cost, grad] = costFunctionReg(test_theta, X, y, 10);
  67. fprintf('\nCost at test theta (with lambda = 10): %f\n', cost);
  68. fprintf('Expected cost (approx): 3.16\n');
  69. fprintf('Gradient at test theta - first five values only:\n');
  70. fprintf(' %f \n', grad(1:5));
  71. fprintf('Expected gradients (approx) - first five values only:\n');
  72. fprintf(' 0.3460\n 0.1614\n 0.1948\n 0.2269\n 0.0922\n');
  73. fprintf('\nProgram paused. Press enter to continue.\n');
  74. pause;
  75. %% ============= Part 2: Regularization and Accuracies =============
  76. % Optional Exercise:
  77. % In this part, you will get to try different values of lambda and
  78. % see how regularization affects the decision coundart
  79. %
  80. % Try the following values of lambda (0, 1, 10, 100).
  81. %
  82. % How does the decision boundary change when you vary lambda? How does
  83. % the training set accuracy vary?
  84. %
  85. % Initialize fitting parameters
  86. initial_theta = zeros(size(X, 2), 1);
  87. % Set regularization parameter lambda to 1 (you should vary this)
  88. lambda = 1;
  89. % Set Options
  90. options = optimset('GradObj', 'on', 'MaxIter', 400);
  91. % Optimize
  92. [theta, J, exit_flag] = ...
  93. fminunc(@(t)(costFunctionReg(t, X, y, lambda)), initial_theta, options);
  94. % Plot Boundary
  95. plotDecisionBoundary(theta, X, y);
  96. hold on;
  97. title(sprintf('lambda = %g', lambda))
  98. % Labels and Legend
  99. xlabel('Microchip Test 1')
  100. ylabel('Microchip Test 2')
  101. legend('y = 1', 'y = 0', 'Decision boundary')
  102. hold off;
  103. % Compute accuracy on our training set
  104. p = predict(theta, X);
  105. fprintf('Train Accuracy: %f\n', mean(double(p == y)) * 100);
  106. fprintf('Expected accuracy (with lambda = 1): 83.1 (approx)\n');