ex1_multi.m 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159
  1. %% Machine Learning Online Class
  2. % Exercise 1: Linear regression with multiple variables
  3. %
  4. % Instructions
  5. % ------------
  6. %
  7. % This file contains code that helps you get started on the
  8. % linear regression exercise.
  9. %
  10. % You will need to complete the following functions in this
  11. % exericse:
  12. %
  13. % warmUpExercise.m
  14. % plotData.m
  15. % gradientDescent.m
  16. % computeCost.m
  17. % gradientDescentMulti.m
  18. % computeCostMulti.m
  19. % featureNormalize.m
  20. % normalEqn.m
  21. %
  22. % For this part of the exercise, you will need to change some
  23. % parts of the code below for various experiments (e.g., changing
  24. % learning rates).
  25. %
  26. %% Initialization
  27. %% ================ Part 1: Feature Normalization ================
  28. %% Clear and Close Figures
  29. clear ; close all; clc
  30. fprintf('Loading data ...\n');
  31. %% Load Data
  32. data = load('ex1data2.txt');
  33. X = data(:, 1:2);
  34. y = data(:, 3);
  35. m = length(y);
  36. % Print out some data points
  37. fprintf('First 10 examples from the dataset: \n');
  38. fprintf(' x = [%.0f %.0f], y = %.0f \n', [X(1:10,:) y(1:10,:)]');
  39. fprintf('Program paused. Press enter to continue.\n');
  40. pause;
  41. % Scale features and set them to zero mean
  42. fprintf('Normalizing Features ...\n');
  43. [X mu sigma] = featureNormalize(X);
  44. % Add intercept term to X
  45. X = [ones(m, 1) X];
  46. %% ================ Part 2: Gradient Descent ================
  47. % ====================== YOUR CODE HERE ======================
  48. % Instructions: We have provided you with the following starter
  49. % code that runs gradient descent with a particular
  50. % learning rate (alpha).
  51. %
  52. % Your task is to first make sure that your functions -
  53. % computeCost and gradientDescent already work with
  54. % this starter code and support multiple variables.
  55. %
  56. % After that, try running gradient descent with
  57. % different values of alpha and see which one gives
  58. % you the best result.
  59. %
  60. % Finally, you should complete the code at the end
  61. % to predict the price of a 1650 sq-ft, 3 br house.
  62. %
  63. % Hint: By using the 'hold on' command, you can plot multiple
  64. % graphs on the same figure.
  65. %
  66. % Hint: At prediction, make sure you do the same feature normalization.
  67. %
  68. fprintf('Running gradient descent ...\n');
  69. % Choose some alpha value
  70. alpha = 0.01;
  71. num_iters = 400;
  72. % Init Theta and Run Gradient Descent
  73. theta = zeros(3, 1);
  74. [theta, J_history] = gradientDescentMulti(X, y, theta, alpha, num_iters);
  75. % Plot the convergence graph
  76. figure;
  77. plot(1:numel(J_history), J_history, '-b', 'LineWidth', 2);
  78. xlabel('Number of iterations');
  79. ylabel('Cost J');
  80. % Display gradient descent's result
  81. fprintf('Theta computed from gradient descent: \n');
  82. fprintf(' %f \n', theta);
  83. fprintf('\n');
  84. % Estimate the price of a 1650 sq-ft, 3 br house
  85. % ====================== YOUR CODE HERE ======================
  86. % Recall that the first column of X is all-ones. Thus, it does
  87. % not need to be normalized.
  88. price = 0; % You should change this
  89. % ============================================================
  90. fprintf(['Predicted price of a 1650 sq-ft, 3 br house ' ...
  91. '(using gradient descent):\n $%f\n'], price);
  92. fprintf('Program paused. Press enter to continue.\n');
  93. pause;
  94. %% ================ Part 3: Normal Equations ================
  95. fprintf('Solving with normal equations...\n');
  96. % ====================== YOUR CODE HERE ======================
  97. % Instructions: The following code computes the closed form
  98. % solution for linear regression using the normal
  99. % equations. You should complete the code in
  100. % normalEqn.m
  101. %
  102. % After doing so, you should complete this code
  103. % to predict the price of a 1650 sq-ft, 3 br house.
  104. %
  105. %% Load Data
  106. data = csvread('ex1data2.txt');
  107. X = data(:, 1:2);
  108. y = data(:, 3);
  109. m = length(y);
  110. % Add intercept term to X
  111. X = [ones(m, 1) X];
  112. % Calculate the parameters from the normal equation
  113. theta = normalEqn(X, y);
  114. % Display normal equation's result
  115. fprintf('Theta computed from the normal equations: \n');
  116. fprintf(' %f \n', theta);
  117. fprintf('\n');
  118. % Estimate the price of a 1650 sq-ft, 3 br house
  119. % ====================== YOUR CODE HERE ======================
  120. price = 0; % You should change this
  121. % ============================================================
  122. fprintf(['Predicted price of a 1650 sq-ft, 3 br house ' ...
  123. '(using normal equations):\n $%f\n'], price);