ex7_pca.m 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235
  1. %% Machine Learning Online Class
  2. % Exercise 7 | Principle Component Analysis and K-Means Clustering
  3. %
  4. % Instructions
  5. % ------------
  6. %
  7. % This file contains code that helps you get started on the
  8. % exercise. You will need to complete the following functions:
  9. %
  10. % pca.m
  11. % projectData.m
  12. % recoverData.m
  13. % computeCentroids.m
  14. % findClosestCentroids.m
  15. % kMeansInitCentroids.m
  16. %
  17. % For this exercise, you will not need to change any code in this file,
  18. % or any other files other than those mentioned above.
  19. %
  20. %% Initialization
  21. clear ; close all; clc
  22. %% ================== Part 1: Load Example Dataset ===================
  23. % We start this exercise by using a small dataset that is easily to
  24. % visualize
  25. %
  26. fprintf('Visualizing example dataset for PCA.\n\n');
  27. % The following command loads the dataset. You should now have the
  28. % variable X in your environment
  29. load ('ex7data1.mat');
  30. % Visualize the example dataset
  31. plot(X(:, 1), X(:, 2), 'bo');
  32. axis([0.5 6.5 2 8]); axis square;
  33. fprintf('Program paused. Press enter to continue.\n');
  34. pause;
  35. %% =============== Part 2: Principal Component Analysis ===============
  36. % You should now implement PCA, a dimension reduction technique. You
  37. % should complete the code in pca.m
  38. %
  39. fprintf('\nRunning PCA on example dataset.\n\n');
  40. % Before running PCA, it is important to first normalize X
  41. [X_norm, mu, sigma] = featureNormalize(X);
  42. % Run PCA
  43. [U, S] = pca(X_norm);
  44. % Compute mu, the mean of the each feature
  45. % Draw the eigenvectors centered at mean of data. These lines show the
  46. % directions of maximum variations in the dataset.
  47. hold on;
  48. drawLine(mu, mu + 1.5 * S(1,1) * U(:,1)', '-k', 'LineWidth', 2);
  49. drawLine(mu, mu + 1.5 * S(2,2) * U(:,2)', '-k', 'LineWidth', 2);
  50. hold off;
  51. fprintf('Top eigenvector: \n');
  52. fprintf(' U(:,1) = %f %f \n', U(1,1), U(2,1));
  53. fprintf('\n(you should expect to see -0.707107 -0.707107)\n');
  54. fprintf('Program paused. Press enter to continue.\n');
  55. pause;
  56. %% =================== Part 3: Dimension Reduction ===================
  57. % You should now implement the projection step to map the data onto the
  58. % first k eigenvectors. The code will then plot the data in this reduced
  59. % dimensional space. This will show you what the data looks like when
  60. % using only the corresponding eigenvectors to reconstruct it.
  61. %
  62. % You should complete the code in projectData.m
  63. %
  64. fprintf('\nDimension reduction on example dataset.\n\n');
  65. % Plot the normalized dataset (returned from pca)
  66. plot(X_norm(:, 1), X_norm(:, 2), 'bo');
  67. axis([-4 3 -4 3]); axis square
  68. % Project the data onto K = 1 dimension
  69. K = 1;
  70. Z = projectData(X_norm, U, K);
  71. fprintf('Projection of the first example: %f\n', Z(1));
  72. fprintf('\n(this value should be about 1.481274)\n\n');
  73. X_rec = recoverData(Z, U, K);
  74. fprintf('Approximation of the first example: %f %f\n', X_rec(1, 1), X_rec(1, 2));
  75. fprintf('\n(this value should be about -1.047419 -1.047419)\n\n');
  76. % Draw lines connecting the projected points to the original points
  77. hold on;
  78. plot(X_rec(:, 1), X_rec(:, 2), 'ro');
  79. for i = 1:size(X_norm, 1)
  80. drawLine(X_norm(i,:), X_rec(i,:), '--k', 'LineWidth', 1);
  81. end
  82. hold off
  83. fprintf('Program paused. Press enter to continue.\n');
  84. pause;
  85. %% =============== Part 4: Loading and Visualizing Face Data =============
  86. % We start the exercise by first loading and visualizing the dataset.
  87. % The following code will load the dataset into your environment
  88. %
  89. fprintf('\nLoading face dataset.\n\n');
  90. % Load Face dataset
  91. load ('ex7faces.mat')
  92. % Display the first 100 faces in the dataset
  93. displayData(X(1:100, :));
  94. fprintf('Program paused. Press enter to continue.\n');
  95. pause;
  96. %% =========== Part 5: PCA on Face Data: Eigenfaces ===================
  97. % Run PCA and visualize the eigenvectors which are in this case eigenfaces
  98. % We display the first 36 eigenfaces.
  99. %
  100. fprintf(['\nRunning PCA on face dataset.\n' ...
  101. '(this might take a minute or two ...)\n\n']);
  102. % Before running PCA, it is important to first normalize X by subtracting
  103. % the mean value from each feature
  104. [X_norm, mu, sigma] = featureNormalize(X);
  105. % Run PCA
  106. [U, S] = pca(X_norm);
  107. % Visualize the top 36 eigenvectors found
  108. displayData(U(:, 1:36)');
  109. fprintf('Program paused. Press enter to continue.\n');
  110. pause;
  111. %% ============= Part 6: Dimension Reduction for Faces =================
  112. % Project images to the eigen space using the top k eigenvectors
  113. % If you are applying a machine learning algorithm
  114. fprintf('\nDimension reduction for face dataset.\n\n');
  115. K = 100;
  116. Z = projectData(X_norm, U, K);
  117. fprintf('The projected data Z has a size of: ')
  118. fprintf('%d ', size(Z));
  119. fprintf('\n\nProgram paused. Press enter to continue.\n');
  120. pause;
  121. %% ==== Part 7: Visualization of Faces after PCA Dimension Reduction ====
  122. % Project images to the eigen space using the top K eigen vectors and
  123. % visualize only using those K dimensions
  124. % Compare to the original input, which is also displayed
  125. fprintf('\nVisualizing the projected (reduced dimension) faces.\n\n');
  126. K = 100;
  127. X_rec = recoverData(Z, U, K);
  128. % Display normalized data
  129. subplot(1, 2, 1);
  130. displayData(X_norm(1:100,:));
  131. title('Original faces');
  132. axis square;
  133. % Display reconstructed data from only k eigenfaces
  134. subplot(1, 2, 2);
  135. displayData(X_rec(1:100,:));
  136. title('Recovered faces');
  137. axis square;
  138. fprintf('Program paused. Press enter to continue.\n');
  139. pause;
  140. %% === Part 8(a): Optional (ungraded) Exercise: PCA for Visualization ===
  141. % One useful application of PCA is to use it to visualize high-dimensional
  142. % data. In the last K-Means exercise you ran K-Means on 3-dimensional
  143. % pixel colors of an image. We first visualize this output in 3D, and then
  144. % apply PCA to obtain a visualization in 2D.
  145. close all; close all; clc
  146. % Reload the image from the previous exercise and run K-Means on it
  147. % For this to work, you need to complete the K-Means assignment first
  148. A = double(imread('bird_small.png'));
  149. % If imread does not work for you, you can try instead
  150. % load ('bird_small.mat');
  151. A = A / 255;
  152. img_size = size(A);
  153. X = reshape(A, img_size(1) * img_size(2), 3);
  154. K = 16;
  155. max_iters = 10;
  156. initial_centroids = kMeansInitCentroids(X, K);
  157. [centroids, idx] = runkMeans(X, initial_centroids, max_iters);
  158. % Sample 1000 random indexes (since working with all the data is
  159. % too expensive. If you have a fast computer, you may increase this.
  160. sel = floor(rand(1000, 1) * size(X, 1)) + 1;
  161. % Setup Color Palette
  162. palette = hsv(K);
  163. colors = palette(idx(sel), :);
  164. % Visualize the data and centroid memberships in 3D
  165. figure;
  166. scatter3(X(sel, 1), X(sel, 2), X(sel, 3), 10, colors);
  167. title('Pixel dataset plotted in 3D. Color shows centroid memberships');
  168. fprintf('Program paused. Press enter to continue.\n');
  169. pause;
  170. %% === Part 8(b): Optional (ungraded) Exercise: PCA for Visualization ===
  171. % Use PCA to project this cloud to 2D for visualization
  172. % Subtract the mean to use PCA
  173. [X_norm, mu, sigma] = featureNormalize(X);
  174. % PCA and project the data to 2D
  175. [U, S] = pca(X_norm);
  176. Z = projectData(X_norm, U, 2);
  177. % Plot in 2D
  178. figure;
  179. plotDataPoints(Z(sel, :), idx(sel), K);
  180. title('Pixel dataset plotted in 2D, using PCA for dimensionality reduction');
  181. fprintf('Program paused. Press enter to continue.\n');
  182. pause;