ex8.m 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. %% Machine Learning Online Class
  2. % Exercise 8 | Anomaly Detection and Collaborative Filtering
  3. %
  4. % Instructions
  5. % ------------
  6. %
  7. % This file contains code that helps you get started on the
  8. % exercise. You will need to complete the following functions:
  9. %
  10. % estimateGaussian.m
  11. % selectThreshold.m
  12. % cofiCostFunc.m
  13. %
  14. % For this exercise, you will not need to change any code in this file,
  15. % or any other files other than those mentioned above.
  16. %
  17. %% Initialization
  18. clear ; close all; clc
  19. %% ================== Part 1: Load Example Dataset ===================
  20. % We start this exercise by using a small dataset that is easy to
  21. % visualize.
  22. %
  23. % Our example case consists of 2 network server statistics across
  24. % several machines: the latency and throughput of each machine.
  25. % This exercise will help us find possibly faulty (or very fast) machines.
  26. %
  27. fprintf('Visualizing example dataset for outlier detection.\n\n');
  28. % The following command loads the dataset. You should now have the
  29. % variables X, Xval, yval in your environment
  30. load('ex8data1.mat');
  31. % Visualize the example dataset
  32. plot(X(:, 1), X(:, 2), 'bx');
  33. axis([0 30 0 30]);
  34. xlabel('Latency (ms)');
  35. ylabel('Throughput (mb/s)');
  36. fprintf('Program paused. Press enter to continue.\n');
  37. pause
  38. %% ================== Part 2: Estimate the dataset statistics ===================
  39. % For this exercise, we assume a Gaussian distribution for the dataset.
  40. %
  41. % We first estimate the parameters of our assumed Gaussian distribution,
  42. % then compute the probabilities for each of the points and then visualize
  43. % both the overall distribution and where each of the points falls in
  44. % terms of that distribution.
  45. %
  46. fprintf('Visualizing Gaussian fit.\n\n');
  47. % Estimate my and sigma2
  48. [mu sigma2] = estimateGaussian(X);
  49. % Returns the density of the multivariate normal at each data point (row)
  50. % of X
  51. p = multivariateGaussian(X, mu, sigma2);
  52. % Visualize the fit
  53. visualizeFit(X, mu, sigma2);
  54. xlabel('Latency (ms)');
  55. ylabel('Throughput (mb/s)');
  56. fprintf('Program paused. Press enter to continue.\n');
  57. pause;
  58. %% ================== Part 3: Find Outliers ===================
  59. % Now you will find a good epsilon threshold using a cross-validation set
  60. % probabilities given the estimated Gaussian distribution
  61. %
  62. pval = multivariateGaussian(Xval, mu, sigma2);
  63. [epsilon F1] = selectThreshold(yval, pval);
  64. fprintf('Best epsilon found using cross-validation: %e\n', epsilon);
  65. fprintf('Best F1 on Cross Validation Set: %f\n', F1);
  66. fprintf(' (you should see a value epsilon of about 8.99e-05)\n');
  67. fprintf(' (you should see a Best F1 value of 0.875000)\n\n');
  68. % Find the outliers in the training set and plot the
  69. outliers = find(p < epsilon);
  70. % Draw a red circle around those outliers
  71. hold on
  72. plot(X(outliers, 1), X(outliers, 2), 'ro', 'LineWidth', 2, 'MarkerSize', 10);
  73. hold off
  74. fprintf('Program paused. Press enter to continue.\n');
  75. pause;
  76. %% ================== Part 4: Multidimensional Outliers ===================
  77. % We will now use the code from the previous part and apply it to a
  78. % harder problem in which more features describe each datapoint and only
  79. % some features indicate whether a point is an outlier.
  80. %
  81. % Loads the second dataset. You should now have the
  82. % variables X, Xval, yval in your environment
  83. load('ex8data2.mat');
  84. % Apply the same steps to the larger dataset
  85. [mu sigma2] = estimateGaussian(X);
  86. % Training set
  87. p = multivariateGaussian(X, mu, sigma2);
  88. % Cross-validation set
  89. pval = multivariateGaussian(Xval, mu, sigma2);
  90. % Find the best threshold
  91. [epsilon F1] = selectThreshold(yval, pval);
  92. fprintf('Best epsilon found using cross-validation: %e\n', epsilon);
  93. fprintf('Best F1 on Cross Validation Set: %f\n', F1);
  94. fprintf(' (you should see a value epsilon of about 1.38e-18)\n');
  95. fprintf(' (you should see a Best F1 value of 0.615385)\n');
  96. fprintf('# Outliers found: %d\n\n', sum(p < epsilon));