nnCostFunction.m 3.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. function [J, grad] = nnCostFunction(nn_params, input_layer_size, hidden_layer_size, num_labels, X, y, lambda)
  2. m = size(X, 1);
  3. % You need to return the following variables correctly
  4. Theta1 = reshape(nn_params(1:hidden_layer_size * (input_layer_size + 1)), ...
  5. hidden_layer_size, (input_layer_size + 1));
  6. Theta2 = reshape(nn_params((1 + (hidden_layer_size * (input_layer_size + 1))):end), ...
  7. num_labels, (hidden_layer_size + 1));
  8. ThetaGrad1 = zeros(size(Theta1));
  9. ThetaGrad2 = zeros(size(Theta2));
  10. a1 = [ones(size(X, 1),1) X]';
  11. z2 = Theta1 * a1;
  12. a2 = [ones(1, size(z2,2)); sigmoid(z2)];
  13. z3 = Theta2 * a2;
  14. a3 = sigmoid(z3);
  15. y_map = zeros(length(y), num_labels);
  16. for i = 1:length(y_map)
  17. y_map(i, y(i)) = 1;
  18. end
  19. d3 = a3 - y_map';
  20. t = Theta2' * d3;
  21. d2 = t(2:end,:) .* sigmoidGradient(z2);
  22. ThetaGrad2 = ThetaGrad2 + d3 * a2';
  23. ThetaGrad1 = ThetaGrad1 + d2 * a1';
  24. ThetaGrad1 = ThetaGrad1 + lambda * [zeros(size(Theta1,1),1) Theta1(:,2:end)];
  25. ThetaGrad2 = ThetaGrad2 + lambda * [zeros(size(Theta2,1),1) Theta2(:,2:end)];
  26. grad = [ThetaGrad1(:); ThetaGrad2(:)];
  27. grad = grad / m;
  28. J = mean(sum( - y_map' .* log(a3) + (y_map' -1) .* log(1 - a3)));
  29. J = J + lambda * 0.5 / size(X, 1) * (sum(sum(Theta1(:,2:end) .^2)) + sum(sum(Theta2(:,2:end) .^2)));
  30. % ====================== YOUR CODE HERE ======================
  31. % Instructions: You should complete the code by working through the
  32. % following parts.
  33. %
  34. % Part 1: Feedforward the neural network and return the cost in the
  35. % variable J. After implementing Part 1, you can verify that your
  36. % cost function computation is correct by verifying the cost
  37. % computed in ex4.m
  38. %
  39. % Part 2: Implement the backpropagation algorithm to compute the gradients
  40. % Theta1_grad and Theta2_grad. You should return the partial derivatives of
  41. % the cost function with respect to Theta1 and Theta2 in Theta1_grad and
  42. % Theta2_grad, respectively. After implementing Part 2, you can check
  43. % that your implementation is correct by running checkNNGradients
  44. %
  45. % Note: The vector y passed into the function is a vector of labels
  46. % containing values from 1..K. You need to map this vector into a
  47. % binary vector of 1's and 0's to be used with the neural network
  48. % cost function.
  49. %
  50. % Hint: We recommend implementing backpropagation using a for-loop
  51. % over the training examples if you are implementing it for the
  52. % first time.
  53. %
  54. % Part 3: Implement regularization with the cost function and gradients.
  55. %
  56. % Hint: You can implement this around the code for
  57. % backpropagation. That is, you can compute the gradients for
  58. % the regularization separately and then add them to Theta1_grad
  59. % and Theta2_grad from Part 2.
  60. %
  61. % -------------------------------------------------------------
  62. % =========================================================================
  63. % Unroll gradients
  64. end