cofiCostFunc.m 2.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. function [J, grad] = cofiCostFunc(params, Y, R, num_users, num_movies, ...
  2. num_features, lambda)
  3. %COFICOSTFUNC Collaborative filtering cost function
  4. % [J, grad] = COFICOSTFUNC(params, Y, R, num_users, num_movies, ...
  5. % num_features, lambda) returns the cost and gradient for the
  6. % collaborative filtering problem.
  7. %
  8. % Unfold the U and W matrices from params
  9. X = reshape(params(1:num_movies*num_features), num_movies, num_features);
  10. Theta = reshape(params(num_movies*num_features+1:end), ...
  11. num_users, num_features);
  12. % You need to return the following values correctly
  13. J = 0;
  14. X_grad = zeros(size(X));
  15. Theta_grad = zeros(size(Theta));
  16. % ====================== YOUR CODE HERE ======================
  17. % Instructions: Compute the cost function and gradient for collaborative
  18. % filtering. Concretely, you should first implement the cost
  19. % function (without regularization) and make sure it is
  20. % matches our costs. After that, you should implement the
  21. % gradient and use the checkCostFunction routine to check
  22. % that the gradient is correct. Finally, you should implement
  23. % regularization.
  24. %
  25. % Notes: X - num_movies x num_features matrix of movie features
  26. % Theta - num_users x num_features matrix of user features
  27. % Y - num_movies x num_users matrix of user ratings of movies
  28. % R - num_movies x num_users matrix, where R(i, j) = 1 if the
  29. % i-th movie was rated by the j-th user
  30. %
  31. % You should set the following variables correctly:
  32. %
  33. % X_grad - num_movies x num_features matrix, containing the
  34. % partial derivatives w.r.t. to each element of X
  35. % Theta_grad - num_users x num_features matrix, containing the
  36. % partial derivatives w.r.t. to each element of Theta
  37. %
  38. J = sum(sum(0.5 * (((X * Theta' - Y) .* R) .^ 2))) + 0.5 * lambda * (sum(sum(Theta .^ 2)) + sum(sum(X .^ 2)));
  39. for i = 1:size(R, 1)
  40. idx = find(R(i,:) == 1);
  41. Theta_tmp = Theta(idx, :);
  42. Y_tmp = Y(i, idx);
  43. X_grad(i,:) = (X(i,:) * Theta_tmp' - Y_tmp) * Theta_tmp + lambda * X(i,:);
  44. end
  45. for j = 1:size(R, 2)
  46. idx = find(R(:,j) == 1);
  47. X_tmp = X(idx,:);
  48. Y_tmp = Y(idx,j);
  49. Theta_grad(j,:) = (Theta(j,:) * X_tmp' - Y_tmp') * X_tmp + lambda * Theta(j,:);
  50. end
  51. % =============================================================
  52. grad = [X_grad(:); Theta_grad(:)];
  53. end