1% Multivariate calculus demo script
3% This script file
is designed to be used in cell mode
4% from the matlab editor, or best of all, use the publish
5% to HTML feature from the matlab editor. Older versions
6% of matlab can copy and paste entire blocks of code into
7% the Matlab command window.
9% Typical usage of the gradient and Hessian might be in
10% optimization problems, where one might compare an analytically
11% derived gradient
for correctness, or use the Hessian matrix
12% to compute confidence interval estimates on parameters in a
13% maximum likelihood estimation.
15%% Gradient of the Rosenbrock function at [1,1], the global minimizer
16rosen = @(x) (1-x(1)).^2 + 105*(x(2)-x(1).^2).^2;
17% The gradient should be zero (within floating point noise)
18[grad,err] = gradest(rosen,[1 1])
20%% The Hessian matrix at the minimizer should be positive definite
21H = hessian(rosen,[1 1])
22% The eigenvalues of h should be positive
25%% Gradient estimation
using gradest - a function of 5 variables
26[grad,err] = gradest(@(x) sum(x.^2),[1 2 3 4 5])
28%% Simple Hessian matrix of a problem with 3 independent variables
29[H,err] = hessian(@(x) x(1) + x(2)^2 + x(3)^3,[1 2 3])
31%% A semi-definite Hessian matrix
32H = hessian(@(xy) cos(xy(1) - xy(2)),[0 0])
33% one of these eigenvalues will be zero (approximately)
36%% Directional derivative of the Rosenbrock function at the solution
37% This should be zero. Ok, its a trivial test
case.
38[dd,err] = directionaldiff(rosen,[1 1],[1 2])
40%% Directional derivative at other locations
41[dd,err] = directionaldiff(rosen,[2 3],[1 -1])
43% We can test
this example
46g = gradest(rosen,[2 3]);
48% The directional derivative will be the dot product of the gradient with
49% the (unit normalized) vector. So
this difference will be (approx) zero.
52%% Jacobian matrix of a scalar function
is just the gradient
53[jac,err] = jacobianest(rosen,[2 3])
55grad = gradest(rosen,[2 3])
57%% Jacobian matrix of a linear system will reduce to the design matrix
63[jac,err] = jacobianest(fun,x)
65disp 'This should be essentially zero at any location x'
68%% The jacobian matrix of a nonlinear transformation of variables
69% evaluated at some arbitrary location [-2, -3]
70fun = @(xy) [xy(1).^2, cos(xy(1) - xy(2))];
71[jac,err] = jacobianest(fun,[-2 -3])