LINE Solver
MATLAB API documentation
Loading...
Searching...
No Matches
multivariable_calc_demo.m
1% Multivariate calculus demo script
2
3% This script file is designed to be used in cell mode
4% from the matlab editor, or best of all, use the publish
5% to HTML feature from the matlab editor. Older versions
6% of matlab can copy and paste entire blocks of code into
7% the Matlab command window.
8
9% Typical usage of the gradient and Hessian might be in
10% optimization problems, where one might compare an analytically
11% derived gradient for correctness, or use the Hessian matrix
12% to compute confidence interval estimates on parameters in a
13% maximum likelihood estimation.
14
15%% Gradient of the Rosenbrock function at [1,1], the global minimizer
16rosen = @(x) (1-x(1)).^2 + 105*(x(2)-x(1).^2).^2;
17% The gradient should be zero (within floating point noise)
18[grad,err] = gradest(rosen,[1 1])
19
20%% The Hessian matrix at the minimizer should be positive definite
21H = hessian(rosen,[1 1])
22% The eigenvalues of h should be positive
23eig(H)
24
25%% Gradient estimation using gradest - a function of 5 variables
26[grad,err] = gradest(@(x) sum(x.^2),[1 2 3 4 5])
27
28%% Simple Hessian matrix of a problem with 3 independent variables
29[H,err] = hessian(@(x) x(1) + x(2)^2 + x(3)^3,[1 2 3])
30
31%% A semi-definite Hessian matrix
32H = hessian(@(xy) cos(xy(1) - xy(2)),[0 0])
33% one of these eigenvalues will be zero (approximately)
34eig(H)
35
36%% Directional derivative of the Rosenbrock function at the solution
37% This should be zero. Ok, its a trivial test case.
38[dd,err] = directionaldiff(rosen,[1 1],[1 2])
39
40%% Directional derivative at other locations
41[dd,err] = directionaldiff(rosen,[2 3],[1 -1])
42
43% We can test this example
44v = [1 -1];
45v = v/norm(v);
46g = gradest(rosen,[2 3]);
47
48% The directional derivative will be the dot product of the gradient with
49% the (unit normalized) vector. So this difference will be (approx) zero.
50dot(g,v) - dd
51
52%% Jacobian matrix of a scalar function is just the gradient
53[jac,err] = jacobianest(rosen,[2 3])
54
55grad = gradest(rosen,[2 3])
56
57%% Jacobian matrix of a linear system will reduce to the design matrix
58A = rand(5,3);
59b = rand(5,1);
60fun = @(x) (A*x-b);
61
62x = rand(3,1);
63[jac,err] = jacobianest(fun,x)
64
65disp 'This should be essentially zero at any location x'
66jac - A
67
68%% The jacobian matrix of a nonlinear transformation of variables
69% evaluated at some arbitrary location [-2, -3]
70fun = @(xy) [xy(1).^2, cos(xy(1) - xy(2))];
71[jac,err] = jacobianest(fun,[-2 -3])
72
73