1%% Optimization of a simple (Rosenbrock) function
2rosen = @(x) (1-x(1)).^2 + 105*(x(2)-x(1).^2).^2;
4% With no constraints, operation simply passes through
5% directly to fminsearch. The solution should be [1 1]
6xsol = fminsearchcon(rosen,[3 3])
8%% Only lower bound constraints
9xsol = fminsearchcon(rosen,[3 3],[2 2])
11%% Only upper bound constraints
12xsol = fminsearchcon(rosen,[-5 -5],[],[0 0])
15xsol = fminsearchcon(rosen,[2.5 2.5],[2 2],[3 3])
18xsol = fminsearchcon(rosen,[0 0],[2 -inf],[inf 3])
20%% Fix a variable as constant, x(2) == 3
21fminsearchcon(rosen,[3 3],[-inf 3],[inf,3])
23%% Linear inequality, x(1) + x(2) <= 1
24fminsearchcon(rosen,[0 0],[],[],[1 1],1)
26%% Nonlinear inequality, norm(x) <= 1
27fminsearchcon(rosen,[0 0],[],[],[],[],@(x) norm(x) - 1)
29%% Minimize a linear objective, subject to a nonlinear constraints.
31nonlcon = @(x) [norm(x) - 1;sin(sum(x))];
32fminsearchcon(fun,[0 0],[],[],[],[],nonlcon)
34%% Provide your own fminsearch options
35opts = optimset(
'fminsearch');
38opts.MaxFunEvals = 100;
43Quadraticfun = @(x) x*H*x';
45% Global minimizer
is at [0 0 0 0 0].
46% Set all lower bound constraints, all of which will
47% be active in
this test.
49xsol = fminsearchcon(Quadraticfun,[1 2 3 4 5],LB,[],[],[],[],opts)
51%% Exactly fix one variable, constrain some others, and set a tolerance
52opts = optimset(
'fminsearch');
57xsol = fminsearchcon(@(x) norm(x),[1 3 1 1],LB,UB,[],[],[],opts)
59%% All the standard outputs from fminsearch are still returned
60[xsol,fval,exitflag,output] = fminsearchcon(@(x) norm(x),[1 3 1 1],LB,UB)