LINE Solver
MATLAB API documentation
Loading...
Searching...
No Matches
converged.m
1function bool = converged(self, it)
2% BOOL = CONVERGED(IT)
3%
4% Apply convergence test to the SolverLN iterations. As the solver keeps
5% iterating, this method maintains a moving avg of the recent results based
6% on which it averages across the layer the maximum queue-length error.
7% Convergence is tested by resetting all layers (to avoid caching) and
8% doing an extra iteration. If the iteration keeps fulfilling the error
9% requirements for convergence, then the solver completes.
10
11bool = false;
12iter_min = max([2*length(self.model.ensemble),ceil(self.options.iter_max/4)]);
13E = self.nlayers;
14results = self.results; %#ok<NASGU> % faster in matlab
15
16%% Start moving average to help convergence
17
18if false%it>self.averagingstart%<self.averagingstart+50
19 % In the first 50 averaging iterations use Cesaro summation
20 if ~isempty(self.averagingstart)
21 if it>=iter_min % assume steady-state
22 for e=1:E
23 wnd_size_max = (it-self.averagingstart+1);
24 sk_q = cell(1,wnd_size_max);
25 sk_u = cell(1,wnd_size_max);
26 sk_r = cell(1,wnd_size_max);
27 sk_t = cell(1,wnd_size_max);
28 sk_a = cell(1,wnd_size_max);
29 sk_w = cell(1,wnd_size_max);
30 % compute all partial sumbs of up to wnd_size_max elements
31 for k= 1:wnd_size_max
32 if k==1
33 sk_q{k} = results{self.averagingstart,e}.QN;
34 sk_u{k} = results{self.averagingstart,e}.UN;
35 sk_r{k} = results{self.averagingstart,e}.RN;
36 sk_t{k} = results{self.averagingstart,e}.TN;
37 sk_a{k} = results{self.averagingstart,e}.AN;
38 sk_w{k} = results{self.averagingstart,e}.WN;
39 else
40 sk_q{k} = results{self.averagingstart+k-1,e}.QN/k + sk_q{k-1}*(k-1)/k;
41 sk_u{k} = results{self.averagingstart+k-1,e}.UN/k + sk_u{k-1}*(k-1)/k;
42 sk_r{k} = results{self.averagingstart+k-1,e}.RN/k + sk_r{k-1}*(k-1)/k;
43 sk_t{k} = results{self.averagingstart+k-1,e}.TN/k + sk_t{k-1}*(k-1)/k;
44 sk_a{k} = results{self.averagingstart+k-1,e}.AN/k + sk_a{k-1}*(k-1)/k;
45 sk_w{k} = results{self.averagingstart+k-1,e}.WN/k + sk_w{k-1}*(k-1)/k;
46 end
47 end
48 results{end,e}.QN = cellsum(sk_q)/wnd_size_max;
49 results{end,e}.UN = cellsum(sk_u)/wnd_size_max;
50 results{end,e}.RN = cellsum(sk_r)/wnd_size_max;
51 results{end,e}.TN = cellsum(sk_t)/wnd_size_max;
52 results{end,e}.AN = cellsum(sk_a)/wnd_size_max;
53 results{end,e}.WN = cellsum(sk_w)/wnd_size_max;
54 end
55 end
56 end
57else
58 wnd_size = max(5,ceil(iter_min/5)); % moving window size
59 mov_avg_weight = 1/wnd_size;
60 results = self.results; % faster in matlab
61 if it>=iter_min % assume steady-state
62 for e=1:E
63 results{end,e}.QN = mov_avg_weight*results{end,e}.QN;
64 results{end,e}.UN = mov_avg_weight*results{end,e}.UN;
65 results{end,e}.RN = mov_avg_weight*results{end,e}.RN;
66 results{end,e}.TN = mov_avg_weight*results{end,e}.TN;
67 results{end,e}.AN = mov_avg_weight*results{end,e}.AN;
68 results{end,e}.WN = mov_avg_weight*results{end,e}.WN;
69 for k=1:(wnd_size-1)
70 results{end,e}.QN = results{end,e}.QN + results{end-k,e}.QN * mov_avg_weight;
71 results{end,e}.UN = results{end,e}.UN + results{end-k,e}.UN * mov_avg_weight;
72 results{end,e}.RN = results{end,e}.RN + results{end-k,e}.RN * mov_avg_weight;
73 results{end,e}.TN = results{end,e}.TN + results{end-k,e}.TN * mov_avg_weight;
74 results{end,e}.AN = results{end,e}.AN + results{end-k,e}.AN * mov_avg_weight;
75 results{end,e}.WN = results{end,e}.WN + results{end-k,e}.WN * mov_avg_weight;
76 end
77 end
78 end
79end
80self.results = results;
81
82%% Take as error metric the max qlen-error averaged across layers
83if it>1
84 self.maxitererr(it) = 0;
85 for e = 1:E
86 metric = results{end,e}.QN;
87 metric_1 = results{end-1,e}.QN;
88 N = sum(self.ensemble{e}.getNumberOfJobs);
89 if N>0
90 try
91 IterErr = max(abs(metric(:) - metric_1(:)))/N;
92 catch
93 IterErr = 0;
94 end
95 self.maxitererr(it) = self.maxitererr(it) + IterErr;
96 end
97 % if self.options.verbose
98 % if self.solvers{e}.options.verbose
99 % line_printf(sprintf('QLen change: %f.\n',self.maxitererr(it)/E));
100 % elseif e==1
101 % line_printf('\n');
102 % end
103 % end
104 end
105 if it==iter_min
106 if self.options.verbose
107 line_printf( '\b Started averaging to aid convergence.');
108 end
109 self.averagingstart = it;
110 end
111
112 %% Update relaxation factor for adaptive/auto modes
113 relax_mode = self.options.config.relax;
114 if strcmpi(relax_mode, 'adaptive') || strcmpi(relax_mode, 'auto')
115 % Track error history
116 self.relax_err_history = [self.relax_err_history, self.maxitererr(it)];
117 wnd = self.options.config.relax_history;
118 if length(self.relax_err_history) > wnd
119 self.relax_err_history = self.relax_err_history(end-wnd+1:end);
120 end
121
122 if length(self.relax_err_history) >= 3
123 % Detect oscillation by counting sign changes in error differences
124 err = self.relax_err_history;
125 diff_err = diff(err);
126 sign_changes = sum(diff_err(1:end-1) .* diff_err(2:end) < 0);
127
128 if strcmpi(relax_mode, 'auto') && self.relax_omega == 1.0
129 % For 'auto' mode: enable relaxation when oscillation detected
130 if sign_changes >= length(diff_err) * 0.5
131 self.relax_omega = self.options.config.relax_factor;
132 if self.options.verbose
133 line_printf(sprintf(' [enabling relaxation, omega=%.2f]', self.relax_omega));
134 end
135 end
136 elseif strcmpi(relax_mode, 'adaptive')
137 % For 'adaptive' mode: adjust omega based on error trajectory
138 if sign_changes >= length(diff_err) * 0.5
139 % Oscillating - reduce omega
140 self.relax_omega = max(self.options.config.relax_min, self.relax_omega * 0.8);
141 if self.options.verbose
142 line_printf(sprintf(' [omega=%.2f]', self.relax_omega));
143 end
144 elseif sign_changes == 0 && self.maxitererr(it) < self.maxitererr(it-1)
145 % Monotonically decreasing - can increase omega slightly
146 self.relax_omega = min(1.0, self.relax_omega * 1.05);
147 end
148 end
149 end
150 end
151end
152
153%% Check convergence. Do not allow to converge in less than 2 iterations.
154if it==0 && self.options.verbose
155 line_printf('SolverLN initialization completed. Starting iteration on ensemble models.');
156elseif it>2 && self.maxitererr(it) < self.options.iter_tol && self.maxitererr(it-1) < self.options.iter_tol&& self.maxitererr(it-1) < self.options.iter_tol
157 if ~self.hasconverged % if potential convergence has just been detected
158 % do a hard reset of every layer to check that this is really the fixed point
159 for e=1:E
160 self.ensemble{e}.reset();
161 end
162 if self.options.verbose
163 line_printf(sprintf('\b Testing convergence.')); %Deep reset.');
164 end
165 self.hasconverged = true; % if it passes the change again next time then complete
166 else
167 if self.options.verbose
168 if self.solvers{end}.options.verbose
169 line_printf(sprintf('\nSolverLN completed in %d iterations. Running final checks.\n',size(results,1)));
170 end
171 end
172 bool = true;
173 end
174else
175 self.hasconverged = false;
176end
177end