157 function [ai , sf ] =
regress(fxi,initialai) {
159 error(
" This SVR implementation requires fxi values in [-1,1] ");
164 [ai, sf] = this.regress1D(fxi, initialai);
167 [ai, sf] = this.regress2D(fxi, initialai);
173 copy = general.regression.ScalarEpsSVR_SMO;
174 copy =
clone@general.regression.BaseScalarSVR(
this, copy);
188 function [ai , sf ] = regress1D(fxi,initialai) {
190 if ~isempty(initialai)
192 [a, dW, T] = I2_WarmStart(
this, fxi, initialai);
195 [a, dW, T] = I0_ColdStart(
this, fxi);
216 while S > stop && cnt < this.
MaxCount+1
219 afxi = (a(1:n)-a(n+1:end))*this.
K;
222 plot(1:n,fxi,
" r ",1:n,[fxi-this.
Eps; fxi+this.
Eps],
" r-- ",1:n,afxi,
" b ");
223 legend(
" f(x_i) training values ",
" +\epsilon ",
" -\epsilon ",
" approximation values ");
228 [idx, M] = this.WSS0_1D_GetMaxGainIndex(a, dW);
232 r = max(-a(idx),min(this.C-a(idx),dW(idx)));
236 idx1 = idx - n*(idx > n);
241 plot(idx1,afxi(idx1),
" . ",
" MarkerSize ",5);
249 T = T + r*(r - 2*dW(idx) + sgn(idx)*fxi(idx1) - this.
Eps);
252 Ki = this.
K(idx1, :);
253 dW = dW + sign(idx-n-.5) * r * [Ki -Ki];
256 E = this.C * sum(max(0,min(2-this.
Eps,dW)));
262 disp(sprintf(
" Finished after %d/%d iterations.\n ",cnt,this.
MaxCount));
267 ai = (a(1:n)-a(n+1:end))^
t;
277 function [ai , sf ] = regress2D(fxi,initialai) {
278 if ~isempty(initialai)
280 [a, dW, T] = I2_WarmStart(
this, fxi, initialai);
283 [a, dW, T] = I0_ColdStart(
this, fxi);
305 while abs(S) > stop && cnt < this.
MaxCount+1
320 ij = [ij; this.WSS7_Combi(a, dW, i)];
322 [r, s, idx] = this.getMax2DGainUpdatesForIndices(a, dW, ij);
333 afxi = (a(1:n)-a(n+1:end))*this.
K;
336 plot(1:n,fxi,
" r ",1:n,[fxi-this.
Eps; fxi+this.
Eps]
" , "r--
" ,1:n,afxi, "b^
t);
339 plot(i,afxi(i),
" o ",
" MarkerEdgeColor ",
" k ",...
340 " MarkerFaceColor ",[.1 .8 .1],...
343 plot(i1,afxi(i1),
" o ",
" MarkerEdgeColor ",
" k ",...
344 " MarkerFaceColor ",[.8 .1 .1],...
348 plot(j,afxi(j),
" o ",
" MarkerEdgeColor ",
" k ",...
349 " MarkerFaceColor ",[.1 .8 .1],...
352 plot(j1,afxi(j1),
" o ",
" MarkerEdgeColor ",
" k ",...
353 " MarkerFaceColor ",[.8 .1 .1],...
360 fprintf(
" alpha_{%d} change: %e, alpha_{%d} change: %e\n ",i,r,j,s);
371 pm = sign(i - n - .5)*sign(j - n - .5);
373 T = T + r*(r - 2*dW(i) - this.
Eps + sgn(i)*fxi(i1)) ...
374 + s*(s - 2*dW(j) - this.
Eps + sgn(j)*fxi(j1)) ...
375 + 2*pm*r*s*this.
K(i1,j1);
380 dW = dW + sign(i-n-.5) * r * [Ki -Ki] ...
381 + sign(j-n-.5) * s * [Kj -Kj];
384 E = this.C * sum(max(0,min(2-this.
Eps,dW)));
391 fprintf(
" ScalarEpsSVR_SMO: Finished after %d/%d iterations.\n ",cnt,this.
MaxCount);
395 ai = (a(1:n)-a(n+1:end))^
t;
405 function [i , M ] = WSS0_1D_GetMaxGainIndex(a,dW) {
410 ch = [ind(a(n+1:end) == 0) ind(a(1:n) == 0)+n];
413 r = max(-a(ch),min(this.C-a(ch),dW(ch)));
416 g = r .* (dW(ch) - .5*r);
425 plot(1:2*n,r2," b ");
429 plot(1:2*n,g2," g ");
430 plot(i,g(idxch)," r. "," MarkerSize ",6);
431 plot([n n+eps],[0, max([r g])]," black ");
433 title(sprintf(" Best gain index: %d, gain: %e, \\alpha_{%d} change: %e
",i, M, i, r(idxch)));
434 legend(" \alpha difference
"," gain
");
440 function [i , M ] = WSS1_1D_GetMaxGradientIndex(a,dW) {
441 [M, i] = max(max([dW .* (a < this.C); -dW .* (a > 0)],[],1));
451 function ij = WSS0_AllIndices(a) {
455 apch = ind(a(n+1:end) == 0);
456 amch = ind(a(1:n) == 0);
458 /* Create unique combinations of indices */
459 A = repmat(ch,length(ch),1);
462 ij = [I(I~=0) J(J~=0)];
463 /* Remove indices that would look at moving the same alpha up and down at the same time */
464 ij(ij(:,1)-ij(:,2)-n == 0,:) = [];
474 function ij = WSS1_1DMaxGainPlusLast(a,dW,i) {
475 in = this.WSS0_1D_GetMaxGainIndex(a, dW);
480 [dWs, sortidx] = sort(abs(dW));
482 while sortidx(pick) == in
499 function ij = WSS2_TwoSetMax1DGain(a,dW) {
503 set2 = set1(end)+1:n;
504 i = this.WSS0_1D_GetMaxGainIndex(a([set1 set1+n]), dW([set1 set1+n]));
506 i = i+(n-len); /* Move to \am index (add number of set2 elements), i.e. i = i+numel(set2) */
509 j = this.WSS0_1D_GetMaxGainIndex(a([set2 set2+n]), dW([set2 set2+n]));
511 j = j+len; /* If larger than set2 size add number of set1 elements */
514 /* Add +len as this is the offset for the second set in each case */
519 function ij = WSS4_1DMaxGainPluskNN(a,dW) {
520 i = this.WSS0_1D_GetMaxGainIndex(a, dW);
527 [dist, idx] = sort(sqrt(1-this.K(ki,:)));
528 /* Extract indices of alpha^+ that are "close
" wrt to the kernel metric \sqrt{1-\K(x_i,x_j)}
529 * Start at 2 as i itself is not an option for second index */
530 nidx = idx(2: min(this.NNk+1,numel(idx)));
531 /* Of course also consider the \alpha^- */
532 ij = [i*ones(size(nidx,2)*2,1) [nidx nidx+n]^t];
536 function ij = WSS7_Combi(a,dW,i) {
537 ij = this.WSS2_TwoSetMax1DGain(a, dW);
538 ij = [ij; this.WSS4_1DMaxGainPluskNN(a, dW)];
540 /* Implement Max1DGainPlusLast directly to save a call to the 1D method. */
546 [dWs, sortidx] = sort(abs(dW));
548 while sortidx(pick) == in
558 function ij = WSS128_ApproxBestGainIndices(a,dW) {
560 /* Only consider changeable \alpha^{+,-}, i.e. the ones whos partner \alpha equals zero. */
563 apch = ind(a(n+1:end) == 0);
564 amch = ind(a(1:n) == 0);
566 /* Create unique combinations of indices */
567 A = repmat(ch,length(ch),1);
570 ij = [I(I~=0) J(J~=0)];
571 /* Remove indices that would look at moving the same alpha up and down at the same time */
572 ij(ij(:,1)-ij(:,2)-n == 0,:) = [];
575 kijidxup = kijidx > n;
576 kijidx(kijidxup) = kijidx(kijidxup)-n;
577 kijidx = sub2ind(size(this.K),kijidx(:,1),kijidx(:,2));
578 Kij = this.K(kijidx);
580 sgn = 1-mod(sum(kijidxup,2),2)*2;
584 div = 1 ./ (1-Kij.^2);
586 /* Compute update r */
587 rs = [(dwi(:,1) - sgn .* Kij .* dwi(:,2)) .* div ...
588 (dwi(:,2) - sgn .* Kij .* dwi(:,1)) .* div];
589 ub = rs > this.C - aij;
591 rs(ub) = this.C-aij(ub);
594 /* % Compute gain for r_i,s_j combinations */
595 g = sum(rs .* (dwi - .5*rs),2) - sgn.*rs(:,1).*rs(:,2).*Kij;
597 /* Extract i,j indices */
601 fprintf(" Updates found inside WSS128: r=%f, s=%f\n
",rs(idx,1),rs(idx,2));
606 plot(1:length(g),max(0,g));
608 plot(idx,g(idx)," black.
"," MarkerSize
",8);
610 title(sprintf(" Best gain index: (%d,%d), gain: %e
",ij(1),ij(2), maxg));
622 function ij = WSS1024_RandomAdmissibleIndices(a) {
625 ch = [ind(a(n+1:end) == 0) ind(a(1:n) == 0)+n];
626 i = ch(randi(length(ch)));
627 j = ch(randi(length(ch)));
628 while j==i || abs(j-i) == n
629 j = ch(randi(length(ch)));
635 function [a , dW , T ] = I0_ColdStart(fxi) {
637 a = zeros(1,2*length(fxi)); /* 1..n = a^+, n+1..2n=a^- */
639 dW = [fxi -fxi] - this.Eps;
643 function [ai , dW , T ] = I1_ColdStartKernelRule(fxi) {
645 /* ai(1:n) = 1; ai(n+1:2*n) = 0; */
646 ai = max(0,min(this.C,[fxi.*(fxi>=0) -fxi.*(fxi<0)]));
648 a = ai(1:n)-ai(n+1:end);
649 hlp = -a*this.K + fxi;
650 dW = [hlp -hlp] - this.Eps;
651 /* T = a*this.K*a' + this.Eps*sum(ai) - fxi*a'; */
657 function [ai , dW , T ] = I2_WarmStart(fxi,initialai) {
661 ap(initialai > 0) = initialai(initialai>0);
662 am(initialai < 0) = initialai(initialai<0);
664 hlp = -(ap-am)*this.K + fxi;
665 dW = [hlp -hlp] - this.Eps;
670 function [r , s , idx ] = getMax2DGainUpdatesForIndices(a,dW,ij) {
673 kijidxup = kijidx > n;
674 kijidx(kijidxup) = kijidx(kijidxup)-n;
675 kijidx = sub2ind([n n],kijidx(:,1),kijidx(:,2));
676 Kij = this.K(kijidx);
678 sgn = 1-mod(sum(kijidxup,2),2)*2;
682 div = 1 ./ (1-Kij.^2);
684 /* Compute update r */
685 rs = [(dwi(:,1) - sgn .* Kij .* dwi(:,2)) .* div ...
686 (dwi(:,2) - sgn .* Kij .* dwi(:,1)) .* div];
688 /* Handle constraints */
689 b = rs > this.C - aij | rs < - aij;
691 ronly = find(b(:,1) & ~b(:,2));
692 sonly = find(~b(:,1) & b(:,2));
693 both = find(b(:,1) & b(:,2));
695 /* r is constrained only */
697 rs(ronly,1) = min(this.C - aij(ronly,1),max(- aij(ronly,1), rs(ronly,1)));
698 rs(ronly,2) = min(this.C-aij(ronly,2), max(-aij(ronly,2), dwi(ronly,2) - rs(ronly,1) .* Kij(ronly)));
701 /* s is constrained only */
703 rs(sonly,2) = min(this.C - aij(sonly,2),max(-aij(sonly,2), rs(sonly,2)));
704 rs(sonly,1) = min(this.C-aij(sonly,1), max(-aij(sonly,1), dwi(sonly,1) - rs(sonly,2) .* Kij(sonly)));
707 /* Case of both vars constrained
708 * Update s2 and r2 for the cases of the other one is constrained */
711 r1 = min(this.C - aij(both,1),max(- aij(both,1), rs(both,1)));
712 s1 = min(this.C-aij(both,2), max(-aij(both,2), dwi(both,1) - r1 .* Kijb));
713 s2 = min(this.C - aij(both,2),max(- aij(both,2), rs(both,2)));
714 r2 = min(this.C-aij(both,1), max(-aij(both,1), dwi(both,2) - s2 .* Kijb));
716 /* Determine update via get higher gain
717 * Gain if r is constrained and s accordingly updated */
718 g1 = r1 .* (dwi(both,1) -.5*r1) + s1 .*(dwi(both,2) -.5 * s1) + sgn(both) .* r1 .* s1.* Kijb;
719 /* Gain if s is constrained and r accordingly updated */
720 g2 = r2 .* (dwi(both,1) -.5*r2) + s2 .*(dwi(both,2) -.5 * s2) + sgn(both) .* r2 .* s2 .* Kijb;
722 /* % See which gain is higher. */
724 rs(both(cmp),1) = r1(cmp);
725 rs(both(cmp),2) = s1(cmp);
726 rs(both(~cmp),1) = r2(~cmp);
727 rs(both(~cmp),2) = s2(~cmp);
731 g = sum(rs .* (dwi - .5*rs),2) + sgn.*rs(:,1).*rs(:,2).*Kij;
733 /* Extract r,s updates */
734 [dummy, idx] = max(g);
740 public: /* ( Static ) */
742 static function res = test_ScalarEpsSVR_SMO() {
745 fx = fx ./ max(abs(fx));
747 svr = general.regression.ScalarEpsSVR_SMO;
752 kernel = kernels.GaussKernel(.8);
753 svr.K= kernel.evaluate(x,x);
756 res = res & compute(2);
758 function res = compute(version)
759 svr.Version= version;
760 [ai, svidx] = svr.computeKernelCoefficients(fx,[]);
762 svfun = @(x)ai" *(kernel.evaluate(x,sv)
");
764 fdiff = abs(fsvr(svidx)-fx(svidx));
765 res = isempty(find(fdiff > 1.01*svr.Eps,1));
MaxCount
The maximum number of iterations.
static const TOL_OK
SVR-SMO specific flag. Indicates that the E+T values are smaller than the prescribed tolerance...
Version
Which Version to use.
StopEps
Stopping criterion.
Eps
The epsilon value to use for the loss function.
data.FileMatrix K
The kernel matrix to use.
disp
Handle object disp method which is called by the display method. See the MATLAB disp function...
double C
The weighting of the slack variables.
integer LastIterations
The number of iterations used at the last run.
function [ ai , sf ] = regress(fxi, initialai)
SCALARSVR Scalar support vector regression.
Global configuration class for all KerMor run-time settings.
static function KerMor theinstance = App()
The singleton KerMor instance.
NNk
Nearest neighbors to search for WSS4 strategy.
double Lambda
The regularization parameter for the primary minimization problem.
static const MAX_ITER
Maximum number of iterations reached.
StopFlag: Flags that algorithms can use to specify the reason for their termination.