45 fx = [sin(x)+.2*x; cos(x)-.1*x; exp(-x.^2).*sin(x); cos(2*x).*sin(x)];
58 atd = data.ApproxTrainData(x,[],[]);
62 alg = approx.algorithms.VKOGA;
63 alg.MaxExpansionSize= 50;
65 alg.UsefPGreedy= fPGreedy;
66 ec = kernels.config.ExpansionConfig;
67 ec.Prototype.Kernel= kernels.GaussKernel(.8);
68 ec.StateConfig= kernels.config.GaussConfig(
" G ",linspace(.5,2,nG));
71 kexp = alg.computeApproximation(atd);
78 h1 = pm.nextPlot(
" fun ",
" Function ",
" x ",
" f(x) ");
79 plot(h1,x,[fx; kexp.evaluate(x)]^
t);
81 h2 = pm.nextPlot(
" nfun ",
" Newton Basis Functions on training data ",
" x ",
" N(x) ");
82 plot(h2,x,alg.bestNewtonBasisValuesOnATD);
84 alg.plotSummary(pm,
" VKOGA Demo ");
87 alg.getApproximationSummary;
123 if res.alg.UsefPGreedy
124 fprintf(2,
" Warning: f/P-Greedy selection criteria was used. Error plots are for f-Greedy case.\n ");
127 fx = res.atd.fxi.toMemoryMatrix;
128 x = res.atd.xi.toMemoryMatrix;
132 h0 = doPlot(0,zeros(size(fx)));
136 k = res.kexp.getSubExpansion(s);
146 function h1 = doPlot(s,fxi)
147 h1 = pm.nextPlot(sprintf(
" step%d ",s),...
148 sprintf(
" Iteration %d ",s),
" x ",
" f(x) ");
149 plot(h1,x,fx
" , "LineWidth^
t,2);
154 plot(h1,x,allerr,
" r ");
155 plot(h1,x,fxi
" , "--^t);
157 plot(h1,k.Centers.xi,k.evaluate(k.Centers.xi)
" , "k.
" , "MarkerSize^
t,17);
161 [v, idx] = max(allerr);
162 plot(h1,x(idx),v,
" ro ",
" MarkerSize ",ms);
163 [~, idx] = max(abs(err),[],2);
166 plot(h1,[x(idx(
l)) x(idx(
l))+eps],[fx(
l,idx(
l)) fxi(
l,idx(
l))],
" k-- ");
167 plot(h1,[x(idx(
l)) x(idx(
l))+eps],[fx(
l,idx(
l)) fxi(
l,idx(
l))],
" kx ",
" MarkerSize ",ms);
171 axis(h1,axis(h1)*1.03);
196 [
X, ind, Xemesh, Yemesh] = cutcircle(.05);
199 atd = data.ApproxTrainData(
X,[],[]);
200 fxi = exp(abs(
X(1,:)-
X(2,:)))-1;
204 alg = approx.algorithms.VKOGA;
206 alg.MaxExpansionSize= 200;
208 alg.MaxAbsResidualErr= 1e-6;
210 alg.MaxRelErr= 1e-10;
211 alg.UsefPGreedy=
false;
212 ec = kernels.config.ExpansionConfig;
218 wc = kernels.config.WendlandConfig(
" G ",[1 2 3],
" S ",[1 1 1]);
224 kexp = alg.computeApproximation(atd);
226 h = pm.nextPlot(
" circ ",
" Selected data points ");
227 plot(h,
X(1,:),
X(2,:),
" . ",
X(1,alg.Used),
X(2,alg.Used),
" ro ");
229 Z = zeros(size(Xemesh));
231 h = pm.nextPlot(
" fun ",
" Original function ");
232 surf(h,Xemesh,Yemesh,Z);
234 h = pm.nextPlot(
" afun ",
" Approximation ");
236 aZ(ind) = kexp.evaluate(
X);
237 surf(h,Xemesh,Yemesh,aZ);
239 h = pm.nextPlot(
" err ",
" Error ");
240 surf(h,Xemesh,Yemesh,abs(Z-aZ));
247 function plotBasis(idx)
248 h = pm.nextPlot(sprintf(
" bfun%d ",idx),sprintf(
" Newton basis function %d ",idx),
" x ",
" y ");
249 nv = zeros(size(Xemesh));
250 nv(ind) = alg.bestNewtonBasisValuesOnATD(:,idx);
251 surf(h,Xemesh,Yemesh,nv);
254 function [Xe, ind, xm, ym]=cutcircle(he)
264 [xm, ym]=meshgrid(-1:he:1,-1:he:1);
267 ind=find((xee.^2+yee.^2<=1)&((xee>=0)|(yee>=0)) );
270 Xe=[xee(ind), yee(ind)]^
t;
VKOGA: Contains some demo functions for the VKOGA algorithm.
PlotManager: Small class that allows the same plots generated by some script to be either organized a...
static function struct res = VKOGA_1D_nD(integer n,logical fPGreedy,integer nG)
Starts a demo of the approx.algorithms.VKOGA algorithm.
static function IterationPlots(struct res,integer steps,PlotManager pm)
Demonstrates the VKOGA iterations during approximation computations.
static function rowvec< double > n = L2(matrix< double > x)
Returns the discrete norm for each column vector in x.
static function NewtonBasis_Schaback()
The demo of the schaback paper for the function-dependent Newton basis.
Norm: Static class for commonly used norms on sets of vectors.