rbmatlab  1.16.09
 All Classes Namespaces Files Functions Variables Modules Pages
h_refinement_err_ext.m
1 function hp_model = h_refinement_err_ext( hp_model,model_data )
2 %function hp_model = h_refinement( hp_model,model_data )
3 %
4 % compute the h_refinment of the parameter space
5 % extension for error distance
6 % saves POD base in the little tree
7 
8 
9 
10 
11 hp_model.base_model.rho = hp_model.rho;
12 hp_model.base_model.rho_POD = hp_model.rho_POD;
13 hp_model.base_model.error_tol1=hp_model.error_tol1;
14 
15 rand('state',hp_model.base_model.RB_train_rand_seed);
16 hp_model.base_model.training = rand_uniform(hp_model.base_model.RB_train_size,...
17  hp_model.base_model.mu_ranges);
18 
19 hp_model.tree.anchor = get_mu(hp_model);
20 hp_model.tree.model = hp_model.base_model;
21 %hp_model.tree.model.h_part=1;
22 hp_model.tree.training = hp_model.base_model.training;
23 %hp_model.tree.model_data = model_data;
24 
25 %erzeuge basis des anfangsparameters:
26 training_backup = hp_model.tree.model.training;
27 hp_model.tree.model.RB_stop_Nmax = 1;
28 hp_model.tree.model.training = hp_model.tree.anchor;
29 
30 hp_model.tree = create_basis_automatic(hp_model.tree);
31 
32 hp_model.tree.model.training = training_backup;
33 
34 % Littletree für die Trainigssetgenerierung
35 ltreedata.anchor = hp_model.tree.anchor;
36 ltreedata.detailed_data = hp_model.tree.detailed_data;
37 ltreedata.reduced_data = hp_model.tree.reduced_data;
38 ltreedata.model = hp_model.tree.model;
39 littletree = ltreedata;
40 
41 
42 
43 hp_model.tree = hp1(hp_model.tree);
44 
45 
46 
47 
48 
49 
50  function tree_out = hp1(tree_in)
51 
52  % function tree_out = hp1(tree_in)
53  % generiert die h-Partitionierung für parabollische Probleme rekursiv
54  % tree_in muss schon eine Basis für den Anker besitzen!
55 
56  %finde den Parameter mit dem größten Fehler -> mu1
57  disp('--calculating errors of current domain');
58  % Fehler des Trainigssets berechnen:
59  post_errs = rb_test_indicator(tree_in.model, ...
60  tree_in.detailed_data, tree_in.reduced_data,...
61  tree_in.model.training,'');
62 
63  [max_error, mu_ind]= max(post_errs);
64 
65  % Fehler von der anchor Approximation (nur relevant bei statischer basisgen):
66  error_anchor = max(rb_test_indicator(tree_in.model, ...
67  tree_in.detailed_data, tree_in.reduced_data,...
68  tree_in.anchor,...
69  ''));
70 
71  disp(['--Maximum error in current domain: ' num2str(max_error) ' - required: < ' num2str(hp_model.error_tol1)] );
72  disp(['--Maximum error of anchor : ' num2str(error_anchor) ' - required: < ' num2str(hp_model.error_tol1/tree_in.model.rho)]);
73 
74  % Warnen falls Ankerfehler größer als error/rho
75  if error_anchor>hp_model.error_tol1/tree_in.model.rho
76  warning('more RB nodes required!');
77  end;
78 
79  if max_error<hp_model.error_tol1
80  disp('--NO SPLIT--');
81  % fertig, keine zerteilung mehr nötig;
82  tree_out = tree_in;
83  else
84  % Falls error>errortol -> splitten!
85  disp('--SPLIT--');
86 
87  % Erzeuge neue Blätter
88  child_right = leaf();
89  child_left = leaf();
90  % Vererbung von Eigeschaften
91  child_right.model = tree_in.model;
92  child_left.model = tree_in.model;
93  % child_right.model.h_part =1;
94  % child_left.model.h_part =1;
95  % child_right.model_data = tree_in.model_data;
96  % child_left.model_data = tree_in.model_data;
97 
98  % Linker Zweig erbt den Anker vom Elternzweig
99  child_left.anchor = tree_in.anchor;
100  % Rechter Zweig enthält den ersten neuen greedygefundenen Parameter
101  child_right.anchor = tree_in.model.training(:, mu_ind(1));
102 
103  % Basis des ersten parameters (mu0) erben
104  child_left.detailed_data = tree_in.detailed_data;
105  child_left.reduced_data = tree_in.reduced_data;
106 
107  % Basis für den neuen parameter (mu1) generieren
108  backup = child_right.model.training;
109  child_right.model.use_generated_RB_basis = 0;
110 
111  child_right.model.RB_stop_Nmax = 1;
112  child_right.model.training = child_right.anchor;
113  %alternativ: soviel moden nehmen bis error kleiner
114  %error_tol/(roh*roh2) mit roh2 >= koerziv(mu)
115 
116 
117  % Basen des Ankers mu1
118  child_right = create_basis_automatic(child_right);
119 
120  child_right.model.training = backup;
121 
122  child_left.model.training=[];
123  child_right.model.training=[];
124 
125  disp('--dividing trainings set');
126 
127  % Aufteilen des Elterntrainigsset auf die Kinder
128  % hier kommt die d-Funktion zum einsatz
129  leng = length(tree_in.model.training(1,:));
130 
131  for i = 1:leng
132  if ~isequal(tree_in.model.training(:,i),child_right.anchor) && ~isequal(tree_in.model.training(:,i),child_left.anchor)
133  distance_l=hp_model.distance_function(child_left.model,child_left.anchor,tree_in.model.training(:,i),...
134  child_left.detailed_data,child_left.reduced_data);
135  distance_r=hp_model.distance_function(child_right.model,child_right.anchor,tree_in.model.training(:,i),...
136  child_right.detailed_data,child_right.reduced_data) ;
137  % Aufteilen
138  if distance_l<distance_r
139  child_left.model.training = [child_left.model.training, tree_in.model.training(:,i)];
140  else
141  child_right.model.training = [child_right.model.training, tree_in.model.training(:,i)];
142  end;
143  end;
144  end;
145 
146 
147  % Protrkolliert die Änderungen in dem kleinen Baum mit.
148  ltreedatanew.anchor = child_right.anchor;
149  ltreedatanew.detailed_data = child_right.detailed_data;
150  ltreedatanew.reduced_data = child_right.reduced_data;
151  ltreedatanew.model = child_right.model;
152  littletree=change_littletree(littletree,tree_in.anchor,ltreedatanew);
153  disp('--refining trainings set (needs some time)');
154 
155  % Trainingsset mittels accept-reject Monte Carlo verfeinern
156 
157  %LEFT SIDE
158  % grenzen des Zufallgenerators finden:
159  switch hp_model.trainingset_generation_mode
160  case 'boundary_box'
161  %boundarybox erstellen:
162  param_min = min(child_left.model.training');
163  param_max = max(child_left.model.training');
164  l = max(param_max-param_min);
165  t = size(child_left.model.training);
166  if isempty(l) || min(l) == 0 || t(2) <= 1
167  box_ranges = child_left.model.mu_ranges;
168  else
169 
170  param_max=param_max+max(l,hp_model.boundarybox_minimum)*hp_model.boundarybox_factor;
171  param_min=param_min-max(l,hp_model.boundarybox_minimum)*hp_model.boundarybox_factor;
172  box_ranges = cell2mat(child_left.model.mu_ranges')';
173 
174  %Box:
175  param_min = max([param_min;box_ranges(1,:)]);
176  param_max = min([param_max;box_ranges(2,:)]);
177 
178  box_ranges = mat2cell([param_min;param_max]',ones(1,length(param_min)));
179  end;
180 
181 
182  case 'mu_ranges'
183  box_ranges = child_left.model.mu_ranges;
184  otherwise
185  box_ranges = child_left.model.mu_ranges;
186  end;
187 
188  %Verfeinerung:
189  while length(child_left.model.training) < child_left.model.RB_train_size
190 
191  parameter = rand_uniform(1,box_ranges);
192  if part_of_domain(littletree,child_left.anchor,parameter,child_left.model)==1
193  child_left.model.training = [child_left.model.training,parameter ];
194  disp([num2str(length(child_left.model.training)) ' of ' num2str(child_left.model.RB_train_size)]);
195  end;
196  end;
197 
198  %RIGHT SIDE
199 
200 
201  switch hp_model.trainingset_generation_mode
202  case 'boundary_box'
203  %boundarybox erstellen:
204  param_min = min(child_right.model.training');
205  param_max = max(child_right.model.training');
206  l = max(param_max-param_min);
207  t=size(child_right.model.training);
208  if isempty(l) || min(l) == 0 || t(2) <= 1
209  box_ranges = child_right.model.mu_ranges;
210  else
211 
212  param_max=param_max+max(l,hp_model.boundarybox_minimum)*hp_model.boundarybox_factor;
213  param_min=param_min-max(l,hp_model.boundarybox_minimum)*hp_model.boundarybox_factor;
214  box_ranges = cell2mat(child_right.model.mu_ranges')';
215 
216  %Box:
217  param_min = max([param_min;box_ranges(1,:)]);
218  param_max = min([param_max;box_ranges(2,:)]);
219 
220  box_ranges = mat2cell([param_min;param_max]',ones(1,length(param_min)));
221 
222  end;
223 
224 
225 
226  case 'mu_ranges'
227  box_ranges = child_left.model.mu_ranges;
228  otherwise
229  box_ranges = child_left.model.mu_ranges;
230 
231  end;
232 
233  while length(child_right.model.training) < child_right.model.RB_train_size
234 
235  parameter = rand_uniform(1,box_ranges);
236  if part_of_domain(littletree,child_right.anchor,parameter,child_right.model)==1
237  child_right.model.training = [child_right.model.training,parameter ];
238  disp([num2str(length(child_right.model.training)) ' of ' num2str((child_right.model.RB_train_size))]);
239  end;
240 
241  end;
242 
243 
244 
245  child_left.training = child_left.model.training;
246  child_right.training = child_right.model.training;
247 
248 
249 
250 
251  % Rekursiver Aufruf
252  tree_out={hp1(child_left),hp1(child_right)};
253 
254  end
255 
256  end
257 
258 
259 
260 
261 
262 
263 
264 
265 
266 
267 
268 
269 
270 
271 
272 
273  function tree_out = create_basis_automatic(tree_in)
274 
275  switch (hp_model.find_nr_nodes)
276 
277 
278  case 'automatic'
279 
280  % generiere basis des ankers -autmatische anzahl der nodes bestimmen;
281  % Basisgen. mit POD error:
282  RB_ex_algo_backup = tree_in.model.RB_extension_algorithm; %= RB_extension_PCA_fixspace_flexible%
283  tree_in.model.RB_extension_algorithm = tree_in.model.RB_extension_algorithm_hp; %=RB_extension_PCA_fixspace_flexible_hp%
284  tree_in.detailed_data = gen_detailed_data(tree_in.model,model_data);
285  tree_in.reduced_data = gen_reduced_data(tree_in.model,tree_in.detailed_data);
286  tree_in.model.RB_extension_algorithm = RB_ex_algo_backup;
287 
288  % Basisgenerierung
289  tree_in.model.RB_stop_epsilon = tree_in.model.error_tol1/tree_in.model.rho;
290  tree_in.model.RB_stop_Nmax = inf;
291  tree_in.model.nr_extension_modes =1;
292  % benutze die POD moden:
293  tree_in.model.use_generated_RB_basis = 1;
294 
295  % Basis genieren (Standart Greedy)
296  tree_in.detailed_data = rb_basis_generation(tree_in.model,tree_in.detailed_data);
297  %erster Parameter
298  tree_in.reduced_data = gen_reduced_data(tree_in.model,tree_in.detailed_data);
299 
300 
301  %Alte version:
302 
303  % RB_error:
304 
305  % error_RB = max(rb_test_indicator(tree_in.model, ...
306  % tree_in.detailed_data, tree_in.reduced_data,...
307  % tree_in.anchor,...
308  % ''));
309 
310 
311 
312  %while error_RB > tree_in.model.error_tol1/tree_in.model.rho
313 
314  % tree_in.model.use_generated_RB_basis = 1;
315  % tree_in.detailed_data = rb_basis_generation(tree_in.model,tree_in.detailed_data);
316  % tree_in.reduced_data = gen_reduced_data(tree_in.model,tree_in.detailed_data);
317  % error_RB = max(rb_test_indicator(tree_in.model, ...
318  % tree_in.detailed_data, tree_in.reduced_data,...
319  % tree_in.anchor,...
320  % ''));
321 
322  %end;
323 
324  case 'static'
325  %model.nr_extension_modes werden verwendet - muss gesetzt
326  %sein%
327  tree_in.detailed_data = gen_detailed_data(tree_in.model,model_data);
328  tree_in.reduced_data = gen_reduced_data(tree_in.model,tree_in.detailed_data);
329 
330 
331 
332  end;
333 
334  tree_out = tree_in;
335 
336  end
337 
338 
339 
340 
341  %Aktuallisiert die Änderungen bei dem Baume mit der Ankerstruktur mit
342  %Hilfsfunktionen
343  function tree_out = change_littletree(tree_in,para_expand,tree_data_add)
344  t= iscell(tree_in);
345  if t==1
346  tree_in{1} = change_littletree(tree_in{1},para_expand,tree_data_add);
347  tree_in{2} = change_littletree(tree_in{2},para_expand,tree_data_add);
348  tree_out = tree_in;
349  else
350  if isequal(tree_in.anchor,para_expand)
351  tree_out = {tree_in,tree_data_add};
352  else
353  tree_out = tree_in;
354  end;
355 
356 
357  end;
358  end
359  % Überprüft, ob ein Parameter param zu dem Gebiet des anchors gehört
360  % benötigt einen little tree.
361  % Hilfsfunktion
362  function is = part_of_domain(ltree, anchor,param,model)
363  test = iscell(ltree);
364  %Navigation
365  if test ==1
366  % Representanten bestimmen
367  first_l=littletree_first(ltree{1});
368  first_r=littletree_first(ltree{2});
369  % Mit d-Funktion naviagtion durchführen
370  distance_l = hp_model.distance_function(model,first_l.anchor,param,...
371  first_l.detailed_data,first_l.reduced_data);%norm(first_l-param,2);
372  distance_r = hp_model.distance_function(model,first_r.anchor,param,...
373  first_r.detailed_data,first_r.reduced_data);%norm(first_r-param,2);
374  % Unterbaum finden und Rekursiv vortfahren
375  if distance_l<distance_r
376  is = part_of_domain(ltree{1}, anchor,param,model);
377  else
378  is = part_of_domain(ltree{2}, anchor,param,model);
379  end;
380  else
381  % im passenden Gebiet angekommen
382 
383  if isequal(ltree.anchor,anchor)
384  is = 1;
385  else
386  is = 0;
387  end;
388  end;
389 
390 
391  end
392 
393  % gibt den zugehörigen Anker für die Variable tree zurück.
394  function value= littletree_first (tree)
395  t= iscell(tree);
396  if t==1
397  value=littletree_first(tree{1});
398  else
399  value=tree;
400  end;
401 
402  end
403 
404 
405 
406 
407 end
408 
function test_err = rb_test_indicator(model, detailed_data, reduced_data, M_test, savepath)
M_test,[savepath])
Customizable implementation of an abstract greedy algorithm.
Definition: DuneRBLeafNode.m:1
function detailed_data = rb_basis_generation(model, detailed_data)
reduced basis construction with different methods
function [ RBext , dummy ] = RB_extension_PCA_fixspace_flexible(model, detailed_data)
function computing a RB basis extension for given parameters by the POD-Greedy algorithm.