rbmatlab  1.16.09
 All Classes Namespaces Files Functions Variables Modules Pages
h_refinement.m
1 function hp_model = h_refinement( hp_model,model_data )
2 %function hp_model = h_refinement( hp_model,model_data )
3 %
4 %compute the h_refinment of the parameter space
5 
6 if hp_model.error_distance_extionsion ==1
7  hp_model = h_refinement_err_ext(hp_model,model_data );
8 else
9 
10 
11 
12 
13 hp_model.base_model.rho = hp_model.rho;
14 hp_model.base_model.rho_POD = hp_model.rho_POD;
15 hp_model.base_model.error_tol1=hp_model.error_tol1;
16 
17 rand('state',hp_model.base_model.RB_train_rand_seed);
18 hp_model.base_model.training = rand_uniform(hp_model.base_model.RB_train_size,...
19  hp_model.base_model.mu_ranges);
20 
21 hp_model.tree.anchor = get_mu(hp_model);
22 hp_model.tree.model = hp_model.base_model;
23 %hp_model.tree.model.h_part=1;
24 hp_model.tree.training = hp_model.base_model.training;
25 %hp_model.tree.model_data = model_data;
26 
27 %erzeuge basis des anfangsparameters:
28 training_backup = hp_model.tree.model.training;
29 hp_model.tree.model.RB_stop_Nmax = 1;
30 hp_model.tree.model.training = hp_model.tree.anchor;
31 
32 hp_model.tree = create_basis_automatic(hp_model.tree);
33 
34 hp_model.tree.model.training = training_backup;
35 
36 % Littletree für die Trainigsset generiung
37 littletree = hp_model.tree.anchor;
38 
39 
40 
41 hp_model.tree = hp1(hp_model.tree);
42 
43 
44 end
45 
46 
47 
48  function tree_out = hp1(tree_in)
49 
50  % function tree_out = hp1(tree_in)
51  % generiert die h-Partitionierung für parabollische Probleme rekursiv
52  % tree_in muss schon eine Basis für den Anker besitzen!
53 
54  %finde den Parameter mit dem größten Fehler -> mu1
55  disp('--calculating errors of current domain');
56  % Fehler des Trainigssets berechnen:
57  post_errs = rb_test_indicator(tree_in.model, ...
58  tree_in.detailed_data, tree_in.reduced_data,...
59  tree_in.model.training,'');
60 
61  [max_error, mu_ind]= max(post_errs);
62 
63  % Fehler von der anchor Approximation (nur relevant bei statischer basisgen):
64  error_anchor = max(rb_test_indicator(tree_in.model, ...
65  tree_in.detailed_data, tree_in.reduced_data,...
66  tree_in.anchor,...
67  ''));
68 
69  disp(['--Maximum error in current domain: ' num2str(max_error) ' - required: < ' num2str(hp_model.error_tol1)] );
70  disp(['--Maximum error of anchor : ' num2str(error_anchor) ' - required: < ' num2str(hp_model.error_tol1/tree_in.model.rho)]);
71 
72  % Warnen falls Ankerfehler größer als error/rho
73  if error_anchor>hp_model.error_tol1/tree_in.model.rho
74  warning('more RB nodes required!');
75  end;
76 
77  if max_error<hp_model.error_tol1
78  disp('--NO SPLIT--');
79  % fertig, keine zerteilung mehr nötig;
80  tree_out = tree_in;
81  else
82  % Falls error>errortol -> splitten!
83  disp('--SPLIT--');
84 
85  % Erzeuge neue Blätter
86  child_right = leaf();
87  child_left = leaf();
88  % Vererbung von Eigeschaften
89  child_right.model = tree_in.model;
90  child_left.model = tree_in.model;
91  % child_right.model.h_part =1;
92  % child_left.model.h_part =1;
93  % child_right.model_data = tree_in.model_data;
94  % child_left.model_data = tree_in.model_data;
95 
96  % Linker Zweig erbt den Anker vom Elternzweig
97  child_left.anchor = tree_in.anchor;
98  % Rechter Zweig enthält den ersten neuen greedygefundenen Parameter
99  child_right.anchor = tree_in.model.training(:, mu_ind(1));
100 
101 
102  child_left.model.training=[];
103  child_right.model.training=[];
104 
105 
106  % Aufteilen des Elterntrainigsset auf die Kinder
107  % hier kommt die d-Funktion zum einsatz
108  leng = length(tree_in.model.training(1,:));
109 
110  for i = 1:leng
111  if ~isequal(tree_in.model.training(:,i),child_right.anchor) && ~isequal(tree_in.model.training(:,i),child_left.anchor)
112  distance_l=hp_model.distance_function(child_left.model,child_left.anchor,tree_in.model.training(:,i));
113  distance_r=hp_model.distance_function(child_right.model,child_right.anchor,tree_in.model.training(:,i)) ;
114  % Aufteilen
115  if distance_l<distance_r
116  child_left.model.training = [child_left.model.training, tree_in.model.training(:,i)];
117  else
118  child_right.model.training = [child_right.model.training, tree_in.model.training(:,i)];
119  end;
120  end;
121  end;
122 
123  % Protokolliert die Änderungen in dem kleinen Baum (globale variable!) mit.
124  littletree=change_littletree(littletree,tree_in.anchor,child_right.anchor);
125 
126  % Trainingsset mittels accept-reject Monte Carlo verfeinern
127 
128  %LEFT SIDE
129  % grenzen des Zufallgenerators finden:
130  switch hp_model.trainingset_generation_mode
131  case 'boundary_box'
132  %boundarybox erstellen:
133  param_min = min(child_left.model.training');
134  param_max = max(child_left.model.training');
135  l = max(param_max-param_min);
136  t = size(child_left.model.training);
137  if isempty(l) || min(l) == 0 || t(2) <= 1
138  box_ranges = child_left.model.mu_ranges;
139  else
140 
141  param_max=param_max+max(l,hp_model.boundarybox_minimum)*hp_model.boundarybox_factor;
142  param_min=param_min-max(l,hp_model.boundarybox_minimum)*hp_model.boundarybox_factor;
143  box_ranges = cell2mat(child_left.model.mu_ranges')';
144 
145  %Box:
146  param_min = max([param_min;box_ranges(1,:)]);
147  param_max = min([param_max;box_ranges(2,:)]);
148 
149  box_ranges = mat2cell([param_min;param_max]',ones(1,length(param_min)));
150  end;
151 
152 
153  case 'mu_ranges'
154  box_ranges = child_left.model.mu_ranges;
155  otherwise
156  box_ranges = child_left.model.mu_ranges;
157  end;
158 
159  %Verfeinerung:
160  while length(child_left.model.training) < child_left.model.RB_train_size
161 
162  parameter = rand_uniform(1,box_ranges);
163  if part_of_domain(littletree,child_left.anchor,parameter,child_left.model)==1
164  child_left.model.training = [child_left.model.training,parameter ];
165  end;
166 
167  end;
168 
169  %RIGHT SIDE
170 
171 
172  switch hp_model.trainingset_generation_mode
173  case 'boundary_box'
174  %boundarybox erstellen:
175  param_min = min(child_right.model.training');
176  param_max = max(child_right.model.training');
177  l = max(param_max-param_min);
178  t=size(child_right.model.training);
179  if isempty(l) || min(l) == 0 || t(2) <= 1
180  box_ranges = child_right.model.mu_ranges;
181  else
182 
183  param_max=param_max+max(l,hp_model.boundarybox_minimum)*hp_model.boundarybox_factor;
184  param_min=param_min-max(l,hp_model.boundarybox_minimum)*hp_model.boundarybox_factor;
185  box_ranges = cell2mat(child_right.model.mu_ranges')';
186 
187  %Box:
188  param_min = max([param_min;box_ranges(1,:)]);
189  param_max = min([param_max;box_ranges(2,:)]);
190 
191  box_ranges = mat2cell([param_min;param_max]',ones(1,length(param_min)));
192 
193  end;
194 
195 
196 
197  case 'mu_ranges'
198  box_ranges = child_left.model.mu_ranges;
199  otherwise
200  box_ranges = child_left.model.mu_ranges;
201 
202  end;
203 
204  while length(child_right.model.training) < child_right.model.RB_train_size
205 
206  parameter = rand_uniform(1,box_ranges);
207  if part_of_domain(littletree,child_right.anchor,parameter,child_right.model)==1
208  child_right.model.training = [child_right.model.training,parameter ];
209  end;
210 
211  end;
212 
213 
214 
215  child_left.training = child_left.model.training;
216  child_right.training = child_right.model.training;
217 
218 
219  % Basis des ersten parameters (mu0) erben
220  child_left.detailed_data = tree_in.detailed_data;
221  child_left.reduced_data = tree_in.reduced_data;
222  % Basis für den neuen parameter (mu1) generieren
223  backup = child_right.model.training;
224  child_right.model.use_generated_RB_basis = 0;
225  child_right.model.RB_stop_Nmax = 1;
226  child_right.model.training = child_right.anchor;
227  % Basen des Ankers mu1
228  child_right = create_basis_automatic(child_right);
229  child_right.model.training = backup;
230 
231  % Rekursiver Aufruf
232  tree_out={hp1(child_left),hp1(child_right)};
233 
234  end
235 
236  end
237 
238 
239 
240 
241 
242 
243 
244 
245 
246 
247 
248 
249 
250 
251 
252 
253  function tree_out = create_basis_automatic(tree_in)
254 
255  switch (hp_model.find_nr_nodes)
256 
257 
258  case 'automatic'
259 
260  % generiere basis des ankers -autmatische anzahl der nodes bestimmen;
261  % Basisgen. mit POD error:
262  RB_ex_algo_backup = tree_in.model.RB_extension_algorithm; %= RB_extension_PCA_fixspace_flexible%
263  tree_in.model.RB_extension_algorithm = tree_in.model.RB_extension_algorithm_hp; %=RB_extension_PCA_fixspace_flexible_hp%
264  tree_in.detailed_data = gen_detailed_data(tree_in.model,model_data);
265  tree_in.reduced_data = gen_reduced_data(tree_in.model,tree_in.detailed_data);
266  tree_in.model.RB_extension_algorithm = RB_ex_algo_backup;
267 
268  % Basisgenerierung
269  tree_in.model.RB_stop_epsilon = tree_in.model.error_tol1/tree_in.model.rho;
270  tree_in.model.RB_stop_Nmax = inf;
271  tree_in.model.nr_extension_modes =1;
272  % benutze die POD moden:
273  tree_in.model.use_generated_RB_basis = 1;
274 
275  % Basis genieren (Standart Greedy)
276  tree_in.detailed_data = rb_basis_generation(tree_in.model,tree_in.detailed_data);
277  %erster Parameter
278  tree_in.reduced_data = gen_reduced_data(tree_in.model,tree_in.detailed_data);
279 
280 
281  %Alte version:
282 
283  % RB_error:
284 
285  % error_RB = max(rb_test_indicator(tree_in.model, ...
286  % tree_in.detailed_data, tree_in.reduced_data,...
287  % tree_in.anchor,...
288  % ''));
289 
290 
291 
292  %while error_RB > tree_in.model.error_tol1/tree_in.model.rho
293 
294  % tree_in.model.use_generated_RB_basis = 1;
295  % tree_in.detailed_data = rb_basis_generation(tree_in.model,tree_in.detailed_data);
296  % tree_in.reduced_data = gen_reduced_data(tree_in.model,tree_in.detailed_data);
297  % error_RB = max(rb_test_indicator(tree_in.model, ...
298  % tree_in.detailed_data, tree_in.reduced_data,...
299  % tree_in.anchor,...
300  % ''));
301 
302  %end;
303 
304  case 'static'
305  %model.nr_extension_modes werden verwendet - muss gesetzt
306  %sein%
307  tree_in.detailed_data = gen_detailed_data(tree_in.model,model_data);
308  tree_in.reduced_data = gen_reduced_data(tree_in.model,tree_in.detailed_data);
309 
310 
311 
312  end;
313 
314  tree_out = tree_in;
315 
316  end
317 
318 
319 
320 
321  %Aktuallisiert die Änderungen bei dem Baume mit der Ankerstruktur mit
322  %Hilfsfunktionen
323  function tree_out = change_littletree(tree_in,para_expand,para_add)
324  t= iscell(tree_in);
325  if t==1
326  tree_in{1} = change_littletree(tree_in{1},para_expand,para_add);
327  tree_in{2} = change_littletree(tree_in{2},para_expand,para_add);
328  tree_out = tree_in;
329  else
330  if isequal(tree_in,para_expand)
331  tree_out = {tree_in,para_add};
332  else
333  tree_out = tree_in;
334  end;
335 
336 
337  end;
338  end
339 
340  % Überprüft, ob ein Parameter param zu dem Gebiet des anchors gehört
341  % benötigt einen little tree.
342  % Hilfsfunktion
343  function is = part_of_domain(ltree, anchor,param,model)
344  test = iscell(ltree);
345  %Navigation
346  if test ==1
347  % Representanten bestimmen
348  first_l=littletree_first(ltree{1});
349  first_r=littletree_first(ltree{2});
350  % Mit d-Funktion naviagtion durchführen
351  distance_l = hp_model.distance_function(model,first_l,param);%norm(first_l-param,2);
352  distance_r = hp_model.distance_function(model,first_r,param);%norm(first_r-param,2);
353  % Unterbaum finden und Rekursiv vortfahren
354  if distance_l<distance_r
355  is = part_of_domain(ltree{1}, anchor,param,model);
356  else
357  is = part_of_domain(ltree{2}, anchor,param,model);
358  end;
359  else
360  % im passenden Gebiet angekommen
361 
362  if isequal(ltree,anchor)
363  is = 1;
364  else
365  is = 0;
366  end;
367  end;
368 
369 
370  end
371 
372  % gibt den zugehörigen Anker für die Variable tree zurück.
373  function value= littletree_first (tree)
374  t= iscell(tree);
375  if t==1
376  value=littletree_first(tree{1});
377  else
378  value=tree;
379  end;
380 
381  end
382 
383 
384 
385 
386 end
387 
function test_err = rb_test_indicator(model, detailed_data, reduced_data, M_test, savepath)
M_test,[savepath])
Customizable implementation of an abstract greedy algorithm.
Definition: DuneRBLeafNode.m:1
function detailed_data = rb_basis_generation(model, detailed_data)
reduced basis construction with different methods
function [ RBext , dummy ] = RB_extension_PCA_fixspace_flexible(model, detailed_data)
function computing a RB basis extension for given parameters by the POD-Greedy algorithm.