rbmatlab  1.16.09
 All Classes Namespaces Files Functions Variables Modules Pages
lin_evol_opt_get_Jacobian.m
1 function [J,Delta_J,varargout] = lin_evol_opt_get_Jacobian(model, varargin)
2 %function J = lin_evol_opt_get_Jacobian(model, model_data, reduced_data)
3 %
4 %Function returning the Jacobian of the output functional
5 %
6 %For detailed optimization the Jacobian is calculated numerically.
7 %For reduced optimization the function checks if the @f$ \partial_{\mu_i}
8 %\Theta @f$ are available in the model. If not it is also calculated
9 %numerically.
10 %
11 %varargin can be either "model_data" in the detailed case or "reduced_data"
12 %in the reduced case.
13 %
14 % varargout(1): output
15 % varargout(2): data (struct of additional data)
16 %
17 % In the reduced case an error bound is given by Delta_J (as a vector for every
18 % mu-direction)
19 %
20 % Markus Dihlmann 30.04.2010
21 % Oliver Zeeb
22 %
23 data = [];
24 
25 if(model.verbose>=8)
26  disp('entered lin_evol_opt_get_Jacobian')
27 end
28 
29 Delta_J = 0;
30 
31 if isfield(varargin{1},'grid')
32  model_data = varargin{1};
33 
34  if (~model.optimization.derivatives_available||~model.compute_derivative_info)
35  %calculate derivative via small differences
36  h_range = 10000; % for constructing the difference quotient: how small is the variaton of the parameter
37  func_mu = model.optimization.objective_function(model, model_data); %value of the functional at the actual parameter set, needed in every cycle run
38  J=[];
39  for k=1:length(model.mu_names)
40  h = (model.mu_ranges{k}(2)-model.mu_ranges{k}(1))/h_range; % ranges must be intervals
41  if model.optimization.params_to_optimize(k) == 1
42  if model.mu_ranges{k}(2)-model.(model.mu_names{k}) >= h % is it possible to get the righthanded difference quotient?
43  model.(model.mu_names{k}) = model.(model.mu_names{k}) + h; %getfield(model,model.mu_names{k})
44  func = model.optimization.objective_function(model, model_data); %value of the functional after changing one parameter
45  grad_comp = (func - func_mu)/h; %righthanded difference quotient
46  model.(model.mu_names{k}) = model.(model.mu_names{k}) - h; %set model to initial values for the next cycle run
47  else
48  model.(model.mu_names{k}) = model.(model.mu_names{k}) - h;
49  func = model.optimization.objective_function(model, model_data);
50  grad_comp = (func_mu - func)/h; % lefthanded difference quotient
51  model.(model.mu_names{k}) = model.(model.mu_names{k}) + h; %set model to initial values for the next cycle run
52  end
53  J=[J,grad_comp];% add component to the end of gradient
54  output = func_mu;
55  end
56  end
57  %-------------------------------------------
58  %End of detailed calculation of the gradient
59  %-------------------------------------------
60  if(model.verbose>=8)
61  disp('calculated gradient is: ')
62  J
63  end
64  else
65  %calculate derivative via derivative pde simulation
66  sim_data_der = detailed_simulation(model, model_data);
67  y_der_end=size(sim_data_der.y_der,2);
68  y_end=length(sim_data_der.y);
69  J=sim_data_der.y_der(:,y_der_end)';
70  output=sim_data_der.y(y_end);
71  end
72 
73 
74 else
75  reduced_data= varargin{1};
76  if model.verbose>=8
77  disp('reduced_gradient calculation')
78  end
79  %disp('Rewrite lin_evol_opt_get_Jacobian to augment performance (output calculation');
80  %model_data = varargin{2};
81  %detailed_data = varargin{3};
82  %Here calculation of reduced Jacobian
83 
84  %are derivatives available?
85  %yes -> calculation of gradient by evolution equation
86  %no -> finite difference calculation of gradient using reduced
87  %simulations
88 
89  if (model.optimization.derivatives_available)
90  J=[];
91  sim_data = rb_simulation(model, reduced_data);
92  %sim_data = model.rb_reconstruction(model, model_data, detailed_data, sim_data);
93  for i=1:size(sim_data.s_der,2)
94  y=sim_data.s_der{i}(end);
95  J=[J,y];
96  output=sim_data.s(end);
97  end
98 
99  Delta_J = sim_data.Delta_s_grad;
100  data.nRB = sim_data.nRB;
101  data.nRB_der = sim_data.nRB_der;
102 
103  else
104  disp('!!! Attention: In lin_evol_opt_get_Jacobian there are no derivatives available!!!');
105  h_range = 1000; % for constructing the difference quotient: how small is the variaton of the parameter
106  func_mu = model.optimization.objective_function(model, reduced_data, detailed_data); %value of the functional at the actual parameter set, needed in every cycle run
107  J=[];
108  for k=1:length(model.mu_names)
109  h = (model.mu_ranges{k}(2)-model.mu_ranges{k}(1))/h_range; % ranges must be intervals
110  if model.optimization.params_to_optimize(k) == 1
111  if model.mu_ranges{k}(2)-model.(model.mu_names{k}) >= h % is it possible to get the righthanded difference quotient?
112  model.(model.mu_names{k}) = model.(model.mu_names{k}) + h; %getfield(model,model.mu_names{k})
113  func = model.optimization.objective_function(model, reduced_data, detailed_data); %value of the functional after changing one parameter
114  grad_comp = (func - func_mu)/h; %righthanded difference quotient
115  model.(model.mu_names{k}) = model.(model.mu_names{k}) - h; %set model to initial values for the next cycle run
116  else
117  model.(model.mu_names{k}) = model.(model.mu_names{k}) - h;
118  func = model.optimization.objective_function(model, reduced_data, detailed_data);
119  grad_comp = (func_mu - func)/h; % lefthanded difference quotient
120  model.(model.mu_names{k}) = model.(model.mu_names{k}) + h; %set model to initial values for the next cycle run
121  end
122  J=[J,grad_comp];
123  output = func_mu;
124  end
125  end
126 
127  end
128 
129 
130 
131 
132 
133 
134 end
135 
136 varargout = {output,data};
137 
138 
139 end
140 
141 
142 
143 
144