rbmatlab  1.16.09
 All Classes Namespaces Files Functions Variables Modules Pages
porsche_get_Jacobian.m
1 function [jacobian,Delta_J] = porsche_get_Jacobian(model, data)
2 %function [Jacobian,Delta_J] = porsche_get_Jacobian(model, data)
3 %
4 %Function returning the Jacobian of the output functional
5 %
6 %For detailed optimization the Jacobian is calculated numerically via
7 %Finite Differences
8 %For reduced optimization the function checks if the @f$ \partial_{\mu_i}
9 %\Theta @f$ are available in the model. If not it is also calculated
10 %numerically via Finite Differences
11 %
12 %data can be either "model_data" in the detailed case or "reduced_data"
13 %in the reduced case.
14 %
15 % In the reduced case an error bound is given by Delta_J
16 %
17 % Markus Dihlmann 30.04.2010
18 % Oliver Zeeb
19 %
20 
21 if(model.verbose>=8)
22  disp('entered porsche_get_Jacobian')
23 end
24 
25 Delta_J = 0;
26 h_range = 100000; % for constructing the difference quotient: how small is the variaton of the parameter
27 
28 %new: with flag model.optimization.opt_mode
29 
30 
31 if (~model.optimization.derivatives_available||~model.compute_derivative_info)
32  %calculate derivative via small differences
33  func_mu = model.optimization.objective_function(model,data); %value of the objective function for the original parameter mu, calculated some lines above
34  jacobian=[];
35  for k=1:length(model.mu_names)
36  h = (model.mu_ranges{k}(2)-model.mu_ranges{k}(1))/h_range; % ranges must be intervals
37  if model.optimization.params_to_optimize(k) == 1
38  if model.mu_ranges{k}(2)-model.(model.mu_names{k}) >= h % is it possible to get the righthanded difference quotient?
39  old_mu = model.(model.mu_names{k});
40  model.(model.mu_names{k}) = model.(model.mu_names{k}) + h; %getfield(model,model.mu_names{k})
41  func = model.optimization.objective_function(model, data); %value of the functional after changing one parameter
42  grad_comp = (func - func_mu)/h; %righthanded difference quotient
43  model.(model.mu_names{k}) = old_mu; %set model to initial values for the next cycle run
44  else
45  old_mu = model.(model.mu_names{k});
46  model.(model.mu_names{k}) = model.(model.mu_names{k}) - h;
47  func = model.optimization.objective_function(model, data);
48  grad_comp = (func_mu - func)/h; % lefthanded difference quotient
49  model.(model.mu_names{k}) = old_mu; %set model to initial values for the next cycle run
50  end
51  jacobian=[jacobian,grad_comp]; % add component to the end of gradient
52  end
53  end
54 
55  if(model.verbose>=8)
56  disp(['calculated gradient is: ' num2str(J)])
57  end
58 % else
59 % %calculate derivative via derivative pde simulation
60 % sim_data_der = detailed_simulation(model, model_data);
61 % y_end=size(sim_data_der.y_der,1);
62 % J=sim_data_der.y_der(y_end,:)';
63 end
64 
65 
66 
67 %old: without flag model.optimization.opt_mode
68 % if strcmp(inputname(2),'model_data')
69 % model_data = varargin{1};
70 %
71 % if (~model.optimization.derivatives_available||~model.compute_derivative_info)
72 % %calculate derivative via small differences
73 % func_mu = model.optimization.objective_function(model,model_data); %value of the objective function for the original parameter mu, calculated some lines above
74 % jacobian=[];
75 % for k=1:length(model.mu_names)
76 % h = (model.mu_ranges{k}(2)-model.mu_ranges{k}(1))/h_range; % ranges must be intervals
77 % if model.optimization.params_to_optimize(k) == 1
78 % if model.mu_ranges{k}(2)-model.(model.mu_names{k}) >= h % is it possible to get the righthanded difference quotient?
79 % old_mu = model.(model.mu_names{k});
80 % model.(model.mu_names{k}) = model.(model.mu_names{k}) + h; %getfield(model,model.mu_names{k})
81 % func = model.optimization.objective_function(model, model_data); %value of the functional after changing one parameter
82 % grad_comp = (func - func_mu)/h; %righthanded difference quotient
83 % model.(model.mu_names{k}) = old_mu; %set model to initial values for the next cycle run
84 % else
85 % old_mu = model.(model.mu_names{k});
86 % model.(model.mu_names{k}) = model.(model.mu_names{k}) - h;
87 % func = model.optimization.objective_function(model, model_data);
88 % grad_comp = (func_mu - func)/h; % lefthanded difference quotient
89 % model.(model.mu_names{k}) = old_mu; %set model to initial values for the next cycle run
90 % end
91 % jacobian=[jacobian,grad_comp]; % add component to the end of gradient
92 % end
93 % end
94 % %-------------------------------------------
95 % %End of detailed calculation of the gradient
96 % %-------------------------------------------
97 % if(model.verbose>=8)
98 % disp(['calculated gradient is: ' num2str(J)])
99 % end
100 % % else
101 % % %calculate derivative via derivative pde simulation
102 % % sim_data_der = detailed_simulation(model, model_data);
103 % % y_end=size(sim_data_der.y_der,1);
104 % % J=sim_data_der.y_der(y_end,:)';
105 % end
106 %
107 %
108 % elseif strcmp(inputname(2), 'reduced_data')
109 % reduced_data= varargin{1};
110 %
111 % %are derivatives available?
112 % %yes -> calculation of gradient by evolution equation
113 % %no -> finite difference calculation of gradient using reduced
114 % %simulations
115 %
116 % if (model.optimization.derivatives_available)
117 % jacobian=[];
118 % sim_data = rb_derivative_simulation(model, reduced_data);
119 % %sim_data = model.rb_reconstruction(model, model_data, detailed_data, sim_data);
120 % for i=1:size(sim_data.s_der,2)
121 % y=sim_data.s_der{i}(end);
122 % jacobian=[jacobian,y];
123 % end
124 %
125 % Delta_J = sim_data.Delta_s_grad;
126 % else
127 %
128 % func_mu = model.optimization.objective_function(model, reduced_data); %value of the functional at the actual parameter set, needed in every cycle run
129 % jacobian=[];
130 % for k=1:length(model.mu_names)
131 % h = (model.mu_ranges{k}(2)-model.mu_ranges{k}(1))/h_range; % ranges must be intervals
132 % if model.optimization.params_to_optimize(k) == 1
133 % if model.mu_ranges{k}(2)-model.(model.mu_names{k}) >= h % is it possible to get the righthanded difference quotient?
134 % old_mu = model.(model.mu_names{k});
135 % model.(model.mu_names{k}) = model.(model.mu_names{k}) + h; %getfield(model,model.mu_names{k})
136 % func = model.optimization.objective_function(model, reduced_data); %value of the functional after changing one parameter
137 % grad_comp = (func - func_mu)/h; %righthanded difference quotient
138 % model.(model.mu_names{k}) = old_mu; %set model to initial values for the next cycle run
139 % else
140 % old_mu = model.(model.mu_names{k});
141 % model.(model.mu_names{k}) = model.(model.mu_names{k}) - h;
142 % func = model.optimization.objective_function(model, reduced_data);
143 % grad_comp = (func_mu - func)/h; % lefthanded difference quotient
144 % model.(model.mu_names{k}) = old_mu; %set model to initial values for the next cycle run
145 % end
146 % jacobian=[jacobian,grad_comp]; % add component to the end of gradient
147 % end
148 % end
149 %
150 % end
151 %
152 %
153 %
154 %
155 %
156 % else
157 % disp('neither model_data nor reduced_data was given');
158 % J=0;
159 % end
160 
161 
162 
163 
164