1 function [J,Delta_J,varargout] = lin_evol_opt_get_Jacobian(model, varargin)
2 %
function J = lin_evol_opt_get_Jacobian(model, model_data, reduced_data)
4 %Function returning the Jacobian of the output functional
6 %For detailed optimization the Jacobian is calculated numerically.
7 %For reduced optimization the
function checks
if the @f$ \partial_{\mu_i}
8 %\Theta @f$ are available in the model. If not it is also calculated
11 %varargin can be either
"model_data" in the detailed
case or
"reduced_data"
14 % varargout(1): output
15 % varargout(2): data (struct of additional data)
17 % In the reduced case an error bound is given by Delta_J (as a vector for every
20 % Markus Dihlmann 30.04.2010
26 disp(
'entered lin_evol_opt_get_Jacobian')
31 if isfield(varargin{1},
'grid')
32 model_data = varargin{1};
34 if (~model.optimization.derivatives_available||~model.compute_derivative_info)
35 %calculate derivative via small differences
36 h_range = 10000; %
for constructing the difference quotient: how small is the variaton of the parameter
37 func_mu = model.optimization.objective_function(model, model_data); %value of the functional at the actual parameter set, needed in every cycle run
39 for k=1:length(model.mu_names)
40 h = (model.mu_ranges{k}(2)-model.mu_ranges{k}(1))/h_range; % ranges must be intervals
41 if model.optimization.params_to_optimize(k) == 1
42 if model.mu_ranges{k}(2)-model.(model.mu_names{k}) >= h % is it possible to
get the righthanded difference quotient?
43 model.(model.mu_names{k}) = model.(model.mu_names{k}) + h; %getfield(model,model.mu_names{k})
44 func = model.optimization.objective_function(model, model_data); %value of the functional after changing one parameter
45 grad_comp = (func - func_mu)/h; %righthanded difference quotient
46 model.(model.mu_names{k}) = model.(model.mu_names{k}) - h; %set model to initial values
for the next cycle run
48 model.(model.mu_names{k}) = model.(model.mu_names{k}) - h;
49 func = model.optimization.objective_function(model, model_data);
50 grad_comp = (func_mu - func)/h; % lefthanded difference quotient
51 model.(model.mu_names{k}) = model.(model.mu_names{k}) + h; %set model to initial values
for the next cycle run
53 J=[J,grad_comp];% add component to the end of gradient
57 %-------------------------------------------
58 %End of detailed calculation of the gradient
59 %-------------------------------------------
61 disp(
'calculated gradient is: ')
65 %calculate derivative via derivative pde simulation
66 sim_data_der = detailed_simulation(model, model_data);
67 y_der_end=size(sim_data_der.y_der,2);
68 y_end=length(sim_data_der.y);
69 J=sim_data_der.y_der(:,y_der_end)';
70 output=sim_data_der.y(y_end);
75 reduced_data= varargin{1};
77 disp(
'reduced_gradient calculation')
79 %disp('Rewrite lin_evol_opt_get_Jacobian to augment performance (output calculation');
80 %model_data = varargin{2};
81 %detailed_data = varargin{3};
82 %Here calculation of reduced Jacobian
84 %are derivatives available?
85 %yes -> calculation of gradient by evolution equation
86 %no -> finite difference calculation of gradient
using reduced
89 if (model.optimization.derivatives_available)
91 sim_data = rb_simulation(model, reduced_data);
92 %sim_data = model.rb_reconstruction(model, model_data, detailed_data, sim_data);
93 for i=1:size(sim_data.s_der,2)
94 y=sim_data.s_der{i}(end);
96 output=sim_data.s(end);
99 Delta_J = sim_data.Delta_s_grad;
100 data.nRB = sim_data.nRB;
101 data.nRB_der = sim_data.nRB_der;
104 disp(
'!!! Attention: In lin_evol_opt_get_Jacobian there are no derivatives available!!!');
105 h_range = 1000; %
for constructing the difference quotient: how small is the variaton of the parameter
106 func_mu = model.optimization.objective_function(model, reduced_data, detailed_data); %value of the functional at the actual parameter set, needed in every cycle run
108 for k=1:length(model.mu_names)
109 h = (model.mu_ranges{k}(2)-model.mu_ranges{k}(1))/h_range; % ranges must be intervals
110 if model.optimization.params_to_optimize(k) == 1
111 if model.mu_ranges{k}(2)-model.(model.mu_names{k}) >= h % is it possible to
get the righthanded difference quotient?
112 model.(model.mu_names{k}) = model.(model.mu_names{k}) + h; %getfield(model,model.mu_names{k})
113 func = model.optimization.objective_function(model, reduced_data, detailed_data); %value of the functional after changing one parameter
114 grad_comp = (func - func_mu)/h; %righthanded difference quotient
115 model.(model.mu_names{k}) = model.(model.mu_names{k}) - h; %set model to initial values
for the next cycle run
117 model.(model.mu_names{k}) = model.(model.mu_names{k}) - h;
118 func = model.optimization.objective_function(model, reduced_data, detailed_data);
119 grad_comp = (func_mu - func)/h; % lefthanded difference quotient
120 model.(model.mu_names{k}) = model.(model.mu_names{k}) + h; %set model to initial values
for the next cycle run
136 varargout = {output,data};