rbmatlab  1.16.09
 All Classes Namespaces Files Functions Variables Modules Pages
matrix_diffusion.m
1 function res = matrix_diffusion(x, model, df_info, i, j)
2 %function res = matrix_diffusion(x, model, df_info, i, j)
3 %
4 
5 % IM 12.04.2013
6 
7 
8 gradient_hat_phi_i = evaluate_basis_function_derivative(df_info, x, i);
9 gradient_hat_phi_j = evaluate_basis_function_derivative(df_info, x, j);
10 
11 JIT = df_info.grid.JIT;
12 
13 d = cache_function(@(xarg, xmodel)xmodel.diffusivity_tensor(df_info.grid, ...
14  1:df_info.grid.nelements, ...
15  xarg, xmodel), x, ...
16  model);
17 if ~iscell(d)
18 
19  ATJ_11 = d(:, 1) .* JIT(:, 1, 1) + d(:, 2) .* JIT(:, 2, 1);
20  ATJ_21 = d(:, 3) .* JIT(:, 1, 1) + d(:, 4) .* JIT(:, 2, 1);
21  ATJ_12 = d(:, 1) .* JIT(:, 1, 2) + d(:, 2) .* JIT(:, 2, 2);
22  ATJ_22 = d(:, 3) .* JIT(:, 1, 2) + d(:, 4) .* JIT(:, 2, 2);
23 
24  JTATJ_11 = JIT(:, 1, 1) .* ATJ_11 + JIT(:, 2, 1) .* ATJ_21;
25  JTATJ_12 = JIT(:, 1, 1) .* ATJ_12 + JIT(:, 2, 1) .* ATJ_22;
26  JTATJ_21 = JIT(:, 1, 2) .* ATJ_11 + JIT(:, 2, 2) .* ATJ_21;
27  JTATJ_22 = JIT(:, 1, 2) .* ATJ_12 + JIT(:, 2, 2) .* ATJ_22;
28 
29  res = ...
30  gradient_hat_phi_j(:, 1)' * gradient_hat_phi_i(:, 1) * JTATJ_11 + ...
31  gradient_hat_phi_j(:, 1)' * gradient_hat_phi_i(:, 2) * JTATJ_12 + ...
32  gradient_hat_phi_j(:, 2)' * gradient_hat_phi_i(:, 1) * JTATJ_21 + ...
33  gradient_hat_phi_j(:, 2)' * gradient_hat_phi_i(:, 2) * JTATJ_22;
34 else
35 
36  res = cell(1,length(d));
37  for q = 1:length(d)
38 
39  ATJ_11 = d{q}(:, 1) .* JIT(:, 1, 1) + d{q}(:, 2) .* JIT(:, 2, 1);
40  ATJ_21 = d{q}(:, 3) .* JIT(:, 1, 1) + d{q}(:, 4) .* JIT(:, 2, 1);
41  ATJ_12 = d{q}(:, 1) .* JIT(:, 1, 2) + d{q}(:, 2) .* JIT(:, 2, 2);
42  ATJ_22 = d{q}(:, 3) .* JIT(:, 1, 2) + d{q}(:, 4) .* JIT(:, 2, 2);
43 
44  JTATJ_11 = JIT(:, 1, 1) .* ATJ_11 + JIT(:, 2, 1) .* ATJ_21;
45  JTATJ_12 = JIT(:, 1, 1) .* ATJ_12 + JIT(:, 2, 1) .* ATJ_22;
46  JTATJ_21 = JIT(:, 1, 2) .* ATJ_11 + JIT(:, 2, 2) .* ATJ_21;
47  JTATJ_22 = JIT(:, 1, 2) .* ATJ_12 + JIT(:, 2, 2) .* ATJ_22;
48 
49  res{q} = ...
50  gradient_hat_phi_j(:, 1)' * gradient_hat_phi_i(:, 1) * JTATJ_11 + ...
51  gradient_hat_phi_j(:, 1)' * gradient_hat_phi_i(:, 2) * JTATJ_12 + ...
52  gradient_hat_phi_j(:, 2)' * gradient_hat_phi_i(:, 1) * JTATJ_21 + ...
53  gradient_hat_phi_j(:, 2)' * gradient_hat_phi_i(:, 2) * JTATJ_22;
54  end
55 end
56 
57 end
function varargout = cache_function(func_ptr, varargin)
simple caching of function call inputs are cached too!