PageRenderTime 46ms CodeModel.GetById 23ms RepoModel.GetById 0ms app.codeStats 0ms

/FitFunc/fit_ML_laplace.m

http://github.com/aludnam/MATLAB
Objective C | 94 lines | 89 code | 5 blank | 0 comment | 7 complexity | 394097338d2f9772fd63ccfb4e706cde MD5 | raw file
Possible License(s): BSD-3-Clause
  1. function result = fit_ML_laplace( x,hAx )
  2. % fit_ML_normal - Maximum Likelihood fit of the laplace distribution of i.i.d. samples!.
  3. % Given the samples of a laplace distribution, the PDF parameter is found
  4. %
  5. % fits data to the probability of the form:
  6. % p(x) = 1/(2*b)*exp(-abs(x-u)/b)
  7. % with parameters: u,b
  8. %
  9. % format: result = fit_ML_laplace( x,hAx )
  10. %
  11. % input: x - vector, samples with laplace distribution to be parameterized
  12. % hAx - handle of an axis, on which the fitted distribution is plotted
  13. % if h is given empty, a figure is created.
  14. %
  15. % output: result - structure with the fields
  16. % u,b - fitted parameters
  17. % CRB_b - Cram?r-Rao Bound for the estimator value
  18. % RMS - RMS error of the estimation
  19. % type - 'ML'
  20. %
  21. %
  22. % Algorithm
  23. % ===========
  24. %
  25. % We use the ML algorithm to estimate the PDF from the samples.
  26. % The laplace destribution is given by:
  27. %
  28. % p(x;u,b) = 1/(2*b)*exp(-abs(x-u)/b)
  29. %
  30. % where x are the samples which distribute by the function p(x;u,b)
  31. % and are assumed to be i.i.d !!!
  32. %
  33. % The ML estimator is given by:
  34. %
  35. % a = parameters vector = [u,b]
  36. % f(Xn,a) = 1/(2*b)*exp(-abs(Xn-u)/b)
  37. % L(a) = f(X,a) = product_by_n( f(Xn,a) )
  38. % = (2*b)^(-N) * exp( - sum( abs(Xn-u) )/b )
  39. % log(L(a)) = -N*log(2*b) - sum( abs(Xn-u) )/b
  40. %
  41. % The maximum likelihood point is found by the derivative of log(L(a)) with respect to "a":
  42. %
  43. % diff(log(L(a)),b) = N/(b^2) * ( sum( abs(Xn-u) )/N - b )
  44. % = J(b) * (b_estimation - b)
  45. % diff(log(L(a)),m) = (1/b) * sum( diff( abs(Xn-u),u ) ) => can't obtain a derivative
  46. % But, u is the mean of the distribution, and therefore => u = mean(Xn)
  47. %
  48. %
  49. % Therefore, the (efficient) estimators are given by:
  50. %
  51. % u = sum( Xn )/N
  52. % b = sum( abs(Xn-u) )/N
  53. %
  54. % The Cram?r-Rao Bounds for these estimator are:
  55. %
  56. % VAR( m ) = ?
  57. % VAR( b ) = 1/J(b) = b^2 / N
  58. %
  59. % NOTE: the ML estimator does not detect a deviation from the model.
  60. % therefore, check the RMS value !
  61. %
  62. if (nargin<1)
  63. error( 'fit_ML_laplace - insufficient input arguments' );
  64. end
  65. % Estimation
  66. % =============
  67. x = x(:); % should be column vectors !
  68. N = length(x);
  69. u = sum( x )/N;
  70. b = sum(abs(x-u))/N;
  71. CRB_b = b^2 / N;
  72. [n,x_c] = hist( x,100 );
  73. n = n / sum(n*abs(x_c(2)-x_c(1)));
  74. y = 1/(2*b)*exp(-abs(x_c-u)/b);
  75. RMS = sqrt( (y-n)*((y-n)')/ (x_c(2)-x_c(1))^2 / (length(x_c)-1) );
  76. % finish summarizing results
  77. % ============================
  78. result = struct( 'u',u,'b',b,'CRB_b',CRB_b,'RMS',RMS,'type','ML' );
  79. % plot distribution if asked for
  80. % ===============================
  81. if (nargin>1)
  82. xspan = linspace(min(x),max(x),100);
  83. if ishandle( hAx )
  84. plot_laplace( xspan,result,hAx,1 );
  85. else
  86. figure;
  87. plot_laplace( xspan,result,gca,1 );
  88. end
  89. end