Class: Statsample::GLM::MLE::Base
- Inherits:
-
Object
- Object
- Statsample::GLM::MLE::Base
- Defined in:
- lib/statsample-glm/glm/mle/base.rb
Constant Summary collapse
- MIN_DIFF_PARAMETERS =
1e-2
Instance Attribute Summary collapse
-
#coefficients ⇒ Object
readonly
Returns the value of attribute coefficients.
-
#degree_of_freedom ⇒ Object
readonly
Returns the value of attribute degree_of_freedom.
-
#fitted_mean_values ⇒ Object
readonly
Returns the value of attribute fitted_mean_values.
-
#iterations ⇒ Object
readonly
Returns the value of attribute iterations.
-
#log_likelihood ⇒ Object
readonly
Returns the value of attribute log_likelihood.
-
#residuals ⇒ Object
readonly
Returns the value of attribute residuals.
Instance Method Summary collapse
-
#initialize(data_set, dependent, opts) ⇒ Base
constructor
A new instance of Base.
-
#newton_raphson(x, y, start_values = nil) ⇒ Object
Newton Raphson with automatic stopping criteria.
- #standard_error ⇒ Object
Constructor Details
#initialize(data_set, dependent, opts) ⇒ Base
Returns a new instance of Base.
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
# File 'lib/statsample-glm/glm/mle/base.rb', line 12 def initialize data_set, dependent, opts @opts = opts @data_set = data_set @dependent = dependent @stop_criteria = :parameters @var_cov_matrix = nil @iterations = nil @parameters = nil x = @data_set.to_matrix y = @dependent.to_matrix(:vertical) @coefficients = newton_raphson x, y @log_likelihood = _log_likelihood x, y, @coefficients @fitted_mean_values = create_vector measurement(x, @coefficients).to_a.flatten @residuals = @dependent - @fitted_mean_values @degree_of_freedom = @dependent.count - x.column_size # This jugad is done because the last vector index for Normal is sigma^2 # which we dont want to return to the user. @coefficients = create_vector(self.is_a?(Statsample::GLM::MLE::Normal) ? @coefficients.to_a.flatten[0..-2] : @coefficients.to_a.flatten) end |
Instance Attribute Details
#coefficients ⇒ Object (readonly)
Returns the value of attribute coefficients.
6 7 8 |
# File 'lib/statsample-glm/glm/mle/base.rb', line 6 def coefficients @coefficients end |
#degree_of_freedom ⇒ Object (readonly)
Returns the value of attribute degree_of_freedom.
6 7 8 |
# File 'lib/statsample-glm/glm/mle/base.rb', line 6 def degree_of_freedom @degree_of_freedom end |
#fitted_mean_values ⇒ Object (readonly)
Returns the value of attribute fitted_mean_values.
6 7 8 |
# File 'lib/statsample-glm/glm/mle/base.rb', line 6 def fitted_mean_values @fitted_mean_values end |
#iterations ⇒ Object (readonly)
Returns the value of attribute iterations.
6 7 8 |
# File 'lib/statsample-glm/glm/mle/base.rb', line 6 def iterations @iterations end |
#log_likelihood ⇒ Object (readonly)
Returns the value of attribute log_likelihood.
6 7 8 |
# File 'lib/statsample-glm/glm/mle/base.rb', line 6 def log_likelihood @log_likelihood end |
#residuals ⇒ Object (readonly)
Returns the value of attribute residuals.
6 7 8 |
# File 'lib/statsample-glm/glm/mle/base.rb', line 6 def residuals @residuals end |
Instance Method Details
#newton_raphson(x, y, start_values = nil) ⇒ Object
Newton Raphson with automatic stopping criteria. Based on: Von Tessin, P. (2005). Maximum Likelihood Estimation With Java and Ruby
x
-
matrix of dependent variables. Should have nxk dimensions
y
-
matrix of independent values. Should have nx1 dimensions
@m
-
class for @ming. Could be Normal or Logistic
start_values
-
matrix of coefficients. Should have 1xk dimensions
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 |
# File 'lib/statsample-glm/glm/mle/base.rb', line 55 def newton_raphson(x,y, start_values=nil) # deep copy? if start_values.nil? parameters = set_default_parameters(x) else parameters = start_values.dup end k = parameters.row_size raise "n on y != n on x" if x.row_size != y.row_size h = nil fd = nil if @stop_criteria == :mle old_likelihood = _log_likelihood(x, y, parameters) else old_parameters = parameters end @opts[:iterations].times do |i| @iterations = i + 1 h = second_derivative(x,y,parameters) if h.singular? raise "Hessian is singular!" end fd = first_derivative(x,y,parameters) parameters = parameters - (h.inverse * (fd)) if @stop_criteria == :parameters flag = true k.times do |j| diff = ( parameters[j,0] - old_parameters[j,0] ) / parameters[j,0] flag = false if diff.abs >= MIN_DIFF_PARAMETERS end if flag @var_cov_matrix = h.inverse*-1.0 return parameters end old_parameters = parameters else begin new_likelihood = _log_likelihood(x,y,parameters) if(new_likelihood < old_likelihood) or ((new_likelihood - old_likelihood) / new_likelihood).abs < @opts[:epsilon] @var_cov_matrix = h.inverse*-1.0 break; end old_likelihood = new_likelihood rescue =>e puts "#{e}" end end end @parameters = parameters parameters end |
#standard_error ⇒ Object
38 39 40 41 42 43 44 45 46 |
# File 'lib/statsample-glm/glm/mle/base.rb', line 38 def standard_error out = [] @data_set.vectors.to_a.each_index do |i| out << Math::sqrt(@var_cov_matrix[i,i]) end out end |