Class: SVMKit::LinearModel::LogisticRegression

Inherits:
Object
  • Object
show all
Includes:
Base::BaseEstimator, Base::Classifier
Defined in:
lib/svmkit/linear_model/logistic_regression.rb

Overview

LogisticRegression is a class that implements Logistic Regression with mini-batch stochastic gradient descent optimization. For multiclass classification problem, it uses one-vs-the-rest strategy.

Reference

    1. Shalev-Shwartz, Y. Singer, N. Srebro, and A. Cotter, “Pegasos: Primal Estimated sub-GrAdient SOlver for SVM,” Mathematical Programming, vol. 127 (1), pp. 3–30, 2011.

Examples:

estimator =
  SVMKit::LinearModel::LogisticRegression.new(reg_param: 1.0, max_iter: 1000, batch_size: 20, random_seed: 1)
estimator.fit(training_samples, traininig_labels)
results = estimator.predict(testing_samples)

Instance Attribute Summary collapse

Attributes included from Base::BaseEstimator

#params

Instance Method Summary collapse

Methods included from Base::Classifier

#score

Constructor Details

#initialize(reg_param: 1.0, fit_bias: false, bias_scale: 1.0, max_iter: 1000, batch_size: 20, optimizer: nil, random_seed: nil) ⇒ LogisticRegression

Create a new classifier with Logisitc Regression by the SGD optimization.



54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# File 'lib/svmkit/linear_model/logistic_regression.rb', line 54

def initialize(reg_param: 1.0, fit_bias: false, bias_scale: 1.0,
               max_iter: 1000, batch_size: 20, optimizer: nil, random_seed: nil)
  check_params_float(reg_param: reg_param, bias_scale: bias_scale)
  check_params_integer(max_iter: max_iter, batch_size: batch_size)
  check_params_boolean(fit_bias: fit_bias)
  check_params_type_or_nil(Integer, random_seed: random_seed)
  check_params_positive(reg_param: reg_param, bias_scale: bias_scale, max_iter: max_iter, batch_size: batch_size)
  @params = {}
  @params[:reg_param] = reg_param
  @params[:fit_bias] = fit_bias
  @params[:bias_scale] = bias_scale
  @params[:max_iter] = max_iter
  @params[:batch_size] = batch_size
  @params[:optimizer] = optimizer
  @params[:optimizer] ||= Optimizer::Nadam.new
  @params[:random_seed] = random_seed
  @params[:random_seed] ||= srand
  @weight_vec = nil
  @bias_term = nil
  @classes = nil
  @rng = Random.new(@params[:random_seed])
end

Instance Attribute Details

#bias_termNumo::DFloat (readonly)

Return the bias term (a.k.a. intercept) for Logistic Regression.



33
34
35
# File 'lib/svmkit/linear_model/logistic_regression.rb', line 33

def bias_term
  @bias_term
end

#classesNumo::Int32 (readonly)

Return the class labels.



37
38
39
# File 'lib/svmkit/linear_model/logistic_regression.rb', line 37

def classes
  @classes
end

#rngRandom (readonly)

Return the random generator for performing random sampling.



41
42
43
# File 'lib/svmkit/linear_model/logistic_regression.rb', line 41

def rng
  @rng
end

#weight_vecNumo::DFloat (readonly)

Return the weight vector for Logistic Regression.



29
30
31
# File 'lib/svmkit/linear_model/logistic_regression.rb', line 29

def weight_vec
  @weight_vec
end

Instance Method Details

#decision_function(x) ⇒ Numo::DFloat

Calculate confidence scores for samples.



113
114
115
116
# File 'lib/svmkit/linear_model/logistic_regression.rb', line 113

def decision_function(x)
  check_sample_array(x)
  x.dot(@weight_vec.transpose) + @bias_term
end

#fit(x, y) ⇒ LogisticRegression

Fit the model with given training data.



82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
# File 'lib/svmkit/linear_model/logistic_regression.rb', line 82

def fit(x, y)
  check_sample_array(x)
  check_label_array(y)
  check_sample_label_size(x, y)

  @classes = Numo::Int32[*y.to_a.uniq.sort]
  n_classes = @classes.size
  _n_samples, n_features = x.shape

  if n_classes > 2
    @weight_vec = Numo::DFloat.zeros(n_classes, n_features)
    @bias_term = Numo::DFloat.zeros(n_classes)
    n_classes.times do |n|
      bin_y = Numo::Int32.cast(y.eq(@classes[n])) * 2 - 1
      weight, bias = binary_fit(x, bin_y)
      @weight_vec[n, true] = weight
      @bias_term[n] = bias
    end
  else
    negative_label = y.to_a.uniq.min
    bin_y = Numo::Int32.cast(y.ne(negative_label)) * 2 - 1
    @weight_vec, @bias_term = binary_fit(x, bin_y)
  end

  self
end

#marshal_dumpHash

Dump marshal data.



151
152
153
154
155
156
157
# File 'lib/svmkit/linear_model/logistic_regression.rb', line 151

def marshal_dump
  { params: @params,
    weight_vec: @weight_vec,
    bias_term: @bias_term,
    classes: @classes,
    rng: @rng }
end

#marshal_load(obj) ⇒ nil

Load marshal data.



161
162
163
164
165
166
167
168
# File 'lib/svmkit/linear_model/logistic_regression.rb', line 161

def marshal_load(obj)
  @params = obj[:params]
  @weight_vec = obj[:weight_vec]
  @bias_term = obj[:bias_term]
  @classes = obj[:classes]
  @rng = obj[:rng]
  nil
end

#predict(x) ⇒ Numo::Int32

Predict class labels for samples.



122
123
124
125
126
127
128
129
130
# File 'lib/svmkit/linear_model/logistic_regression.rb', line 122

def predict(x)
  check_sample_array(x)

  return Numo::Int32.cast(predict_proba(x)[true, 1].ge(0.5)) * 2 - 1 if @classes.size <= 2

  n_samples, = x.shape
  decision_values = predict_proba(x)
  Numo::Int32.asarray(Array.new(n_samples) { |n| @classes[decision_values[n, true].max_index] })
end

#predict_proba(x) ⇒ Numo::DFloat

Predict probability for samples.



136
137
138
139
140
141
142
143
144
145
146
147
# File 'lib/svmkit/linear_model/logistic_regression.rb', line 136

def predict_proba(x)
  check_sample_array(x)

  proba = 1.0 / (Numo::NMath.exp(-decision_function(x)) + 1.0)
  return (proba.transpose / proba.sum(axis: 1)).transpose if @classes.size > 2

  n_samples, = x.shape
  probs = Numo::DFloat.zeros(n_samples, 2)
  probs[true, 1] = proba
  probs[true, 0] = 1.0 - proba
  probs
end