Class: Tensorflow::Train::Optimizer

Inherits:
Object
  • Object
show all
Defined in:
lib/tensorflow/train/optimizer.rb

Direct Known Subclasses

GradientDescentOptimizer

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(name: nil, use_locking: false) ⇒ Optimizer

Returns a new instance of Optimizer.



7
8
9
10
11
12
13
14
# File 'lib/tensorflow/train/optimizer.rb', line 7

def initialize(name: nil, use_locking: false)
  @name = name
  @use_locking = use_locking
  raise(Error::InvalidArgumentError, "Must specify the optimizer name") unless name

  @slots = {}
  @non_slots = {}
end

Instance Attribute Details

#nameObject (readonly)

Returns the value of attribute name.



5
6
7
# File 'lib/tensorflow/train/optimizer.rb', line 5

def name
  @name
end

Instance Method Details

#apply_gradients(grads_and_vars, global_step: nil, name: nil) ⇒ Object



28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
# File 'lib/tensorflow/train/optimizer.rb', line 28

def apply_gradients(grads_and_vars, global_step: nil, name: nil)
  varlist = grads_and_vars.map { |_grad, var| var }
  #create_slots(varlist)
  #TensorStream.name_scope(name, default: @name) do
    prepare
    apply_ops = grads_and_vars.map do |grad, var|
      #TensorStream.name_scope("update_" + var.op.name) do
        apply_dense(grad, var)
      #end
    end

    if global_step.nil?
      finish(apply_ops, name)
    else
      global_step.handle.graph.control_dependencies([finish(apply_ops, "update")]) do
        global_step.assign_add(Tensorflow.constant(1, dtype:global_step.dtype))
      end
    end
  #end
end

#compute_gradients(loss, var_list: nil, grad_loss: nil) ⇒ Object



49
50
51
52
53
54
55
56
57
58
59
# File 'lib/tensorflow/train/optimizer.rb', line 49

def compute_gradients(loss, var_list: nil, grad_loss: nil)
  trainable_vars = var_list || self.graph.get_collection_ref(Tensorflow::Graph::GraphKeys::TRAINABLE_VARIABLES)

  if trainable_vars.nil? || trainable_vars.empty?
    raise(Error::InvalidArgumentError, 'There are no variables to train for the loss function')
  end
  gradients = Graph::Gradients.new(graph)
  grads = gradients.gradients(loss, trainable_vars, grad_ys: grad_loss)

  grads.zip(trainable_vars)
end

#get_slot(var, name) ⇒ Object



61
62
63
64
65
66
# File 'lib/tensorflow/train/optimizer.rb', line 61

def get_slot(var, name)
  named_slots = @slots.fetch(name, nil)
  return nil if named_slots.nil?

  named_slots.fetch(var_key(var), nil)
end

#get_slot_namesObject



68
69
70
# File 'lib/tensorflow/train/optimizer.rb', line 68

def get_slot_names
  @slots.keys.sort
end

#graphObject



16
17
18
# File 'lib/tensorflow/train/optimizer.rb', line 16

def graph
  ExecutionContext.current
end

#minimize(loss, var_list: nil, grad_loss: nil, global_step: nil, name: nil) ⇒ Object



20
21
22
23
24
25
26
# File 'lib/tensorflow/train/optimizer.rb', line 20

def minimize(loss, var_list: nil, grad_loss: nil, global_step: nil, name: nil)
  grads_and_vars = compute_gradients(loss, var_list: var_list, grad_loss: grad_loss)
  if grads_and_vars.empty?
    raise(Error::InvalidArgumentError, "No gradients provided for any variable, check your graph for ops that do not support gradients")
  end
  apply_gradients(grads_and_vars, global_step: global_step, name: name)
end