Module: TensorStream::Utils
- Included in:
- TensorStream, Images, Train::LearningRateDecay, Train::SlotCreator
- Defined in:
- lib/tensor_stream/utils.rb
Instance Method Summary collapse
- #__v_scope_name ⇒ Object
- #assign(ref, value, name: nil) ⇒ Object
- #check_allowed_types(input, types) ⇒ Object
- #check_data_types(input_a, input_b) ⇒ Object
-
#check_if_dense(value, expected_shape = nil) ⇒ Object
Check to make sure passed array is dense.
- #colocate_with(op, ignore_existing: false) ⇒ Object
- #constant(value, dtype: nil, shape: nil, internal: false, name: 'Const') ⇒ Object
- #control_dependencies(control_inputs, &block) ⇒ Object
- #convert_to_tensor(value, dtype: nil, name: nil) ⇒ Object
- #device(device_uri, &block) ⇒ Object
- #disable_eager_execution ⇒ Object
- #dynamic_stitch(indices, data, name: nil) ⇒ Object
- #enable_eager_execution ⇒ Object
- #executing_eagerly? ⇒ Boolean
- #float32 ⇒ Object
- #get_collection(name, options = {}) ⇒ Object
- #get_default_graph ⇒ Object
- #get_variable(name, dtype: nil, shape: nil, initializer: nil, trainable: true, collections: nil, validate_shape: false) ⇒ Object
- #get_variable_scope ⇒ Object
- #global_variables_initializer ⇒ Object
- #graph ⇒ Object
- #group(inputs, name: nil) ⇒ Object
- #image ⇒ Object
- #layers ⇒ Object
-
#list_local_devices ⇒ Object
List available evaluators + devices in the current local environment Returns: - An array containing the names of those devices.
- #name_scope(name, default_name = nil, default: nil, values: nil) ⇒ Object
- #norm_dtype(dtype) ⇒ Object
-
#placeholder(dtype, shape: nil, name: nil) ⇒ Object
Inserts a placeholder for a tensor that will be always fed.
- #program {|_self| ... } ⇒ Object
- #reset_default_graph ⇒ Object
-
#session(evaluator = nil, thread_pool_class: Concurrent::ImmediateExecutor, log_device_placement: false, profile_enabled: false) {|session| ... } ⇒ Object
Creates a session context where operations can be executed.
-
#set_random_seed(seed) ⇒ Object
Sets random seed to use for the default graph.
- #train ⇒ Object
- #trainable_variables ⇒ Object
-
#variable(value, name: nil, initializer: nil, graph: nil, dtype: nil, trainable: true) ⇒ Object
Creates a variable A variable maintains state across sessions.
-
#variable_scope(scope = nil, default_name = nil, reuse: nil, initializer: nil) ⇒ Object
Defines a variable context manager.
Instance Method Details
#__v_scope_name ⇒ Object
129 130 131 |
# File 'lib/tensor_stream/utils.rb', line 129 def __v_scope_name Thread.current[:tensor_stream_variable_scope].map(&:name).compact.reject(&:empty?).join('/') end |
#assign(ref, value, name: nil) ⇒ Object
203 204 205 206 |
# File 'lib/tensor_stream/utils.rb', line 203 def assign(ref, value, name: nil) raise "#{ref.name} not a variable" unless ref.is_a?(Variable) ref.assign(value, name: name) end |
#check_allowed_types(input, types) ⇒ Object
274 275 276 277 278 279 |
# File 'lib/tensor_stream/utils.rb', line 274 def check_allowed_types(input, types) return input unless input.is_a?(Tensor) return input if input.data_type.nil? raise "#{input.source}: Parameter data type #{input.data_type} passed not in #{types.join(',')}" unless types.include?(input.data_type.to_sym) end |
#check_data_types(input_a, input_b) ⇒ Object
281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 |
# File 'lib/tensor_stream/utils.rb', line 281 def check_data_types(input_a, input_b) if !input_a.is_a?(Tensor) && input_b.is_a?(Tensor) input_a = convert_to_tensor(input_a, dtype: input_b.data_type) elsif !input_b.is_a?(Tensor) && input_a.is_a?(Tensor) input_b = convert_to_tensor(input_b, dtype: input_a.data_type) else input_a = convert_to_tensor(input_a) input_b = convert_to_tensor(input_b) end if norm_dtype(input_a.data_type) != norm_dtype(input_b.data_type) raise TensorStream::ValueError, "Value Error: Tensor conversion requested dtype #{input_a.data_type} for tensor type #{input_b.data_type}" end [input_a, input_b] end |
#check_if_dense(value, expected_shape = nil) ⇒ Object
Check to make sure passed array is dense
258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 |
# File 'lib/tensor_stream/utils.rb', line 258 def check_if_dense(value, expected_shape = nil) return unless value.is_a?(Array) return if value.empty? expected_shape ||= shape_eval(value) s = expected_shape.shift raise TensorStream::ValueError, "Argument must be a dense tensor: #{value}, expected size #{s} got #{value.size}" if value.size != s return if expected_shape.empty? value.each do |item| check_if_dense(item, expected_shape.dup) end end |
#colocate_with(op, ignore_existing: false) ⇒ Object
150 151 152 153 |
# File 'lib/tensor_stream/utils.rb', line 150 def colocate_with(op, ignore_existing: false) # noop for now yield end |
#constant(value, dtype: nil, shape: nil, internal: false, name: 'Const') ⇒ Object
163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 |
# File 'lib/tensor_stream/utils.rb', line 163 def constant(value, dtype: nil, shape: nil, internal: false, name: 'Const') = { const: true, value: value, name: name, internal: internal } if value.is_a?(Float) TensorStream::Constant.new(dtype || :float32, 0, shape || [], ) elsif value.is_a?(Integer) TensorStream::Constant.new(dtype || :int32, 0, shape || [], ) elsif value.is_a?(String) TensorStream::Constant.new(dtype || :string, 0, shape || [], ) elsif !!value == value TensorStream::Constant.new(dtype || :boolean, 0, shape || [], ) elsif value.is_a?(Array) dimension = shape || shape_eval(value) rank = dimension.size TensorStream.check_if_dense(value) cur_dtype = dtype || Tensor.detect_type(value.flatten.last) value = Tensor.cast_dtype(value, cur_dtype) unless dtype.nil? [:value] = value TensorStream::Constant.new(cur_dtype, rank, dimension, ) end end |
#control_dependencies(control_inputs, &block) ⇒ Object
236 237 238 |
# File 'lib/tensor_stream/utils.rb', line 236 def control_dependencies(control_inputs, &block) TensorStream.get_default_graph.control_dependencies(control_inputs, &block) end |
#convert_to_tensor(value, dtype: nil, name: nil) ⇒ Object
240 241 242 243 244 245 246 247 248 249 250 251 252 253 |
# File 'lib/tensor_stream/utils.rb', line 240 def convert_to_tensor(value, dtype: nil, name: nil) return value if value.is_a?(Tensor) return convert_to_tensor(value.call) if value.is_a?(Proc) # raise "Invalid tensor value" if value.nil? if value.is_a?(Array) && value[0].is_a?(Tensor) return TensorStream.stack(value) if value.size > 1 return TensorStream.(value[0], 0) end check_if_dense(value) i_cons(value, dtype: dtype || Tensor.detect_type(value), name: name) end |
#device(device_uri, &block) ⇒ Object
104 105 106 |
# File 'lib/tensor_stream/utils.rb', line 104 def device(device_uri, &block) get_default_graph.device(device_uri, &block) end |
#disable_eager_execution ⇒ Object
23 24 25 |
# File 'lib/tensor_stream/utils.rb', line 23 def disable_eager_execution TensorStream::Graph.get_default_graph.disable_eager_execution end |
#dynamic_stitch(indices, data, name: nil) ⇒ Object
191 192 193 |
# File 'lib/tensor_stream/utils.rb', line 191 def dynamic_stitch(indices, data, name: nil) TensorStream::DynamicStitch.new(:dynamic_stitch, [indices, data], name: name) end |
#enable_eager_execution ⇒ Object
19 20 21 |
# File 'lib/tensor_stream/utils.rb', line 19 def enable_eager_execution TensorStream::Graph.get_default_graph.enable_eager_execution end |
#executing_eagerly? ⇒ Boolean
27 28 29 |
# File 'lib/tensor_stream/utils.rb', line 27 def executing_eagerly? TensorStream::Graph.get_default_graph.executing_eagerly? end |
#get_collection(name, options = {}) ⇒ Object
199 200 201 |
# File 'lib/tensor_stream/utils.rb', line 199 def get_collection(name, = {}) Graph.get_default_graph.get_collection(name, ) end |
#get_default_graph ⇒ Object
11 12 13 |
# File 'lib/tensor_stream/utils.rb', line 11 def get_default_graph TensorStream::Graph.get_default_graph end |
#get_variable(name, dtype: nil, shape: nil, initializer: nil, trainable: true, collections: nil, validate_shape: false) ⇒ Object
195 196 197 |
# File 'lib/tensor_stream/utils.rb', line 195 def get_variable(name, dtype: nil, shape: nil, initializer: nil, trainable: true, collections: nil, validate_shape: false) get_variable_scope.get_variable(name, dtype: dtype, shape: shape, initializer: initializer, trainable: trainable, collections: collections) end |
#get_variable_scope ⇒ Object
119 120 121 122 123 124 125 126 127 |
# File 'lib/tensor_stream/utils.rb', line 119 def get_variable_scope if !Thread.current[:tensor_stream_variable_scope] variable_scope = VariableScope.new Thread.current[:tensor_stream_variable_scope] = [variable_scope] return variable_scope end Thread.current[:tensor_stream_variable_scope].last end |
#global_variables_initializer ⇒ Object
214 215 216 |
# File 'lib/tensor_stream/utils.rb', line 214 def global_variables_initializer TensorStream::Variable.global_variables_initializer end |
#graph ⇒ Object
7 8 9 |
# File 'lib/tensor_stream/utils.rb', line 7 def graph TensorStream::Graph.new end |
#group(inputs, name: nil) ⇒ Object
187 188 189 |
# File 'lib/tensor_stream/utils.rb', line 187 def group(inputs, name: nil) TensorStream::ControlFlow.new(:group, inputs, nil, name: name) end |
#image ⇒ Object
222 223 224 |
# File 'lib/tensor_stream/utils.rb', line 222 def image TensorStream::Images end |
#layers ⇒ Object
159 160 161 |
# File 'lib/tensor_stream/utils.rb', line 159 def layers TensorStream::Layers end |
#list_local_devices ⇒ Object
List available evaluators + devices in the current local environment Returns:
-
An array containing the names of those devices
35 36 37 38 39 40 41 42 |
# File 'lib/tensor_stream/utils.rb', line 35 def list_local_devices local_name = 'job:localhost' TensorStream::Evaluator.evaluators.collect do |k, v| v[:class].query_supported_devices.collect do |device_str| [local_name, "ts:#{k}:#{device_str.name}"].join('/') end end.flatten end |
#name_scope(name, default_name = nil, default: nil, values: nil) ⇒ Object
108 109 110 111 112 113 114 115 116 117 |
# File 'lib/tensor_stream/utils.rb', line 108 def name_scope(name, default_name = nil, default: nil, values: nil) if values graph_count = values.select { |v| v.is_a?(Tensor) }.map(&:graph).map(&:object_id).uniq.size raise "values are not on the same graph" if graph_count > 1 end get_default_graph.name_scope(name || default_name || default) do |scope| yield scope if block_given? end end |
#norm_dtype(dtype) ⇒ Object
298 299 300 301 302 303 304 305 306 307 308 |
# File 'lib/tensor_stream/utils.rb', line 298 def norm_dtype(dtype) dtype = dtype.to_sym case dtype when :int :int32 when :float :float32 else dtype end end |
#placeholder(dtype, shape: nil, name: nil) ⇒ Object
Inserts a placeholder for a tensor that will be always fed.
210 211 212 |
# File 'lib/tensor_stream/utils.rb', line 210 def placeholder(dtype, shape: nil, name: nil) TensorStream::Placeholder.new(dtype, nil, shape, name: name) end |
#program {|_self| ... } ⇒ Object
155 156 157 |
# File 'lib/tensor_stream/utils.rb', line 155 def program yield self end |
#reset_default_graph ⇒ Object
15 16 17 |
# File 'lib/tensor_stream/utils.rb', line 15 def reset_default_graph TensorStream::Graph.get_default_graph.reset end |
#session(evaluator = nil, thread_pool_class: Concurrent::ImmediateExecutor, log_device_placement: false, profile_enabled: false) {|session| ... } ⇒ Object
Creates a session context where operations can be executed
Args:
evaluator: Specific evaluator to use, otherwise the best evaluator will automatically be determined
Options:
thread_pool_class: Class to use to manage thread pooling
log_device_placement: Show assigned device/evalutor for each tensor op
profile_enabled: Log performance metrics for each operation
143 144 145 146 147 148 |
# File 'lib/tensor_stream/utils.rb', line 143 def session(evaluator = nil, thread_pool_class: Concurrent::ImmediateExecutor, log_device_placement: false, profile_enabled: false) session = TensorStream::Session.new(evaluator, thread_pool_class: thread_pool_class, log_device_placement: log_device_placement, profile_enabled: profile_enabled) yield session if block_given? session end |
#set_random_seed(seed) ⇒ Object
Sets random seed to use for the default graph
232 233 234 |
# File 'lib/tensor_stream/utils.rb', line 232 def set_random_seed(seed) TensorStream.get_default_graph.random_seed = seed end |
#train ⇒ Object
218 219 220 |
# File 'lib/tensor_stream/utils.rb', line 218 def train TensorStream::Trainer end |
#trainable_variables ⇒ Object
226 227 228 |
# File 'lib/tensor_stream/utils.rb', line 226 def trainable_variables TensorStream.get_default_graph.get_collection(TensorStream::GraphKeys::TRAINABLE_VARIABLES) end |
#variable(value, name: nil, initializer: nil, graph: nil, dtype: nil, trainable: true) ⇒ Object
Creates a variable A variable maintains state across sessions
47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
# File 'lib/tensor_stream/utils.rb', line 47 def variable(value, name: nil, initializer: nil, graph: nil, dtype: nil, trainable: true) op = Graph.get_default_graph.add_op(:assign, nil, value) = { initializer: initializer || op, name: name, graph: graph, dtype: dtype, trainable: trainable } tensor = if value.is_a?(String) i_var(dtype || :string, 0, [], get_variable_scope, ) elsif value.is_a?(Integer) i_var(dtype || :int32, 0, [], get_variable_scope, ) elsif value.is_a?(Float) i_var(dtype || :float32, 0, [], get_variable_scope, ) else i_var(dtype || :float32, 0, nil, get_variable_scope, ) end op.set_input(0, tensor.op) Graph.get_default_graph.add_node(op) tensor end |
#variable_scope(scope = nil, default_name = nil, reuse: nil, initializer: nil) ⇒ Object
Defines a variable context manager
72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 |
# File 'lib/tensor_stream/utils.rb', line 72 def variable_scope(scope = nil, default_name = nil, reuse: nil, initializer: nil) Thread.current[:tensor_stream_variable_scope] ||= [ VariableScope.new ] # uniquenifier if scope.nil? && default_name same_names = get_variable_scope.used_names.select { |s| s.start_with?(default_name) } new_name = default_name index = 1 while same_names.include?(new_name) new_name = "#{default_name}_#{index}" index += 1 end scope = new_name end variable_scope = VariableScope.new(name: scope, reuse: reuse, initializer: initializer) get_variable_scope.register_name(scope || "") Thread.current[:tensor_stream_variable_scope] << variable_scope scope_name = __v_scope_name if block_given? begin TensorStream.get_default_graph.name_scope(scope) do yield(scope_name) end ensure Thread.current[:tensor_stream_variable_scope].pop end else variable_scope end end |