Class: TensorStream::NN
- Inherits:
-
Object
- Object
- TensorStream::NN
- Defined in:
- lib/tensor_stream/nn/nn_ops.rb
Overview
High level machine learning functions
Class Method Summary collapse
- .relu(features, name: nil) ⇒ Object
- .sigmoid_cross_entropy_with_logits(labels: nil, logits: nil, name: nil) ⇒ Object
- .softmax(logits, _options = {}) ⇒ Object
Class Method Details
.relu(features, name: nil) ⇒ Object
8 9 10 |
# File 'lib/tensor_stream/nn/nn_ops.rb', line 8 def self.relu(features, name: nil) TensorStream.max(features, 0, name: "relu_#{name}") end |
.sigmoid_cross_entropy_with_logits(labels: nil, logits: nil, name: nil) ⇒ Object
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 |
# File 'lib/tensor_stream/nn/nn_ops.rb', line 12 def self.sigmoid_cross_entropy_with_logits(labels: nil, logits: nil, name: nil) TensorStream.name_scope(name, default: 'logistic_loss', values: [logits, labels]) do |name| tf = TensorStream logits = tf.convert_to_tensor(logits, name: 'logits') labels = tf.convert_to_tensor(labels, name: 'labels') zeros = tf.zeros_like(logits, dtype: logits.dtype) cond = (logits >= zeros) relu_logits = tf.where(cond, logits, zeros) neg_abs_logits = tf.where(cond, -logits, logits) return tf.add( relu_logits - logits * labels, tf.log1p(tf.exp(neg_abs_logits)), name: name) end end |
.softmax(logits, _options = {}) ⇒ Object
4 5 6 |
# File 'lib/tensor_stream/nn/nn_ops.rb', line 4 def self.softmax(logits, = {}) TensorStream.exp(logits) / TensorStream.reduce_sum(TensorStream.exp(logits)) end |