Tensorflow Intro

TensorBoard

Tensorboard can be used for Model Visualization and Logging, and is open with Terminal command:

tensorboard --logdir=path/to/log_dir

Create Summary Log

  1. Summary are operations:
loss_summary = tf.summary.scalar('loss', loss)
  1. Summary writes some summary to a log file
summary_writer = tf.summary.FileWriter('logs/', session.graph)
  1. Example
pred, summary = sess.run([out, loss_summary], feed_dict={x: x_, labels: y_})
summary_writer.add_summary(summary, global_step)

Name Scoping

with tf.variable_scope("foo"):
    with tf.variable_scope("bar"):
        v = tf.Variable("v", [1])

v.name
>>> "foo/bar/v:0"

1. Name scoping is useful for sharing weights

with tf.variable_scope("foo"):
    with tf.variable_scope("bar"):
        v = tf.get_variable("v", [1])

2. Name scoping is useful for cleaner code when similar structures can be re-used

def layer(input, input_size, output_size, scope_name):
    tf.variable_scope(scope_name):
        W = tf.Variable("W", tf.random_normal(input_size, output_size)))
        b = tf.Variable("b", tf.zeros(output_size))
        z = tf.matmul(input, W) + b
        return z


input = .......
h0 = layer(input, 10, 20, "h0")
h1 = layer(h0, 20, 20, "h1")
tf.get_variable("h0/W")
tf.get_variable("h1/b")

Build a basic Neural Network Model

graph = tf.Graph #create an instance graph

with graph.as_default():
    w=tf.variable() #initialization to truncated normal and bias set to 0
    logits = WX + b
    #loss take mean over all training examples
    loss = softmax_cross_entropy_with_logits(logits, train)
    #gradient + optimizatiom
    optimizer = GradientDescentOptimizer(learning_rate).minimize(loss)