2018年2月14日 星期三

[TensorFlow] Linear Regression sample


 TensorFlow    Python   Linear Regression  


Introduction


We will create a sample here to solve a standard Linear Regression model like

Y = X * 0.1(weight) + 0.3(bias)

For which we give a series of training data [X, Y] and use MSE(Mean Squared error) estimator to predict weight and bias by TensorFlow.



Environment


Python 3.6.2
TensorFlow 1.5.0
matplotlib  2.1.2



Implement


Before we started

Here are some of TensorFlow’s APIs we will use later.

4.  tf.zeros
8.  tf.pow
9.  tf.square



tf.placeholder

A placeholder is for a tensor that will be fed later.

import tensorflow as tf
num1=tf.placeholder(tf.int32)
num2=tf.placeholder(tf.int32)
result=num1+num2
with tf.Session() as sess:
    result=sess.run(result,{num1:10, num2:20})
    print(result)  # Output: 30


tf.Variable

The Variable() constructor requires an initial value, which defines the type and shape of the variable. A variable maintains state in the graph across calls to run().

import tensorflow as tf
W = tf.Variable(tf.random_uniform([1
], -1.0, 1.0), name='weight') #See https://www.tensorflow.org/api_docs/python/tf/random_uniform
b = tf.Variable(tf.zeros([1]), name='bias') #See https://www.tensorflow.org/api_docs/python/tf/zeros
init_op=tf.global_variables_initializer()
with tf.Session() as sess:
    sess.run(init_op)
    print(sess.run(W))
    print(sess.run(b))


tf.random_uniform

Outputs random values from a uniform distribution.

import tensorflow as tf

rnd= tf.random_uniform([2], 100, 200)
with tf.Session() as sess:
    # sess.run(init)
    print(sess.run(rnd))  # Ouput: [141.38602 158.75304]


tf.square

import tensorflow as tf
s= tf.square(10)
with tf.Session() as sess:
    print(sess.run(s))  # Ouput: 100


tf.reduce_mean

Computes the mean of elements across dimensions of a tensor.

import tensorflow as tf

x = tf.constant([[2., 3.], [4., 1.]])

with tf.Session() as sess:
    all = tf.reduce_mean(x) #reduces all dimensions
    dm0 = tf.reduce_mean(x, axis=0) #reduces dimension 0, that will be on 2 and 4, 3 and 1
    dm1 = tf.reduce_mean(x, axis=1) #reduces dimension 1, that will be on 2 and 3, 4 and 1

    print(sess.run(all)) # Result: 2.5
    print(sess.run(dm0)) # Result: [3. 2.]
    print(sess.run(dm1)) # Result: [2.5 2.5]


tf.reduce_sum

Computes the sum of elements across dimensions of a tensor.

# See https://www.tensorflow.org/api_docs/python/tf/reduce_sum
import tensorflow as tf

x = tf.constant([[2, 3], [4, 1]])

with tf.Session() as sess:
    print(sess.run(tf.reduce_sum(x))) # Result: 2+3+4+1=10
    print(sess.run(tf.reduce_sum(x, 0))) # Result: [6 4]
    print(sess.run(tf.reduce_sum(x, 1))) # Result: [5 5]
    print(sess.run(tf.reduce_sum(x, 1, keepdims=True))) # Result: [[5], [5]]
    print(sess.run(tf.reduce_sum(x, [0, 1]))) # Result: 10
   




tf.global_variables_initializer

Returns an Op that initializes global variables and must run this Op first.
PS. Op is the Operation that produces this tensor as an output.

import tensorflow as tf
rnd= tf.Variable(tf.random_uniform([2], 100, 200))
init = tf.global_variables_initializer()
with tf.Session() as sess:
    sess.run(init)
    print(sess.run(rnd))  # Ouput: [141.38602 158.75304]



Basic model: Linear Regression



"""Linear Regression
"""
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

# Parameters
learning_rate = 0.2
training_epochs = 201
display_step = 20

# Create 100 training data
train_X = np.random.rand(100).astype(np.float32)
train_Y = train_X * 0.1 + 0.3

# Try to find values for W and b that compute train_Y = W * train_X + b
W = tf.Variable(tf.random_uniform([1], -1.0, 1.0))
b = tf.Variable(tf.zeros([1]))
y = W * train_X + b

# Minimize the mean squared errors.
loss = tf.reduce_sum(tf.pow(y-train_Y, 2))/train_X.shape[0]
# loss = tf.reduce_mean(tf.square(y - train_Y)) # Or use reduce_mean
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
train = optimizer.minimize(loss)

# Initialize the variables (i.e. assign their default value)
init = tf.global_variables_initializer()

# Start training
with tf.Session() as sess:

    # Run the initializer
    sess.run(init)

    # Fit all training data
    for step in range(training_epochs):
        sess.run(train)
        if step % display_step == 0:
            stepStr = str(int(step/display_step) + 1) + '.'
            print(stepStr, sess.run(W), sess.run(b))
            plt.plot(train_X, train_Y, 'go', label='Original data')
            plt.plot(train_X, sess.run(W) * train_X + sess.run(b), label='Fitted line')
            plt.legend()
            plt.show()
            plt.show()



Output:




Graph:





Github






Reference



沒有留言:

張貼留言