Tensorflow多層感知機實現MNIST分類

import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import tensorflow as tf
import tensorflow.contrib.layers as layers
from tensorflow.examples.tutorials.mnist import input_data

mnist = input_data.read_data_sets('./data/mnist_data', one_hot=True)

batch_size = 200
eta = 0.001
max_epoch = 50
n_hidden = 30
n_classes = 10
n_input = 784

def multilayer_perceptron(x):
    fc1 = layers.fully_connected(x, n_hidden, activation_fn=tf.nn.relu)
    out = layers.fully_connected(fc1, n_classes, activation_fn=None)
    return out

x = tf.placeholder(tf.float32, [None, n_input])
y = tf.placeholder(tf.float32, [None, n_classes])
y_hat = multilayer_perceptron(x)

loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=y_hat, labels=y))
train = tf.train.AdamOptimizer(learning_rate=eta).minimize(loss)
correct_prediction = tf.equal(tf.argmax(y_hat,1), tf.argmax(y,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, dtype=tf.float32))
init = tf.global_variables_initializer()


with tf.Session() as sess:
    sess.run(init)
    for epoch in range(max_epoch):
        epoch_loss = 0.0
        batch_steps = int(mnist.train.num_examples/batch_size)
        for i in range(batch_steps):
            batch_x, batch_y = mnist.train.next_batch(batch_size)
            _, c = sess.run([train,loss], feed_dict={x:batch_x, y:batch_y})
        epoch_loss += c / batch_steps
        accur = sess.run(accuracy,feed_dict={x:mnist.test.images, y:mnist.test.labels}) 
        print('Epoch %02d, Loss = %.6f, Accuracy = %.6f' %(epoch, epoch_loss, accur))

            

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章